diff --git a/application/urls.py b/application/urls.py index 7a51ca4..11dc3b3 100644 --- a/application/urls.py +++ b/application/urls.py @@ -22,6 +22,7 @@ urlpatterns = [ url(r'^post/(?P\d+)/remove/$', views.post_remove, name='post_remove'), url(r'^student/(?P[-\w]+)/remove/$', views.tag_remove, name='tag_remove'), url(r'^tags/', include('taggit_templatetags2.urls')), + url(r'^newsletter/', include('newsletter.urls')), ] if settings.DEBUG: diff --git a/log.txt b/log.txt index 29bb3f4..1776ed2 100644 --- a/log.txt +++ b/log.txt @@ -400,3 +400,6 @@ [24/Oct/2018 19:03:28] INFO [mysite:191] ]> [24/Oct/2018 19:03:45] INFO [mysite:189] bamberg [24/Oct/2018 19:03:45] INFO [mysite:191] , ]> +[25/Oct/2018 10:33:13] INFO [mysite:189] bamberg +[25/Oct/2018 15:28:52] INFO [mysite:189] hi +[25/Oct/2018 15:28:54] INFO [mysite:189] diff --git a/mysite/settings.py b/mysite/settings.py index dc0e8b4..a7b586e 100644 --- a/mysite/settings.py +++ b/mysite/settings.py @@ -44,11 +44,14 @@ INSTALLED_APPS = [ 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', + 'django.contrib.sites', 'application', 'taggit', 'taggit_templatetags2', 'djcelery', 'kombu.transport.django', + 'sorl.thumbnail', + 'newsletter', ] MIDDLEWARE = [ @@ -261,4 +264,6 @@ CELERY_TASK_SERIALIZER = 'json' CELERY_RESULT_SERIALIZER = 'json' CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend' CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler" -djcelery.setup_loader() \ No newline at end of file +djcelery.setup_loader() + +SITE_ID = 1 \ No newline at end of file diff --git a/thesisenv/bin/change_tz b/thesisenv/bin/change_tz new file mode 100755 index 0000000..f6f589d --- /dev/null +++ b/thesisenv/bin/change_tz @@ -0,0 +1,12 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 +# EASY-INSTALL-ENTRY-SCRIPT: 'python-card-me==0.9.3','console_scripts','change_tz' +__requires__ = 'python-card-me==0.9.3' +import re +import sys +from pkg_resources import load_entry_point + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit( + load_entry_point('python-card-me==0.9.3', 'console_scripts', 'change_tz')() + ) diff --git a/thesisenv/bin/chardetect b/thesisenv/bin/chardetect new file mode 100755 index 0000000..8cc2808 --- /dev/null +++ b/thesisenv/bin/chardetect @@ -0,0 +1,11 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from chardet.cli.chardetect import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/thesisenv/bin/fsdump b/thesisenv/bin/fsdump new file mode 100755 index 0000000..9232d1b --- /dev/null +++ b/thesisenv/bin/fsdump @@ -0,0 +1,11 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from ZODB.FileStorage.fsdump import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/thesisenv/bin/fsoids b/thesisenv/bin/fsoids new file mode 100755 index 0000000..4bf15c5 --- /dev/null +++ b/thesisenv/bin/fsoids @@ -0,0 +1,11 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from ZODB.scripts.fsoids import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/thesisenv/bin/fsrefs b/thesisenv/bin/fsrefs new file mode 100755 index 0000000..b1449c0 --- /dev/null +++ b/thesisenv/bin/fsrefs @@ -0,0 +1,11 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from ZODB.scripts.fsrefs import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/thesisenv/bin/fstail b/thesisenv/bin/fstail new file mode 100755 index 0000000..7174ddb --- /dev/null +++ b/thesisenv/bin/fstail @@ -0,0 +1,11 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from ZODB.scripts.fstail import Main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(Main()) diff --git a/thesisenv/bin/ics_diff b/thesisenv/bin/ics_diff new file mode 100755 index 0000000..1d08143 --- /dev/null +++ b/thesisenv/bin/ics_diff @@ -0,0 +1,12 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 +# EASY-INSTALL-ENTRY-SCRIPT: 'python-card-me==0.9.3','console_scripts','ics_diff' +__requires__ = 'python-card-me==0.9.3' +import re +import sys +from pkg_resources import load_entry_point + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit( + load_entry_point('python-card-me==0.9.3', 'console_scripts', 'ics_diff')() + ) diff --git a/thesisenv/bin/repozo b/thesisenv/bin/repozo new file mode 100755 index 0000000..c0198fd --- /dev/null +++ b/thesisenv/bin/repozo @@ -0,0 +1,11 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from ZODB.scripts.repozo import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/thesisenv/bin/runzeo b/thesisenv/bin/runzeo new file mode 100755 index 0000000..585e191 --- /dev/null +++ b/thesisenv/bin/runzeo @@ -0,0 +1,12 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 +# EASY-INSTALL-ENTRY-SCRIPT: 'ZEO==5.2.0','console_scripts','runzeo' +__requires__ = 'ZEO==5.2.0' +import re +import sys +from pkg_resources import load_entry_point + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit( + load_entry_point('ZEO==5.2.0', 'console_scripts', 'runzeo')() + ) diff --git a/thesisenv/bin/surlex2regex.py b/thesisenv/bin/surlex2regex.py new file mode 100755 index 0000000..700a254 --- /dev/null +++ b/thesisenv/bin/surlex2regex.py @@ -0,0 +1,17 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 +from surlex import Surlex +import sys +from optparse import OptionParser + +def main(): + parser = OptionParser() + parser.set_usage('surlex2regex.py ') + if len(sys.argv) == 1: + argv = ['-h'] + else: + argv = sys.argv[1:] + options, args = parser.parse_args(argv) + print (Surlex(args[0]).translate()) + +if __name__ == '__main__': + main() diff --git a/thesisenv/bin/zconfig b/thesisenv/bin/zconfig new file mode 100755 index 0000000..aa1e661 --- /dev/null +++ b/thesisenv/bin/zconfig @@ -0,0 +1,11 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from ZConfig.validator import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/thesisenv/bin/zconfig_schema2html b/thesisenv/bin/zconfig_schema2html new file mode 100755 index 0000000..13293f6 --- /dev/null +++ b/thesisenv/bin/zconfig_schema2html @@ -0,0 +1,11 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 + +# -*- coding: utf-8 -*- +import re +import sys + +from ZConfig.schema2html import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/thesisenv/bin/zdaemon b/thesisenv/bin/zdaemon new file mode 100755 index 0000000..e05ee0b --- /dev/null +++ b/thesisenv/bin/zdaemon @@ -0,0 +1,12 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 +# EASY-INSTALL-ENTRY-SCRIPT: 'zdaemon==4.2.0','console_scripts','zdaemon' +__requires__ = 'zdaemon==4.2.0' +import re +import sys +from pkg_resources import load_entry_point + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit( + load_entry_point('zdaemon==4.2.0', 'console_scripts', 'zdaemon')() + ) diff --git a/thesisenv/bin/zeo-nagios b/thesisenv/bin/zeo-nagios new file mode 100755 index 0000000..270af21 --- /dev/null +++ b/thesisenv/bin/zeo-nagios @@ -0,0 +1,12 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 +# EASY-INSTALL-ENTRY-SCRIPT: 'ZEO==5.2.0','console_scripts','zeo-nagios' +__requires__ = 'ZEO==5.2.0' +import re +import sys +from pkg_resources import load_entry_point + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit( + load_entry_point('ZEO==5.2.0', 'console_scripts', 'zeo-nagios')() + ) diff --git a/thesisenv/bin/zeoctl b/thesisenv/bin/zeoctl new file mode 100755 index 0000000..491d05f --- /dev/null +++ b/thesisenv/bin/zeoctl @@ -0,0 +1,12 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 +# EASY-INSTALL-ENTRY-SCRIPT: 'ZEO==5.2.0','console_scripts','zeoctl' +__requires__ = 'ZEO==5.2.0' +import re +import sys +from pkg_resources import load_entry_point + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit( + load_entry_point('ZEO==5.2.0', 'console_scripts', 'zeoctl')() + ) diff --git a/thesisenv/bin/zeopack b/thesisenv/bin/zeopack new file mode 100755 index 0000000..91d18d7 --- /dev/null +++ b/thesisenv/bin/zeopack @@ -0,0 +1,12 @@ +#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 +# EASY-INSTALL-ENTRY-SCRIPT: 'ZEO==5.2.0','console_scripts','zeopack' +__requires__ = 'ZEO==5.2.0' +import re +import sys +from pkg_resources import load_entry_point + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit( + load_entry_point('ZEO==5.2.0', 'console_scripts', 'zeopack')() + ) diff --git a/thesisenv/include/site/python3.6/persistent/cPersistence.h b/thesisenv/include/site/python3.6/persistent/cPersistence.h new file mode 100644 index 0000000..ac9a885 --- /dev/null +++ b/thesisenv/include/site/python3.6/persistent/cPersistence.h @@ -0,0 +1,156 @@ +/***************************************************************************** + + Copyright (c) 2001, 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +#ifndef CPERSISTENCE_H +#define CPERSISTENCE_H + +#include "_compat.h" +#include "bytesobject.h" + +#include "ring.h" + +#define CACHE_HEAD \ + PyObject_HEAD \ + CPersistentRing ring_home; \ + int non_ghost_count; \ + Py_ssize_t total_estimated_size; + +struct ccobject_head_struct; + +typedef struct ccobject_head_struct PerCache; + +/* How big is a persistent object? + + 12 PyGC_Head is two pointers and an int + 8 PyObject_HEAD is an int and a pointer + + 12 jar, oid, cache pointers + 8 ring struct + 8 serialno + 4 state + extra + 4 size info + + (56) so far + + 4 dict ptr + 4 weaklist ptr + ------------------------- + 68 only need 62, but obmalloc rounds up to multiple of eight + + Even a ghost requires 64 bytes. It's possible to make a persistent + instance with slots and no dict, which changes the storage needed. + +*/ + +#define cPersistent_HEAD \ + PyObject_HEAD \ + PyObject *jar; \ + PyObject *oid; \ + PerCache *cache; \ + CPersistentRing ring; \ + char serial[8]; \ + signed state:8; \ + unsigned estimated_size:24; + +/* We recently added estimated_size. We originally added it as a new + unsigned long field after a signed char state field and a + 3-character reserved field. This didn't work because there + are packages in the wild that have their own copies of cPersistence.h + that didn't see the update. + + To get around this, we used the reserved space by making + estimated_size a 24-bit bit field in the space occupied by the old + 3-character reserved field. To fit in 24 bits, we made the units + of estimated_size 64-character blocks. This allows is to handle up + to a GB. We should never see that, but to be paranoid, we also + truncate sizes greater than 1GB. We also set the minimum size to + 64 bytes. + + We use the _estimated_size_in_24_bits and _estimated_size_in_bytes + macros both to avoid repetition and to make intent a little clearer. +*/ +#define _estimated_size_in_24_bits(I) ((I) > 1073741696 ? 16777215 : (I)/64+1) +#define _estimated_size_in_bytes(I) ((I)*64) + +#define cPersistent_GHOST_STATE -1 +#define cPersistent_UPTODATE_STATE 0 +#define cPersistent_CHANGED_STATE 1 +#define cPersistent_STICKY_STATE 2 + +typedef struct { + cPersistent_HEAD +} cPersistentObject; + +typedef void (*percachedelfunc)(PerCache *, PyObject *); + +typedef struct { + PyTypeObject *pertype; + getattrofunc getattro; + setattrofunc setattro; + int (*changed)(cPersistentObject*); + void (*accessed)(cPersistentObject*); + void (*ghostify)(cPersistentObject*); + int (*setstate)(PyObject*); + percachedelfunc percachedel; + int (*readCurrent)(cPersistentObject*); +} cPersistenceCAPIstruct; + +#define cPersistenceType cPersistenceCAPI->pertype + +#ifndef DONT_USE_CPERSISTENCECAPI +static cPersistenceCAPIstruct *cPersistenceCAPI; +#endif + +#define cPersistanceModuleName "cPersistence" + +#define PER_TypeCheck(O) PyObject_TypeCheck((O), cPersistenceCAPI->pertype) + +#define PER_USE_OR_RETURN(O,R) {if((O)->state==cPersistent_GHOST_STATE && cPersistenceCAPI->setstate((PyObject*)(O)) < 0) return (R); else if ((O)->state==cPersistent_UPTODATE_STATE) (O)->state=cPersistent_STICKY_STATE;} + +#define PER_CHANGED(O) (cPersistenceCAPI->changed((cPersistentObject*)(O))) + +#define PER_READCURRENT(O, E) \ + if (cPersistenceCAPI->readCurrent((cPersistentObject*)(O)) < 0) { E; } + +#define PER_GHOSTIFY(O) (cPersistenceCAPI->ghostify((cPersistentObject*)(O))) + +/* If the object is sticky, make it non-sticky, so that it can be ghostified. + The value is not meaningful + */ +#define PER_ALLOW_DEACTIVATION(O) ((O)->state==cPersistent_STICKY_STATE && ((O)->state=cPersistent_UPTODATE_STATE)) + +#define PER_PREVENT_DEACTIVATION(O) ((O)->state==cPersistent_UPTODATE_STATE && ((O)->state=cPersistent_STICKY_STATE)) + +/* + Make a persistent object usable from C by: + + - Making sure it is not a ghost + + - Making it sticky. + + IMPORTANT: If you call this and don't call PER_ALLOW_DEACTIVATION, + your object will not be ghostified. + + PER_USE returns a 1 on success and 0 failure, where failure means + error. + */ +#define PER_USE(O) \ +(((O)->state != cPersistent_GHOST_STATE \ + || (cPersistenceCAPI->setstate((PyObject*)(O)) >= 0)) \ + ? (((O)->state==cPersistent_UPTODATE_STATE) \ + ? ((O)->state=cPersistent_STICKY_STATE) : 1) : 0) + +#define PER_ACCESSED(O) (cPersistenceCAPI->accessed((cPersistentObject*)(O))) + +#endif diff --git a/thesisenv/include/site/python3.6/persistent/ring.h b/thesisenv/include/site/python3.6/persistent/ring.h new file mode 100644 index 0000000..df8e822 --- /dev/null +++ b/thesisenv/include/site/python3.6/persistent/ring.h @@ -0,0 +1,66 @@ +/***************************************************************************** + + Copyright (c) 2003 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +/* Support routines for the doubly-linked list of cached objects. + +The cache stores a headed, doubly-linked, circular list of persistent +objects, with space for the pointers allocated in the objects themselves. +The cache stores the distinguished head of the list, which is not a valid +persistent object. The other list members are non-ghost persistent +objects, linked in LRU (least-recently used) order. + +The r_next pointers traverse the ring starting with the least recently used +object. The r_prev pointers traverse the ring starting with the most +recently used object. + +Obscure: While each object is pointed at twice by list pointers (once by +its predecessor's r_next, again by its successor's r_prev), the refcount +on the object is bumped only by 1. This leads to some possibly surprising +sequences of incref and decref code. Note that since the refcount is +bumped at least once, the list does hold a strong reference to each +object in it. +*/ + +typedef struct CPersistentRing_struct +{ + struct CPersistentRing_struct *r_prev; + struct CPersistentRing_struct *r_next; +} CPersistentRing; + +/* The list operations here take constant time independent of the + * number of objects in the list: + */ + +/* Add elt as the most recently used object. elt must not already be + * in the list, although this isn't checked. + */ +void ring_add(CPersistentRing *ring, CPersistentRing *elt); + +/* Remove elt from the list. elt must already be in the list, although + * this isn't checked. + */ +void ring_del(CPersistentRing *elt); + +/* elt must already be in the list, although this isn't checked. It's + * unlinked from its current position, and relinked into the list as the + * most recently used object (which is arguably the tail of the list + * instead of the head -- but the name of this function could be argued + * either way). This is equivalent to + * + * ring_del(elt); + * ring_add(ring, elt); + * + * but may be a little quicker. + */ +void ring_move_to_head(CPersistentRing *ring, CPersistentRing *elt); diff --git a/thesisenv/include/site/python3.6/zope.proxy/proxy.h b/thesisenv/include/site/python3.6/zope.proxy/proxy.h new file mode 100644 index 0000000..509564e --- /dev/null +++ b/thesisenv/include/site/python3.6/zope.proxy/proxy.h @@ -0,0 +1,59 @@ +#ifndef _proxy_H_ +#define _proxy_H_ 1 + +typedef struct { + PyObject_HEAD + PyObject *proxy_object; +} ProxyObject; + +#define Proxy_GET_OBJECT(ob) (((ProxyObject *)(ob))->proxy_object) + +typedef struct { + PyTypeObject *proxytype; + int (*check)(PyObject *obj); + PyObject *(*create)(PyObject *obj); + PyObject *(*getobject)(PyObject *proxy); +} ProxyInterface; + +#ifndef PROXY_MODULE + +/* These are only defined in the public interface, and are not + * available within the module implementation. There we use the + * classic Python/C API only. + */ + +static ProxyInterface *_proxy_api = NULL; + +static int +Proxy_Import(void) +{ + if (_proxy_api == NULL) { + PyObject *m = PyImport_ImportModule("zope.proxy"); + if (m != NULL) { + PyObject *tmp = PyObject_GetAttrString(m, "_CAPI"); + if (tmp != NULL) { +#if PY_VERSION_HEX < 0x02070000 + if (PyCObject_Check(tmp)) + _proxy_api = (ProxyInterface *) + PyCObject_AsVoidPtr(tmp); +#else + if (PyCapsule_CheckExact(tmp)) + _proxy_api = (ProxyInterface *) + PyCapsule_GetPointer(tmp, NULL); +#endif + Py_DECREF(tmp); + } + } + } + return (_proxy_api == NULL) ? -1 : 0; +} + +#define ProxyType (*_proxy_api->proxytype) +#define Proxy_Check(obj) (_proxy_api->check((obj))) +#define Proxy_CheckExact(obj) ((obj)->ob_type == ProxyType) +#define Proxy_New(obj) (_proxy_api->create((obj))) +#define Proxy_GetObject(proxy) (_proxy_api->getobject((proxy))) + +#endif /* PROXY_MODULE */ + +#endif /* _proxy_H_ */ diff --git a/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/PKG-INFO b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..c884b42 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/PKG-INFO @@ -0,0 +1,697 @@ +Metadata-Version: 1.1 +Name: Acquisition +Version: 4.5 +Summary: Acquisition is a mechanism that allows objects to obtain attributes from the containment hierarchy they're in. +Home-page: https://github.com/zopefoundation/Acquisition +Author: Zope Foundation and Contributors +Author-email: zope-dev@zope.org +License: ZPL 2.1 +Description: Environmental Acquisiton + ======================== + + This package implements "environmental acquisiton" for Python, as + proposed in the OOPSLA96_ paper by Joseph Gil and David H. Lorenz: + + We propose a new programming paradigm, environmental acquisition in + the context of object aggregation, in which objects acquire + behaviour from their current containers at runtime. The key idea is + that the behaviour of a component may depend upon its enclosing + composite(s). In particular, we propose a form of feature sharing in + which an object "inherits" features from the classes of objects in + its environment. By examining the declaration of classes, it is + possible to determine which kinds of classes may contain a + component, and which components must be contained in a given kind of + composite. These relationships are the basis for language constructs + that supports acquisition. + + .. _OOPSLA96: http://www.cs.virginia.edu/~lorenz/papers/oopsla96/>`_: + + .. contents:: + + Introductory Example + -------------------- + + Zope implements acquisition with "Extension Class" mix-in classes. To + use acquisition your classes must inherit from an acquisition base + class. For example:: + + >>> import ExtensionClass, Acquisition + + >>> class C(ExtensionClass.Base): + ... color = 'red' + + >>> class A(Acquisition.Implicit): + ... def report(self): + ... print(self.color) + ... + >>> a = A() + >>> c = C() + >>> c.a = a + + >>> c.a.report() + red + + >>> d = C() + >>> d.color = 'green' + >>> d.a = a + + >>> d.a.report() + green + + >>> try: + ... a.report() + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + The class ``A`` inherits acquisition behavior from + ``Acquisition.Implicit``. The object, ``a``, "has" the color of + objects ``c`` and d when it is accessed through them, but it has no + color by itself. The object ``a`` obtains attributes from its + environment, where its environment is defined by the access path used + to reach ``a``. + + Acquisition Wrappers + -------------------- + + When an object that supports acquisition is accessed through an + extension class instance, a special object, called an acquisition + wrapper, is returned. In the example above, the expression ``c.a`` + returns an acquisition wrapper that contains references to both ``c`` + and ``a``. It is this wrapper that performs attribute lookup in ``c`` + when an attribute cannot be found in ``a``. + + Acquisition wrappers provide access to the wrapped objects through the + attributes ``aq_parent``, ``aq_self``, ``aq_base``. Continue the + example from above:: + + >>> c.a.aq_parent is c + True + >>> c.a.aq_self is a + True + + Explicit and Implicit Acquisition + --------------------------------- + + Two styles of acquisition are supported: implicit and explicit + acquisition. + + Implicit acquisition + -------------------- + + Implicit acquisition is so named because it searches for attributes + from the environment automatically whenever an attribute cannot be + obtained directly from an object or through inheritance. + + An attribute can be implicitly acquired if its name does not begin + with an underscore. + + To support implicit acquisition, your class should inherit from the + mix-in class ``Acquisition.Implicit``. + + Explicit Acquisition + -------------------- + + When explicit acquisition is used, attributes are not automatically + obtained from the environment. Instead, the method aq_acquire must be + used. For example:: + + >>> print(c.a.aq_acquire('color')) + red + + To support explicit acquisition, your class should inherit from the + mix-in class ``Acquisition.Explicit``. + + Controlling Acquisition + ----------------------- + + A class (or instance) can provide attribute by attribute control over + acquisition. Your should subclass from ``Acquisition.Explicit``, and set + all attributes that should be acquired to the special value + ``Acquisition.Acquired``. Setting an attribute to this value also allows + inherited attributes to be overridden with acquired ones. For example:: + + >>> class C(Acquisition.Explicit): + ... id = 1 + ... secret = 2 + ... color = Acquisition.Acquired + ... __roles__ = Acquisition.Acquired + + The only attributes that are automatically acquired from containing + objects are color, and ``__roles__``. Note that the ``__roles__`` + attribute is acquired even though its name begins with an + underscore. In fact, the special ``Acquisition.Acquired`` value can be + used in ``Acquisition.Implicit`` objects to implicitly acquire + selected objects that smell like private objects. + + Sometimes, you want to dynamically make an implicitly acquiring object + acquire explicitly. You can do this by getting the object's + aq_explicit attribute. This attribute provides the object with an + explicit wrapper that replaces the original implicit wrapper. + + Filtered Acquisition + -------------------- + + The acquisition method, ``aq_acquire``, accepts two optional + arguments. The first of the additional arguments is a "filtering" + function that is used when considering whether to acquire an + object. The second of the additional arguments is an object that is + passed as extra data when calling the filtering function and which + defaults to ``None``. The filter function is called with five + arguments: + + * The object that the aq_acquire method was called on, + + * The object where an object was found, + + * The name of the object, as passed to aq_acquire, + + * The object found, and + + * The extra data passed to aq_acquire. + + If the filter returns a true object that the object found is returned, + otherwise, the acquisition search continues. + + Here's an example:: + + >>> from Acquisition import Explicit + + >>> class HandyForTesting(object): + ... def __init__(self, name): + ... self.name = name + ... def __str__(self): + ... return "%s(%s)" % (self.name, self.__class__.__name__) + ... __repr__=__str__ + ... + >>> class E(Explicit, HandyForTesting): pass + ... + >>> class Nice(HandyForTesting): + ... isNice = 1 + ... def __str__(self): + ... return HandyForTesting.__str__(self)+' and I am nice!' + ... __repr__ = __str__ + ... + >>> a = E('a') + >>> a.b = E('b') + >>> a.b.c = E('c') + >>> a.p = Nice('spam') + >>> a.b.p = E('p') + + >>> def find_nice(self, ancestor, name, object, extra): + ... return hasattr(object,'isNice') and object.isNice + + >>> print(a.b.c.aq_acquire('p', find_nice)) + spam(Nice) and I am nice! + + The filtered acquisition in the last line skips over the first + attribute it finds with the name ``p``, because the attribute doesn't + satisfy the condition given in the filter. + + Filtered acquisition is rarely used in Zope. + + Acquiring from Context + ---------------------- + + Normally acquisition allows objects to acquire data from their + containers. However an object can acquire from objects that aren't its + containers. + + Most of the examples we've seen so far show establishing of an + acquisition context using getattr semantics. For example, ``a.b`` is a + reference to ``b`` in the context of ``a``. + + You can also manually set acquisition context using the ``__of__`` + method. For example:: + + >>> from Acquisition import Implicit + >>> class C(Implicit): pass + ... + >>> a = C() + >>> b = C() + >>> a.color = "red" + >>> print(b.__of__(a).color) + red + + In this case, ``a`` does not contain ``b``, but it is put in ``b``'s + context using the ``__of__`` method. + + Here's another subtler example that shows how you can construct an + acquisition context that includes non-container objects:: + + >>> from Acquisition import Implicit + + >>> class C(Implicit): + ... def __init__(self, name): + ... self.name = name + + >>> a = C("a") + >>> a.b = C("b") + >>> a.b.color = "red" + >>> a.x = C("x") + + >>> print(a.b.x.color) + red + + Even though ``b`` does not contain ``x``, ``x`` can acquire the color + attribute from ``b``. This works because in this case, ``x`` is accessed + in the context of ``b`` even though it is not contained by ``b``. + + Here acquisition context is defined by the objects used to access + another object. + + Containment Before Context + -------------------------- + + If in the example above suppose both a and b have an color attribute:: + + >>> a = C("a") + >>> a.color = "green" + >>> a.b = C("b") + >>> a.b.color = "red" + >>> a.x = C("x") + + >>> print(a.b.x.color) + green + + Why does ``a.b.x.color`` acquire color from ``a`` and not from ``b``? + The answer is that an object acquires from its containers before + non-containers in its context. + + To see why consider this example in terms of expressions using the + ``__of__`` method:: + + a.x -> x.__of__(a) + + a.b -> b.__of__(a) + + a.b.x -> x.__of__(a).__of__(b.__of__(a)) + + Keep in mind that attribute lookup in a wrapper is done by trying to + look up the attribute in the wrapped object first and then in the + parent object. So in the expressions above proceeds from left to + right. + + The upshot of these rules is that attributes are looked up by + containment before context. + + This rule holds true also for more complex examples. For example, + ``a.b.c.d.e.f.g.attribute`` would search for attribute in ``g`` and + all its containers first. (Containers are searched in order from the + innermost parent to the outermost container.) If the attribute is not + found in ``g`` or any of its containers, then the search moves to + ``f`` and all its containers, and so on. + + Additional Attributes and Methods + --------------------------------- + + You can use the special method ``aq_inner`` to access an object + wrapped only by containment. So in the example above, + ``a.b.x.aq_inner`` is equivalent to ``a.x``. + + You can find out the acquisition context of an object using the + aq_chain method like so: + + >>> [obj.name for obj in a.b.x.aq_chain] + ['x', 'b', 'a'] + + You can find out if an object is in the containment context of another + object using the ``aq_inContextOf`` method. For example: + + >>> a.b.aq_inContextOf(a) + True + + .. Note: as of this writing the aq_inContextOf examples don't work the + way they should be working. According to Jim, this is because + aq_inContextOf works by comparing object pointer addresses, which + (because they are actually different wrapper objects) doesn't give + you the expected results. He acknowledges that this behavior is + controversial, and says that there is a collector entry to change + it so that you would get the answer you expect in the above. (We + just need to get to it). + + Acquisition Module Functions + ---------------------------- + + In addition to using acquisition attributes and methods directly on + objects you can use similar functions defined in the ``Acquisition`` + module. These functions have the advantage that you don't need to + check to make sure that the object has the method or attribute before + calling it. + + ``aq_acquire(object, name [, filter, extra, explicit, default, containment])`` + Acquires an object with the given name. + + This function can be used to explictly acquire when using explicit + acquisition and to acquire names that wouldn't normally be + acquired. + + The function accepts a number of optional arguments: + + ``filter`` + A callable filter object that is used to decide if an object + should be acquired. + + The filter is called with five arguments: + + * The object that the aq_acquire method was called on, + + * The object where an object was found, + + * The name of the object, as passed to aq_acquire, + + * The object found, and + + * The extra argument passed to aq_acquire. + + If the filter returns a true object that the object found is + returned, otherwise, the acquisition search continues. + + ``extra`` + Extra data to be passed as the last argument to the filter. + + ``explicit`` + A flag (boolean value) indicating whether explicit acquisition + should be used. The default value is true. If the flag is + true, then acquisition will proceed regardless of whether + wrappers encountered in the search of the acquisition + hierarchy are explicit or implicit wrappers. If the flag is + false, then parents of explicit wrappers are not searched. + + This argument is useful if you want to apply a filter without + overriding explicit wrappers. + + ``default`` + A default value to return if no value can be acquired. + + ``containment`` + A flag indicating whether the search should be limited to the + containment hierarchy. + + In addition, arguments can be provided as keywords. + + ``aq_base(object)`` + Return the object with all wrapping removed. + + ``aq_chain(object [, containment])`` + Return a list containing the object and it's acquisition + parents. The optional argument, containment, controls whether the + containment or access hierarchy is used. + + ``aq_get(object, name [, default, containment])`` + Acquire an attribute, name. A default value can be provided, as + can a flag that limits search to the containment hierarchy. + + ``aq_inner(object)`` + Return the object with all but the innermost layer of wrapping + removed. + + ``aq_parent(object)`` + Return the acquisition parent of the object or None if the object + is unwrapped. + + ``aq_self(object)`` + Return the object with one layer of wrapping removed, unless the + object is unwrapped, in which case the object is returned. + + In most cases it is more convenient to use these module functions + instead of the acquisition attributes and methods directly. + + Acquisition and Methods + ----------------------- + + Python methods of objects that support acquisition can use acquired + attributes. When a Python method is called on an object that is + wrapped by an acquisition wrapper, the wrapper is passed to the method + as the first argument. This rule also applies to user-defined method + types and to C methods defined in pure mix-in classes. + + Unfortunately, C methods defined in extension base classes that define + their own data structures, cannot use aquired attributes at this + time. This is because wrapper objects do not conform to the data + structures expected by these methods. In practice, you will seldom + find this a problem. + + Conclusion + ---------- + + Acquisition provides a powerful way to dynamically share information + between objects. Zope 2 uses acquisition for a number of its key + features including security, object publishing, and DTML variable + lookup. Acquisition also provides an elegant solution to the problem + of circular references for many classes of problems. While acquisition + is powerful, you should take care when using acquisition in your + applications. The details can get complex, especially with the + differences between acquiring from context and acquiring from + containment. + + + Changelog + ========= + + 4.5 (2018-10-05) + ---------------- + + - Avoid deprecation warnings by using current API. + + - Add support for Python 3.7. + + 4.4.4 (2017-11-24) + ------------------ + + - Add Appveyor configuration to automate building Windows eggs. + + 4.4.3 (2017-11-23) + ------------------ + + - Fix the extremely rare potential for a crash when the C extensions + are in use. See `issue 21 `_. + + 4.4.2 (2017-05-12) + ------------------ + + - Fix C capsule name to fix import errors. + + - Ensure our dependencies match our expactations about C extensions. + + 4.4.1 (2017-05-04) + ------------------ + + - Fix C code under Python 3.4, with missing Py_XSETREF. + + 4.4.0 (2017-05-04) + ------------------ + + - Enable the C extension under Python 3. + + - Drop support for Python 3.3. + + 4.3.0 (2017-01-20) + ------------------ + + - Make tests compatible with ExtensionClass 4.2.0. + + - Drop support for Python 2.6 and 3.2. + + - Add support for Python 3.5 and 3.6. + + 4.2.2 (2015-05-19) + ------------------ + + - Make the pure-Python Acquirer objects cooperatively use the + superclass ``__getattribute__`` method, like the C implementation. + See https://github.com/zopefoundation/Acquisition/issues/7. + + - The pure-Python implicit acquisition wrapper allows wrapped objects + to use ``object.__getattribute__(self, name)``. This differs from + the C implementation, but is important for compatibility with the + pure-Python versions of libraries like ``persistent``. See + https://github.com/zopefoundation/Acquisition/issues/9. + + 4.2.1 (2015-04-23) + ------------------ + + - Correct several dangling pointer uses in the C extension, + potentially fixing a few interpreter crashes. See + https://github.com/zopefoundation/Acquisition/issues/5. + + 4.2 (2015-04-04) + ---------------- + + - Add support for PyPy, PyPy3, and Python 3.2, 3.3, and 3.4. + + 4.1 (2014-12-18) + ---------------- + + - Bump dependency on ``ExtensionClass`` to match current release. + + 4.0.3 (2014-11-02) + ------------------ + + - Skip readme.rst tests when tests are run outside a source checkout. + + 4.0.2 (2014-11-02) + ------------------ + + - Include ``*.rst`` files in the release. + + 4.0.1 (2014-10-30) + ------------------ + + - Tolerate Unicode attribute names (ASCII only). LP #143358. + + - Make module-level ``aq_acquire`` API respect the ``default`` parameter. + LP #1387363. + + - Don't raise an attribute error for ``__iter__`` if the fallback to + ``__getitem__`` succeeds. LP #1155760. + + + 4.0 (2013-02-24) + ---------------- + + - Added trove classifiers to project metadata. + + 4.0a1 (2011-12-13) + ------------------ + + - Raise `RuntimeError: Recursion detected in acquisition wrapper` if an object + with a `__parent__` pointer points to a wrapper that in turn points to the + original object. + + - Prevent wrappers to be created while accessing `__parent__` on types derived + from Explicit or Implicit base classes. + + 2.13.9 (2015-02-17) + ------------------- + + - Tolerate Unicode attribute names (ASCII only). LP #143358. + + - Make module-level ``aq_acquire`` API respect the ``default`` parameter. + LP #1387363. + + - Don't raise an attribute error for ``__iter__`` if the fallback to + ``__getitem__`` succeeds. LP #1155760. + + 2.13.8 (2011-06-11) + ------------------- + + - Fixed a segfault on 64bit platforms when providing the `explicit` argument to + the aq_acquire method of an Acquisition wrapper. Thx to LP #675064 for the + hint to the solution. The code passed an int instead of a pointer into a + function. + + 2.13.7 (2011-03-02) + ------------------- + + - Fixed bug: When an object did not implement ``__unicode__``, calling + ``unicode(wrapped)`` was calling ``__str__`` with an unwrapped ``self``. + + 2.13.6 (2011-02-19) + ------------------- + + - Add ``aq_explicit`` to ``IAcquisitionWrapper``. + + - Fixed bug: ``unicode(wrapped)`` was not calling a ``__unicode__`` + method on wrapped objects. + + 2.13.5 (2010-09-29) + ------------------- + + - Fixed unit tests that failed on 64bit Python on Windows machines. + + 2.13.4 (2010-08-31) + ------------------- + + - LP 623665: Fixed typo in Acquisition.h. + + 2.13.3 (2010-04-19) + ------------------- + + - Use the doctest module from the standard library and no longer depend on + zope.testing. + + 2.13.2 (2010-04-04) + ------------------- + + - Give both wrapper classes a ``__getnewargs__`` method, which causes the ZODB + optimization to fail and create persistent references using the ``_p_oid`` + alone. This happens to be the persistent oid of the wrapped object. This lets + these objects to be persisted correctly, even though they are passed to the + ZODB in a wrapped state. + + - Added failing tests for http://dev.plone.org/plone/ticket/10318. This shows + an edge-case where AQ wrappers can be pickled using the specific combination + of cPickle, pickle protocol one and a custom Pickler class with an + ``inst_persistent_id`` hook. Unfortunately this is the exact combination used + by ZODB3. + + 2.13.1 (2010-02-23) + ------------------- + + - Update to include ExtensionClass 2.13.0. + + - Fix the ``tp_name`` of the ImplicitAcquisitionWrapper and + ExplicitAcquisitionWrapper to match their Python visible names and thus have + a correct ``__name__``. + + - Expand the ``tp_name`` of our extension types to hold the fully qualified + name. This ensures classes have their ``__module__`` set correctly. + + 2.13.0 (2010-02-14) + ------------------- + + - Added support for method cache in Acquisition. Patch contributed by + Yoshinori K. Okuji. See https://bugs.launchpad.net/zope2/+bug/486182. + + 2.12.4 (2009-10-29) + ------------------- + + - Fix iteration proxying to pass `self` acquisition-wrapped into both + `__iter__` as well as `__getitem__` (this fixes + https://bugs.launchpad.net/zope2/+bug/360761). + + - Add tests for the __getslice__ proxying, including open-ended slicing. + + 2.12.3 (2009-08-08) + ------------------- + + - More 64-bit fixes in Py_BuildValue calls. + + - More 64-bit issues fixed: Use correct integer size for slice operations. + + 2.12.2 (2009-08-02) + ------------------- + + - Fixed 64-bit compatibility issues for Python 2.5.x / 2.6.x. See + http://www.python.org/dev/peps/pep-0353/ for details. + + 2.12.1 (2009-04-15) + ------------------- + + - Update for iteration proxying: The proxy for `__iter__` must not rely on the + object to have an `__iter__` itself, but also support fall-back iteration via + `__getitem__` (this fixes https://bugs.launchpad.net/zope2/+bug/360761). + + 2.12 (2009-01-25) + ----------------- + + - Release as separate package. + +Platform: UNKNOWN +Classifier: Development Status :: 6 - Mature +Classifier: Environment :: Web Environment +Classifier: Framework :: Zope2 +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy diff --git a/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/SOURCES.txt b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..1ac9253 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,25 @@ +.coveragerc +.gitignore +CHANGES.rst +COPYRIGHT.txt +LICENSE.txt +MANIFEST.in +README.rst +buildout.cfg +pip-delete-this-directory.txt +setup.cfg +setup.py +tox.ini +include/ExtensionClass/ExtensionClass.h +include/ExtensionClass/_compat.h +src/Acquisition/Acquisition.h +src/Acquisition/_Acquisition.c +src/Acquisition/__init__.py +src/Acquisition/interfaces.py +src/Acquisition/tests.py +src/Acquisition.egg-info/PKG-INFO +src/Acquisition.egg-info/SOURCES.txt +src/Acquisition.egg-info/dependency_links.txt +src/Acquisition.egg-info/not-zip-safe +src/Acquisition.egg-info/requires.txt +src/Acquisition.egg-info/top_level.txt \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/dependency_links.txt b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/installed-files.txt b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..598182c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/installed-files.txt @@ -0,0 +1,15 @@ +../Acquisition/Acquisition.h +../Acquisition/_Acquisition.c +../Acquisition/_Acquisition.cpython-36m-darwin.so +../Acquisition/__init__.py +../Acquisition/__pycache__/__init__.cpython-36.pyc +../Acquisition/__pycache__/interfaces.cpython-36.pyc +../Acquisition/__pycache__/tests.cpython-36.pyc +../Acquisition/interfaces.py +../Acquisition/tests.py +PKG-INFO +SOURCES.txt +dependency_links.txt +not-zip-safe +requires.txt +top_level.txt diff --git a/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/not-zip-safe b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/requires.txt b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/requires.txt new file mode 100644 index 0000000..a244c36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/requires.txt @@ -0,0 +1,2 @@ +ExtensionClass>=4.2.0 +zope.interface diff --git a/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..6940737 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Acquisition-4.5-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +Acquisition diff --git a/thesisenv/lib/python3.6/site-packages/Acquisition/Acquisition.h b/thesisenv/lib/python3.6/site-packages/Acquisition/Acquisition.h new file mode 100644 index 0000000..ec36659 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Acquisition/Acquisition.h @@ -0,0 +1,53 @@ +/***************************************************************************** + + Copyright (c) 1996-2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +#ifndef __ACQUISITION_H_ +#define __ACQUISITION_H_ + +typedef struct { + PyObject *(*AQ_Acquire) (PyObject *obj, PyObject *name, PyObject *filter, + PyObject *extra, int explicit, PyObject *deflt, + int containment); + PyObject *(*AQ_Get) (PyObject *obj, PyObject *name, PyObject *deflt, + int containment); + int (*AQ_IsWrapper) (PyObject *obj); + PyObject *(*AQ_Base) (PyObject *obj); + PyObject *(*AQ_Parent) (PyObject *obj); + PyObject *(*AQ_Self) (PyObject *obj); + PyObject *(*AQ_Inner) (PyObject *obj); + PyObject *(*AQ_Chain) (PyObject *obj, int containment); +} ACQUISITIONCAPI; + +#ifndef _IN_ACQUISITION_C + +#define aq_Acquire(obj, name, filter, extra, explicit, deflt, containment ) (AcquisitionCAPI == NULL ? NULL : (AcquisitionCAPI->AQ_Acquire(obj, name, filter, extra, explicit, deflt, containment))) +#define aq_acquire(obj, name) (AcquisitionCAPI == NULL ? NULL : (AcquisitionCAPI->AQ_Acquire(obj, name, NULL, NULL, 1, NULL, 0))) +#define aq_get(obj, name, deflt, containment) (AcquisitionCAPI == NULL ? NULL : (AcquisitionCAPI->AQ_Get(obj, name, deflt, containment))) +#define aq_isWrapper(obj) (AcquisitionCAPI == NULL ? -1 : (AcquisitionCAPI->AQ_IsWrapper(obj))) +#define aq_base(obj) (AcquisitionCAPI == NULL ? NULL : (AcquisitionCAPI->AQ_Base(obj))) +#define aq_parent(obj) (AcquisitionCAPI == NULL ? NULL : (AcquisitionCAPI->AQ_Parent(obj))) +#define aq_self(obj) (AcquisitionCAPI == NULL ? NULL : (AcquisitionCAPI->AQ_Self(obj))) +#define aq_inner(obj) (AcquisitionCAPI == NULL ? NULL : (AcquisitionCAPI->AQ_Inner(obj))) +#define aq_chain(obj, containment) (AcquisitionCAPI == NULL ? NULL : (AcquisitionCAPI->AQ_CHain(obj, containment))) + +static ACQUISITIONCAPI *AcquisitionCAPI = NULL; + +#define aq_init() { \ + AcquisitionCAPI = PyCapsule_Import("Acquisition.AcquisitionCAPI", 0); \ +} + + +#endif + +#endif diff --git a/thesisenv/lib/python3.6/site-packages/Acquisition/_Acquisition.c b/thesisenv/lib/python3.6/site-packages/Acquisition/_Acquisition.c new file mode 100644 index 0000000..fbeaeb8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Acquisition/_Acquisition.c @@ -0,0 +1,2072 @@ +/***************************************************************************** + + Copyright (c) 1996-2003 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +#include "ExtensionClass/ExtensionClass.h" +#include "ExtensionClass/_compat.h" + +#define _IN_ACQUISITION_C +#include "Acquisition/Acquisition.h" + +static ACQUISITIONCAPI AcquisitionCAPI; + +// Py_XSETREF is undefined in Python 3.4 only, it's present in 2.7 and 3.5 +#ifndef Py_XSETREF + +#define Py_XSETREF(op, op2) \ + do { \ + PyObject *_py_tmp = (PyObject *)(op); \ + (op) = (op2); \ + Py_XDECREF(_py_tmp); \ + } while (0) + +#endif + +#define ASSIGN(dst, src) Py_XSETREF(dst, src) +#define OBJECT(O) ((PyObject*)(O)) + +/* sizeof("x") == 2 because of the '\0' byte. */ +#define STR_STARTSWITH(ob, pattern) ((strncmp(ob, pattern, sizeof(pattern) - 1) == 0)) +#define STR_EQ(ob, pattern) ((strcmp(ob, pattern) == 0)) + +static PyObject *py__add__, *py__sub__, *py__mul__, *py__div__, + *py__mod__, *py__pow__, *py__divmod__, *py__lshift__, *py__rshift__, + *py__and__, *py__or__, *py__xor__, *py__coerce__, *py__neg__, + *py__pos__, *py__abs__, *py__nonzero__, *py__invert__, *py__int__, + *py__long__, *py__float__, *py__oct__, *py__hex__, + *py__getitem__, *py__setitem__, *py__delitem__, + *py__getslice__, *py__setslice__, *py__delslice__, *py__contains__, + *py__len__, *py__of__, *py__call__, *py__repr__, *py__str__, *py__unicode__, + *py__cmp__, *py__parent__, *py__iter__, *py__bool__, *py__index__, *py__iadd__, + *py__isub__, *py__imul__, *py__imod__, *py__ipow__, *py__ilshift__, *py__irshift__, + *py__iand__, *py__ixor__, *py__ior__, *py__floordiv__, *py__truediv__, + *py__ifloordiv__, *py__itruediv__, *py__matmul__, *py__imatmul__, *py__idiv__; + +static PyObject *Acquired = NULL; + +static void +init_py_names(void) +{ +#define INIT_PY_NAME(N) py ## N = NATIVE_FROM_STRING(#N) + INIT_PY_NAME(__add__); + INIT_PY_NAME(__sub__); + INIT_PY_NAME(__mul__); + INIT_PY_NAME(__div__); + INIT_PY_NAME(__mod__); + INIT_PY_NAME(__pow__); + INIT_PY_NAME(__divmod__); + INIT_PY_NAME(__lshift__); + INIT_PY_NAME(__rshift__); + INIT_PY_NAME(__and__); + INIT_PY_NAME(__or__); + INIT_PY_NAME(__xor__); + INIT_PY_NAME(__coerce__); + INIT_PY_NAME(__neg__); + INIT_PY_NAME(__pos__); + INIT_PY_NAME(__abs__); + INIT_PY_NAME(__nonzero__); + INIT_PY_NAME(__bool__); + INIT_PY_NAME(__invert__); + INIT_PY_NAME(__int__); + INIT_PY_NAME(__long__); + INIT_PY_NAME(__float__); + INIT_PY_NAME(__oct__); + INIT_PY_NAME(__hex__); + INIT_PY_NAME(__getitem__); + INIT_PY_NAME(__setitem__); + INIT_PY_NAME(__delitem__); + INIT_PY_NAME(__getslice__); + INIT_PY_NAME(__setslice__); + INIT_PY_NAME(__delslice__); + INIT_PY_NAME(__contains__); + INIT_PY_NAME(__len__); + INIT_PY_NAME(__of__); + INIT_PY_NAME(__call__); + INIT_PY_NAME(__repr__); + INIT_PY_NAME(__str__); + INIT_PY_NAME(__unicode__); + INIT_PY_NAME(__cmp__); + INIT_PY_NAME(__parent__); + INIT_PY_NAME(__iter__); + INIT_PY_NAME(__index__); + INIT_PY_NAME(__iadd__); + INIT_PY_NAME(__isub__); + INIT_PY_NAME(__imul__); + INIT_PY_NAME(__imod__); + INIT_PY_NAME(__ipow__); + INIT_PY_NAME(__ilshift__); + INIT_PY_NAME(__irshift__); + INIT_PY_NAME(__iand__); + INIT_PY_NAME(__ixor__); + INIT_PY_NAME(__ior__); + INIT_PY_NAME(__floordiv__); + INIT_PY_NAME(__truediv__); + INIT_PY_NAME(__ifloordiv__); + INIT_PY_NAME(__itruediv__); + INIT_PY_NAME(__matmul__); + INIT_PY_NAME(__imatmul__); + INIT_PY_NAME(__idiv__); +#undef INIT_PY_NAME +} + +static PyObject * +CallMethod(PyObject *self, PyObject *name, PyObject *args, PyObject *kwargs) +{ + PyObject *callable, *result; + + if ((callable = PyObject_GetAttr(self, name)) == NULL) { + return NULL; + } + + result = PyEval_CallObjectWithKeywords(callable, args, kwargs); + + Py_DECREF(callable); + return result; +} + +static PyObject * +CallMethodArgs(PyObject *self, PyObject *name, char *format, ...) +{ + va_list args; + PyObject *py_args, *result; + + va_start(args, format); + py_args = Py_VaBuildValue(format, args); + va_end(args); + + if (py_args == NULL) { + return NULL; + } + + result = CallMethod(self, name, py_args, NULL); + + Py_DECREF(py_args); + return result; +} + +/* For obscure reasons, we need to use tp_richcompare instead of tp_compare. + * The comparisons here all most naturally compute a cmp()-like result. + * This little helper turns that into a bool result for rich comparisons. + */ +static PyObject * +diff_to_bool(int diff, int op) +{ + PyObject *result; + int istrue; + + switch (op) { + case Py_EQ: istrue = diff == 0; break; + case Py_NE: istrue = diff != 0; break; + case Py_LE: istrue = diff <= 0; break; + case Py_GE: istrue = diff >= 0; break; + case Py_LT: istrue = diff < 0; break; + case Py_GT: istrue = diff > 0; break; + default: + assert(! "op unknown"); + istrue = 0; /* To shut up compiler */ + } + + result = istrue ? Py_True : Py_False; + Py_INCREF(result); + return result; +} + +static PyObject* +convert_name(PyObject *name) +{ +#ifdef Py_USING_UNICODE + if (PyUnicode_Check(name)) { + name = PyUnicode_AsEncodedString(name, NULL, NULL); + } + else +#endif + if (!PyBytes_Check(name)) { + PyErr_SetString(PyExc_TypeError, "attribute name must be a string"); + return NULL; + } + else { + Py_INCREF(name); + } + return name; +} + +/* Returns 1 if the current exception set is AttributeError otherwise 0. + * On 1 the AttributeError is removed from the global error indicator. + * On 0 the global error indactor is still set. + */ +static int +swallow_attribute_error(void) +{ + PyObject* error; + + if ((error = PyErr_Occurred()) == NULL) { + return 0; + } + + if (PyErr_GivenExceptionMatches(error, PyExc_AttributeError)) { + PyErr_Clear(); + return 1; + } + + return 0; +} + +/* Declarations for objects of type Wrapper */ + +typedef struct { + PyObject_HEAD + PyObject *obj; + PyObject *container; +} Wrapper; + +static PyExtensionClass Wrappertype, XaqWrappertype; + +#define isImplicitWrapper(o) (Py_TYPE(o) == (PyTypeObject*)&Wrappertype) +#define isExplicitWrapper(o) (Py_TYPE(o) == (PyTypeObject*)&XaqWrappertype) + +#define isWrapper(o) (isImplicitWrapper(o) || isExplicitWrapper(o)) + +/* Same as isWrapper but does a check for NULL pointer. */ +#define XisWrapper(o) ((o) ? isWrapper(o) : 0) + +#define WRAPPER(O) ((Wrapper*)(O)) + +#define newWrapper(obj, container, Wrappertype) \ + PyObject_CallFunctionObjArgs(OBJECT(Wrappertype), obj, container, NULL) + +static char *init_kwlist[] = {"obj", "container", NULL}; + +static int +Wrapper_init(Wrapper *self, PyObject *args, PyObject *kwargs) +{ + int rc; + PyObject *obj, *container; + + rc = PyArg_ParseTupleAndKeywords( + args, kwargs, "OO:__init__", init_kwlist, &obj, &container); + + if (!rc) { + return -1; + } + + if (self == WRAPPER(obj)) { + PyErr_SetString(PyExc_ValueError, + "Cannot wrap acquisition wrapper " + "in itself (Wrapper__init__)"); + return -1; + } + + /* Avoid memory leak if __init__ is called multiple times. */ + Py_CLEAR(self->obj); + Py_CLEAR(self->container); + + Py_INCREF(obj); + self->obj = obj; + + if (container != Py_None) { + Py_INCREF(container); + self->container = container; + } + + return 0; +} + +static PyObject * +Wrapper__new__(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + Wrapper *self = WRAPPER(type->tp_alloc(type, 0)); + if (Wrapper_init(self, args, kwargs) == -1) { + Py_DECREF(self); + return NULL; + } + + return OBJECT(self); +} + + +static int +Wrapper__init__(Wrapper *self, PyObject *args, PyObject *kwargs) +{ + return Wrapper_init(self, args, kwargs); +} + +/* ---------------------------------------------------------------- */ + +/* Creates a new Wrapper object with the values from the old one. + * Steals a reference from 'ob' (also in the error case). + * Returns a new reference. + * Returns NULL on error. + */ +static PyObject * +clone_wrapper(Wrapper *ob) +{ + PyObject *tmp; + + /* Only clone if its shared with others. */ + if (Py_REFCNT(ob) == 1) { + return (PyObject*) ob; + } + + tmp = newWrapper(ob->obj, ob->container, Py_TYPE(ob)); + Py_DECREF(ob); + return tmp; +} + +static PyObject * +__of__(PyObject *inst, PyObject *parent) +{ + PyObject *result; + result = PyObject_CallMethodObjArgs(inst, py__of__, parent, NULL); + + if (XisWrapper(result) && XisWrapper(WRAPPER(result)->container)) { + while (XisWrapper(WRAPPER(result)->obj) && + (WRAPPER(WRAPPER(result)->obj)->container == + WRAPPER(WRAPPER(result)->container)->obj)) { + + /* Copy it, because the result could be shared with others. */ + if ((result = clone_wrapper(WRAPPER(result))) == NULL) { + return NULL; + } + + /* Simplify wrapper */ + Py_XINCREF(WRAPPER(WRAPPER(result)->obj)->obj); + ASSIGN(WRAPPER(result)->obj, WRAPPER(WRAPPER(result)->obj)->obj); + } + } + + return result; +} + +static PyObject * +apply__of__(PyObject *self, PyObject *inst) +{ + PyObject *r; + + if (!self) { + r = self; + } else if (has__of__(self)) { + r = __of__(self, inst); + Py_DECREF(self); + } else { + r = self; + } + + return r; +} + +static PyObject * +get_inner(PyObject *ob) +{ + if (isWrapper(ob)) { + while (isWrapper(WRAPPER(ob)->obj)) { + ob = WRAPPER(ob)->obj; + } + } + + return ob; +} + +static PyObject * +get_base(PyObject *ob) +{ + while (isWrapper(ob)) { + ob = WRAPPER(ob)->obj; + } + return ob; +} + +static PyObject * +Wrapper_descrget(Wrapper *self, PyObject *inst, PyObject *cls) +{ + if (inst == NULL) { + Py_INCREF(self); + return OBJECT(self); + } + + return __of__(OBJECT(self), inst); +} + +static int +Wrapper_traverse(Wrapper *self, visitproc visit, void *arg) +{ + Py_VISIT(self->obj); + Py_VISIT(self->container); + return 0; +} + +static int +Wrapper_clear(Wrapper *self) +{ + Py_CLEAR(self->obj); + Py_CLEAR(self->container); + return 0; +} + +static void +Wrapper_dealloc(Wrapper *self) +{ + PyObject_GC_UnTrack(OBJECT(self)); + Wrapper_clear(self); + Py_TYPE(self)->tp_free(OBJECT(self)); +} + +static PyObject * +Wrapper_special(Wrapper *self, char *name, PyObject *oname) +{ + + PyObject *r = NULL; + + switch(*name) { + case 'b': + if (STR_EQ(name, "base")) { + r = get_base(OBJECT(self)); + Py_INCREF(r); + return r; + } + break; + + case 'p': + if (STR_EQ(name, "parent")) { + r = self->container ? self->container : Py_None; + Py_INCREF(r); + return r; + } + break; + + case 's': + if (STR_EQ(name, "self")) { + Py_INCREF(self->obj); + return self->obj; + } + break; + + case 'e': + if (STR_EQ(name, "explicit")) { + if (isExplicitWrapper(self)) { + Py_INCREF(self); + return OBJECT(self); + } + + return newWrapper(self->obj, self->container, &XaqWrappertype); + } + break; + + case 'a': + if (STR_EQ(name, "acquire")) { + return Py_FindAttr(OBJECT(self), oname); + } + break; + + case 'c': + if (STR_EQ(name, "chain")) { + if ((r = PyList_New(0)) == NULL) { + return NULL; + } + + while (PyList_Append(r, OBJECT(self)) == 0) { + if (isWrapper(self) && self->container) { + self = WRAPPER(self->container); + } else { + return r; + } + } + + Py_DECREF(r); + return NULL; + } + break; + + case 'i': + if (STR_EQ(name, "inContextOf")) { + return Py_FindAttr(OBJECT(self), oname); + } else if (STR_EQ(name, "inner")) { + r = get_inner(OBJECT(self)); + Py_INCREF(r); + return r; + } + break; + + case 'u': + if (STR_EQ(name, "uncle")) { + return NATIVE_FROM_STRING("Bob"); + } + break; + } + + return NULL; +} + +static int +apply_filter(PyObject *filter, PyObject *inst, PyObject *oname, PyObject *r, + PyObject *extra, PyObject *orig) +{ + /* Calls the filter, passing arguments. + + Returns 1 if the filter accepts the value, 0 if not, -1 if an + exception occurred. + + Note the special reference counting rule: This function decrements + the refcount of 'r' when it returns 0 or -1. When it returns 1, it + leaves the refcount unchanged. + */ + + PyObject *py_res; + int res; + + py_res = PyObject_CallFunctionObjArgs(filter, orig, inst, oname, r, extra, NULL); + if (py_res == NULL) { + Py_DECREF(r); + return -1; + } + + res = PyObject_IsTrue(py_res); + Py_DECREF(py_res); + + if (res == 0 || res == -1) { + Py_DECREF(r); + return res; + } + + return 1; +} + +static PyObject * +Wrapper_acquire(Wrapper *self, PyObject *oname, + PyObject *filter, PyObject *extra, PyObject *orig, + int explicit, int containment); + +static PyObject * +Wrapper_findattr_name(Wrapper *self, char* name, PyObject *oname, + PyObject *filter, PyObject *extra, PyObject *orig, + int sob, int sco, int explicit, int containment); + +static PyObject * +Wrapper_findattr(Wrapper *self, PyObject *oname, + PyObject *filter, PyObject *extra, PyObject *orig, + int sob, int sco, int explicit, int containment) +/* + Parameters: + + sob + Search self->obj for the 'oname' attribute + + sco + Search self->container for the 'oname' attribute + + explicit + Explicitly acquire 'oname' attribute from container (assumed with + implicit acquisition wrapper) + + containment + Use the innermost wrapper ("aq_inner") for looking up the 'oname' + attribute. +*/ +{ + PyObject *tmp, *result; + + if ((tmp = convert_name(oname)) == NULL) { + return NULL; + } + + result = Wrapper_findattr_name(self, PyBytes_AS_STRING(tmp), oname, filter, + extra, orig, sob, sco, explicit, containment); + Py_XDECREF(tmp); + return result; +} + +static PyObject * +Wrapper_findattr_name(Wrapper *self, char* name, PyObject *oname, + PyObject *filter, PyObject *extra, PyObject *orig, + int sob, int sco, int explicit, int containment) +/* + Exactly the same as Wrapper_findattr, except that the incoming + Python name string/unicode object has already been decoded + into a C string. This helper function lets us more easily manage + the lifetime of any temporary allocations. + + This function uses Wrapper_acquire, which only takes the original + oname value, not the decoded value. That function can call back into + this one (via Wrapper_findattr). Although that may lead to a few + temporary allocations as we walk through the containment hierarchy, + it is correct: This function may modify its internal view of the + `name` value, and if that were propagated up the hierarchy + the incorrect name may be looked up. +*/ +{ + PyObject *r; + + if (STR_STARTSWITH(name, "aq_") || STR_EQ(name, "__parent__")) { + /* __parent__ is an alias to aq_parent */ + name = STR_EQ(name, "__parent__") ? "parent" : name + 3; + + if ((r = Wrapper_special(self, name, oname))) { + if (filter) { + switch(apply_filter(filter, OBJECT(self), oname, r, extra, orig)) { + case -1: return NULL; + case 1: return r; + } + } else { + return r; + } + } else { + PyErr_Clear(); + } + } else if (STR_STARTSWITH(name, "__") && + (STR_EQ(name, "__reduce__") || + STR_EQ(name, "__reduce_ex__") || + STR_EQ(name, "__getstate__"))) { + + return PyObject_GenericGetAttr(OBJECT(self), oname); + } + + /* If we are doing a containment search, then replace self with aq_inner */ + self = containment ? WRAPPER(get_inner(OBJECT(self))) : self; + + if (sob) { + if (isWrapper(self->obj)) { + if (self == WRAPPER(self->obj)) { + PyErr_SetString(PyExc_RuntimeError, + "Recursion detected in acquisition wrapper"); + return NULL; + } + + r = Wrapper_findattr( + WRAPPER(self->obj), + oname, + filter, + extra, + orig, + 1, + /* Search object container if explicit, + or object is implicit acquirer */ + explicit || isImplicitWrapper(self->obj), + explicit, + containment); + + if (r) { + if (PyECMethod_Check(r) && PyECMethod_Self(r) == self->obj) { + ASSIGN(r, PyECMethod_New(r, OBJECT(self))); + } + return apply__of__(r, OBJECT(self)); + + } else if (!swallow_attribute_error()) { + return NULL; + } + } + + /* Deal with mixed __parent__ / aq_parent circles */ + else if (self->container && + isWrapper(self->container) && + WRAPPER(self->container)->container && + self == WRAPPER(WRAPPER(self->container)->container)) + { + PyErr_SetString(PyExc_RuntimeError, + "Recursion detected in acquisition wrapper"); + return NULL; + } + + /* normal attribute lookup */ + else if ((r = PyObject_GetAttr(self->obj, oname))) { + if (r == Acquired) { + Py_DECREF(r); + return Wrapper_acquire( + self, oname, filter, extra, orig, 1, containment); + } + + if (PyECMethod_Check(r) && PyECMethod_Self(r) == self->obj) { + ASSIGN(r, PyECMethod_New(r, OBJECT(self))); + } + + r = apply__of__(r, OBJECT(self)); + + if (r && filter) { + switch(apply_filter(filter, OBJECT(self), oname, r, extra, orig)) { + case -1: return NULL; + case 1: return r; + } + } else { + return r; + } + } else if (!swallow_attribute_error()) { + return NULL; + } + + PyErr_Clear(); + } + + /* Lookup has failed, acquire it from parent. */ + if (sco && (*name != '_' || explicit)) { + return Wrapper_acquire( + self, oname, filter, extra, orig, explicit, containment); + } + + PyErr_SetObject(PyExc_AttributeError, oname); + return NULL; +} + +static PyObject * +Wrapper_acquire( + Wrapper *self, + PyObject *oname, + PyObject *filter, + PyObject *extra, + PyObject *orig, + int explicit, + int containment) +{ + PyObject *r; + int sob = 1; + int sco = 1; + + if (!self->container) { + PyErr_SetObject(PyExc_AttributeError, oname); + return NULL; + } + + /* If the container has an acquisition wrapper itself, + * we'll use Wrapper_findattr to progress further. + */ + if (isWrapper(self->container)) { + if (isWrapper(self->obj)) { + /* Try to optimize search by recognizing repeated + * objects in path. + */ + if (WRAPPER(self->obj)->container == WRAPPER(self->container)->container) { + sco = 0; + } else if (WRAPPER(self->obj)->container == WRAPPER(self->container)->obj) { + sob = 0; + } + } + + /* Don't search the container when the container of the + * container is the same object as 'self'. + */ + if (WRAPPER(self->container)->container == WRAPPER(self)->obj) { + sco = 0; + containment = 1; + } + + r = Wrapper_findattr(WRAPPER(self->container), oname, filter, extra, + orig, sob, sco, explicit, containment); + + return apply__of__(r, OBJECT(self)); + } + + /* If the container has a __parent__ pointer, we create an + * acquisition wrapper for it accordingly. Then we can proceed + * with Wrapper_findattr, just as if the container had an + * acquisition wrapper in the first place (see above). + */ + else if ((r = PyObject_GetAttr(self->container, py__parent__))) { + /* Don't search the container when the parent of the parent + * is the same object as 'self' + */ + if (r == WRAPPER(self)->obj) { + sco = 0; + } + else if (WRAPPER(r)->obj == WRAPPER(self)->obj) { + sco = 0; + } + + ASSIGN(self->container, newWrapper(self->container, r, &Wrappertype)); + + /* don't need __parent__ anymore */ + Py_DECREF(r); + + r = Wrapper_findattr(WRAPPER(self->container), oname, filter, extra, + orig, sob, sco, explicit, containment); + + /* There's no need to DECREF the wrapper here because it's + * not stored in self->container, thus 'self' owns its + * reference now + */ + return r; + } + + /* The container is the end of the acquisition chain; if we + * can't look up the attribute here, we can't look it up at all. + */ + else { + /* We need to clean up the AttributeError from the previous + * getattr (because it has clearly failed). + */ + if(!swallow_attribute_error()) { + return NULL; + } + + if ((r = PyObject_GetAttr(self->container, oname)) == NULL) { + /* May be AttributeError or some other kind of error */ + return NULL; + } + + if (r == Acquired) { + Py_DECREF(r); + } else if (filter) { + switch(apply_filter(filter, self->container, oname, r, extra, orig)) { + case -1: return NULL; + case 1: return apply__of__(r, OBJECT(self)); + } + } else { + return apply__of__(r, OBJECT(self)); + } + } + + PyErr_SetObject(PyExc_AttributeError, oname); + return NULL; +} + +static PyObject * +Wrapper_getattro(Wrapper *self, PyObject *oname) +{ + return Wrapper_findattr(self, oname, NULL, NULL, NULL, 1, 1, 0, 0); +} + +static PyObject * +Xaq_getattro(Wrapper *self, PyObject *oname) +{ + PyObject *tmp, *result; + + if ((tmp = convert_name(oname)) == NULL) { + return NULL; + } + + /* Special case backward-compatible acquire method. */ + if (STR_EQ(PyBytes_AS_STRING(tmp), "acquire")) { + result = Py_FindAttr(OBJECT(self), oname); + } else { + result = Wrapper_findattr(self, oname, NULL, NULL, NULL, 1, 0, 0, 0); + } + + Py_DECREF(tmp); + return result; +} + +static int +Wrapper_setattro(Wrapper *self, PyObject *oname, PyObject *v) +{ + + PyObject *tmp = NULL; + char *name = ""; + int result; + + if ((tmp = convert_name(oname)) == NULL) { + return -1; + } + + name = PyBytes_AS_STRING(tmp); + + if (STR_EQ(name, "aq_parent") || STR_EQ(name, "__parent__")) { + Py_XINCREF(v); + ASSIGN(self->container, v); + result = 0; + } else { + if (v) { + result = PyObject_SetAttr(self->obj, oname, get_base(v)); + } + else { + result = PyObject_DelAttr(self->obj, oname); + } + } + + Py_DECREF(tmp); + return result; +} + +static int +Wrapper_compare(Wrapper *self, PyObject *w) +{ + + PyObject *obj, *wobj; + PyObject *m; + int r; + + if (OBJECT(self) == w) { + return 0; + } + + if ((m = PyObject_GetAttr(OBJECT(self), py__cmp__)) == NULL) { + PyErr_Clear(); + + /* Unwrap self completely -> obj. */ + obj = get_base(OBJECT(self)); + + /* Unwrap w completely -> wobj. */ + wobj = get_base(w); + + if (obj == wobj) { + return 0; + } else if (obj < w) { + return -1; + } else { + return 1; + } + } + + ASSIGN(m, PyObject_CallFunction(m, "O", w)); + if (m == NULL) { + return -1; + } + + r = PyLong_AsLong(m); + Py_DECREF(m); + return r; +} + +static PyObject * +Wrapper_richcompare(Wrapper *self, PyObject *w, int op) +{ + return diff_to_bool(Wrapper_compare(self, w), op); +} + +static PyObject * +Wrapper_repr(Wrapper *self) +{ + PyObject *r; + + if ((r = PyObject_GetAttr(OBJECT(self), py__repr__))) { + ASSIGN(r, PyObject_CallFunction(r, NULL, NULL)); + return r; + } else { + PyErr_Clear(); + return PyObject_Repr(self->obj); + } +} + +static PyObject * +Wrapper_str(Wrapper *self) +{ + PyObject *r; + + if ((r = PyObject_GetAttr(OBJECT(self), py__str__))) { + ASSIGN(r, PyObject_CallFunction(r,NULL,NULL)); + return r; + } else { + PyErr_Clear(); + return PyObject_Str(self->obj); + } +} + +static PyObject * +Wrapper_unicode(Wrapper *self) +{ + PyObject *r; + + if ((r = PyObject_GetAttr(OBJECT(self), py__unicode__))) { + ASSIGN(r, PyObject_CallFunction(r, NULL, NULL)); + return r; + } else { + PyErr_Clear(); + return Wrapper_str(self); + } +} + +static long +Wrapper_hash(Wrapper *self) +{ + return PyObject_Hash(self->obj); +} + +static PyObject * +Wrapper_call(PyObject *self, PyObject *args, PyObject *kw) +{ + return CallMethod(self, py__call__, args, kw); +} + +/* Code to handle accessing Wrapper objects as sequence objects */ +static Py_ssize_t +Wrapper_length(PyObject* self) +{ + PyObject *result; + PyObject *callable; + PyObject *tres; + Py_ssize_t res; + + callable = PyObject_GetAttr(self, py__len__); + if (callable == NULL) { + if (PyErr_ExceptionMatches(PyExc_AttributeError)) { + /* PyObject_LengthHint in Python3 catches only TypeError. + * Python2 catches both (type and attribute error) + */ + PyErr_SetString(PyExc_TypeError, "object has no len()"); + } + return -1; + } + + result = PyObject_CallObject(callable, NULL); + Py_DECREF(callable); + + if (result == NULL) { + return -1; + } + + /* PyLong_AsSsize_t can only be called on long objects. */ + tres = PyNumber_Long(result); + Py_DECREF(result); + + if (tres == NULL) { + return -1; + } + + res = PyLong_AsSsize_t(tres); + Py_DECREF(tres); + + if (res == -1 && PyErr_Occurred()) { + return -1; + } + + return res; +} + +static PyObject * +Wrapper_add(PyObject *self, PyObject *bb) +{ + return CallMethodArgs(self, py__add__, "(O)", bb); +} + +static PyObject * +Wrapper_repeat(PyObject *self, Py_ssize_t n) +{ + return CallMethodArgs(self, py__mul__, "(n)", n); +} + +static PyObject * +Wrapper_item(PyObject *self, Py_ssize_t i) +{ + return CallMethodArgs(self, py__getitem__, "(n)", i); +} + +static PyObject * +Wrapper_slice(PyObject *self, Py_ssize_t ilow, Py_ssize_t ihigh) +{ + return CallMethodArgs(self, py__getslice__, "(nn)", ilow, ihigh); +} + +static int +Wrapper_ass_item(PyObject *self, Py_ssize_t i, PyObject *v) +{ + if (v) { + v = CallMethodArgs(self, py__setitem__, "(nO)", i, v); + } else { + v = CallMethodArgs(self, py__delitem__, "(n)", i); + } + + if (v == NULL) { + return -1; + } + + Py_DECREF(v); + return 0; +} + +static int +Wrapper_ass_slice(PyObject *self, Py_ssize_t ilow, Py_ssize_t ihigh, PyObject *v) +{ + if (v) { + v = CallMethodArgs(self, py__setslice__, "(nnO)", ilow, ihigh, v); + } else { + v = CallMethodArgs(self, py__delslice__, "(nn)", ilow, ihigh); + } + + if (v == NULL) { + return -1; + } + + Py_DECREF(v); + return 0; +} + +static int +Wrapper_contains(PyObject *self, PyObject *v) +{ + long result; + + if ((v = CallMethodArgs(self, py__contains__, "(O)", v)) == NULL) { + return -1; + } + + result = PyLong_AsLong(v); + Py_DECREF(v); + return result; +} + +/* Support for iteration cannot rely on the internal implementation of + `PyObject_GetIter`, since the `self` passed into `__iter__` and + `__getitem__` should be acquisition-wrapped (also see LP 360761): The + wrapper obviously supports the iterator protocol so simply calling + `PyObject_GetIter(OBJECT(self))` results in an infinite recursion. + Instead the base object needs to be checked and the wrapper must only + be used when actually calling `__getitem__` or setting up a sequence + iterator. */ +static PyObject * +Wrapper_iter(Wrapper *self) +{ + PyObject *obj = self->obj; + PyObject *res; + if ((res=PyObject_GetAttr(OBJECT(self),py__iter__))) { + ASSIGN(res,PyObject_CallFunction(res,NULL,NULL)); + if (res != NULL && !PyIter_Check(res)) { + PyErr_Format(PyExc_TypeError, + "iter() returned non-iterator " + "of type '%.100s'", + Py_TYPE(res)->tp_name); + Py_DECREF(res); + res = NULL; + } + } else if (PySequence_Check(obj)) { + PyErr_Clear(); + ASSIGN(res,PySeqIter_New(OBJECT(self))); + } else { + res = PyErr_Format(PyExc_TypeError, "iteration over non-sequence"); + } + return res; +} + +static PySequenceMethods Wrapper_as_sequence = { + (lenfunc)Wrapper_length, /*sq_length*/ + Wrapper_add, /*sq_concat*/ + (ssizeargfunc)Wrapper_repeat, /*sq_repeat*/ + (ssizeargfunc)Wrapper_item, /*sq_item*/ + (ssizessizeargfunc)Wrapper_slice, /*sq_slice*/ + (ssizeobjargproc)Wrapper_ass_item, /*sq_ass_item*/ + (ssizessizeobjargproc)Wrapper_ass_slice, /*sq_ass_slice*/ + (objobjproc)Wrapper_contains, /*sq_contains*/ +}; + +/* -------------------------------------------------------------- */ + +/* Code to access Wrapper objects as mappings */ + + +static PyObject * +Wrapper_subscript(PyObject *self, PyObject *key) +{ + return CallMethodArgs(self, py__getitem__, "(O)", key); +} + +static int +Wrapper_ass_sub(PyObject *self, PyObject *key, PyObject *v) +{ + if (v) { + v = CallMethodArgs(self, py__setitem__, "(OO)", key, v); + } else { + v = CallMethodArgs(self, py__delitem__, "(O)", key); + } + + if (v == NULL) { + return -1; + } + + Py_DECREF(v); + return 0; +} + +static PyMappingMethods Wrapper_as_mapping = { + (lenfunc)Wrapper_length, /*mp_length*/ + (binaryfunc)Wrapper_subscript, /*mp_subscript*/ + (objobjargproc)Wrapper_ass_sub, /*mp_ass_subscript*/ +}; + +/* -------------------------------------------------------------- */ + +/* Code to access Wrapper objects as numbers */ + +#define WRAP_UNARYOP(OPNAME) \ + static PyObject* Wrapper_##OPNAME(PyObject* self) { \ + return PyObject_CallMethodObjArgs(self, py__##OPNAME##__, NULL); \ + } + +#define WRAP_BINOP(OPNAME) \ + static PyObject* Wrapper_##OPNAME(PyObject* self, PyObject* o1) { \ + return CallMethodArgs(self, py__##OPNAME##__, "(O)", o1); \ + } + +#define WRAP_TERNARYOP(OPNAME) \ + static PyObject* Wrapper_##OPNAME(PyObject* self, PyObject* o1, PyObject* o2) { \ + return CallMethodArgs(self, py__##OPNAME##__, "(OO)", o1, o2); \ + } + +WRAP_BINOP(sub); +WRAP_BINOP(mul); + +#ifndef PY3K +WRAP_BINOP(div); +#endif + +WRAP_BINOP(mod); +WRAP_BINOP(divmod); +WRAP_TERNARYOP(pow); +WRAP_UNARYOP(neg); +WRAP_UNARYOP(pos); +WRAP_UNARYOP(abs); +WRAP_UNARYOP(invert); +WRAP_BINOP(lshift); +WRAP_BINOP(rshift); +WRAP_BINOP(and); +WRAP_BINOP(xor); +WRAP_BINOP(or); + +WRAP_UNARYOP(int); + +#ifndef PY3K +WRAP_UNARYOP(long); +#endif + +WRAP_UNARYOP(float); + +#ifndef PY3K +WRAP_UNARYOP(oct); +WRAP_UNARYOP(hex); +#endif + +WRAP_BINOP(iadd); +WRAP_BINOP(isub); +WRAP_BINOP(imul); + +#ifndef PY3K +WRAP_BINOP(idiv); +#endif + +WRAP_BINOP(imod); +WRAP_TERNARYOP(ipow); +WRAP_BINOP(ilshift); +WRAP_BINOP(irshift); +WRAP_BINOP(iand); +WRAP_BINOP(ixor); +WRAP_BINOP(ior); +WRAP_BINOP(floordiv); +WRAP_BINOP(truediv); +WRAP_BINOP(ifloordiv); +WRAP_BINOP(itruediv); +WRAP_UNARYOP(index); + +#if ((PY_MAJOR_VERSION == 3) && (PY_MINOR_VERSION > 4)) +WRAP_BINOP(matmul); +WRAP_BINOP(imatmul); +#endif + +static int +Wrapper_nonzero(PyObject *self) +{ + int res; + PyObject* result = NULL; + PyObject* callable = NULL; + +#ifdef PY3K + callable = PyObject_GetAttr(self, py__bool__); +#else + callable = PyObject_GetAttr(self, py__nonzero__); +#endif + + if (callable == NULL) { + PyErr_Clear(); + + callable = PyObject_GetAttr(self, py__len__); + if (callable == NULL) { + PyErr_Clear(); + return 1; + } + } + + result = PyObject_CallObject(callable, NULL); + Py_DECREF(callable); + + if (result == NULL) { + return -1; + } + + res = PyObject_IsTrue(result); + Py_DECREF(result); + + return res; + +} + +#ifndef PY3K +static int +Wrapper_coerce(PyObject **self, PyObject **o) +{ + PyObject *m; + + if ((m=PyObject_GetAttr(*self, py__coerce__)) == NULL) { + PyErr_Clear(); + Py_INCREF(*self); + Py_INCREF(*o); + return 0; + } + + ASSIGN(m, PyObject_CallFunction(m, "O", *o)); + if (m == NULL) { + return -1; + } + + if (!PyArg_ParseTuple(m, "OO", self, o)) { + Py_DECREF(m); + return -1; + } + + Py_INCREF(*self); + Py_INCREF(*o); + Py_DECREF(m); + return 0; +} +#endif + +static PyNumberMethods Wrapper_as_number = { + Wrapper_add, /* nb_add */ + Wrapper_sub, /* nb_subtract */ + Wrapper_mul, /* nb_multiply */ +#ifndef PY3K + Wrapper_div, /* nb_divide */ +#endif + Wrapper_mod, /* nb_remainder */ + Wrapper_divmod, /* nb_divmod */ + Wrapper_pow, /* nb_power */ + Wrapper_neg, /* nb_negative */ + Wrapper_pos, /* nb_positive */ + Wrapper_abs, /* nb_absolute */ + Wrapper_nonzero, /* nb_nonzero */ + Wrapper_invert, /* nb_invert */ + Wrapper_lshift, /* nb_lshift */ + Wrapper_rshift, /* nb_rshift */ + Wrapper_and, /* nb_and */ + Wrapper_xor, /* nb_xor */ + Wrapper_or, /* nb_or */ + +#ifndef PY3K + Wrapper_coerce, /* nb_coerce */ +#endif + + Wrapper_int, /* nb_int */ + +#ifdef PY3K + NULL, +#else + Wrapper_long, /* nb_long */ +#endif + + Wrapper_float, /* nb_float */ + +#ifndef PY3K + Wrapper_oct, /* nb_oct*/ + Wrapper_hex, /* nb_hex*/ +#endif + + Wrapper_iadd, /* nb_inplace_add */ + Wrapper_isub, /* nb_inplace_subtract */ + Wrapper_imul, /* nb_inplace_multiply */ + +#ifndef PY3K + Wrapper_idiv, /* nb_inplace_divide */ +#endif + + Wrapper_imod, /* nb_inplace_remainder */ + Wrapper_ipow, /* nb_inplace_power */ + Wrapper_ilshift, /* nb_inplace_lshift */ + Wrapper_irshift, /* nb_inplace_rshift */ + Wrapper_iand, /* nb_inplace_and */ + Wrapper_ixor, /* nb_inplace_xor */ + Wrapper_ior, /* nb_inplace_or */ + Wrapper_floordiv, /* nb_floor_divide */ + Wrapper_truediv, /* nb_true_divide */ + Wrapper_ifloordiv, /* nb_inplace_floor_divide */ + Wrapper_itruediv, /* nb_inplace_true_divide */ + Wrapper_index, /* nb_index */ + +#if ((PY_MAJOR_VERSION == 3) && (PY_MINOR_VERSION > 4)) + Wrapper_matmul, /* nb_matrix_multiply */ + Wrapper_imatmul, /* nb_inplace_matrix_multiply */ +#endif +}; + + + +/* -------------------------------------------------------- */ + + +static char *acquire_args[] = {"object", "name", "filter", "extra", "explicit", + "default", "containment", NULL}; + +static PyObject * +Wrapper_acquire_method(Wrapper *self, PyObject *args, PyObject *kw) +{ + PyObject *name, *filter = NULL, *extra = Py_None; + PyObject *expl = NULL, *defalt = NULL; + int explicit = 1; + int containment = 0; + PyObject *result; + + if (!PyArg_ParseTupleAndKeywords(args, kw, "O|OOOOi", acquire_args+1, + &name, &filter, &extra, &expl, + &defalt, &containment)) + { + return NULL; + } + + if (expl) { + explicit = PyObject_IsTrue(expl); + } + + if (filter == Py_None) { + filter = NULL; + } + + result = Wrapper_findattr(self, name, filter, extra, OBJECT(self), 1, + explicit || isImplicitWrapper(self), + explicit, containment); + + if (result == NULL && defalt != NULL) { + /* as "Python/bltinmodule.c:builtin_getattr" turn + * only 'AttributeError' into a default value, such + * that e.g. "ConflictError" and errors raised by the filter + * are not mapped to the default value. + */ + if (swallow_attribute_error()) { + Py_INCREF(defalt); + result = defalt; + } + } + + return result; +} + +/* forward declaration so that we can use it in Wrapper_inContextOf */ +static PyObject * capi_aq_inContextOf(PyObject *self, PyObject *o, int inner); + +static PyObject * +Wrapper_inContextOf(Wrapper *self, PyObject *args) +{ + PyObject *o; + int inner = 1; + if (!PyArg_ParseTuple(args, "O|i", &o, &inner)) { + return NULL; + } + + return capi_aq_inContextOf(OBJECT(self), o, inner); +} + +PyObject * +Wrappers_are_not_picklable(PyObject *wrapper, PyObject *args) +{ + PyErr_SetString(PyExc_TypeError, + "Can't pickle objects in acquisition wrappers."); + return NULL; +} + +static PyObject * +Wrapper___getnewargs__(PyObject *self) +{ + return PyTuple_New(0); +} + +static struct PyMethodDef Wrapper_methods[] = { + {"acquire", (PyCFunction)Wrapper_acquire_method, + METH_VARARGS|METH_KEYWORDS, + "Get an attribute, acquiring it if necessary"}, + {"aq_acquire", (PyCFunction)Wrapper_acquire_method, + METH_VARARGS|METH_KEYWORDS, + "Get an attribute, acquiring it if necessary"}, + {"aq_inContextOf", (PyCFunction)Wrapper_inContextOf, METH_VARARGS, + "Test whether the object is currently in the context of the argument"}, + {"__getnewargs__", (PyCFunction)Wrapper___getnewargs__, METH_NOARGS, + "Get arguments to be passed to __new__"}, + {"__getstate__", (PyCFunction)Wrappers_are_not_picklable, METH_VARARGS, + "Wrappers are not picklable"}, + {"__reduce__", (PyCFunction)Wrappers_are_not_picklable, METH_VARARGS, + "Wrappers are not picklable"}, + {"__reduce_ex__", (PyCFunction)Wrappers_are_not_picklable, METH_VARARGS, + "Wrappers are not picklable"}, + {"__unicode__", (PyCFunction)Wrapper_unicode, METH_NOARGS, + "Unicode"}, + {NULL, NULL} +}; + +static PyExtensionClass Wrappertype = { + PyVarObject_HEAD_INIT(NULL, 0) + "Acquisition.ImplicitAcquisitionWrapper", /* tp_name */ + sizeof(Wrapper), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)Wrapper_dealloc, /* tp_dealloc */ + (printfunc)0, /* tp_print */ + (getattrfunc)0, /* tp_getattr */ + (setattrfunc)0, /* tp_setattr */ + 0, /* tp_compare */ + (reprfunc)Wrapper_repr, /* tp_repr */ + &Wrapper_as_number, /* tp_as_number */ + &Wrapper_as_sequence, /* tp_as_sequence */ + &Wrapper_as_mapping, /* tp_as_mapping */ + (hashfunc)Wrapper_hash, /* tp_hash */ + (ternaryfunc)Wrapper_call, /* tp_call */ + (reprfunc)Wrapper_str, /* tp_str */ + (getattrofunc)Wrapper_getattro, /* tp_getattro */ + (setattrofunc)Wrapper_setattro, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_HAVE_VERSION_TAG, /* tp_flags */ + "Wrapper object for implicit acquisition", /* tp_doc */ + (traverseproc)Wrapper_traverse, /* tp_traverse */ + (inquiry)Wrapper_clear, /* tp_clear */ + (richcmpfunc)Wrapper_richcompare, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + (getiterfunc)Wrapper_iter, /* tp_iter */ + 0, /* tp_iternext */ + Wrapper_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + (descrgetfunc)Wrapper_descrget, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + (initproc)Wrapper__init__, /* tp_init */ + 0, /* tp_alloc */ + Wrapper__new__ /* tp_new */ +}; + +static PyExtensionClass XaqWrappertype = { + PyVarObject_HEAD_INIT(NULL, 0) + "Acquisition.ExplicitAcquisitionWrapper", /*tp_name*/ + sizeof(Wrapper), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)Wrapper_dealloc, /* tp_dealloc */ + (printfunc)0, /* tp_print */ + (getattrfunc)0, /* tp_getattr */ + (setattrfunc)0, /* tp_setattr */ + 0, /* tp_compare */ + (reprfunc)Wrapper_repr, /* tp_repr */ + &Wrapper_as_number, /* tp_as_number */ + &Wrapper_as_sequence, /* tp_as_sequence */ + &Wrapper_as_mapping, /* tp_as_mapping */ + (hashfunc)Wrapper_hash, /* tp_hash */ + (ternaryfunc)Wrapper_call, /* tp_call */ + (reprfunc)Wrapper_str, /* tp_str */ + (getattrofunc)Xaq_getattro, /* tp_getattro */ + (setattrofunc)Wrapper_setattro, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_HAVE_VERSION_TAG, /* tp_flags */ + "Wrapper object for explicit acquisition", /* tp_doc */ + (traverseproc)Wrapper_traverse, /* tp_traverse */ + (inquiry)Wrapper_clear, /* tp_clear */ + (richcmpfunc)Wrapper_richcompare, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + (getiterfunc)Wrapper_iter, /* tp_iter */ + 0, /* tp_iternext */ + Wrapper_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + (descrgetfunc)Wrapper_descrget, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + (initproc)Wrapper__init__, /* tp_init */ + 0, /* tp_alloc */ + Wrapper__new__ /* tp_new */ +}; + +static PyObject * +acquire_of(PyObject *self, PyObject *inst, PyExtensionClass *target) +{ + if (!PyExtensionInstance_Check(inst)) { + PyErr_SetString(PyExc_TypeError, + "attempt to wrap extension method using an object that" + " is not an extension class instance."); + return NULL; + } + + return newWrapper(self, inst, target); + +} + +static PyObject * +aq__of__(PyObject *self, PyObject *inst) +{ + return acquire_of(self, inst, &Wrappertype); +} + +static PyObject * +xaq__of__(PyObject *self, PyObject *inst) +{ + return acquire_of(self, inst, &XaqWrappertype); +} + +static struct PyMethodDef Acquirer_methods[] = { + {"__of__",(PyCFunction)aq__of__, METH_O, + "__of__(context) -- return the object in a context"}, + + {NULL, NULL} +}; + +static struct PyMethodDef ExplicitAcquirer_methods[] = { + {"__of__",(PyCFunction)xaq__of__, METH_O, + "__of__(context) -- return the object in a context"}, + + {NULL, NULL} +}; + +static PyObject * +capi_aq_acquire( + PyObject *self, + PyObject *name, + PyObject *filter, + PyObject *extra, + int explicit, + PyObject *defalt, + int containment) +{ + PyObject *result; + + if (filter == Py_None) { + filter = NULL; + } + + /* We got a wrapped object, so business as usual */ + if (isWrapper(self)) { + result = Wrapper_findattr(WRAPPER(self), name, filter, extra, + OBJECT(self), 1, + explicit || isImplicitWrapper(self), + explicit, containment); + } + + /* Not wrapped; check if we have a __parent__ pointer. If that's + * the case, create a wrapper and pretend it's business as usual. + */ + else if ((result = PyObject_GetAttr(self, py__parent__))) { + self = newWrapper(self, result, &Wrappertype); + + /* don't need __parent__ anymore */ + Py_DECREF(result); + + result = Wrapper_findattr(WRAPPER(self), name, filter, extra, + OBJECT(self), 1, 1, explicit, containment); + + /* Get rid of temporary wrapper */ + Py_DECREF(self); + } + + /* No wrapper and no __parent__, so just getattr. */ + else { + /* Clean up the AttributeError from the previous getattr + * (because it has clearly failed). + */ + if (!swallow_attribute_error()) { + return NULL; + } + + if (!filter) { + result = PyObject_GetAttr(self, name); + } else { + /* Construct a wrapper so we can use Wrapper_findattr */ + if ((self = newWrapper(self, Py_None, &Wrappertype)) == NULL) { + return NULL; + } + + result = Wrapper_findattr(WRAPPER(self), name, filter, extra, + OBJECT(self), 1, 1, explicit, containment); + + /* Get rid of temporary wrapper */ + Py_DECREF(self); + } + } + + if (result == NULL && defalt != NULL) { + /* Python/bltinmodule.c:builtin_getattr turns only 'AttributeError' + * into a default value. + */ + if (swallow_attribute_error()) { + Py_INCREF(defalt); + result = defalt; + } + } + + return result; +} + +static PyObject * +module_aq_acquire(PyObject *ignored, PyObject *args, PyObject *kw) +{ + PyObject *self; + PyObject *name, *filter = NULL, *extra = Py_None; + PyObject *expl = NULL, *defalt = NULL; + int explicit = 1, containment = 0; + + if (!PyArg_ParseTupleAndKeywords(args, kw, "OO|OOOOi", acquire_args, + &self, &name, &filter, &extra, &expl, + &defalt, &containment)) + { + return NULL; + } + + if (expl) { + explicit = PyObject_IsTrue(expl); + } + + return capi_aq_acquire(self, name, filter, extra, + explicit, defalt, containment); +} + +static PyObject * +capi_aq_get(PyObject *self, PyObject *name, PyObject *defalt, int containment) +{ + PyObject *result; + + result = capi_aq_acquire(self, name, NULL, NULL, 1, defalt, containment); + + if (result == NULL && defalt) { + PyErr_Clear(); + Py_INCREF(defalt); + return defalt; + } else { + return result; + } +} + +static PyObject * +module_aq_get(PyObject *r, PyObject *args) +{ + PyObject *self, *name, *defalt = NULL; + int containment = 0; + + if (!PyArg_ParseTuple(args, "OO|Oi", &self, &name, &defalt, &containment)) { + return NULL; + } + + return capi_aq_get(self, name, defalt, containment); +} + +static int +capi_aq_iswrapper(PyObject *self) { + return isWrapper(self); +} + +static PyObject * +capi_aq_base(PyObject *self) +{ + PyObject *result = get_base(self); + Py_INCREF(result); + return result; +} + +static PyObject * +module_aq_base(PyObject *ignored, PyObject *self) +{ + return capi_aq_base(self); +} + +static PyObject * +capi_aq_parent(PyObject *self) +{ + PyObject *result; + + if (isWrapper(self) && WRAPPER(self)->container) { + Py_INCREF(WRAPPER(self)->container); + return WRAPPER(self)->container; + } + else if ((result = PyObject_GetAttr(self, py__parent__))) { + /* We already own the reference to result (PyObject_GetAttr gives + * it to us), no need to INCREF here. + */ + return result; + } else { + /* We need to clean up the AttributeError from the previous + * getattr (because it has clearly failed). + */ + if (!swallow_attribute_error()) { + return NULL; + } + + Py_RETURN_NONE; + } +} + +static PyObject * +module_aq_parent(PyObject *ignored, PyObject *self) +{ + return capi_aq_parent(self); +} + +static PyObject * +capi_aq_self(PyObject *self) +{ + PyObject *result; + + if (!isWrapper(self)) { + result = self; + } else { + result = WRAPPER(self)->obj; + } + + Py_INCREF(result); + return result; +} + +static PyObject * +module_aq_self(PyObject *ignored, PyObject *self) +{ + return capi_aq_self(self); +} + +static PyObject * +capi_aq_inner(PyObject *self) +{ + self = get_inner(self); + Py_INCREF(self); + return self; +} + +static PyObject * +module_aq_inner(PyObject *ignored, PyObject *self) +{ + return capi_aq_inner(self); +} + +static PyObject * +capi_aq_chain(PyObject *self, int containment) +{ + PyObject *result; + + /* This allows Py_XDECREF at the end. + * Needed, because the result of PyObject_GetAttr(self, py__parent__) must + * be kept alive until not needed anymore. It could be that the refcount of + * its return value is 1 => calling Py_DECREF too early leads to segfault. + */ + Py_INCREF(self); + + if ((result = PyList_New(0)) == NULL) { + return NULL; + } + + while (1) { + if (isWrapper(self)) { + if (containment) { + ASSIGN(self, get_inner(self)); + Py_INCREF(self); + } + + if (PyList_Append(result, OBJECT(self)) < 0) { + goto err; + } + + if (WRAPPER(self)->container) { + ASSIGN(self, WRAPPER(self)->container); + Py_INCREF(self); + continue; + } + } else { + if (PyList_Append(result, self) < 0) { + goto err; + } + + ASSIGN(self, PyObject_GetAttr(self, py__parent__)); + if (self) { + if (self != Py_None) { + continue; + } + } else if (!swallow_attribute_error()) { + goto err; + } + } + break; + } + + Py_XDECREF(self); + return result; +err: + Py_XDECREF(self); + Py_DECREF(result); + return NULL; +} + +static PyObject * +module_aq_chain(PyObject *ignored, PyObject *args) +{ + PyObject *self; + int containment = 0; + + if (!PyArg_ParseTuple(args, "O|i", &self, &containment)) { + return NULL; + } + + return capi_aq_chain(self, containment); +} + +static PyObject * +capi_aq_inContextOf(PyObject *self, PyObject *o, int inner) +{ + PyObject *result = Py_False; + + o = get_base(o); + + /* This allows Py_DECREF at the end, if the while loop did nothing. */ + Py_INCREF(self); + + while (1) { + /* if aq_base(self) is o: return 1 */ + if (get_base(self) == o) { + result = Py_True; + break; + } + + if (inner) { + ASSIGN(self, capi_aq_inner(self)); + if (self == NULL) { + return NULL; + } else if (self == Py_None) { + result = Py_False; + break; + } + } + + ASSIGN(self, capi_aq_parent(self)); + if (self == NULL) { + return NULL; + } else if (self == Py_None) { + result = Py_False; + break; + } + } + + Py_DECREF(self); + Py_INCREF(result); + return result; +} + +static PyObject * +module_aq_inContextOf(PyObject *ignored, PyObject *args) +{ + PyObject *self, *o; + int inner = 1; + + if (!PyArg_ParseTuple(args, "OO|i", &self, &o, &inner)) { + return NULL; + } + + return capi_aq_inContextOf(self, o, inner); +} + +static struct PyMethodDef methods[] = { + {"aq_acquire", (PyCFunction)module_aq_acquire, METH_VARARGS|METH_KEYWORDS, + "aq_acquire(ob, name [, filter, extra, explicit]) -- " + "Get an attribute, acquiring it if necessary" + }, + {"aq_get", (PyCFunction)module_aq_get, METH_VARARGS, + "aq_get(ob, name [, default]) -- " + "Get an attribute, acquiring it if necessary." + }, + {"aq_base", (PyCFunction)module_aq_base, METH_O, + "aq_base(ob) -- Get the object unwrapped"}, + {"aq_parent", (PyCFunction)module_aq_parent, METH_O, + "aq_parent(ob) -- Get the parent of an object"}, + {"aq_self", (PyCFunction)module_aq_self, METH_O, + "aq_self(ob) -- Get the object with the outermost wrapper removed"}, + {"aq_inner", (PyCFunction)module_aq_inner, METH_O, + "aq_inner(ob) -- " + "Get the object with all but the innermost wrapper removed"}, + {"aq_chain", (PyCFunction)module_aq_chain, METH_VARARGS, + "aq_chain(ob [, containment]) -- " + "Get a list of objects in the acquisition environment"}, + {"aq_inContextOf", (PyCFunction)module_aq_inContextOf, METH_VARARGS, + "aq_inContextOf(base, ob [, inner]) -- " + "Determine whether the object is in the acquisition context of base."}, + {NULL, NULL} +}; + +#ifdef PY3K +static struct PyModuleDef moduledef = +{ + PyModuleDef_HEAD_INIT, + "_Acquisition", /* m_name */ + "Provide base classes for acquiring objects", /* m_doc */ + -1, /* m_size */ + methods, /* m_methods */ + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL, /* m_free */ +}; +#endif + + +static PyObject* +module_init(void) +{ + PyObject *m, *d; + PyObject *api; + + PURE_MIXIN_CLASS(Acquirer, + "Base class for objects that implicitly" + " acquire attributes from containers\n", + Acquirer_methods); + + PURE_MIXIN_CLASS(ExplicitAcquirer, + "Base class for objects that explicitly" + " acquire attributes from containers\n", + ExplicitAcquirer_methods); + + if (!ExtensionClassImported) { + return NULL; + } + + Acquired = NATIVE_FROM_STRING(""); + if (Acquired == NULL) { + return NULL; + } + +#ifdef PY3K + m = PyModule_Create(&moduledef); +#else + m = Py_InitModule3("_Acquisition", + methods, + "Provide base classes for acquiring objects\n\n"); +#endif + + d = PyModule_GetDict(m); + init_py_names(); + PyExtensionClass_Export(d,"Acquirer", AcquirerType); + PyExtensionClass_Export(d,"ImplicitAcquisitionWrapper", Wrappertype); + PyExtensionClass_Export(d,"ExplicitAcquirer", ExplicitAcquirerType); + PyExtensionClass_Export(d,"ExplicitAcquisitionWrapper", XaqWrappertype); + + /* Create aliases */ + PyDict_SetItemString(d,"Implicit", OBJECT(&AcquirerType)); + PyDict_SetItemString(d,"Explicit", OBJECT(&ExplicitAcquirerType)); + PyDict_SetItemString(d,"Acquired", Acquired); + + AcquisitionCAPI.AQ_Acquire = capi_aq_acquire; + AcquisitionCAPI.AQ_Get = capi_aq_get; + AcquisitionCAPI.AQ_IsWrapper = capi_aq_iswrapper; + AcquisitionCAPI.AQ_Base = capi_aq_base; + AcquisitionCAPI.AQ_Parent = capi_aq_parent; + AcquisitionCAPI.AQ_Self = capi_aq_self; + AcquisitionCAPI.AQ_Inner = capi_aq_inner; + AcquisitionCAPI.AQ_Chain = capi_aq_chain; + + api = PyCapsule_New(&AcquisitionCAPI, "Acquisition.AcquisitionCAPI", NULL); + + PyDict_SetItemString(d, "AcquisitionCAPI", api); + Py_DECREF(api); + + return m; +} + +#ifdef PY3K +PyMODINIT_FUNC PyInit__Acquisition(void) +{ + return module_init(); +} +#else +PyMODINIT_FUNC init_Acquisition(void) +{ + module_init(); +} +#endif diff --git a/thesisenv/lib/python3.6/site-packages/Acquisition/_Acquisition.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/Acquisition/_Acquisition.cpython-36m-darwin.so new file mode 100755 index 0000000..5a96230 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/Acquisition/_Acquisition.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/Acquisition/__init__.py b/thesisenv/lib/python3.6/site-packages/Acquisition/__init__.py new file mode 100644 index 0000000..dbe6a79 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Acquisition/__init__.py @@ -0,0 +1,953 @@ +from __future__ import absolute_import, print_function + +# pylint:disable=W0212,R0911,R0912 + + +import os +import operator +import platform +import sys +import types +import weakref + +import ExtensionClass + +from zope.interface import classImplements + +from .interfaces import IAcquirer +from .interfaces import IAcquisitionWrapper + +IS_PYPY = getattr(platform, 'python_implementation', lambda: None)() == 'PyPy' +IS_PURE = 'PURE_PYTHON' in os.environ + + +class Acquired(object): + "Marker for explicit acquisition" + + +_NOT_FOUND = object() # marker + +### +# Helper functions +### + + +def _has__of__(obj): + """Check whether an object has an __of__ method for returning itself + in the context of a container.""" + # It is necessary to check both the type (or we get into cycles) + # as well as the presence of the method (or mixins of Base pre- or + # post-class-creation as done in, e.g., + # zopefoundation/Persistence) can fail. + return (isinstance(obj, ExtensionClass.Base) and + hasattr(type(obj), '__of__')) + + +def _apply_filter(predicate, inst, name, result, extra, orig): + return predicate(orig, inst, name, result, extra) + + +if sys.version_info < (3,): + import copy_reg + + def _rebound_method(method, wrapper): + """Returns a version of the method with self bound to `wrapper`""" + if isinstance(method, types.MethodType): + method = types.MethodType(method.im_func, wrapper, method.im_class) + return method + exec("""def _reraise(tp, value, tb=None): + raise tp, value, tb +""") +else: # pragma: no cover (python 2 is currently our reference) + import copyreg as copy_reg + + def _rebound_method(method, wrapper): + """Returns a version of the method with self bound to `wrapper`""" + if isinstance(method, types.MethodType): + method = types.MethodType(method.__func__, wrapper) + return method + + def _reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +### +# Wrapper object protocol, mostly ported from C directly +### + + +def _Wrapper_findspecial(wrapper, name): + """ + Looks up the special acquisition attributes of an object. + :param str name: The attribute to find, with 'aq' already stripped. + """ + + result = _NOT_FOUND + + if name == 'base': + result = wrapper._obj + while isinstance(result, _Wrapper) and result._obj is not None: + result = result._obj + elif name == 'parent': + result = wrapper._container + elif name == 'self': + result = wrapper._obj + elif name == 'explicit': + if type(wrapper)._IS_IMPLICIT: + result = ExplicitAcquisitionWrapper( + wrapper._obj, wrapper._container) + else: + result = wrapper + elif name == 'acquire': + result = object.__getattribute__(wrapper, 'aq_acquire') + elif name == 'chain': + # XXX: C has a second implementation here + result = aq_chain(wrapper) + elif name == 'inContextOf': + result = object.__getattribute__(wrapper, 'aq_inContextOf') + elif name == 'inner': + # XXX: C has a second implementation here + result = aq_inner(wrapper) + elif name == 'uncle': + result = 'Bob' + + return result + + +def _Wrapper_acquire(wrapper, name, + predicate=None, predicate_extra=None, + orig_object=None, + explicit=True, containment=True): + """ + Attempt to acquire the `name` from the parent of the wrapper. + + :raises AttributeError: If the wrapper has no parent or the + attribute cannot be found. + """ + + if wrapper._container is None: + raise AttributeError(name) + + search_self = True + search_parent = True + + # If the container has an acquisition wrapper itself, we'll use + # _Wrapper_findattr to progress further + if isinstance(wrapper._container, _Wrapper): + if isinstance(wrapper._obj, _Wrapper): + # try to optimize search by recognizing repeated objects in path + if wrapper._obj._container is wrapper._container._container: + search_parent = False + elif wrapper._obj._container is wrapper._container._obj: + search_self = False + + # Don't search the container when the container of the container + # is the same object as `wrapper` + if wrapper._container._container is wrapper._obj: + search_parent = False + containment = True + result = _Wrapper_findattr(wrapper._container, name, + predicate=predicate, + predicate_extra=predicate_extra, + orig_object=orig_object, + search_self=search_self, + search_parent=search_parent, + explicit=explicit, + containment=containment) + # XXX: Why does this branch of the C code check __of__, + # but the next one doesn't? + if _has__of__(result): + result = result.__of__(wrapper) + return result + + # If the container has a __parent__ pointer, we create an + # acquisition wrapper for it accordingly. Then we can proceed + # with Wrapper_findattr, just as if the container had an + # acquisition wrapper in the first place (see above). + # NOTE: This mutates the wrapper + elif hasattr(wrapper._container, '__parent__'): + parent = wrapper._container.__parent__ + # Don't search the container when the parent of the parent + # is the same object as 'self' + if parent is wrapper._obj: + search_parent = False + elif isinstance(parent, _Wrapper) and parent._obj is wrapper._obj: + # XXX: C code just does parent._obj, assumes its a wrapper + search_parent = False + + wrapper._container = ImplicitAcquisitionWrapper( + wrapper._container, parent) + return _Wrapper_findattr(wrapper._container, name, + predicate=predicate, + predicate_extra=predicate_extra, + orig_object=orig_object, + search_self=search_self, + search_parent=search_parent, + explicit=explicit, + containment=containment) + else: + # The container is the end of the acquisition chain; if we + # can't look up the attributes here, we can't look it up at all + result = getattr(wrapper._container, name) + if result is not Acquired: + if predicate: + if _apply_filter(predicate, wrapper._container, name, + result, predicate_extra, orig_object): + return (result.__of__(wrapper) + if _has__of__(result) else result) + else: + raise AttributeError(name) + else: + if _has__of__(result): + result = result.__of__(wrapper) + return result + + # this line cannot be reached + raise AttributeError(name) # pragma: no cover + + +def _Wrapper_findattr(wrapper, name, + predicate=None, predicate_extra=None, + orig_object=None, + search_self=True, search_parent=True, + explicit=True, containment=True): + """ + Search the `wrapper` object for the attribute `name`. + + :param bool search_self: Search `wrapper.aq_self` for the attribute. + :param bool search_parent: Search `wrapper.aq_parent` for the attribute. + :param bool explicit: Explicitly acquire the attribute from the parent + (should be assumed with implicit wrapper) + :param bool containment: Use the innermost wrapper (`aq_inner`) + for looking up the attribute. + """ + + orig_name = name + if orig_object is None: + orig_object = wrapper + + # First, special names + if name.startswith('aq') or name == '__parent__': + # __parent__ is an alias of aq_parent + if name == '__parent__': + name = 'parent' + else: + name = name[3:] + + result = _Wrapper_findspecial(wrapper, name) + if result is not _NOT_FOUND: + if predicate: + if _apply_filter(predicate, wrapper, orig_name, + result, predicate_extra, orig_object): + return result + else: + raise AttributeError(orig_name) + return result + elif name in ('__reduce__', '__reduce_ex__', '__getstate__', + '__of__', '__cmp__', '__eq__', '__ne__', '__lt__', + '__le__', '__gt__', '__ge__'): + return object.__getattribute__(wrapper, orig_name) + + # If we're doing a containment search, replace the wrapper with aq_inner + if containment: + while isinstance(wrapper._obj, _Wrapper): + wrapper = wrapper._obj + + if search_self and wrapper._obj is not None: + if isinstance(wrapper._obj, _Wrapper): + if wrapper is wrapper._obj: + raise RuntimeError("Recursion detected in acquisition wrapper") + try: + result = _Wrapper_findattr(wrapper._obj, orig_name, + predicate=predicate, + predicate_extra=predicate_extra, + orig_object=orig_object, + search_self=True, + search_parent=explicit or isinstance(wrapper._obj, ImplicitAcquisitionWrapper), # NOQA + explicit=explicit, + containment=containment) + if isinstance(result, types.MethodType): + result = _rebound_method(result, wrapper) + elif _has__of__(result): + result = result.__of__(wrapper) + return result + except AttributeError: + pass + + # deal with mixed __parent__ / aq_parent circles + elif (isinstance(wrapper._container, _Wrapper) and + wrapper._container._container is wrapper): + raise RuntimeError("Recursion detected in acquisition wrapper") + else: + # normal attribute lookup + try: + result = getattr(wrapper._obj, orig_name) + except AttributeError: + pass + else: + if result is Acquired: + return _Wrapper_acquire(wrapper, orig_name, + predicate=predicate, + predicate_extra=predicate_extra, + orig_object=orig_object, + explicit=True, + containment=containment) + + if isinstance(result, types.MethodType): + result = _rebound_method(result, wrapper) + elif _has__of__(result): + result = result.__of__(wrapper) + + if predicate: + if _apply_filter(predicate, wrapper, orig_name, + result, predicate_extra, orig_object): + return result + else: + return result + + # lookup has failed, acquire from the parent + if search_parent and (not name.startswith('_') or explicit): + return _Wrapper_acquire(wrapper, orig_name, + predicate=predicate, + predicate_extra=predicate_extra, + orig_object=orig_object, + explicit=explicit, + containment=containment) + + raise AttributeError(orig_name) + + +_NOT_GIVEN = object() # marker +_OGA = object.__getattribute__ + +# Map from object types with slots to their generated, derived +# types (or None if no derived type is needed) +_wrapper_subclass_cache = weakref.WeakKeyDictionary() + + +def _make_wrapper_subclass_if_needed(cls, obj, container): + # If the type of an object to be wrapped has __slots__, then we + # must create a wrapper subclass that has descriptors for those + # same slots. In this way, its methods that use object.__getattribute__ + # directly will continue to work, even when given an instance of _Wrapper + if getattr(cls, '_Wrapper__DERIVED', False): + return None + type_obj = type(obj) + wrapper_subclass = _wrapper_subclass_cache.get(type_obj, _NOT_GIVEN) + if wrapper_subclass is _NOT_GIVEN: + slotnames = copy_reg._slotnames(type_obj) + if slotnames and not isinstance(obj, _Wrapper): + new_type_dict = {'_Wrapper__DERIVED': True} + + def _make_property(slotname): + return property(lambda s: getattr(s._obj, slotname), + lambda s, v: setattr(s._obj, slotname, v), + lambda s: delattr(s._obj, slotname)) + for slotname in slotnames: + new_type_dict[slotname] = _make_property(slotname) + new_type = type(cls.__name__ + '_' + type_obj.__name__, + (cls,), + new_type_dict) + else: + new_type = None + wrapper_subclass = _wrapper_subclass_cache[type_obj] = new_type + + return wrapper_subclass + + +class _Wrapper(ExtensionClass.Base): + __slots__ = ('_obj', '_container', '__dict__') + _IS_IMPLICIT = None + + def __new__(cls, obj, container): + wrapper_subclass = _make_wrapper_subclass_if_needed(cls, obj, container) # NOQA + if wrapper_subclass: + inst = wrapper_subclass(obj, container) + else: + inst = super(_Wrapper, cls).__new__(cls) + inst._obj = obj + inst._container = container + if hasattr(obj, '__dict__') and not isinstance(obj, _Wrapper): + # Make our __dict__ refer to the same dict as the other object, + # so that if it has methods that use `object.__getattribute__` + # they still work. Note that because we have slots, + # we won't interfere with the contents of that dict. + object.__setattr__(inst, '__dict__', obj.__dict__) + return inst + + def __init__(self, obj, container): + super(_Wrapper, self).__init__() + self._obj = obj + self._container = container + + def __setattr__(self, name, value): + if name == '__parent__' or name == 'aq_parent': + object.__setattr__(self, '_container', value) + return + if name == '_obj' or name == '_container': + # should only happen at init time + object.__setattr__(self, name, value) + return + + # If we are wrapping something, unwrap passed in wrappers + if self._obj is None: + raise AttributeError( + 'Attempt to set attribute on empty acquisition wrapper') + + while value is not None and isinstance(value, _Wrapper): + value = value._obj + + setattr(self._obj, name, value) + + def __delattr__(self, name): + if name == '__parent__' or name == 'aq_parent': + self._container = None + else: + delattr(self._obj, name) + + def __getattribute__(self, name): + if name in ('_obj', '_container'): + return _OGA(self, name) + if (_OGA(self, '_obj') is not None or + _OGA(self, '_container') is not None): + return _Wrapper_findattr(self, name, None, None, None, True, + type(self)._IS_IMPLICIT, False, False) + return _OGA(self, name) + + def __of__(self, parent): + # Based on __of__ in the C code; + # simplify a layer of wrapping. + + # We have to call the raw __of__ method or we recurse on our + # own lookup (the C code does not have this issue, it can use + # the wrapped __of__ method because it gets here via the + # descriptor code path)... + wrapper = self._obj.__of__(parent) + if (not isinstance(wrapper, _Wrapper) or + not isinstance(wrapper._container, _Wrapper)): + return wrapper + # but the returned wrapper should be based on this object's + # wrapping chain + wrapper._obj = self + + while (isinstance(wrapper._obj, _Wrapper) and + (wrapper._obj._container is wrapper._container._obj)): + # Since we mutate the wrapper as we walk up, we must copy + # XXX: This comes from the C implementation. Do we really need to + # copy? + wrapper = type(wrapper)(wrapper._obj, wrapper._container) + wrapper._obj = wrapper._obj._obj + return wrapper + + def aq_acquire(self, name, + filter=None, extra=None, + explicit=True, + default=_NOT_GIVEN, + containment=False): + try: + return _Wrapper_findattr(self, name, + predicate=filter, + predicate_extra=extra, + orig_object=self, + search_self=True, + search_parent=explicit or type(self)._IS_IMPLICIT, # NOQA + explicit=explicit, + containment=containment) + except AttributeError: + if default is _NOT_GIVEN: + raise + return default + + acquire = aq_acquire + + def aq_inContextOf(self, o, inner=True): + return aq_inContextOf(self, o, inner=inner) + + # Wrappers themselves are not picklable, but if the underlying + # object has a _p_oid, then the __getnewargs__ method is allowed + def __reduce__(self, *args): + raise TypeError("Can't pickle objects in acquisition wrappers.") + __reduce_ex__ = __reduce__ + __getstate__ = __reduce__ + + def __getnewargs__(self): + return () + + # Equality and comparisons + + def __hash__(self): + # The C implementation doesn't pass the wrapper + # to any __hash__ that the object implements, + # so it can't access derived attributes. + # (If that changes, just add this to __unary_special_methods__ + # and remove this method) + return hash(self._obj) + + # The C implementation forces all comparisons through the + # __cmp__ method, if it's implemented. If it's not implemented, + # then comparisons are based strictly on the memory addresses + # of the underlying object (aq_base). We could mostly emulate + # this behaviour on Python 2, but on Python 3 __cmp__ is gone, + # so users won't have an expectation to write it. + # Because users have never had an expectation that the rich comparison + # methods would be called on their wrapped objects (and so would not be + # accessing acquired attributes there), we can't/don't want to start + # proxying to them? + # For the moment, we settle for an emulation of the C behaviour: + # define __cmp__ the same way, and redirect the rich comparison operators + # to it. (Note that these attributes are also hardcoded in getattribute) + def __cmp__(self, other): + aq_self = self._obj + if hasattr(type(aq_self), '__cmp__'): + return _rebound_method(aq_self.__cmp__, self)(other) + + my_base = aq_base(self) + other_base = aq_base(other) + if my_base is other_base: + return 0 + return -1 if id(my_base) < id(other_base) else 1 + + def __eq__(self, other): + return self.__cmp__(other) == 0 + + def __ne__(self, other): + return self.__cmp__(other) != 0 + + def __lt__(self, other): + return self.__cmp__(other) < 0 + + def __le__(self, other): + return self.__cmp__(other) <= 0 + + def __gt__(self, other): + return self.__cmp__(other) > 0 + + def __ge__(self, other): + return self.__cmp__(other) >= 0 + + # Special methods looked up by the type of self._obj, + # but which must have the wrapper as self when called + + def __nonzero__(self): + aq_self = self._obj + type_aq_self = type(aq_self) + nonzero = getattr(type_aq_self, '__nonzero__', None) + if nonzero is None: + # Py3 bool? + nonzero = getattr(type_aq_self, '__bool__', None) + if nonzero is None: + # a len? + nonzero = getattr(type_aq_self, '__len__', None) + if nonzero: + return bool(nonzero(self)) # Py3 is strict about the return type + # If nothing was defined, then it's true + return True + __bool__ = __nonzero__ + + def __unicode__(self): + f = getattr(self.aq_self, '__unicode__', + getattr(self.aq_self, '__str__', object.__str__)) + return _rebound_method(f, self)() + + def __repr__(self): + aq_self = self._obj + try: + return _rebound_method(aq_self.__repr__, self)() + except (AttributeError, TypeError): + return repr(aq_self) + + def __str__(self): + aq_self = self._obj + try: + return _rebound_method(aq_self.__str__, self)() + except (AttributeError, TypeError): # pragma: no cover (Only Py3) + return str(aq_self) + + __binary_special_methods__ = [ + # general numeric + '__add__', + '__sub__', + '__mul__', + '__matmul__', + '__floordiv__', # not implemented in C + '__mod__', + '__divmod__', + '__pow__', + '__lshift__', + '__rshift__', + '__and__', + '__xor__', + '__or__', + + # division; only one of these will be used at any one time + '__truediv__', + '__div__', + + # reflected numeric + '__radd__', + '__rsub__', + '__rmul__', + '__rdiv__', + '__rtruediv__', + '__rfloordiv__', + '__rmod__', + '__rdivmod__', + '__rpow__', + '__rlshift__', + '__rrshift__', + '__rand__', + '__rxor__', + '__ror__', + + # in place numeric + '__iadd__', + '__isub__', + '__imul__', + '__imatmul__', + '__idiv__', + '__itruediv__', + '__ifloordiv__', + '__imod__', + '__idivmod__', + '__ipow__', + '__ilshift__', + '__irshift__', + '__iand__', + '__ixor__', + '__ior__', + + # conversion + '__coerce__', + + # container + '__delitem__', + ] + + __unary_special_methods__ = [ + # arithmetic + '__neg__', + '__pos__', + '__abs__', + '__invert__', + + # conversion + '__complex__', + '__int__', + '__long__', + '__float__', + '__oct__', + '__hex__', + '__index__', + # '__len__', + + # strings are special + # '__repr__', + # '__str__', + ] + + for _name in __binary_special_methods__: + def _make_op(_name): + def op(self, other): + aq_self = self._obj + return getattr(type(aq_self), _name)(self, other) + return op + locals()[_name] = _make_op(_name) + + for _name in __unary_special_methods__: + def _make_op(_name): + def op(self): + aq_self = self._obj + return getattr(type(aq_self), _name)(self) + return op + locals()[_name] = _make_op(_name) + + del _make_op + del _name + + # Container protocol + + def __len__(self): + # if len is missing, it should raise TypeError + # (AttributeError is acceptable under Py2, but Py3 + # breaks list conversion if AttributeError is raised) + try: + l = getattr(type(self._obj), '__len__') + except AttributeError: + raise TypeError('object has no len()') + else: + return l(self) + + def __iter__(self): + # For things that provide either __iter__ or just __getitem__, + # we need to be sure that the wrapper is provided as self + if hasattr(self._obj, '__iter__'): + return _rebound_method(self._obj.__iter__, self)() + if hasattr(self._obj, '__getitem__'): + # Unfortunately we cannot simply call iter(self._obj) + # and rebind im_self like we do above: the Python runtime + # complains: + # (TypeError: 'sequenceiterator' expected, got 'Wrapper' instead) + + class WrapperIter(object): + __slots__ = ('_wrapper',) + + def __init__(self, o): + self._wrapper = o + + def __getitem__(self, i): + return self._wrapper.__getitem__(i) + it = WrapperIter(self) + return iter(it) + + return iter(self._obj) + + def __contains__(self, item): + # First, if the type of the object defines __contains__ then + # use it + aq_self = self._obj + aq_contains = getattr(type(aq_self), '__contains__', None) + if aq_contains: + return aq_contains(self, item) + # Next, we should attempt to iterate like the interpreter; + # but the C code doesn't do this, so we don't either. + # return item in iter(self) + raise AttributeError('__contains__') + + def __setitem__(self, key, value): + aq_self = self._obj + try: + setter = type(aq_self).__setitem__ + except AttributeError: + raise AttributeError("__setitem__") # doctests care about the name + else: + setter(self, key, value) + + def __getitem__(self, key): + if isinstance(key, slice) and hasattr(operator, 'getslice'): + # Only on Python 2 + # XXX: This is probably not proxying correctly, but the existing + # tests pass with this behaviour + return operator.getslice( + self._obj, + key.start if key.start is not None else 0, + key.stop if key.stop is not None else sys.maxint) + + aq_self = self._obj + try: + getter = type(aq_self).__getitem__ + except AttributeError: + raise AttributeError("__getitem__") # doctests care about the name + else: + return getter(self, key) + + def __call__(self, *args, **kwargs): + try: + # Note we look this up on the completely unwrapped + # object, so as not to get a class + call = getattr(self.aq_base, '__call__') + except AttributeError: # pragma: no cover + # A TypeError is what the interpreter raises; + # AttributeError is allowed to percolate through the + # C proxy + raise TypeError('object is not callable') + else: + return _rebound_method(call, self)(*args, **kwargs) + + +class ImplicitAcquisitionWrapper(_Wrapper): + _IS_IMPLICIT = True + + +class ExplicitAcquisitionWrapper(_Wrapper): + _IS_IMPLICIT = False + + def __getattribute__(self, name): + # Special case backwards-compatible acquire method + if name == 'acquire': + return object.__getattribute__(self, name) + + return _Wrapper.__getattribute__(self, name) + + +class _Acquirer(ExtensionClass.Base): + + def __getattribute__(self, name): + try: + return super(_Acquirer, self).__getattribute__(name) + except AttributeError: + # the doctests have very specific error message + # requirements (but at least we can preserve the traceback) + _, _, tb = sys.exc_info() + try: + _reraise(AttributeError, AttributeError(name), tb) + finally: + del tb + + def __of__(self, context): + return type(self)._Wrapper(self, context) + + +class Implicit(_Acquirer): + _Wrapper = ImplicitAcquisitionWrapper + + +ImplicitAcquisitionWrapper._Wrapper = ImplicitAcquisitionWrapper + + +class Explicit(_Acquirer): + _Wrapper = ExplicitAcquisitionWrapper + + +ExplicitAcquisitionWrapper._Wrapper = ExplicitAcquisitionWrapper + +### +# Exported module functions +### + + +def aq_acquire(obj, name, + filter=None, extra=None, + explicit=True, + default=_NOT_GIVEN, + containment=False): + if isinstance(obj, _Wrapper): + return obj.aq_acquire(name, + filter=filter, extra=extra, + default=default, + explicit=explicit or type(obj)._IS_IMPLICIT, + containment=containment) + + # Does it have a parent, or do we have a filter? + # Then go through the acquisition code + if hasattr(obj, '__parent__') or filter is not None: + parent = getattr(obj, '__parent__', None) + return aq_acquire(ImplicitAcquisitionWrapper(obj, parent), + name, + filter=filter, extra=extra, + default=default, + explicit=explicit, + containment=containment) + + # no parent and no filter, simple case + try: + return getattr(obj, name) + except AttributeError: + if default is _NOT_GIVEN: + raise AttributeError(name) # doctests are strict + return default + + +def aq_parent(obj): + # needs to be safe to call from __getattribute__ of a wrapper + # and reasonably fast + if isinstance(obj, _Wrapper): + return object.__getattribute__(obj, '_container') + # if not a wrapper, deal with the __parent__ + return getattr(obj, '__parent__', None) + + +def aq_chain(obj, containment=False): + result = [] + + while True: + if isinstance(obj, _Wrapper): + if obj._obj is not None: + if containment: + while isinstance(obj._obj, _Wrapper): + obj = obj._obj + result.append(obj) + if obj._container is not None: + obj = obj._container + continue + else: + result.append(obj) + obj = getattr(obj, '__parent__', None) + if obj is not None: + continue + + break + + return result + + +def aq_base(obj): + result = obj + while isinstance(result, _Wrapper): + result = result._obj + return result + + +def aq_get(obj, name, default=_NOT_GIVEN, containment=False): + + # Not wrapped. If we have a __parent__ pointer, create a wrapper + # and go as usual + if not isinstance(obj, _Wrapper) and hasattr(obj, '__parent__'): + obj = ImplicitAcquisitionWrapper(obj, obj.__parent__) + + try: + # We got a wrapped object, business as usual + return (_Wrapper_findattr(obj, name, None, None, obj, + True, True, True, containment) + if isinstance(obj, _Wrapper) + # ok, plain getattr + else getattr(obj, name)) + except AttributeError: + if default is _NOT_GIVEN: + raise + return default + + +def aq_inner(obj): + if not isinstance(obj, _Wrapper): + return obj + + result = obj._obj + while isinstance(result, _Wrapper): + obj = result + result = result._obj + result = obj + return result + + +def aq_self(obj): + if isinstance(obj, _Wrapper): + return obj.aq_self + return obj + + +def aq_inContextOf(self, o, inner=True): + next = self + o = aq_base(o) + + while True: + if aq_base(next) is o: + return True + + if inner: + self = aq_inner(next) + if self is None: # pragma: no cover + # This branch is normally impossible to hit, + # it just mirrors a check in C + break + else: + self = next + + next = aq_parent(self) + if next is None: + break + + return False + + +if not (IS_PYPY or IS_PURE): # pragma: no cover + # Make sure we can import the C extension of our dependency. + from ExtensionClass import _ExtensionClass # NOQA + from ._Acquisition import * # NOQA + +classImplements(Explicit, IAcquirer) +classImplements(ExplicitAcquisitionWrapper, IAcquisitionWrapper) +classImplements(Implicit, IAcquirer) +classImplements(ImplicitAcquisitionWrapper, IAcquisitionWrapper) diff --git a/thesisenv/lib/python3.6/site-packages/Acquisition/interfaces.py b/thesisenv/lib/python3.6/site-packages/Acquisition/interfaces.py new file mode 100644 index 0000000..1069dc1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Acquisition/interfaces.py @@ -0,0 +1,62 @@ +############################################################################## +# +# Copyright (c) 2005 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Acquisition z3 interfaces. + +$Id$ +""" + +from zope.interface import Attribute +from zope.interface import Interface + + +class IAcquirer(Interface): + + """Acquire attributes from containers. + """ + + def __of__(context): + """Get the object in a context. + """ + + +class IAcquisitionWrapper(Interface): + + """Wrapper object for acquisition. + """ + + def aq_acquire(name, filter=None, extra=None, explicit=True, default=0, + containment=0): + """Get an attribute, acquiring it if necessary. + """ + + def aq_inContextOf(obj, inner=1): + """Test whether the object is currently in the context of the argument. + """ + + aq_base = Attribute( + """Get the object unwrapped.""") + + aq_parent = Attribute( + """Get the parent of an object.""") + + aq_self = Attribute( + """Get the object with the outermost wrapper removed.""") + + aq_inner = Attribute( + """Get the object with all but the innermost wrapper removed.""") + + aq_chain = Attribute( + """Get a list of objects in the acquisition environment.""") + + aq_explicit = Attribute( + """Get the object with an explicit acquisition wrapper.""") diff --git a/thesisenv/lib/python3.6/site-packages/Acquisition/tests.py b/thesisenv/lib/python3.6/site-packages/Acquisition/tests.py new file mode 100644 index 0000000..af5ab4a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Acquisition/tests.py @@ -0,0 +1,3349 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Acquisition test cases (and useful examples) +""" + +from __future__ import print_function +import gc +import unittest +import sys +import operator +from doctest import DocTestSuite, DocFileSuite + +import ExtensionClass + +import Acquisition +from Acquisition import ( # NOQA + aq_acquire, + aq_base, + aq_chain, + aq_get, + aq_inContextOf, + aq_inner, + aq_parent, + aq_self, + Explicit, + Implicit, + IS_PYPY, + IS_PURE, +) + +if sys.version_info >= (3,): + PY3 = True + PY2 = False + + def unicode(self): + # For test purposes, redirect the unicode + # to the type of the object, just like Py2 did + try: + return type(self).__unicode__(self) + except AttributeError as e: + return type(self).__str__(self) + long = int +else: + PY2 = True + PY3 = False + +if 'Acquisition._Acquisition' not in sys.modules: + CAPI = False +else: + CAPI = True + +MIXIN_POST_CLASS_DEFINITION = True +try: + class Plain(object): + pass + Plain.__bases__ = (ExtensionClass.Base, ) +except TypeError: + # Not supported + MIXIN_POST_CLASS_DEFINITION = False + +AQ_PARENT = unicode('aq_parent') +UNICODE_WAS_CALLED = unicode('unicode was called') +STR_WAS_CALLED = unicode('str was called') +TRUE = unicode('True') + + +class I(Implicit): + + def __init__(self, id): + self.id = id + + def __repr__(self): + return self.id + + +class E(Explicit): + + def __init__(self, id): + self.id = id + + def __repr__(self): + return self.id + + +class Location(object): + __parent__ = None + + +class ECLocation(ExtensionClass.Base): + __parent__ = None + + +def show(x): + print(showaq(x).strip()) + + +def showaq(m_self, indent=''): + rval = '' + obj = m_self + base = getattr(obj, 'aq_base', obj) + try: + id = base.id + except Exception: + id = str(base) + try: + id = id() + except Exception: + pass + + if hasattr(obj, 'aq_self'): + if hasattr(obj.aq_self, 'aq_self'): + rval = rval + indent + "(" + id + ")\n" + rval = rval + indent + "| \\\n" + rval = rval + showaq(obj.aq_self, '| ' + indent) + rval = rval + indent + "|\n" + rval = rval + showaq(obj.aq_parent, indent) + elif hasattr(obj, 'aq_parent'): + rval = rval + indent + id + "\n" + rval = rval + indent + "|\n" + rval = rval + showaq(obj.aq_parent, indent) + else: + rval = rval + indent + id + "\n" + return rval + + +class TestStory(unittest.TestCase): + + def test_story(self): + # Acquisition is a mechanism that allows objects to obtain + # attributes from their environment. It is similar to inheritence, + # except that, rather than traversing an inheritence hierarchy + # to obtain attributes, a containment hierarchy is traversed. + + # The "ExtensionClass":ExtensionClass.html. release includes mix-in + # extension base classes that can be used to add acquisition as a + # feature to extension subclasses. These mix-in classes use the + # context-wrapping feature of ExtensionClasses to implement + # acquisition. Consider the following example: + + class C(ExtensionClass.Base): + color = 'red' + + class A(Implicit): + def report(self): + return self.color + + a = A() + c = C() + c.a = a + self.assertEqual(c.a.report(), 'red') + + d = C() + d.color = 'green' + d.a = a + self.assertEqual(d.a.report(), 'green') + + with self.assertRaises(AttributeError): + a.report() + + # The class 'A' inherits acquisition behavior from 'Implicit'. + # The object, 'a', "has" the color of objects 'c' and 'd' + # when it is accessed through them, but it has no color by itself. + # The object 'a' obtains attributes from it's environment, where + # it's environment is defined by the access path used to reach 'a'. + + # Acquisition wrappers + + # When an object that supports acquisition is accessed through + # an extension class instance, a special object, called an + # acquisition wrapper, is returned. In the example above, the + # expression 'c.a' returns an acquisition wrapper that + # contains references to both 'c' and 'a'. It is this wrapper + # that performs attribute lookup in 'c' when an attribute + # cannot be found in 'a'. + + # Aquisition wrappers provide access to the wrapped objects + # through the attributes 'aq_parent', 'aq_self', 'aq_base'. + # In the example above, the expressions: + self.assertIs(c.a.aq_parent, c) + + # and: + self.assertIs(c.a.aq_self, a) + + # both evaluate to true, but the expression: + self.assertIsNot(c.a, a) + + # evaluates to false, because the expression 'c.a' evaluates + # to an acquisition wrapper around 'c' and 'a', not 'a' itself. + + # The attribute 'aq_base' is similar to 'aq_self'. Wrappers may be + # nested and 'aq_self' may be a wrapped object. The 'aq_base' + # attribute is the underlying object with all wrappers removed. + + # Acquisition Control + + # Two styles of acquisition are supported in the current + # ExtensionClass release, implicit and explicit aquisition. + + # Implicit acquisition + + # Implicit acquisition is so named because it searches for + # attributes from the environment automatically whenever an + # attribute cannot be obtained directly from an object or + # through inheritence. + + # An attribute may be implicitly acquired if it's name does + # not begin with an underscore, '_'. + # To support implicit acquisition, an object should inherit + # from the mix-in class 'Implicit'. + + # Explicit Acquisition + + # When explicit acquisition is used, attributes are not + # automatically obtained from the environment. Instead, the + # method 'aq_aquire' must be used, as in: + # print(c.a.aq_acquire('color')) + + # To support explicit acquisition, an object should inherit + # from the mix-in class 'Explicit'. + + # Controlled Acquisition + + # A class (or instance) can provide attribute by attribute control + # over acquisition. This is done by: + # - subclassing from 'Explicit', and + # - setting all attributes that should be acquired to the special + # value: 'Acquisition.Acquired'. Setting an attribute to this + # value also allows inherited attributes to be overridden with + # acquired ones. + # For example, in: + + class E(Explicit): + id = 1 + secret = 2 + color = Acquisition.Acquired + __roles__ = Acquisition.Acquired + + # The *only* attributes that are automatically acquired from + # containing objects are 'color', and '__roles__'. + + c = C() + c.foo = 'foo' + c.e = E() + self.assertEqual(c.e.color, 'red') + with self.assertRaises(AttributeError): + c.e.foo + + # Note also that the '__roles__' attribute is acquired even + # though it's name begins with an underscore: + + c.__roles__ = 'Manager', 'Member' + self.assertEqual(c.e.__roles__, ('Manager', 'Member')) + + # In fact, the special 'Acquisition.Acquired' value can be used + # in 'Implicit' objects to implicitly acquire + # selected objects that smell like private objects. + + class I(Implicit): + __roles__ = Acquisition.Acquired + + c.x = C() + with self.assertRaises(AttributeError): + c.x.__roles__ + + c.x = I() + self.assertEqual(c.x.__roles__, ('Manager', 'Member')) + + # Filtered Acquisition + + # The acquisition method, 'aq_acquire', accepts two optional + # arguments. The first of the additional arguments is a + # "filtering" function that is used when considering whether to + # acquire an object. The second of the additional arguments is an + # object that is passed as extra data when calling the filtering + # function and which defaults to 'None'. + + # The filter function is called with five arguments: + # - The object that the 'aq_acquire' method was called on, + # - The object where an object was found, + # - The name of the object, as passed to 'aq_acquire', + # - The object found, and + # - The extra data passed to 'aq_acquire'. + + # If the filter returns a true object that the object found is + # returned, otherwise, the acquisition search continues. + # For example, in: + + class HandyForTesting(object): + def __init__(self, name): + self.name = name + + def __str__(self): + return "%s(%s)" % (self.name, self.__class__.__name__) + + __repr__ = __str__ + + class E(Explicit, HandyForTesting): + pass + + class Nice(HandyForTesting): + isNice = 1 + + def __str__(self): + return HandyForTesting.__str__(self) + ' and I am nice!' + + __repr__ = __str__ + + a = E('a') + a.b = E('b') + a.b.c = E('c') + a.p = Nice('spam') + a.b.p = E('p') + + def find_nice(self, ancestor, name, object, extra): + return hasattr(object, 'isNice') and object.isNice + + self.assertEqual(str(a.b.c.aq_acquire('p', find_nice)), + 'spam(Nice) and I am nice!') + + # The filtered acquisition in the last line skips over the first + # attribute it finds with the name 'p', because the attribute + # doesn't satisfy the condition given in the filter. + + # Acquisition and methods + + # Python methods of objects that support acquisition can use + # acquired attributes as in the 'report' method of the first example + # above. When a Python method is called on an object that is + # wrapped by an acquisition wrapper, the wrapper is passed to the + # method as the first argument. This rule also applies to + # user-defined method types and to C methods defined in pure mix-in + # classes. + + # Unfortunately, C methods defined in extension base classes that + # define their own data structures, cannot use aquired attributes at + # this time. This is because wrapper objects do not conform to the + # data structures expected by these methods. + + # Acquiring Acquiring objects + + # Consider the following example: + class C(Implicit): + def __init__(self, name): + self.name = name + + def __str__(self): + return "%s(%s)" % (self.name, self.__class__.__name__) + + __repr__ = __str__ + + a = C("a") + a.b = C("b") + a.b.pref = "spam" + a.b.c = C("c") + a.b.c.color = "red" + a.b.c.pref = "eggs" + a.x = C("x") + o = a.b.c.x + + # The expression 'o.color' might be expected to return '"red"'. In + # earlier versions of ExtensionClass, however, this expression + # failed. Acquired acquiring objects did not acquire from the + # environment they were accessed in, because objects were only + # wrapped when they were first found, and were not rewrapped as they + # were passed down the acquisition tree. + + # In the current release of ExtensionClass, the expression "o.color" + # does indeed return '"red"'. + self.assertEqual(o.color, 'red') + + # When searching for an attribute in 'o', objects are searched in + # the order 'x', 'a', 'b', 'c'. So, for example, the expression, + # 'o.pref' returns '"spam"', not '"eggs"': + self.assertEqual(o.pref, 'spam') + + # In earlier releases of ExtensionClass, the attempt to get the + # 'pref' attribute from 'o' would have failed. + + # If desired, the current rules for looking up attributes in complex + # expressions can best be understood through repeated application of + # the '__of__' method: + # 'a.x' -- 'x.__of__(a)' + # 'a.b' -- 'b.__of__(a)' + # 'a.b.x' -- 'x.__of__(a).__of__(b.__of__(a))' + # 'a.b.c' -- 'c.__of__(b.__of__(a))' + # 'a.b.c.x' -- + # 'x.__of__(a).__of__(b.__of__(a)).__of__(c.__of__(b.__of__(a)))' + + # and by keeping in mind that attribute lookup in a wrapper + # is done by trying to lookup the attribute in the wrapped object + # first and then in the parent object. In the expressions above + # involving the '__of__' method, lookup proceeds from left to right. + + # Note that heuristics are used to avoid most of the repeated + # lookups. For example, in the expression: 'a.b.c.x.foo', the object + # 'a' is searched no more than once, even though it is wrapped three + # times. + + +def test_unwrapped(): + """ + >>> c = I('unwrapped') + >>> show(c) + unwrapped + + >>> try: + ... c.aq_parent + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + >>> try: + ... c.__parent__ + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + >>> aq_acquire(c, 'id') + 'unwrapped' + + >>> try: + ... aq_acquire(c, 'x') + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + >>> aq_acquire(c, 'id', + ... lambda searched, parent, name, ob, extra: extra) + Traceback (most recent call last): + ... + AttributeError: id + + >>> aq_acquire(c, 'id', + ... lambda searched, parent, name, ob, extra: extra, + ... 1) + 'unwrapped' + + >>> aq_base(c) is c + 1 + + >>> aq_chain(c) + [unwrapped] + + >>> aq_chain(c, 1) + [unwrapped] + + >>> aq_get(c, 'id') + 'unwrapped' + + >>> try: + ... aq_get(c, 'x') + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + >>> aq_get(c, 'x', 'foo') + 'foo' + >>> aq_get(c, 'x', 'foo', 1) + 'foo' + + >>> aq_inner(c) is c + 1 + + >>> aq_parent(c) + + >>> aq_self(c) is c + 1 + + """ + + +def test_simple(): + """ + >>> a = I('a') + >>> a.y = 42 + >>> a.b = I('b') + >>> a.b.c = I('c') + >>> show(a.b.c) + c + | + b + | + a + + >>> show(a.b.c.aq_parent) + b + | + a + + >>> show(a.b.c.aq_self) + c + + >>> show(a.b.c.aq_base) + c + + >>> show(a.b.c.aq_inner) + c + | + b + | + a + + >>> a.b.c.y + 42 + + >>> a.b.c.aq_chain + [c, b, a] + + >>> a.b.c.aq_inContextOf(a) + 1 + >>> a.b.c.aq_inContextOf(a.b) + 1 + >>> a.b.c.aq_inContextOf(a.b.c) + 1 + + >>> aq_inContextOf(a.b.c, a) + 1 + >>> aq_inContextOf(a.b.c, a.b) + 1 + >>> aq_inContextOf(a.b.c, a.b.c) + 1 + + + >>> a.b.c.aq_acquire('y') + 42 + + >>> a.b.c.aq_acquire('id') + 'c' + + >>> try: + ... a.b.c.aq_acquire('x') + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + >>> a.b.c.aq_acquire('id', + ... lambda searched, parent, name, ob, extra: extra) + Traceback (most recent call last): + ... + AttributeError: id + + >>> aq_acquire(a.b.c, 'id', + ... lambda searched, parent, name, ob, extra: extra, + ... 1) + 'c' + + >>> aq_acquire(a.b.c, 'id') + 'c' + + >>> try: + ... aq_acquire(a.b.c, 'x') + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + >>> aq_acquire(a.b.c, 'y') + 42 + + >>> aq_acquire(a.b.c, 'id', + ... lambda searched, parent, name, ob, extra: extra) + Traceback (most recent call last): + ... + AttributeError: id + + >>> aq_acquire(a.b.c, 'id', + ... lambda searched, parent, name, ob, extra: extra, + ... 1) + 'c' + + >>> show(aq_base(a.b.c)) + c + + >>> aq_chain(a.b.c) + [c, b, a] + + >>> aq_chain(a.b.c, 1) + [c, b, a] + + >>> aq_get(a.b.c, 'id') + 'c' + + >>> try: + ... aq_get(a.b.c, 'x') + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + >>> aq_get(a.b.c, 'x', 'foo') + 'foo' + >>> aq_get(a.b.c, 'x', 'foo', 1) + 'foo' + + >>> show(aq_inner(a.b.c)) + c + | + b + | + a + + >>> show(aq_parent(a.b.c)) + b + | + a + + >>> show(aq_self(a.b.c)) + c + + A wrapper's __parent__ attribute (which is equivalent to its + aq_parent attribute) points to the Acquisition parent. + + >>> a.b.c.__parent__ == a.b.c.aq_parent + True + >>> a.b.c.__parent__ == a.b + True + """ + + +def test_muliple(): + r""" + >>> a = I('a') + >>> a.color = 'red' + >>> a.a1 = I('a1') + >>> a.a1.color = 'green' + >>> a.a1.a11 = I('a11') + >>> a.a2 = I('a2') + >>> a.a2.a21 = I('a21') + >>> show(a.a1.a11.a2.a21) + a21 + | + (a2) + | \ + | (a2) + | | \ + | | a2 + | | | + | | a + | | + | a1 + | | + | a + | + a11 + | + a1 + | + a + + >>> a.a1.a11.a2.a21.color + 'red' + + >>> show(a.a1.a11.a2.a21.aq_parent) + (a2) + | \ + | (a2) + | | \ + | | a2 + | | | + | | a + | | + | a1 + | | + | a + | + a11 + | + a1 + | + a + + >>> show(a.a1.a11.a2.a21.aq_parent.aq_parent) + a11 + | + a1 + | + a + + >>> show(a.a1.a11.a2.a21.aq_self) + a21 + + >>> show(a.a1.a11.a2.a21.aq_parent.aq_self) + (a2) + | \ + | a2 + | | + | a + | + a1 + | + a + + >>> show(a.a1.a11.a2.a21.aq_base) + a21 + + >>> show(a.a1.a11.a2.a21.aq_inner) + a21 + | + (a2) + | \ + | (a2) + | | \ + | | a2 + | | | + | | a + | | + | a1 + | | + | a + | + a11 + | + a1 + | + a + + >>> show(a.a1.a11.a2.a21.aq_inner.aq_parent.aq_inner) + a2 + | + a + + >>> show(a.a1.a11.a2.a21.aq_inner.aq_parent.aq_inner.aq_parent) + a + + >>> a.a1.a11.a2.a21.aq_chain + [a21, a2, a11, a1, a] + + >>> a.a1.a11.a2.a21.aq_inContextOf(a) + 1 + + >>> a.a1.a11.a2.a21.aq_inContextOf(a.a2) + 1 + + >>> a.a1.a11.a2.a21.aq_inContextOf(a.a1) + 0 + + >>> a.a1.a11.a2.a21.aq_acquire('color') + 'red' + >>> a.a1.a11.a2.a21.aq_acquire('id') + 'a21' + + >>> a.a1.a11.a2.a21.aq_acquire('color', + ... lambda ob, parent, name, v, extra: extra) + Traceback (most recent call last): + ... + AttributeError: color + + >>> a.a1.a11.a2.a21.aq_acquire('color', + ... lambda ob, parent, name, v, extra: extra, 1) + 'red' + + >>> a.a1.y = 42 + >>> a.a1.a11.a2.a21.aq_acquire('y') + 42 + + >>> try: + ... a.a1.a11.a2.a21.aq_acquire('y', containment=1) + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + Much of the same, but with methods: + + >>> show(aq_parent(a.a1.a11.a2.a21)) + (a2) + | \ + | (a2) + | | \ + | | a2 + | | | + | | a + | | + | a1 + | | + | a + | + a11 + | + a1 + | + a + + >>> show(aq_parent(a.a1.a11.a2.a21.aq_parent)) + a11 + | + a1 + | + a + + >>> show(aq_self(a.a1.a11.a2.a21)) + a21 + + >>> show(aq_self(a.a1.a11.a2.a21.aq_parent)) + (a2) + | \ + | a2 + | | + | a + | + a1 + | + a + + >>> show(aq_base(a.a1.a11.a2.a21)) + a21 + + >>> show(aq_inner(a.a1.a11.a2.a21)) + a21 + | + (a2) + | \ + | (a2) + | | \ + | | a2 + | | | + | | a + | | + | a1 + | | + | a + | + a11 + | + a1 + | + a + + >>> show(aq_inner(a.a1.a11.a2.a21.aq_inner.aq_parent)) + a2 + | + a + + >>> show(aq_parent( + ... a.a1.a11.a2.a21.aq_inner.aq_parent.aq_inner)) + a + + >>> aq_chain(a.a1.a11.a2.a21) + [a21, a2, a11, a1, a] + + >>> aq_chain(a.a1.a11.a2.a21, 1) + [a21, a2, a] + + >>> aq_acquire(a.a1.a11.a2.a21, 'color') + 'red' + >>> aq_acquire(a.a1.a11.a2.a21, 'id') + 'a21' + + >>> aq_acquire(a.a1.a11.a2.a21, 'color', + ... lambda ob, parent, name, v, extra: extra) + Traceback (most recent call last): + ... + AttributeError: color + + >>> aq_acquire(a.a1.a11.a2.a21, 'color', + ... lambda ob, parent, name, v, extra: extra, 1) + 'red' + + >>> a.a1.y = 42 + >>> aq_acquire(a.a1.a11.a2.a21, 'y') + 42 + + >>> try: + ... aq_acquire(a.a1.a11.a2.a21, 'y', containment=1) + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + """ + + +def test_pinball(): + r""" + >>> a = I('a') + >>> a.a1 = I('a1') + >>> a.a1.a11 = I('a11') + >>> a.a1.a12 = I('a12') + >>> a.a2 = I('a2') + >>> a.a2.a21 = I('a21') + >>> a.a2.a22 = I('a22') + >>> show(a.a1.a11.a1.a12.a2.a21.a2.a22) + a22 + | + (a2) + | \ + | (a2) + | | \ + | | a2 + | | | + | | a + | | + | (a2) + | | \ + | | (a2) + | | | \ + | | | a2 + | | | | + | | | a + | | | + | | (a1) + | | | \ + | | | (a1) + | | | | \ + | | | | a1 + | | | | | + | | | | a + | | | | + | | | a1 + | | | | + | | | a + | | | + | | a11 + | | | + | | a1 + | | | + | | a + | | + | a12 + | | + | (a1) + | | \ + | | (a1) + | | | \ + | | | a1 + | | | | + | | | a + | | | + | | a1 + | | | + | | a + | | + | a11 + | | + | a1 + | | + | a + | + a21 + | + (a2) + | \ + | (a2) + | | \ + | | a2 + | | | + | | a + | | + | (a1) + | | \ + | | (a1) + | | | \ + | | | a1 + | | | | + | | | a + | | | + | | a1 + | | | + | | a + | | + | a11 + | | + | a1 + | | + | a + | + a12 + | + (a1) + | \ + | (a1) + | | \ + | | a1 + | | | + | | a + | | + | a1 + | | + | a + | + a11 + | + a1 + | + a + + """ + + +def test_explicit(): + """ + >>> a = E('a') + >>> a.y = 42 + >>> a.b = E('b') + >>> a.b.c = E('c') + >>> show(a.b.c) + c + | + b + | + a + + >>> show(a.b.c.aq_parent) + b + | + a + + >>> show(a.b.c.aq_self) + c + + >>> show(a.b.c.aq_base) + c + + >>> show(a.b.c.aq_inner) + c + | + b + | + a + + >>> a.b.c.y + Traceback (most recent call last): + ... + AttributeError: y + + >>> a.b.c.aq_chain + [c, b, a] + + >>> a.b.c.aq_inContextOf(a) + 1 + >>> a.b.c.aq_inContextOf(a.b) + 1 + >>> a.b.c.aq_inContextOf(a.b.c) + 1 + + + >>> a.b.c.aq_acquire('y') + 42 + + >>> a.b.c.aq_acquire('id') + 'c' + + >>> try: + ... a.b.c.aq_acquire('x') + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + >>> a.b.c.aq_acquire('id', + ... lambda searched, parent, name, ob, extra: extra) + Traceback (most recent call last): + ... + AttributeError: id + + >>> aq_acquire(a.b.c, 'id', + ... lambda searched, parent, name, ob, extra: extra, + ... 1) + 'c' + + >>> aq_acquire(a.b.c, 'id') + 'c' + + >>> try: + ... aq_acquire(a.b.c, 'x') + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + >>> aq_acquire(a.b.c, 'y') + 42 + + >>> aq_acquire(a.b.c, 'id', + ... lambda searched, parent, name, ob, extra: extra) + Traceback (most recent call last): + ... + AttributeError: id + + >>> aq_acquire(a.b.c, 'id', + ... lambda searched, parent, name, ob, extra: extra, + ... 1) + 'c' + + >>> show(aq_base(a.b.c)) + c + + >>> aq_chain(a.b.c) + [c, b, a] + + >>> aq_chain(a.b.c, 1) + [c, b, a] + + >>> aq_get(a.b.c, 'id') + 'c' + + >>> try: + ... aq_get(a.b.c, 'x') + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + >>> aq_get(a.b.c, 'y') + 42 + + >>> aq_get(a.b.c, 'x', 'foo') + 'foo' + >>> aq_get(a.b.c, 'x', 'foo', 1) + 'foo' + + >>> show(aq_inner(a.b.c)) + c + | + b + | + a + + >>> show(aq_parent(a.b.c)) + b + | + a + + >>> show(aq_self(a.b.c)) + c + + """ + + +def test_mixed_explicit_and_explicit(): + """ + >>> a = I('a') + >>> a.y = 42 + >>> a.b = E('b') + >>> a.b.z = 3 + >>> a.b.c = I('c') + >>> show(a.b.c) + c + | + b + | + a + + >>> show(a.b.c.aq_parent) + b + | + a + + >>> show(a.b.c.aq_self) + c + + >>> show(a.b.c.aq_base) + c + + >>> show(a.b.c.aq_inner) + c + | + b + | + a + + >>> a.b.c.y + 42 + + >>> a.b.c.z + 3 + + >>> a.b.c.aq_chain + [c, b, a] + + >>> a.b.c.aq_inContextOf(a) + 1 + >>> a.b.c.aq_inContextOf(a.b) + 1 + >>> a.b.c.aq_inContextOf(a.b.c) + 1 + + >>> a.b.c.aq_acquire('y') + 42 + + >>> a.b.c.aq_acquire('z') + 3 + + >>> a.b.c.aq_acquire('id') + 'c' + + >>> try: + ... a.b.c.aq_acquire('x') + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + >>> a.b.c.aq_acquire('id', + ... lambda searched, parent, name, ob, extra: extra) + Traceback (most recent call last): + ... + AttributeError: id + + >>> aq_acquire(a.b.c, 'id', + ... lambda searched, parent, name, ob, extra: extra, + ... 1) + 'c' + + >>> aq_acquire(a.b.c, 'id') + 'c' + + >>> try: + ... aq_acquire(a.b.c, 'x') + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + >>> aq_acquire(a.b.c, 'y') + 42 + + >>> aq_acquire(a.b.c, 'id', + ... lambda searched, parent, name, ob, extra: extra) + Traceback (most recent call last): + ... + AttributeError: id + + >>> aq_acquire(a.b.c, 'id', + ... lambda searched, parent, name, ob, extra: extra, + ... 1) + 'c' + + >>> show(aq_base(a.b.c)) + c + + >>> aq_chain(a.b.c) + [c, b, a] + + >>> aq_chain(a.b.c, 1) + [c, b, a] + + >>> aq_get(a.b.c, 'id') + 'c' + + >>> try: + ... aq_get(a.b.c, 'x') + ... except AttributeError: + ... pass + ... else: + ... raise AssertionError('AttributeError not raised.') + + >>> aq_get(a.b.c, 'y') + 42 + + >>> aq_get(a.b.c, 'x', 'foo') + 'foo' + >>> aq_get(a.b.c, 'x', 'foo', 1) + 'foo' + + >>> show(aq_inner(a.b.c)) + c + | + b + | + a + + >>> show(aq_parent(a.b.c)) + b + | + a + + >>> show(aq_self(a.b.c)) + c + + """ + + +class TestAqAlgorithm(unittest.TestCase): + + def test_AqAlg(self): + A = I('A') + B = I('B') + A.B = B + A.B.color = 'red' + C = I('C') + A.C = C + D = I('D') + A.C.D = D + + self.assertEqual(aq_chain(A), [A]) + self.assertEqual(aq_chain(A, 1), [A]) + self.assertEqual(list(map(aq_base, aq_chain(A, 1))), [A]) + + self.assertEqual(str(aq_chain(A.C)), str([C, A])) + self.assertEqual(aq_chain(A.C, 1), [C, A]) + self.assertEqual(list(map(aq_base, aq_chain(A.C, 1))), [C, A]) + + self.assertEqual(str(aq_chain(A.C.D)), str([D, C, A])) + self.assertEqual(aq_chain(A.C.D, 1), [D, C, A]) + self.assertEqual(list(map(aq_base, aq_chain(A.C.D, 1))), [D, C, A]) + + self.assertEqual(str(aq_chain(A.B.C)), str([C, B, A])) + self.assertEqual(aq_chain(A.B.C, 1), [C, A]) + self.assertEqual(list(map(aq_base, aq_chain(A.B.C, 1))), [C, A]) + + self.assertEqual(str(aq_chain(A.B.C.D)), str([D, C, B, A])) + self.assertEqual(aq_chain(A.B.C.D, 1), [D, C, A]) + self.assertEqual(list(map(aq_base, aq_chain(A.B.C.D, 1))), [D, C, A]) + + self.assertEqual(A.B.C.D.color, 'red') + self.assertEqual(aq_get(A.B.C.D, "color", None), 'red') + self.assertIsNone(aq_get(A.B.C.D, "color", None, 1)) + + +class TestExplicitAcquisition(unittest.TestCase): + + def test_explicit_acquisition(self): + from ExtensionClass import Base + + class B(Base): + color = 'red' + + class A(Explicit): + def hi(self): + return self.acquire('color') + + b = B() + b.a = A() + self.assertEqual(b.a.hi(), 'red') + + b.a.color = 'green' + self.assertEqual(b.a.hi(), 'green') + + with self.assertRaises(AttributeError): + A().hi() + + +class TestCreatingWrappers(unittest.TestCase): + + def test_creating_wrappers_directly(self): + from ExtensionClass import Base + from Acquisition import ImplicitAcquisitionWrapper + + class B(Base): + pass + + a = B() + a.color = 'red' + a.b = B() + w = ImplicitAcquisitionWrapper(a.b, a) + self.assertEqual(w.color, 'red') + + with self.assertRaises(TypeError): + ImplicitAcquisitionWrapper(a.b) + + # We can reassign aq_parent / __parent__ on a wrapper: + + x = B() + x.color = 'green' + w.aq_parent = x + self.assertEqual(w.color, 'green') + + y = B() + y.color = 'blue' + w.__parent__ = y + self.assertEqual(w.color, 'blue') + + # Note that messing with the wrapper won't in any way affect the + # wrapped object: + + with self.assertRaises(AttributeError): + aq_base(w).__parent__ + + with self.assertRaises(TypeError): + ImplicitAcquisitionWrapper() + + with self.assertRaises(TypeError): + ImplicitAcquisitionWrapper(obj=1) + + +class TestPickle(unittest.TestCase): + + def test_cant_pickle_acquisition_wrappers_classic(self): + import pickle + + class X(object): + def __getstate__(self): + return 1 + + # We shouldn't be able to pickle wrappers: + + from Acquisition import ImplicitAcquisitionWrapper + w = ImplicitAcquisitionWrapper(X(), X()) + with self.assertRaises(TypeError): + pickle.dumps(w) + + # But that's not enough. We need to defeat persistence as well. :) + # This is tricky. We want to generate the error in __getstate__, not + # in the attr access, as attribute errors are too-often hidden: + + getstate = w.__getstate__ + with self.assertRaises(TypeError): + getstate() + + # We shouldn't be able to pickle wrappers: + + from Acquisition import ExplicitAcquisitionWrapper + w = ExplicitAcquisitionWrapper(X(), X()) + with self.assertRaises(TypeError): + pickle.dumps(w) + + # But that's not enough. We need to defeat persistence as well. :) + # This is tricky. We want to generate the error in __getstate__, not + # in the attr access, as attribute errors are too-often hidden: + + getstate = w.__getstate__ + with self.assertRaises(TypeError): + getstate() + + def test_cant_pickle_acquisition_wrappers_newstyle(self): + import pickle + + class X(object): + def __getstate__(self): + return 1 + + # We shouldn't be able to pickle wrappers: + + from Acquisition import ImplicitAcquisitionWrapper + w = ImplicitAcquisitionWrapper(X(), X()) + with self.assertRaises(TypeError): + pickle.dumps(w) + + # But that's not enough. We need to defeat persistence as well. :) + # This is tricky. We want to generate the error in __getstate__, not + # in the attr access, as attribute errors are too-often hidden: + + getstate = w.__getstate__ + with self.assertRaises(TypeError): + getstate() + + # We shouldn't be able to pickle wrappers: + + from Acquisition import ExplicitAcquisitionWrapper + w = ExplicitAcquisitionWrapper(X(), X()) + with self.assertRaises(TypeError): + pickle.dumps(w) + + # But that's not enough. We need to defeat persistence as well. :) + # This is tricky. We want to generate the error in __getstate__, not + # in the attr access, as attribute errors are too-often hidden: + + getstate = w.__getstate__ + with self.assertRaises(TypeError): + getstate() + + def test_cant_persist_acquisition_wrappers_classic(self): + try: + import cPickle + except ImportError: + import pickle as cPickle + + class X(object): + _p_oid = '1234' + + def __getstate__(self): + return 1 + + # We shouldn't be able to pickle wrappers: + + from Acquisition import ImplicitAcquisitionWrapper + w = ImplicitAcquisitionWrapper(X(), X()) + with self.assertRaises(TypeError): + cPickle.dumps(w) + + # Check for pickle protocol one: + + with self.assertRaises(TypeError): + cPickle.dumps(w, 1) + + # Check custom pickler: + + from io import BytesIO + file = BytesIO() + pickler = cPickle.Pickler(file, 1) + + with self.assertRaises(TypeError): + pickler.dump(w) + + # Check custom pickler with a persistent_id method matching + # the semantics in ZODB.serialize.ObjectWriter.persistent_id: + + file = BytesIO() + pickler = cPickle.Pickler(file, 1) + + def persistent_id(obj): + if not hasattr(obj, '_p_oid'): + return None + klass = type(obj) + oid = obj._p_oid + if hasattr(klass, '__getnewargs__'): + # Coverage, make sure it can be called + assert klass.__getnewargs__(obj) == () + return oid + return 'class_and_oid', klass + + try: + pickler.inst_persistent_id = persistent_id + except AttributeError: + pass + pickler.persistent_id = persistent_id # PyPy and Py3k + pickler.dump(w) + state = file.getvalue() + self.assertTrue(b'1234' in state) + self.assertFalse(b'class_and_oid' in state) + + def test_cant_persist_acquisition_wrappers_newstyle(self): + try: + import cPickle + except ImportError: + import pickle as cPickle + + class X(object): + _p_oid = '1234' + + def __getstate__(self): + return 1 + + # We shouldn't be able to pickle wrappers: + + from Acquisition import ImplicitAcquisitionWrapper + w = ImplicitAcquisitionWrapper(X(), X()) + with self.assertRaises(TypeError): + cPickle.dumps(w) + + # Check for pickle protocol one: + + with self.assertRaises(TypeError): + cPickle.dumps(w, 1) + + # Check custom pickler: + + from io import BytesIO + file = BytesIO() + pickler = cPickle.Pickler(file, 1) + + with self.assertRaises(TypeError): + pickler.dump(w) + + # Check custom pickler with a persistent_id method matching + # the semantics in ZODB.serialize.ObjectWriter.persistent_id: + + file = BytesIO() + pickler = cPickle.Pickler(file, 1) + + def persistent_id(obj): + if not hasattr(obj, '_p_oid'): + return None + klass = type(obj) + oid = obj._p_oid + if hasattr(klass, '__getnewargs__'): + return oid + return 'class_and_oid', klass + + try: + pickler.inst_persistent_id = persistent_id + except AttributeError: + pass + + pickler.persistent_id = persistent_id # PyPy and Py3k + pickler.dump(w) + state = file.getvalue() + self.assertTrue(b'1234' in state) + self.assertFalse(b'class_and_oid' in state) + + +class TestInterfaces(unittest.TestCase): + + def test_interfaces(self): + from zope.interface.verify import verifyClass + + # Explicit and Implicit implement IAcquirer: + from Acquisition.interfaces import IAcquirer + self.assertTrue(verifyClass(IAcquirer, Explicit)) + self.assertTrue(verifyClass(IAcquirer, Implicit)) + + # ExplicitAcquisitionWrapper and ImplicitAcquisitionWrapper implement + # IAcquisitionWrapper: + + from Acquisition import ExplicitAcquisitionWrapper + from Acquisition import ImplicitAcquisitionWrapper + from Acquisition.interfaces import IAcquisitionWrapper + self.assertTrue( + verifyClass(IAcquisitionWrapper, ExplicitAcquisitionWrapper)) + self.assertTrue( + verifyClass(IAcquisitionWrapper, ImplicitAcquisitionWrapper)) + + +class TestMixin(unittest.TestCase): + + @unittest.skipUnless(MIXIN_POST_CLASS_DEFINITION, + 'Changing __bases__ is not supported.') + def test_mixin_post_class_definition(self): + # Assigning to __bases__ is difficult under some versions of python. + # PyPy usually lets it, but CPython (3 esp) may not. + # In this example, you get: + # "TypeError: __bases__ assignment: + # 'Base' deallocator differs from 'object'" + # I don't know what the workaround is; the old one of using a dummy + # superclass no longer works. See http://bugs.python.org/issue672115 + + # Mixing in Base after class definition doesn't break anything, + # but also doesn't result in any wrappers. + from ExtensionClass import Base + + class Plain(object): + pass + + self.assertEqual(Plain.__bases__, (object, )) + Plain.__bases__ = (Base, ) + self.assertIsInstance(Plain(), Base) + + # Even after mixing in that base, when we request such an object + # from an implicit acquiring base, it doesn't come out wrapped: + class I(Implicit): + pass + + root = I() + root.a = I() + root.a.b = Plain() + self.assertIsInstance(root.a.b, Plain) + + # This is because after the mixin, even though Plain is-a Base, + # it's still not an Explicit/Implicit acquirer and provides + # neither the `__of__` nor `__get__` methods necessary. + # `__get__` is added as a consequence of + # `__of__` at class creation time): + self.assertFalse(hasattr(Plain, '__get__')) + self.assertFalse(hasattr(Plain, '__of__')) + + def test_mixin_base(self): + # We can mix-in Base as part of multiple inheritance. + from ExtensionClass import Base + + class MyBase(object): + pass + + class MixedIn(Base, MyBase): + pass + + self.assertEqual(MixedIn.__bases__, (Base, MyBase)) + self.assertIsInstance(MixedIn(), Base) + + # Because it's not an acquiring object and doesn't provide `__of__` + # or `__get__`, when accessed from implicit contexts it doesn't come + # out wrapped: + class I(Implicit): + pass + + root = I() + root.a = I() + root.a.b = MixedIn() + self.assertIsInstance(root.a.b, MixedIn) + + # This is because after the mixin, even though Plain is-a Base, + # it doesn't provide the `__of__` method used for wrapping, and so + # the class definition code that would add the `__get__` method also + # doesn't run: + self.assertFalse(hasattr(MixedIn, '__of__')) + self.assertFalse(hasattr(MixedIn, '__get__')) + + +class TestGC(unittest.TestCase): + # Tests both `__del__` being called and GC collection. + # Note that PyPy always reports 0 collected objects even + # though we can see its finalizers run. + + # Not PyPy + SUPPORTS_GC_THRESHOLD = hasattr(gc, 'get_threshold') + + if SUPPORTS_GC_THRESHOLD: + def setUp(self): + self.thresholds = gc.get_threshold() + gc.set_threshold(0) + + def tearDown(self): + gc.set_threshold(*self.thresholds) + + def test_Basic_gc(self): + # Test to make sure that EC instances participate in GC. + from ExtensionClass import Base + + for B in I, E: + counter = [0] + + class C1(B): + pass + + class C2(Base): + def __del__(self, counter=counter): + counter[0] += 1 + + a = C1('a') + a.b = C1('a.b') + a.b.a = a + a.b.c = C2() + gc.collect() + del a + removed = gc.collect() + if self.SUPPORTS_GC_THRESHOLD: + self.assertTrue(removed > 0) + self.assertEqual(counter[0], 1) + + def test_Wrapper_gc(self): + # Test to make sure that EC instances participate in GC. + for B in I, E: + counter = [0] + + class C(object): + def __del__(self, counter=counter): + counter[0] += 1 + + a = B('a') + a.b = B('b') + a.a_b = a.b # circular reference through wrapper + a.b.c = C() + gc.collect() + del a + removed = gc.collect() + if self.SUPPORTS_GC_THRESHOLD: + self.assertTrue(removed > 0) + self.assertEqual(counter[0], 1) + + +def test_container_proxying(): + """Make sure that recent python container-related slots are proxied. + + >>> import sys + >>> class Impl(Implicit): + ... pass + + >>> class C(Implicit): + ... def __getitem__(self, key): + ... if isinstance(key, slice): + ... print('slicing...') + ... return (key.start,key.stop) + ... print('getitem', key) + ... if key == 4: + ... raise IndexError + ... return key + ... def __contains__(self, key): + ... print('contains', repr(key)) + ... return key == 5 + ... def __iter__(self): + ... print('iterating...') + ... return iter((42,)) + ... def __getslice__(self, start, end): + ... print('slicing...') + ... return (start, end) + + The naked class behaves like this: + + >>> c = C() + >>> 3 in c + contains 3 + False + >>> 5 in c + contains 5 + True + >>> list(c) + iterating... + [42] + >>> c[5:10] + slicing... + (5, 10) + >>> c[5:] == (5, sys.maxsize if PY2 else None) + slicing... + True + + Let's put c in the context of i: + + >>> i = Impl() + >>> i.c = c + + Now check that __contains__ is properly used: + + >>> 3 in i.c # c.__of__(i) + contains 3 + False + >>> 5 in i.c + contains 5 + True + >>> list(i.c) + iterating... + [42] + >>> i.c[5:10] + slicing... + (5, 10) + >>> i.c[5:] == (5, sys.maxsize if PY2 else None) + slicing... + True + + Let's let's test the same again with an explicit wrapper: + + >>> class Impl(Explicit): + ... pass + + >>> class C(Explicit): + ... def __getitem__(self, key): + ... if isinstance(key, slice): + ... print('slicing...') + ... return (key.start,key.stop) + ... print('getitem', key) + ... if key == 4: + ... raise IndexError + ... return key + ... def __contains__(self, key): + ... print('contains', repr(key)) + ... return key == 5 + ... def __iter__(self): + ... print('iterating...') + ... return iter((42,)) + ... def __getslice__(self, start, end): + ... print('slicing...') + ... return (start, end) + + The naked class behaves like this: + + >>> c = C() + >>> 3 in c + contains 3 + False + >>> 5 in c + contains 5 + True + >>> list(c) + iterating... + [42] + >>> c[5:10] + slicing... + (5, 10) + >>> c[5:] == (5, sys.maxsize if PY2 else None) + slicing... + True + + Let's put c in the context of i: + + >>> i = Impl() + >>> i.c = c + + Now check that __contains__ is properly used: + + >>> 3 in i.c # c.__of__(i) + contains 3 + False + >>> 5 in i.c + contains 5 + True + >>> list(i.c) + iterating... + [42] + >>> i.c[5:10] + slicing... + (5, 10) + >>> i.c[5:] == (5, sys.maxsize if PY2 else None) + slicing... + True + + Next let's check that the wrapper's __iter__ proxy falls back + to using the object's __getitem__ if it has no __iter__. See + https://bugs.launchpad.net/zope2/+bug/360761 . + + >>> class C(Implicit): + ... l=[1,2,3] + ... def __getitem__(self, i): + ... return self.l[i] + + >>> c1 = C() + >>> type(iter(c1)) #doctest: +ELLIPSIS + <... '...iterator'> + >>> list(c1) + [1, 2, 3] + + >>> c2 = C().__of__(c1) + >>> type(iter(c2)) #doctest: +ELLIPSIS + <... '...iterator'> + >>> list(c2) + [1, 2, 3] + + The __iter__proxy should also pass the wrapped object as self to + the __iter__ of objects defining __iter__: + + >>> class C(Implicit): + ... def __iter__(self): + ... print('iterating...') + ... for i in range(5): + ... yield i, self.aq_parent.name + >>> c = C() + >>> i = Impl() + >>> i.c = c + >>> i.name = 'i' + >>> list(i.c) + iterating... + [(0, 'i'), (1, 'i'), (2, 'i'), (3, 'i'), (4, 'i')] + + And it should pass the wrapped object as self to + the __getitem__ of objects without an __iter__: + + >>> class C(Implicit): + ... def __getitem__(self, i): + ... return self.aq_parent.l[i] + >>> c = C() + >>> i = Impl() + >>> i.c = c + >>> i.l = range(5) + >>> list(i.c) + [0, 1, 2, 3, 4] + + Finally let's make sure errors are still correctly raised after having + to use a modified version of `PyObject_GetIter` for iterator support: + + >>> class C(Implicit): + ... pass + >>> c = C() + >>> i = Impl() + >>> i.c = c + >>> list(i.c) #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + TypeError: ...iter... + + >>> class C(Implicit): + ... def __iter__(self): + ... return [42] + >>> c = C() + >>> i = Impl() + >>> i.c = c + >>> list(i.c) #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + TypeError: iter() returned non-iterator... + + """ + + +class TestAqParentParentInteraction(unittest.TestCase): + + def test___parent__no_wrappers(self): + # Acquisition also works with objects that aren't wrappers, as long + # as they have __parent__ pointers. Let's take a hierarchy like + # z --isParent--> y --isParent--> x: + x = Location() + y = Location() + z = Location() + x.__parent__ = y + y.__parent__ = z + + # and some attributes that we want to acquire: + x.hello = 'world' + y.foo = 42 + z.foo = 43 # this should not be found + z.bar = 3.145 + + # ``aq_acquire`` works as we know it from implicit/acquisition + # wrappers: + self.assertEqual(aq_acquire(x, 'hello'), 'world') + self.assertEqual(aq_acquire(x, 'foo'), 42) + self.assertEqual(aq_acquire(x, 'bar'), 3.145) + + # as does ``aq_get``: + self.assertEqual(aq_get(x, 'hello'), 'world') + self.assertEqual(aq_get(x, 'foo'), 42) + self.assertEqual(aq_get(x, 'bar'), 3.145) + + # and ``aq_parent``: + self.assertIs(aq_parent(x), y) + self.assertIs(aq_parent(y), z) + + # as well as ``aq_chain``: + self.assertEqual(aq_chain(x), [x, y, z]) + + def test_implicit_wrapper_as___parent__(self): + # Let's do the same test again, only now not all objects are of the + # same kind and link to each other via __parent__ pointers. The + # root is a stupid ExtensionClass object: + class Root(ExtensionClass.Base): + bar = 3.145 + z = Root() + + # The intermediate parent is an object that supports implicit + # acquisition. We bind it to the root via the __of__ protocol: + class Impl(Implicit): + foo = 42 + y = Impl().__of__(z) + + # The child object is again a simple object with a simple __parent__ + # pointer: + x = Location() + x.hello = 'world' + x.__parent__ = y + + # ``aq_acquire`` works as expected from implicit/acquisition + # wrappers: + self.assertEqual(aq_acquire(x, 'hello'), 'world') + self.assertEqual(aq_acquire(x, 'foo'), 42) + self.assertEqual(aq_acquire(x, 'bar'), 3.145) + + # as does ``aq_get``: + self.assertEqual(aq_get(x, 'hello'), 'world') + self.assertEqual(aq_get(x, 'foo'), 42) + self.assertEqual(aq_get(x, 'bar'), 3.145) + + # and ``aq_parent``: + self.assertIs(aq_parent(x), y) + self.assertIs(aq_parent(y), z) + + # as well as ``aq_chain``: + self.assertEqual(aq_chain(x), [x, y, z]) + + # Note that also the (implicit) acquisition wrapper has a __parent__ + # pointer, which is automatically computed from the acquisition + # container (it's identical to aq_parent): + self.assertIs(y.__parent__, z) + + # Just as much as you can assign to aq_parent, you can also assign + # to __parent__ to change the acquisition context of the wrapper: + + newroot = Root() + y.__parent__ = newroot + self.assertIsNot(y.__parent__, z) + self.assertIs(y.__parent__, newroot) + + # Note that messing with the wrapper won't in any way affect the + # wrapped object: + with self.assertRaises(AttributeError): + aq_base(y).__parent__ + + def test_explicit_wrapper_as___parent__(self): + # Let's do this test yet another time, with an explicit wrapper: + class Root(ExtensionClass.Base): + bar = 3.145 + z = Root() + + # The intermediate parent is an object that supports implicit + # acquisition. We bind it to the root via the __of__ protocol: + class Expl(Explicit): + foo = 42 + y = Expl().__of__(z) + + # The child object is again a simple object with a simple __parent__ + # pointer: + x = Location() + x.hello = 'world' + x.__parent__ = y + + # ``aq_acquire`` works as expected from implicit/acquisition + # wrappers: + self.assertEqual(aq_acquire(x, 'hello'), 'world') + self.assertEqual(aq_acquire(x, 'foo'), 42) + self.assertEqual(aq_acquire(x, 'bar'), 3.145) + + # as does ``aq_get``: + self.assertEqual(aq_get(x, 'hello'), 'world') + self.assertEqual(aq_get(x, 'foo'), 42) + self.assertEqual(aq_get(x, 'bar'), 3.145) + + # and ``aq_parent``: + self.assertIs(aq_parent(x), y) + self.assertIs(aq_parent(y), z) + + # as well as ``aq_chain``: + self.assertEqual(aq_chain(x), [x, y, z]) + + # Note that also the (explicit) acquisition wrapper has a __parent__ + # pointer, which is automatically computed from the acquisition + # container (it's identical to aq_parent): + self.assertIs(y.__parent__, z) + + # Just as much as you can assign to aq_parent, you can also assign + # to __parent__ to change the acquisition context of the wrapper: + newroot = Root() + y.__parent__ = newroot + self.assertIsNot(y.__parent__, z) + self.assertIs(y.__parent__, newroot) + + # Note that messing with the wrapper won't in any way affect the + # wrapped object: + with self.assertRaises(AttributeError): + aq_base(y).__parent__ + + def test_implicit_wrapper_has_nonwrapper_as_aq_parent(self): + # Let's do this the other way around: The root and the + # intermediate parent is an object that doesn't support acquisition, + y = ECLocation() + z = Location() + y.__parent__ = z + y.foo = 42 + z.foo = 43 # this should not be found + z.bar = 3.145 + + # only the outmost object does: + class Impl(Implicit): + hello = 'world' + x = Impl().__of__(y) + + # Again, acquiring objects works as usual: + self.assertEqual(aq_acquire(x, 'hello'), 'world') + self.assertEqual(aq_acquire(x, 'foo'), 42) + self.assertEqual(aq_acquire(x, 'bar'), 3.145) + + # as does ``aq_get``: + self.assertEqual(aq_get(x, 'hello'), 'world') + self.assertEqual(aq_get(x, 'foo'), 42) + self.assertEqual(aq_get(x, 'bar'), 3.145) + + # and ``aq_parent``: + self.assertEqual(aq_parent(x), y) + self.assertIs(aq_parent(y), z) + self.assertEqual(x.aq_parent, y) + self.assertEqual(x.aq_parent.aq_parent, z) + + # as well as ``aq_chain``: + self.assertEqual(aq_chain(x), [x, y, z]) + self.assertEqual(x.aq_chain, [x, y, z]) + + # Because the outmost object, ``x``, is wrapped in an implicit + # acquisition wrapper, we can also use direct attribute access: + self.assertEqual(x.hello, 'world') + self.assertEqual(x.foo, 42) + self.assertEqual(x.bar, 3.145) + + def test_explicit_wrapper_has_nonwrapper_as_aq_parent(self): + # Let's do this the other way around: The root and the + # intermediate parent is an object that doesn't support acquisition, + + y = ECLocation() + z = Location() + y.__parent__ = z + y.foo = 42 + z.foo = 43 # this should not be found + z.bar = 3.145 + + # only the outmost object does: + class Expl(Explicit): + hello = 'world' + x = Expl().__of__(y) + + # Again, acquiring objects works as usual: + self.assertEqual(aq_acquire(x, 'hello'), 'world') + self.assertEqual(aq_acquire(x, 'foo'), 42) + self.assertEqual(aq_acquire(x, 'bar'), 3.145) + + # as does ``aq_get``: + self.assertEqual(aq_get(x, 'hello'), 'world') + self.assertEqual(aq_get(x, 'foo'), 42) + self.assertEqual(aq_get(x, 'bar'), 3.145) + + # and ``aq_parent``: + self.assertEqual(aq_parent(x), y) + self.assertIs(aq_parent(y), z) + self.assertEqual(x.aq_parent, y) + self.assertEqual(x.aq_parent.aq_parent, z) + + # as well as ``aq_chain``: + self.assertEqual(aq_chain(x), [x, y, z]) + self.assertEqual(x.aq_chain, [x, y, z]) + + +class TestParentCircles(unittest.TestCase): + + def test___parent__aq_parent_circles(self): + # As a general safety belt, Acquisition won't follow a mixture of + # circular __parent__ pointers and aq_parent wrappers. These can + # occurr when code that uses implicit acquisition wrappers meets + # code that uses __parent__ pointers. + class Impl(Implicit): + hello = 'world' + + class Impl2(Implicit): + hello = 'world2' + only = 'here' + + x = Impl() + y = Impl2().__of__(x) + x.__parent__ = y + + self.assertTrue(x.__parent__.aq_base is y.aq_base) + self.assertTrue(aq_parent(x) is y) + self.assertTrue(x.__parent__.__parent__ is x) + + self.assertEqual(x.hello, 'world') + self.assertEqual(aq_acquire(x, 'hello'), 'world') + + with self.assertRaises(AttributeError): + x.only + + self.assertEqual(aq_acquire(x, 'only'), 'here') + + with self.assertRaises(AttributeError): + aq_acquire(x, 'non_existant_attr') + + with self.assertRaises(RuntimeError): + aq_acquire(y, 'non_existant_attr') + + with self.assertRaises(AttributeError): + x.non_existant_attr + + with self.assertRaises(RuntimeError): + y.non_existant_attr + + @unittest.skipUnless( + hasattr(Acquisition.ImplicitAcquisitionWrapper, '_obj'), + 'Pure Python specific test') + def test_python_impl_cycle(self): + # An extra safety belt, specific to the Python implementation + # because it's not clear how one could arrive in this situation + # naturally. + class Impl(Implicit): + pass + + root = Impl() + root.child = Impl() + child_wrapper = root.child + + # Now set up the python specific boo-boo: + child_wrapper._obj = child_wrapper + + # Now nothing works: + + with self.assertRaises(RuntimeError): + child_wrapper.non_existant_attr + + with self.assertRaises(RuntimeError): + aq_acquire(child_wrapper, 'non_existant_attr') + + def test_unwrapped_implicit_acquirer_unwraps__parent__(self): + # Set up an implicit acquirer with a parent: + class Impl(Implicit): + pass + + y = Impl() + x = Impl() + x.__parent__ = y + + # Now if we retrieve the parent from the (unwrapped) instance, the + # parent should not be wrapped in the instance's acquisition chain. + self.assertIs(x.__parent__, y) + + +class TestBugs(unittest.TestCase): + + def test__iter__after_AttributeError(self): + # See https://bugs.launchpad.net/zope2/+bug/1155760 + import time + + class C(Implicit): + l = [0, 1, 2, 3, 4] + + def __getitem__(self, i): + return self.l[i] + + a = C() + b = C().__of__(a) + try: + for n in b: + time.gmtime() + except AttributeError: + raise + + +class TestSpecialNames(unittest.TestCase): + + def test_special_names(self): + # This test captures some aq_special names that are not otherwise + # tested for. + class Impl(Implicit): + pass + + root = Impl() + root.child = Impl() + + # First, the 'aq_explicit' name returns an explicit wrapper + # instead of an explicit wrapper: + ex_wrapper = root.child.aq_explicit + self.assertIsInstance( + ex_wrapper, Acquisition.ExplicitAcquisitionWrapper) + + # If we ask an explicit wrapper to be explicit, we get back + # the same object: + self.assertIs(ex_wrapper.aq_explicit, ex_wrapper.aq_explicit) + + # These special names can also be filtered: + self.assertIsNone( + aq_acquire(root.child, 'aq_explicit', + lambda searched, parent, name, ob, extra: None, + default=None)) + + self.assertIsNotNone( + aq_acquire(root.child, 'aq_explicit', + lambda searched, parent, name, ob, extra: True, + default=None)) + + # Last, a value that can be used for testing that you have a wrapper: + self.assertEqual(root.child.aq_uncle, 'Bob') + + +class TestWrapper(unittest.TestCase): + + def test_deleting_parent_attrs(self): + # We can detach a wrapper object from its chain by deleting its + # parent. + + class Impl(Implicit): + pass + + root = Impl() + root.a = 42 + root.child = Impl() + + # Initially, a wrapped object has the parent we expect: + child_wrapper = root.child + self.assertIs(child_wrapper.__parent__, root) + self.assertIs(child_wrapper.aq_parent, root) + + # Even though we acquired the 'a' attribute, we can't delete it: + self.assertEqual(child_wrapper.a, 42) + with self.assertRaises(AttributeError): + del child_wrapper.a + + # Now if we delete it (as many times as we want) + # we lose access to the parent and acquired attributes: + del child_wrapper.__parent__ + del child_wrapper.aq_parent + + self.assertIs(child_wrapper.__parent__, None) + self.assertIs(child_wrapper.aq_parent, None) + self.assertFalse(hasattr(child_wrapper, 'a')) + + def test__cmp__is_called_on_wrapped_object(self): + # If we define an object that implements `__cmp__`: + class Impl(Implicit): + def __cmp__(self, other): + return self.a + + # Then it gets called when a wrapper is compared (we call it + # directly to avoid any Python2/3 issues): + root = Impl() + root.a = 42 + root.child = Impl() + self.assertEqual(root.child.a, 42) + self.assertEqual(root.child.__cmp__(None), 42) + + def test_wrapped_methods_have_correct_self(self): + # Getting a method from a wrapper returns an object that uses the + # wrapper as its `__self__`, no matter how many layers deep we go; + # this makes acquisition work in that code. + + class Impl(Implicit): + def method(self): + return self.a + + root = Impl() + root.a = 42 + root.child = Impl() + root.child.child = Impl() + + # We explicitly construct a wrapper to bypass some of the optimizations + # that remove redundant wrappers and thus get more full code coverage: + child_wrapper = Acquisition.ImplicitAcquisitionWrapper( + root.child.child, root.child) + method = child_wrapper.method + self.assertIs(method.__self__, child_wrapper) + self.assertEqual(method(), 42) + + def test_cannot_set_attributes_on_empty_wrappers(self): + # If a wrapper is around None, no attributes can be set on it: + wrapper = Acquisition.ImplicitAcquisitionWrapper(None, None) + + with self.assertRaises(AttributeError): + wrapper.a = 42 + + # Likewise, we can't really get any attributes on such an empty wrapper + with self.assertRaises(AttributeError): + wrapper.a + + def test_getitem_setitem_not_implemented(self): + # If a wrapper wraps something that doesn't implement get/setitem, + # those failures propagate up. + class Impl(Implicit): + pass + + root = Impl() + root.child = Impl() + + # We can't set anything: + with self.assertRaises(AttributeError): + root.child['key'] = 42 + + # We can't get anything: + with self.assertRaises(AttributeError): + root.child['key'] + + def test_getitem_setitem_implemented(self): + # The wrapper delegates to get/set item. + class Root(Implicit): + pass + + class Impl(Implicit): + def __getitem__(self, i): + return self.a + + def __setitem__(self, key, value): + self.a[key] = value + + root = Root() + root.a = dict() + root.child = Impl() + self.assertEqual(root.child[1], {}) + + root.child['a'] = 'b' + self.assertEqual(root.child[1], {'a': 'b'}) + + def test_wrapped_objects_are_unwrapped_on_set(self): + # A wrapper is not passed to the base object during `setattr`. + class Impl(Implicit): + pass + + # Given two different wrappers: + root = Impl() + child = Impl() + child2 = Impl() + root.child = child + root.child2 = child2 + + # If we pass one to the other as an attribute: + root.child.child2 = root.child2 + + # By the time it gets there, it's not wrapped: + self.assertIs(type(child.__dict__['child2']), Impl) + + +class TestOf(unittest.TestCase): + + def test__of__exception(self): + # Wrapper_findattr did't check for an exception in a user defined + # __of__ method before passing the result to the filter. In this + # case the 'value' argument of the filter was NULL, which caused + # a segfault when being accessed. + + class X(Implicit): + def __of__(self, parent): + if aq_base(parent) is not parent: + raise NotImplementedError('ack') + return X.inheritedAttribute('__of__')(self, parent) + + a = I('a') + a.b = I('b') + a.b.x = X('x') + with self.assertRaises(NotImplementedError): + aq_acquire(a.b, 'x', + lambda self, object, name, value, extra: repr(value)) + + def test_wrapper_calls_of_on_non_wrapper(self): + # The ExtensionClass protocol is respected even for non-Acquisition + # objects. + + class MyBase(ExtensionClass.Base): + call_count = 0 + + def __of__(self, other): + self.call_count += 1 + return 42 + + class Impl(Implicit): + pass + + # If we have a wrapper around an object that is an extension class, + # but not an Acquisition wrapper: + root = Impl() + base = MyBase() + wrapper = Acquisition.ImplicitAcquisitionWrapper(base, root) + + # And access that object itself through a wrapper: + root.child = Impl() + root.child.wrapper = wrapper + + # The `__of__` protocol is respected implicitly: + self.assertEqual(root.child.wrapper, 42) + self.assertEqual(base.call_count, 1) + + # Here it is explicitly: + self.assertEqual(wrapper.__of__(root.child), 42) + self.assertEqual(base.call_count, 2) + + +class TestAQInContextOf(unittest.TestCase): + + def test_aq_inContextOf(self): + from ExtensionClass import Base + + class B(Base): + color = 'red' + + class A(Implicit): + def hi(self): + return self.color + + class Location(object): + __parent__ = None + + b = B() + b.a = A() + self.assertEqual(b.a.hi(), 'red') + + b.a.color = 'green' + self.assertEqual(b.a.hi(), 'green') + + with self.assertRaises(AttributeError): + A().hi() + + # New test for wrapper comparisons. + foo = b.a + bar = b.a + self.assertEqual(foo, bar) + + c = A() + b.c = c + b.c.d = c + self.assertEqual(b.c.d, c) + self.assertEqual(b.c.d, b.c) + self.assertEqual(b.c, c) + + l = Location() + l.__parent__ = b.c + + def checkContext(self, o): + # Python equivalent to aq_inContextOf + next = self + o = aq_base(o) + while 1: + if aq_base(next) is o: + return True + self = aq_inner(next) + if self is None: + break + next = aq_parent(self) + if next is None: + break + return False + + self.assertTrue(checkContext(b.c, b)) + self.assertFalse(checkContext(b.c, b.a)) + + self.assertTrue(checkContext(l, b)) + self.assertTrue(checkContext(l, b.c)) + self.assertFalse(checkContext(l, b.a)) + + # aq_inContextOf works the same way: + self.assertTrue(aq_inContextOf(b.c, b)) + self.assertFalse(aq_inContextOf(b.c, b.a)) + + self.assertTrue(aq_inContextOf(l, b)) + self.assertTrue(aq_inContextOf(l, b.c)) + self.assertFalse(aq_inContextOf(l, b.a)) + + self.assertTrue(b.a.aq_inContextOf(b)) + self.assertTrue(b.c.aq_inContextOf(b)) + self.assertTrue(b.c.d.aq_inContextOf(b)) + self.assertTrue(b.c.d.aq_inContextOf(c)) + self.assertTrue(b.c.d.aq_inContextOf(b.c)) + self.assertFalse(b.c.aq_inContextOf(foo)) + self.assertFalse(b.c.aq_inContextOf(b.a)) + self.assertFalse(b.a.aq_inContextOf('somestring')) + + def test_aq_inContextOf_odd_cases(self): + # The aq_inContextOf function still works in some artificial cases. + root = object() + wrapper_around_none = Acquisition.ImplicitAcquisitionWrapper( + None, None) + self.assertEqual(aq_inContextOf(wrapper_around_none, root), 0) + + # If we don't ask for inner objects, the same thing happens + # in this case: + self.assertEqual(aq_inContextOf(wrapper_around_none, root, False), 0) + + # Somewhat surprisingly, the `aq_inner` of this wrapper + # is itself a wrapper: + self.assertIsInstance(aq_inner(wrapper_around_none), + Acquisition.ImplicitAcquisitionWrapper) + + # If we manipulate the Python implementation + # to make this no longer true, nothing breaks: + if hasattr(wrapper_around_none, '_obj'): + setattr(wrapper_around_none, '_obj', None) + + self.assertEqual(aq_inContextOf(wrapper_around_none, root), 0) + self.assertIsInstance(wrapper_around_none, + Acquisition.ImplicitAcquisitionWrapper) + + # Following parent pointers in weird circumstances works too: + class WithParent(object): + __parent__ = None + + self.assertEqual(aq_inContextOf(WithParent(), root), 0) + + +class TestCircles(unittest.TestCase): + + def test_search_repeated_objects(self): + # If an acquisition wrapper object is wrapping another wrapper, and + # also has another wrapper as its parent, and both of *those* + # wrappers have the same object (one as its direct object, one as + # its parent), then acquisition proceeds as normal: we don't get + # into any cycles or fail to acquire expected attributes. In fact, + # we actually can optimize out a level of the search in that case. + + # This is a bit of a convoluted scenario to set up when the code is + # written out all in one place, but it may occur organically when + # spread across a project. + + # We begin with some simple setup, importing the objects we'll use + # and setting up the object that we'll repeat. This particular test + # is specific to the Python implementation, so we're using low-level + # functions from that module: + + from Acquisition import _Wrapper as Wrapper + from Acquisition import _Wrapper_acquire + + class Repeated(object): + hello = "world" + + def __repr__(self): + return 'repeated' + + repeated = Repeated() + + # Now the tricky part, creating the repeating pattern. To rephrase + # the opening sentence, we need a wrapper whose object and parent + # (container) are themselves both wrappers, and the object's parent is + # the same object as the wrapper's parent's object. That might be a + # bit more clear in code: + wrappers_object = Wrapper('a', repeated) + wrappers_parent = Wrapper(repeated, 'b') + wrapper = Wrapper(wrappers_object, wrappers_parent) + self.assertIs(wrapper._obj._container, wrapper._container._obj) + + # Using the low-level function on the wrapper fails to find the + # desired attribute. This is because of the optimization that cuts + # out a level of the search (it is assumed that the higher level + # `_Wrapper_findattr` function is driving the search and will take + # the appropriate steps): + with self.assertRaises(AttributeError): + _Wrapper_acquire(wrapper, 'hello') + + # In fact, if we go through the public interface of the high-level + # functions, we do find the attribute as expected: + self.assertEqual(aq_acquire(wrapper, 'hello'), 'world') + + def test_parent_parent_circles(self): + + class Impl(Implicit): + hello = 'world' + + class Impl2(Implicit): + hello = 'world2' + only = 'here' + + x = Impl() + y = Impl2() + x.__parent__ = y + y.__parent__ = x + + self.assertTrue(x.__parent__.__parent__ is x) + self.assertEqual(aq_acquire(x, 'hello'), 'world') + self.assertEqual(aq_acquire(x, 'only'), 'here') + + self.assertRaises(AttributeError, aq_acquire, x, 'non_existant_attr') + self.assertRaises(AttributeError, aq_acquire, y, 'non_existant_attr') + + def test_parent_parent_parent_circles(self): + + class Impl(Implicit): + hello = 'world' + + class Impl2(Implicit): + hello = 'world' + + class Impl3(Implicit): + hello = 'world2' + only = 'here' + + a = Impl() + b = Impl2() + c = Impl3() + a.__parent__ = b + b.__parent__ = c + c.__parent__ = a + + self.assertTrue(a.__parent__.__parent__ is c) + self.assertTrue( + aq_base(a.__parent__.__parent__.__parent__) is a) + self.assertTrue(b.__parent__.__parent__ is a) + self.assertTrue(c.__parent__.__parent__ is b) + + self.assertEqual(aq_acquire(a, 'hello'), 'world') + self.assertEqual(aq_acquire(b, 'hello'), 'world') + self.assertEqual(aq_acquire(c, 'hello'), 'world2') + + self.assertEqual(aq_acquire(a, 'only'), 'here') + self.assertEqual(aq_acquire(b, 'only'), 'here') + self.assertEqual(aq_acquire(c, 'only'), 'here') + + self.assertRaises(AttributeError, getattr, a, 'non_existant_attr') + self.assertRaises(AttributeError, getattr, b, 'non_existant_attr') + self.assertRaises(AttributeError, getattr, c, 'non_existant_attr') + + +class TestAcquire(unittest.TestCase): + + def setUp(self): + + class Impl(Implicit): + pass + + class Expl(Explicit): + pass + + a = Impl('a') + a.y = 42 + a.b = Expl('b') + a.b.z = 3 + a.b.c = Impl('c') + self.a = a + + def test_explicit_module_default(self): + self.assertEqual(aq_acquire(self.a.b.c, 'z'), 3) + + def test_explicit_module_true(self): + self.assertEqual(aq_acquire(self.a.b.c, 'z', explicit=True), 3) + + def test_explicit_module_false(self): + self.assertEqual(aq_acquire(self.a.b.c, 'z', explicit=False), 3) + + def test_explicit_wrapper_default(self): + self.assertEqual(self.a.b.c.aq_acquire('z'), 3) + + def test_explicit_wrapper_true(self): + self.assertEqual(self.a.b.c.aq_acquire('z', explicit=True), 3) + + def test_explicit_wrapper_false(self): + self.assertEqual(self.a.b.c.aq_acquire('z', explicit=False), 3) + + def test_wrapper_falls_back_to_default(self): + self.assertEqual(aq_acquire(self.a.b.c, 'nonesuch', default=4), 4) + + def test_no_wrapper_but___parent___falls_back_to_default(self): + class NotWrapped(object): + pass + child = NotWrapped() + child.__parent__ = NotWrapped() + self.assertEqual(aq_acquire(child, 'nonesuch', default=4), 4) + + def test_unwrapped_falls_back_to_default(self): + self.assertEqual(aq_acquire(object(), 'nonesuch', default=4), 4) + + def test_w_unicode_attr_name(self): + # See https://bugs.launchpad.net/acquisition/+bug/143358 + found = aq_acquire(self.a.b.c, AQ_PARENT) + self.assertTrue(found.aq_self is self.a.b.aq_self) + + +class TestCooperativeBase(unittest.TestCase): + + def _make_acquirer(self, kind): + from ExtensionClass import Base + + class ExtendsBase(Base): + def __getattribute__(self, name): + if name == 'magic': + return 42 + return super(ExtendsBase, self).__getattribute__(name) + + class Acquirer(kind, ExtendsBase): + pass + + return Acquirer() + + def _check___getattribute___is_cooperative(self, acquirer): + self.assertEqual(getattr(acquirer, 'magic'), 42) + + def test_implicit___getattribute__is_cooperative(self): + self._check___getattribute___is_cooperative( + self._make_acquirer(Implicit)) + + def test_explicit___getattribute__is_cooperative(self): + self._check___getattribute___is_cooperative( + self._make_acquirer(Explicit)) + + +class TestImplicitWrappingGetattribute(unittest.TestCase): + # Implicitly wrapping an object that uses object.__getattribute__ + # in its implementation of __getattribute__ doesn't break. + # This can arise with the `persistent` library or other + # "base" classes. + + # The C implementation doesn't directly support this; however, + # it is used heavily in the Python implementation of Persistent. + + @unittest.skipIf(CAPI, 'Pure Python test.') + def test_object_getattribute_in_rebound_method_with_slots(self): + + class Persistent(object): + __slots__ = ('__flags',) + + def __init__(self): + self.__flags = 42 + + def get_flags(self): + return object.__getattribute__(self, '_Persistent__flags') + + wrapped = Persistent() + wrapper = Acquisition.ImplicitAcquisitionWrapper(wrapped, None) + + self.assertEqual(wrapped.get_flags(), wrapper.get_flags()) + + # Changing it is not reflected in the wrapper's dict (this is an + # implementation detail) + wrapper._Persistent__flags = -1 + self.assertEqual(wrapped.get_flags(), -1) + self.assertEqual(wrapped.get_flags(), wrapper.get_flags()) + + wrapper_dict = object.__getattribute__(wrapper, '__dict__') + self.assertFalse('_Persistent__flags' in wrapper_dict) + + @unittest.skipIf(CAPI, 'Pure Python test.') + def test_type_with_slots_reused(self): + + class Persistent(object): + __slots__ = ('__flags',) + + def __init__(self): + self.__flags = 42 + + def get_flags(self): + return object.__getattribute__(self, '_Persistent__flags') + + wrapped = Persistent() + wrapper = Acquisition.ImplicitAcquisitionWrapper(wrapped, None) + wrapper2 = Acquisition.ImplicitAcquisitionWrapper(wrapped, None) + + self.assertTrue(type(wrapper) is type(wrapper2)) + + @unittest.skipIf(CAPI, 'Pure Python test.') + def test_object_getattribute_in_rebound_method_with_dict(self): + + class Persistent(object): + def __init__(self): + self.__flags = 42 + + def get_flags(self): + return object.__getattribute__(self, '_Persistent__flags') + + wrapped = Persistent() + wrapper = Acquisition.ImplicitAcquisitionWrapper(wrapped, None) + + self.assertEqual(wrapped.get_flags(), wrapper.get_flags()) + + # Changing it is also reflected in both dicts (this is an + # implementation detail) + wrapper._Persistent__flags = -1 + self.assertEqual(wrapped.get_flags(), -1) + self.assertEqual(wrapped.get_flags(), wrapper.get_flags()) + + wrapper_dict = object.__getattribute__(wrapper, '__dict__') + self.assertTrue('_Persistent__flags' in wrapper_dict) + + @unittest.skipIf(CAPI, 'Pure Python test.') + def test_object_getattribute_in_rebound_method_with_slots_and_dict(self): + + class Persistent(object): + __slots__ = ('__flags', '__dict__') + + def __init__(self): + self.__flags = 42 + self.__oid = 'oid' + + def get_flags(self): + return object.__getattribute__(self, '_Persistent__flags') + + def get_oid(self): + return object.__getattribute__(self, '_Persistent__oid') + + wrapped = Persistent() + wrapper = Acquisition.ImplicitAcquisitionWrapper(wrapped, None) + + self.assertEqual(wrapped.get_flags(), wrapper.get_flags()) + self.assertEqual(wrapped.get_oid(), wrapper.get_oid()) + + +class TestUnicode(unittest.TestCase): + + def test_implicit_aq_unicode_should_be_called(self): + class A(Implicit): + def __unicode__(self): + return UNICODE_WAS_CALLED + wrapped = A().__of__(A()) + self.assertEqual(UNICODE_WAS_CALLED, unicode(wrapped)) + self.assertEqual(str(wrapped), repr(wrapped)) + + def test_explicit_aq_unicode_should_be_called(self): + class A(Explicit): + def __unicode__(self): + return UNICODE_WAS_CALLED + wrapped = A().__of__(A()) + self.assertEqual(UNICODE_WAS_CALLED, unicode(wrapped)) + self.assertEqual(str(wrapped), repr(wrapped)) + + def test_implicit_should_fall_back_to_str(self): + class A(Implicit): + def __str__(self): + return 'str was called' + wrapped = A().__of__(A()) + self.assertEqual(STR_WAS_CALLED, unicode(wrapped)) + self.assertEqual('str was called', str(wrapped)) + + def test_explicit_should_fall_back_to_str(self): + class A(Explicit): + def __str__(self): + return 'str was called' + wrapped = A().__of__(A()) + self.assertEqual(STR_WAS_CALLED, unicode(wrapped)) + self.assertEqual('str was called', str(wrapped)) + + def test_str_fallback_should_be_called_with_wrapped_self(self): + class A(Implicit): + def __str__(self): + return str(self.aq_parent == outer) + outer = A() + inner = A().__of__(outer) + self.assertEqual(TRUE, unicode(inner)) + + def test_unicode_should_be_called_with_wrapped_self(self): + class A(Implicit): + def __unicode__(self): + return str(self.aq_parent == outer) + outer = A() + inner = A().__of__(outer) + self.assertEqual(TRUE, unicode(inner)) + + +class TestProxying(unittest.TestCase): + + __binary_numeric_methods__ = [ + '__add__', + '__sub__', + '__mul__', + # '__floordiv__', # not implemented in C + '__mod__', + '__divmod__', + '__pow__', + '__lshift__', + '__rshift__', + '__and__', + '__xor__', + '__or__', + # division + '__truediv__', + '__div__', + # reflected + '__radd__', + '__rsub__', + '__rmul__', + '__rdiv__', + '__rtruediv__', + '__rfloordiv__', + '__rmod__', + '__rdivmod__', + '__rpow__', + '__rlshift__', + '__rrshift__', + '__rand__', + '__rxor__', + '__ror__', + # in place + '__iadd__', + '__isub__', + '__imul__', + '__idiv__', + '__itruediv__', + '__ifloordiv__', + '__imod__', + '__idivmod__', + '__ipow__', + '__ilshift__', + '__irshift__', + '__iand__', + '__ixor__', + '__ior__', + # conversion + # implementing it messes up all the arithmetic tests + # '__coerce__', + ] + + if PY3 and sys.version_info.minor >= 5: + __binary_numeric_methods__.extend([ + '__matmul__', + '__imatmul__' + ]) + + __unary_special_methods__ = [ + # arithmetic + '__neg__', + '__pos__', + '__abs__', + '__invert__', + ] + + __unary_conversion_methods__ = { + # conversion + '__complex__': complex, + '__int__': int, + '__long__': long, + '__float__': float, + '__oct__': oct, + '__hex__': hex, + '__len__': lambda o: o if isinstance(o, int) else len(o), + # '__index__': operator.index, # not implemented in C + } + + def _check_special_methods(self, base_class=Implicit): + # Check that special methods are proxied + # when called implicitly by the interpreter + + def binary_acquired_func(self, other, modulo=None): + return self.value + + def unary_acquired_func(self): + return self.value + + acquire_meths = {} + for k in self.__binary_numeric_methods__: + acquire_meths[k] = binary_acquired_func + for k in self.__unary_special_methods__: + acquire_meths[k] = unary_acquired_func + + def make_converter(f): + def converter(self, *args): + return f(self.value) + return converter + for k, convert in self.__unary_conversion_methods__.items(): + acquire_meths[k] = make_converter(convert) + + acquire_meths['__len__'] = lambda self: self.value + + if PY3: + # Under Python 3, oct() and hex() call __index__ directly + acquire_meths['__index__'] = acquire_meths['__int__'] + + if base_class == Explicit: + acquire_meths['value'] = Acquisition.Acquired + AcquireValue = type('AcquireValue', (base_class,), acquire_meths) + + class B(Implicit): + pass + + base = B() + base.value = 42 + base.derived = AcquireValue() + + # one syntax check for the heck of it + self.assertEqual(base.value, base.derived + 1) + # divmod is not in the operator module + self.assertEqual(base.value, divmod(base.derived, 1)) + + _found_at_least_one_div = False + + for meth in self.__binary_numeric_methods__: + op = getattr(operator, meth, None) + if op is not None: + # called on the instance + self.assertEqual(base.value, op(base.derived, -1)) + + # called on the type, as the interpreter does + # Note that the C version can only implement either __truediv__ + # or __div__, not both + op = getattr(operator, meth, None) + if op is not None: + try: + self.assertEqual(base.value, + op(base.derived, 1)) + if meth in ('__div__', '__truediv__'): + _found_at_least_one_div = True + except TypeError: + if meth in ('__div__', '__truediv__'): + pass + + self.assertTrue( + _found_at_least_one_div, + "Must implement at least one of __div__ and __truediv__") + + # Unary methods + for meth in self.__unary_special_methods__: + self.assertEqual(base.value, getattr(base.derived, meth)()) + op = getattr(operator, meth) + self.assertEqual(base.value, op(base.derived)) + + # Conversion functions + for meth, converter in self.__unary_conversion_methods__.items(): + if not converter: + continue + self.assertEqual(converter(base.value), + getattr(base.derived, meth)()) + + self.assertEqual(converter(base.value), + converter(base.derived)) + + def test_implicit_proxy_special_meths(self): + self._check_special_methods() + + def test_explicit_proxy_special_meths(self): + self._check_special_methods(base_class=Explicit) + + def _check_contains(self, base_class=Implicit): + # Contains has lots of fallback behaviour + class B(Implicit): + pass + base = B() + base.value = 42 + + # The simple case is if the object implements contains itself + class ReallyContains(base_class): + if base_class is Explicit: + value = Acquisition.Acquired + + def __contains__(self, item): + return self.value == item + + base.derived = ReallyContains() + + self.assertTrue(42 in base.derived) + self.assertFalse(24 in base.derived) + + # Iterable objects are NOT iterated + # XXX: Is this a bug in the C code? Shouldn't it do + # what the interpreter does and fallback to iteration? + class IterContains(base_class): + if base_class is Explicit: + value = Acquisition.Acquired + + def __iter__(self): + return iter((42,)) + base.derived = IterContains() + + self.assertRaises(AttributeError, operator.contains, base.derived, 42) + + def test_implicit_proxy_contains(self): + self._check_contains() + + def test_explicit_proxy_contains(self): + self._check_contains(base_class=Explicit) + + def _check_call(self, base_class=Implicit): + class B(Implicit): + pass + base = B() + base.value = 42 + + class Callable(base_class): + if base_class is Explicit: + value = Acquisition.Acquired + + def __call__(self, arg, k=None): + return self.value, arg, k + + base.derived = Callable() + + self.assertEqual(base.derived(1, k=2), (42, 1, 2)) + + if not IS_PYPY: + # XXX: This test causes certain versions + # of PyPy to segfault (at least 2.6.0-alpha1) + class NotCallable(base_class): + pass + + base.derived = NotCallable() + try: + base.derived() + self.fail("Not callable") + except (TypeError, AttributeError): + pass + + def test_implicit_proxy_call(self): + self._check_call() + + def test_explicit_proxy_call(self): + self._check_call(base_class=Explicit) + + def _check_hash(self, base_class=Implicit): + class B(Implicit): + pass + base = B() + base.value = B() + base.value.hash = 42 + + class NoAcquired(base_class): + def __hash__(self): + return 1 + + hashable = NoAcquired() + base.derived = hashable + self.assertEqual(1, hash(hashable)) + self.assertEqual(1, hash(base.derived)) + + # cannot access acquired attributes during + # __hash__ + + class CannotAccessAcquiredAttributesAtHash(base_class): + if base_class is Explicit: + value = Acquisition.Acquired + + def __hash__(self): + return self.value.hash + + hashable = CannotAccessAcquiredAttributesAtHash() + base.derived = hashable + self.assertRaises(AttributeError, hash, hashable) + self.assertRaises(AttributeError, hash, base.derived) + + def test_implicit_proxy_hash(self): + self._check_hash() + + def test_explicit_proxy_hash(self): + self._check_hash(base_class=Explicit) + + def _check_comparison(self, base_class=Implicit): + # Comparison behaviour is complex; see notes in _Wrapper + class B(Implicit): + pass + base = B() + base.value = 42 + + rich_cmp_methods = ['__lt__', '__gt__', '__eq__', + '__ne__', '__ge__', '__le__'] + + def _never_called(self, other): + raise RuntimeError("This should never be called") + + class RichCmpNeverCalled(base_class): + for _name in rich_cmp_methods: + locals()[_name] = _never_called + + base.derived = RichCmpNeverCalled() + base.derived2 = RichCmpNeverCalled() + # We can access all of the operators, but only because + # they are masked + for name in rich_cmp_methods: + getattr(operator, name)(base.derived, base.derived2) + + self.assertFalse(base.derived2 == base.derived) + self.assertEqual(base.derived, base.derived) + + def test_implicit_proxy_comporison(self): + self._check_comparison() + + def test_explicit_proxy_comporison(self): + self._check_comparison(base_class=Explicit) + + def _check_bool(self, base_class=Implicit): + class B(Implicit): + pass + base = B() + base.value = 42 + + class WithBool(base_class): + if base_class is Explicit: + value = Acquisition.Acquired + + def __nonzero__(self): + return bool(self.value) + __bool__ = __nonzero__ + + class WithLen(base_class): + if base_class is Explicit: + value = Acquisition.Acquired + + def __len__(self): + return self.value + + class WithNothing(base_class): + pass + + base.wbool = WithBool() + base.wlen = WithLen() + base.wnothing = WithNothing() + + self.assertEqual(bool(base.wbool), True) + self.assertEqual(bool(base.wlen), True) + self.assertEqual(bool(base.wnothing), True) + + base.value = 0 + self.assertFalse(base.wbool) + self.assertFalse(base.wlen) + + def test_implicit_proxy_bool(self): + self._check_bool() + + def test_explicit_proxy_bool(self): + self._check_bool(base_class=Explicit) + + +class TestCompilation(unittest.TestCase): + + def test_compile(self): + if IS_PYPY or IS_PURE: + with self.assertRaises((AttributeError, ImportError)): + from Acquisition import _Acquisition + else: + from Acquisition import _Acquisition + self.assertTrue(hasattr(_Acquisition, 'AcquisitionCAPI')) + + +def test_suite(): + import os.path + here = os.path.dirname(__file__) + root = os.path.join(here, os.pardir, os.pardir) + readme = os.path.join(root, 'README.rst') + + suites = [ + DocTestSuite(), + unittest.defaultTestLoader.loadTestsFromName(__name__), + ] + + # This file is only available in a source checkout, skip it + # when tests are run for an installed version. + if os.path.isfile(readme): + suites.append(DocFileSuite(readme, module_relative=False)) + + return unittest.TestSuite(suites) diff --git a/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/DESCRIPTION.rst b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..0d9ad4e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,25 @@ +Overview +======== + +AuthEncoding is a framework for handling LDAP style password hashes. + +It is used in Zope2 but does not depend on any other Zope package. + +Changelog +========= + +4.0.0 (2015-09-30) +------------------ + +- Supporting Python 3.3 up to 3.5 and PyPy2. + +- Added ``SHA256DigestScheme``. + + +3.0.0 (2015-09-28) +------------------ + +- Extracted from ``AccessControl 3.0.11`` + + + diff --git a/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/INSTALLER b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/METADATA b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/METADATA new file mode 100644 index 0000000..d4617d4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/METADATA @@ -0,0 +1,54 @@ +Metadata-Version: 2.0 +Name: AuthEncoding +Version: 4.0.0 +Summary: Framework for handling LDAP style password hashes. +Home-page: https://github.com/zopefoundation/AuthEncoding +Author: Zope Foundation and Contributors +Author-email: zope-dev@zope.org +License: ZPL 2.1 +Platform: UNKNOWN +Classifier: Development Status :: 6 - Mature +Classifier: Environment :: Web Environment +Classifier: Framework :: Zope2 +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP +Requires-Dist: six +Provides-Extra: test +Requires-Dist: pytest; extra == 'test' + +Overview +======== + +AuthEncoding is a framework for handling LDAP style password hashes. + +It is used in Zope2 but does not depend on any other Zope package. + +Changelog +========= + +4.0.0 (2015-09-30) +------------------ + +- Supporting Python 3.3 up to 3.5 and PyPy2. + +- Added ``SHA256DigestScheme``. + + +3.0.0 (2015-09-28) +------------------ + +- Extracted from ``AccessControl 3.0.11`` + + + diff --git a/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/RECORD b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/RECORD new file mode 100644 index 0000000..520f923 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/RECORD @@ -0,0 +1,18 @@ +AuthEncoding-4.0.0.dist-info/DESCRIPTION.rst,sha256=UKoEVLqA6eoGuaGlkVcj4mZJUnzBr3IdyZ4rLj9tBY4,381 +AuthEncoding-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +AuthEncoding-4.0.0.dist-info/METADATA,sha256=LMz4ZjZ94m7OrJJAs4_0fs1X4C-gvWPTEw8uGwX-LvY,1588 +AuthEncoding-4.0.0.dist-info/RECORD,, +AuthEncoding-4.0.0.dist-info/RECORD.jws,, +AuthEncoding-4.0.0.dist-info/WHEEL,sha256=5wvfB7GvgZAbKBSE9uX9Zbi6LCL-_KgezgHblXhCRnM,113 +AuthEncoding-4.0.0.dist-info/metadata.json,sha256=yBATAjh7I5YnUKtIQHGhhActRCg60BSsaAeM0r5dPFY,1294 +AuthEncoding-4.0.0.dist-info/top_level.txt,sha256=r_cFzzt28gdOYPoRX7HjrbsJE6G-ZPK0n3sWbqE4PdQ,13 +AuthEncoding/AuthEncoding.py,sha256=A82pAZQA-r1NSG7jBf1qDJn4utAQXMHMSF09Do35t0w,7265 +AuthEncoding/__init__.py,sha256=9fdxLPEIHDUW2ig-pE92y7BkZAHItLvvFpgXTs4byiU,791 +AuthEncoding/__pycache__/AuthEncoding.cpython-36.pyc,, +AuthEncoding/__pycache__/__init__.cpython-36.pyc,, +AuthEncoding/__pycache__/compat.cpython-36.pyc,, +AuthEncoding/compat.py,sha256=o2nbexNN16x67B8xLKZRQQo_F4LheBnRtKgO---vuPY,347 +AuthEncoding/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +AuthEncoding/tests/__pycache__/__init__.cpython-36.pyc,, +AuthEncoding/tests/__pycache__/test_AuthEncoding.cpython-36.pyc,, +AuthEncoding/tests/test_AuthEncoding.py,sha256=zEj6Uz2eC3VPxmafNcy2ZvdSQLfVSu_Yf9X63X2XHfM,3814 diff --git a/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/RECORD.jws b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/RECORD.jws new file mode 100644 index 0000000..0756de9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/RECORD.jws @@ -0,0 +1 @@ +{"payload": "eyJoYXNoIjogInNoYTI1Nj1FbUpVbjFTUEdYeXhKc2FnMWpxNGJkMnlnYW80TlhzY2hORW5iSEFISHdnIn0", "recipients": [{"header": "eyJhbGciOiAiRWQyNTUxOSIsICJqd2siOiB7Imt0eSI6ICJFZDI1NTE5IiwgInZrIjogInZUZU1lalVPMTV1SkQzY203enZPLV9oc0dMSld4ZlFNMEFFd2R5SUQyeTQifX0", "signature": "D1i5q57sD-x3nU_8L_49SwW7b--DjetQhUpnMDwDDui6PVsLfsAr65teCK_GbT4txrxkqzCHEMV8byYKzz4XDw"}]} \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/WHEEL b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/WHEEL new file mode 100644 index 0000000..7bf9daa --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0.a0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/metadata.json b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/metadata.json new file mode 100644 index 0000000..4293f0a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 6 - Mature", "Environment :: Web Environment", "Framework :: Zope2", "License :: OSI Approved :: Zope Public License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP"], "extensions": {"python.details": {"contacts": [{"email": "zope-dev@zope.org", "name": "Zope Foundation and Contributors", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/zopefoundation/AuthEncoding"}}}, "extras": ["test"], "generator": "bdist_wheel (0.30.0.a0)", "license": "ZPL 2.1", "metadata_version": "2.0", "name": "AuthEncoding", "run_requires": [{"extra": "test", "requires": ["pytest"]}, {"requires": ["six"]}], "summary": "Framework for handling LDAP style password hashes.", "version": "4.0.0"} \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/top_level.txt new file mode 100644 index 0000000..c70df06 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/AuthEncoding-4.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +AuthEncoding diff --git a/thesisenv/lib/python3.6/site-packages/AuthEncoding/AuthEncoding.py b/thesisenv/lib/python3.6/site-packages/AuthEncoding/AuthEncoding.py new file mode 100644 index 0000000..4e9dede --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/AuthEncoding/AuthEncoding.py @@ -0,0 +1,253 @@ +############################################################################## +# +# Copyright (c) 2002, 2015 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +import binascii +import six +from binascii import b2a_base64, a2b_base64 +from hashlib import sha1 as sha +from hashlib import sha256 +from os import getpid +import time +from .compat import long, b, u + + +# Use the system PRNG if possible +import random +try: + random = random.SystemRandom() + using_sysrandom = True +except NotImplementedError: + using_sysrandom = False + + +def _reseed(): + if not using_sysrandom: + # This is ugly, and a hack, but it makes things better than + # the alternative of predictability. This re-seeds the PRNG + # using a value that is hard for an attacker to predict, every + # time a random string is required. This may change the + # properties of the chosen random sequence slightly, but this + # is better than absolute predictability. + random.seed(sha256( + "%s%s%s" % (random.getstate(), time.time(), getpid()) + ).digest()) + + +def _choice(c): + _reseed() + return random.choice(c) + + +def _randrange(r): + _reseed() + return random.randrange(r) + + +def constant_time_compare(val1, val2): + """ + Returns True if the two strings are equal, False otherwise. + + The time taken is independent of the number of characters that match. + """ + if len(val1) != len(val2): + return False + result = 0 + for x, y in zip(six.iterbytes(val1), six.iterbytes(val2)): + result |= x ^ y + return result == 0 + + +class PasswordEncryptionScheme: # An Interface + + def encrypt(pw): + """ + Encrypt the provided plain text password. + """ + + def validate(reference, attempt): + """ + Validate the provided password string. Reference is the + correct password, which may be encrypted; attempt is clear text + password attempt. + """ + + +_schemes = [] + + +def registerScheme(id, s): + ''' + Registers an LDAP password encoding scheme. + ''' + _schemes.append((id, u'{%s}' % id, s)) + + +def listSchemes(): + return [id for id, prefix, scheme in _schemes] + + +class SSHADigestScheme: + ''' + SSHA is a modification of the SHA digest scheme with a salt + starting at byte 20 of the base64-encoded string. + ''' + # Source: http://developer.netscape.com/docs/technote/ldap/pass_sha.html + + def generate_salt(self): + # Salt can be any length, but not more than about 37 characters + # because of limitations of the binascii module. + # 7 is what Netscape's example used and should be enough. + # All 256 characters are available. + salt = b'' + for n in range(7): + salt += six.int2byte(_randrange(256)) + return salt + + def encrypt(self, pw): + return self._encrypt_with_salt(pw, self.generate_salt()) + + def validate(self, reference, attempt): + try: + ref = a2b_base64(reference) + except binascii.Error: + # Not valid base64. + return 0 + salt = ref[20:] + compare = self._encrypt_with_salt(attempt, salt) + return constant_time_compare(compare, reference) + + def _encrypt_with_salt(self, pw, salt): + pw = b(pw) + return b2a_base64(sha(pw + salt).digest() + salt)[:-1] + +registerScheme(u'SSHA', SSHADigestScheme()) + + +class SHADigestScheme: + + def encrypt(self, pw): + return self._encrypt(pw) + + def validate(self, reference, attempt): + compare = self._encrypt(attempt) + return constant_time_compare(compare, reference) + + def _encrypt(self, pw): + pw = b(pw) + return b2a_base64(sha(pw).digest())[:-1] + + +registerScheme(u'SHA', SHADigestScheme()) + + +class SHA256DigestScheme: + + def encrypt(self, pw): + return b(sha256(b(pw)).hexdigest()) + + def validate(self, reference, attempt): + a = self.encrypt(attempt) + return constant_time_compare(a, reference) + +registerScheme(u'SHA256', SHA256DigestScheme()) + + +# Bogosity on various platforms due to ITAR restrictions +try: + from crypt import crypt +except ImportError: + crypt = None + +if crypt is not None: + + class CryptDigestScheme: + + def generate_salt(self): + choices = (u"ABCDEFGHIJKLMNOPQRSTUVWXYZ" + u"abcdefghijklmnopqrstuvwxyz" + u"0123456789./") + return _choice(choices) + _choice(choices) + + def encrypt(self, pw): + return b(crypt(self._recode_password(pw), self.generate_salt())) + + def validate(self, reference, attempt): + attempt = self._recode_password(attempt) + a = b(crypt(attempt, reference[:2].decode('ascii'))) + return constant_time_compare(a, reference) + + def _recode_password(self, pw): + # crypt always requires `str` which has a different meaning among + # the Python versions: + if six.PY3: + return u(pw) + return b(pw) + + registerScheme(u'CRYPT', CryptDigestScheme()) + + +class MySQLDigestScheme: + + def encrypt(self, pw): + pw = u(pw) + nr = long(1345345333) + add = 7 + nr2 = long(0x12345671) + for i in pw: + if i == ' ' or i == '\t': + continue + nr ^= (((nr & 63) + add) * ord(i)) + (nr << 8) + nr2 += (nr2 << 8) ^ nr + add += ord(i) + r0 = nr & ((long(1) << 31) - long(1)) + r1 = nr2 & ((long(1) << 31) - long(1)) + return (u"%08lx%08lx" % (r0, r1)).encode('ascii') + + def validate(self, reference, attempt): + a = self.encrypt(attempt) + return constant_time_compare(a, reference) + +registerScheme(u'MYSQL', MySQLDigestScheme()) + + +def pw_validate(reference, attempt): + """Validate the provided password string, which uses LDAP-style encoding + notation. Reference is the correct password, attempt is clear text + password attempt.""" + reference = b(reference) + for id, prefix, scheme in _schemes: + lp = len(prefix) + if reference[:lp] == b(prefix): + return scheme.validate(reference[lp:], attempt) + # Assume cleartext. + return constant_time_compare(reference, b(attempt)) + + +def is_encrypted(pw): + for id, prefix, scheme in _schemes: + lp = len(prefix) + if pw[:lp] == b(prefix): + return 1 + return 0 + + +def pw_encrypt(pw, encoding=u'SSHA'): + """Encrypt the provided plain text password using the encoding if provided + and return it in an LDAP-style representation.""" + encoding = u(encoding) + for id, prefix, scheme in _schemes: + if encoding == id: + return b(prefix) + scheme.encrypt(pw) + raise ValueError('Not supported: %s' % encoding) + +pw_encode = pw_encrypt # backward compatibility diff --git a/thesisenv/lib/python3.6/site-packages/AuthEncoding/__init__.py b/thesisenv/lib/python3.6/site-packages/AuthEncoding/__init__.py new file mode 100644 index 0000000..c6ff3fd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/AuthEncoding/__init__.py @@ -0,0 +1,17 @@ +############################################################################## +# +# Copyright (c) 2002,2015 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + + +from .AuthEncoding import (is_encrypted, pw_encrypt, pw_validate, + registerScheme, listSchemes, + constant_time_compare) diff --git a/thesisenv/lib/python3.6/site-packages/AuthEncoding/compat.py b/thesisenv/lib/python3.6/site-packages/AuthEncoding/compat.py new file mode 100644 index 0000000..b44f965 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/AuthEncoding/compat.py @@ -0,0 +1,20 @@ +import six + +if six.PY3: + long = int +else: + long = long + + +def b(arg): + """Convert `arg` to bytes.""" + if isinstance(arg, six.text_type): + arg = arg.encode("latin-1") + return arg + + +def u(arg): + """Convert `arg` to text.""" + if isinstance(arg, six.binary_type): + arg = arg.decode('ascii', 'replace') + return arg diff --git a/thesisenv/lib/python3.6/site-packages/AuthEncoding/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/AuthEncoding/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/AuthEncoding/tests/test_AuthEncoding.py b/thesisenv/lib/python3.6/site-packages/AuthEncoding/tests/test_AuthEncoding.py new file mode 100644 index 0000000..f6e2df3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/AuthEncoding/tests/test_AuthEncoding.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +############################################################################## +# +# Copyright (c) 2002, 2015 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Test of AuthEncoding +""" + +from AuthEncoding import AuthEncoding +from ..compat import b, u +import pytest + + +def testListSchemes(): + assert len(AuthEncoding.listSchemes()) > 0 # At least one must exist! + + +@pytest.mark.parametrize('schema_id', AuthEncoding.listSchemes()) +@pytest.mark.parametrize('password', [u'good_pw', u'gööd_pw', b(u'gööd_pw')]) +def testGoodPassword(schema_id, password): + enc = AuthEncoding.pw_encrypt(password, schema_id) + assert enc != password + assert AuthEncoding.pw_validate(enc, password) + assert AuthEncoding.pw_validate(u(enc), password) + assert AuthEncoding.is_encrypted(enc) + assert not AuthEncoding.is_encrypted(password) + + +@pytest.mark.parametrize('schema_id', AuthEncoding.listSchemes()) +@pytest.mark.parametrize( + 'password', [u'OK_pa55w0rd \n', u'OK_pä55w0rd \n', b(u'OK_pä55w0rd \n')]) +def testBadPassword(schema_id, password): + enc = AuthEncoding.pw_encrypt(password, schema_id) + assert enc != password + assert not AuthEncoding.pw_validate(enc, u'xxx') + assert not AuthEncoding.pw_validate(enc, b'xxx') + assert not AuthEncoding.pw_validate(u(enc), u'xxx') + assert not AuthEncoding.pw_validate(enc, enc) + if schema_id != u'CRYPT': + # crypt truncates passwords and would fail this test. + assert not AuthEncoding.pw_validate(enc, password[:-1]) + assert not AuthEncoding.pw_validate(enc, password[1:]) + assert AuthEncoding.pw_validate(enc, password) + + +@pytest.mark.parametrize('schema_id', AuthEncoding.listSchemes()) +def testShortPassword(schema_id): + pw = u'1' + enc = AuthEncoding.pw_encrypt(pw, schema_id) + assert AuthEncoding.pw_validate(enc, pw) + assert not AuthEncoding.pw_validate(enc, enc) + assert not AuthEncoding.pw_validate(enc, u'xxx') + + +@pytest.mark.parametrize('schema_id', AuthEncoding.listSchemes()) +def testLongPassword(schema_id): + pw = u'Pw' * 2000 + enc = AuthEncoding.pw_encrypt(pw, schema_id) + assert AuthEncoding.pw_validate(enc, pw) + assert not AuthEncoding.pw_validate(enc, enc) + assert not AuthEncoding.pw_validate(enc, u'xxx') + if u'CRYPT' not in schema_id: + # crypt and bcrypt truncates passwords and would fail these tests. + assert not AuthEncoding.pw_validate(enc, pw[:-2]) + assert not AuthEncoding.pw_validate(enc, pw[2:]) + + +@pytest.mark.parametrize('schema_id', AuthEncoding.listSchemes()) +def testBlankPassword(schema_id): + pw = u'' + enc = AuthEncoding.pw_encrypt(pw, schema_id) + assert enc != pw + assert AuthEncoding.pw_validate(enc, pw) + assert not AuthEncoding.pw_validate(enc, enc) + assert not AuthEncoding.pw_validate(enc, u'xxx') + + +def testUnencryptedPassword(): + # Sanity check + pw = u'my-password' + assert AuthEncoding.pw_validate(pw, pw) + assert not AuthEncoding.pw_validate(pw, pw + u'asdf') + + +def testEncryptWithNotSupportedScheme(): + with pytest.raises(ValueError) as err: + AuthEncoding.pw_encrypt(u'asdf', 'MD1') + assert 'Not supported: MD1' == str(err.value) + + +def testEncryptAcceptsTextAndBinaryEncodingNames(): + assert (AuthEncoding.pw_encrypt(u'asdf', b'SHA') == + AuthEncoding.pw_encrypt(u'asdf', u'SHA')) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/INSTALLER b/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/METADATA b/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/METADATA new file mode 100644 index 0000000..e9ba69c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/METADATA @@ -0,0 +1,348 @@ +Metadata-Version: 2.1 +Name: BTrees +Version: 4.5.1 +Summary: Scalable persistent object containers +Home-page: https://github.com/zopefoundation/BTrees +Author: Zope Foundation +Author-email: zodb-dev@zope.org +License: ZPL 2.1 +Platform: any +Classifier: Development Status :: 6 - Mature +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Framework :: ZODB +Classifier: Topic :: Database +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: Unix +Provides-Extra: test +Provides-Extra: ZODB +Provides-Extra: docs +Requires-Dist: persistent (>=4.1.0) +Requires-Dist: zope.interface +Provides-Extra: ZODB +Requires-Dist: ZODB; extra == 'ZODB' +Provides-Extra: docs +Requires-Dist: Sphinx; extra == 'docs' +Requires-Dist: repoze.sphinx.autointerface; extra == 'docs' +Provides-Extra: test +Requires-Dist: transaction; extra == 'test' +Requires-Dist: zope.testrunner; extra == 'test' + +``BTrees``: scalable persistent components +=========================================== + +.. image:: https://travis-ci.org/zopefoundation/BTrees.svg?branch=master + :target: https://travis-ci.org/zopefoundation/BTrees + +.. image:: https://ci.appveyor.com/api/projects/status/github/zopefoundation/BTrees?branch=master&svg=true + :target: https://ci.appveyor.com/project/mgedmin/BTrees + +.. image:: https://coveralls.io/repos/github/zopefoundation/BTrees/badge.svg?branch=master + :target: https://coveralls.io/github/zopefoundation/BTrees?branch=master + +.. image:: https://img.shields.io/pypi/v/BTrees.svg + :target: https://pypi.org/project/BTrees/ + :alt: Current version on PyPI + +.. image:: https://img.shields.io/pypi/pyversions/BTrees.svg + :target: https://pypi.org/project/BTrees/ + :alt: Supported Python versions + + +This package contains a set of persistent object containers built around +a modified BTree data structure. The trees are optimized for use inside +ZODB's "optimistic concurrency" paradigm, and include explicit resolution +of conflicts detected by that mechanism. + +Please see `the Sphinx documentation `_ for further +information. + + +``BTrees`` Changelog +==================== + +4.5.1 (2018-08-09) +------------------ + +- Produce binary wheels for Python 3.7. + +- Use pyproject.toml to specify build dependencies. This requires pip + 18 or later to build from source. + + +4.5.0 (2018-04-23) +------------------ + +- Add support for Python 3.6 and 3.7. +- Drop support for Python 3.3. +- Raise an ``ImportError`` consistently on Python 3 if the C extension for + BTrees is used but the ``persistent`` C extension is not available. + Previously this could result in an odd ``AttributeError``. See + https://github.com/zopefoundation/BTrees/pull/55 +- Fix the possibility of a rare crash in the C extension when + deallocating items. See https://github.com/zopefoundation/BTrees/issues/75 +- Respect the ``PURE_PYTHON`` environment variable at runtime even if + the C extensions are available. See + https://github.com/zopefoundation/BTrees/issues/78 +- Always attempt to build the C extensions, but make their success + optional. +- Fix a ``DeprecationWarning`` that could come from I and L objects in + Python 2 in pure-Python mode. See https://github.com/zopefoundation/BTrees/issues/79 + +4.4.1 (2017-01-24) +------------------ + +Fixed a packaging bug that caused extra files to be included (some of +which caused problems in some platforms). + +4.4.0 (2017-01-11) +------------------ + +- Allow None as a special key (sorted smaller than all others). + + This is a bit of a return to BTrees 3 behavior in that Nones are + allowed as keys again. Other objects with default ordering are + still not allowed as keys. + +4.3.2 (2017-01-05) +------------------ + +- Make the CPython implementation consistent with the pure-Python + implementation and only check object keys for default comparison + when setting keys. In Python 2 this makes it possible to remove keys + that were added using a less restrictive version of BTrees. (In + Python 3 keys that are unorderable still cannot be removed.) + Likewise, all versions can unpickle trees that already had such + keys. See: https://github.com/zopefoundation/BTrees/issues/53 and + https://github.com/zopefoundation/BTrees/issues/51 + +- Make the Python implementation consistent with the CPython + implementation and check object key identity before checking + equality and performing comparisons. This can allow fixing trees + that have keys that now have broken comparison functions. See + https://github.com/zopefoundation/BTrees/issues/50 + +- Make the CPython implementation consistent with the pure-Python + implementation and no longer raise ``TypeError`` for an object key + (in object-keyed trees) with default comparison on ``__getitem__``, + ``get`` or ``in`` operations. Instead, the results will be a + ``KeyError``, the default value, and ``False``, respectively. + Previously, CPython raised a ``TypeError`` in those cases, while the + Python implementation behaved as specified. + + Likewise, non-integer keys in integer-keyed trees + will raise ``KeyError``, return the default and return ``False``, + respectively, in both implementations. Previously, pure-Python + raised a ``KeyError``, returned the default, and raised a + ``TypeError``, while CPython raised ``TypeError`` in all three cases. + +4.3.1 (2016-05-16) +------------------ + +- Packaging: fix password used to automate wheel creation on Travis. + +4.3.0 (2016-05-10) +------------------ + +- Fix unexpected ``OverflowError`` when passing 64bit values to long + keys / values on Win64. See: + https://github.com/zopefoundation/BTrees/issues/32 + +- When testing ``PURE_PYTHON`` environments under ``tox``, avoid poisoning + the user's global wheel cache. + +- Ensure that the pure-Python implementation, used on PyPy and when a C + compiler isn't available for CPython, pickles identically to the C + version. Unpickling will choose the best available implementation. + This change prevents interoperability problems and database corruption if + both implementations are in use. While it is no longer possible to + pickle a Python implementation and have it unpickle to the Python + implementation if the C implementation is available, existing Python + pickles will still unpickle to the Python implementation (until + pickled again). See: + https://github.com/zopefoundation/BTrees/issues/19 + +- Avoid creating invalid objects when unpickling empty BTrees in a pure-Python + environment. + +- Drop support for Python 2.6 and 3.2. + +4.2.0 (2015-11-13) +------------------ + +- Add support for Python 3.5. + +4.1.4 (2015-06-02) +------------------ + +- Ensure that pure-Python Bucket and Set objects have a human readable + ``__repr__`` like the C versions. + +4.1.3 (2015-05-19) +------------------ + +- Fix ``_p_changed`` when removing items from small pure-Python + BTrees/TreeSets and when adding items to small pure-Python Sets. See: + https://github.com/zopefoundation/BTrees/issues/13 + + +4.1.2 (2015-04-07) +------------------ + +- Suppress testing 64-bit values in OLBTrees on 32 bit machines. + See: https://github.com/zopefoundation/BTrees/issues/9 + +- Fix ``_p_changed`` when adding items to small pure-Python + BTrees/TreeSets. See: + https://github.com/zopefoundation/BTrees/issues/11 + + +4.1.1 (2014-12-27) +------------------ + +- Accomodate long values in pure-Python OLBTrees. + + +4.1.0 (2014-12-26) +------------------ + +- Add support for PyPy and PyPy3. + +- Add support for Python 3.4. + +- BTree subclasses can define ``max_leaf_size`` or ``max_internal_size`` + to control maximum sizes for Bucket/Set and BTree/TreeSet nodes. + +- Detect integer overflow on 32-bit machines correctly under Python 3. + +- Update pure-Python and C trees / sets to accept explicit None to indicate + max / min value for ``minKey``, ``maxKey``. (PR #3) + +- Update pure-Python trees / sets to accept explicit None to indicate + open ranges for ``keys``, ``values``, ``items``. (PR #3) + + +4.0.8 (2013-05-25) +------------------ + +- Fix value-based comparison for objects under Py3k: addresses invalid + merges of ``[OLI]OBTrees/OBuckets``. + +- Ensure that pure-Python implementation of ``OOBTree.byValue`` matches + semantics (reversed-sort) of C implementation. + + +4.0.7 (2013-05-22) +------------------ + +- Issue #2: compilation error on 32-bit mode of OS/X. + +- Test ``PURE_PYTHON`` environment variable support: if set, the C + extensions will not be built, imported, or tested. + + +4.0.6 (2013-05-14) +------------------ + +- Changed the ``ZODB`` extra to require only the real ``ZODB`` package, + rather than the ``ZODB3`` metapackage: depending on the version used, + the metapackage could pull in stale versions of **this** package and + ``persistent``. + +- Fixed Python version check in ``setup.py``. + + +4.0.5 (2013-01-15) +------------------ + +- Fit the ``repr`` of bucket objects, which could contain garbage + characters. + + +4.0.4 (2013-01-12) +------------------ + +- Emulate the (private) iterators used by the C extension modules from + pure Python. This change is "cosmetic" only: it prevents the ZCML + ``zope.app.security:permission.zcml`` from failing. The emulated + classes are **not** functional, and should be considered implementation + details. + +- Accomodate buildout to the fact that we no longer bundle a copy + of 'persistent.h'. + +- Fix test failures on Windows: no longer rely on overflows from + ``sys.maxint``. + + +4.0.3 (2013-01-04) +------------------ + +- Added ``setup_requires==['persistent']``. + + +4.0.2 (2013-01-03) +------------------ + +- Updated Trove classifiers. + +- Added explicit support for Python 3.2, Python 3.3, and PyPy. + Note that the C extensions are not (yet) available on PyPy. + +- Python reference implementations now tested separately from the C + verions on all platforms. + +- 100% unit test coverage. + + +4.0.1 (2012-10-21) +------------------ + +- Provide local fallback for persistent C header inclusion if the + persistent distribution isn't installed. This makes the winbot happy. + + +4.0.0 (2012-10-20) +------------------ + +Platform Changes +################ + +- Dropped support for Python < 2.6. + +- Factored ``BTrees`` as a separate distribution. + +Testing Changes +############### + +- All covered platforms tested under ``tox``. + +- Added support for continuous integration using ``tox`` and ``jenkins``. + +- Added ``setup.py dev`` alias (installs ``nose`` and ``coverage``). + +- Dropped dependency on ``zope.testing`` / ``zope.testrunner``: tests now + run with ``setup.py test``. + +Documentation Changes +##################### + +- Added API reference, generated via Spinx' autodoc. + +- Added Sphinx documentation based on ZODB Guide (snippets are exercised + via 'tox'). + +- Added ``setup.py docs`` alias (installs ``Sphinx`` and + ``repoze.sphinx.autointerface``). + + diff --git a/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/RECORD b/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/RECORD new file mode 100644 index 0000000..fbfa73f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/RECORD @@ -0,0 +1,134 @@ +BTrees-4.5.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +BTrees-4.5.1.dist-info/METADATA,sha256=Nb2ZkIBPwlBQFnLSaGd_6XWqJwn1DIzl5dOR5nri1Mk,11329 +BTrees-4.5.1.dist-info/RECORD,, +BTrees-4.5.1.dist-info/WHEEL,sha256=lx06sLWNl7U6OABNP9EWDqGNT2LbGyG1l5xyFnYYudo,109 +BTrees-4.5.1.dist-info/entry_points.txt,sha256=OZFBvh0wrCZW2J7tzw2NztqnLWwpv5WcriQ9x7FELPY,6 +BTrees-4.5.1.dist-info/top_level.txt,sha256=tyKTCytaCG_dQ7OVIUdgV-PZM9ltjveiOYk5u5FesbU,15 +BTrees/BTreeItemsTemplate.c,sha256=u5_bDWVQljKp87eX_FIQbm1HoVK9QztsB4UkcVZhVKU,24825 +BTrees/BTreeModuleTemplate.c,sha256=kO71qy4qU0nzS4PzTDzkvdn6cf-8DpzRFSt16WFp1O0,20611 +BTrees/BTreeTemplate.c,sha256=qJJnOLO9GJZNNzbBLRO1AxloA62cGDtC-2QFFnupbUY,71551 +BTrees/BucketTemplate.c,sha256=5sSFK-XD19B13bpIVN-rFS8TTcf6OCAYNecFbVdybqw,53068 +BTrees/Development.txt,sha256=IJCMs8N362vMCr4H67mznKIr5Lb5BuYh5YRBzHDq8xk,16958 +BTrees/IFBTree.py,sha256=xTd4UoaUmBhvf0wtco-_AZI2G_2bqymFWKRPDKIHX5g,3452 +BTrees/IIBTree.py,sha256=43-TgRVrwHKzhwZ7myrbRbvUfRIL-EpXWmhKmYfXuZs,3426 +BTrees/IOBTree.py,sha256=1PKMEHbm6Q0tOT-nEE66p6oiWUYtRXGs4ca6kVTtGck,2776 +BTrees/Interfaces.py,sha256=-KxR8M0K5NVfWcvKbo5HkcjHZmPQQ0OXvLxXoaysX08,17831 +BTrees/LFBTree.py,sha256=4qVsFCP6dm-ocZUBMTM97yNkI4MjB14xoBVJPnIpNLk,3453 +BTrees/LLBTree.py,sha256=EyzLlsd-vdy12c6tVr57dshtR7yBRSq4ovXLbWnMoXw,3446 +BTrees/LOBTree.py,sha256=yLR9OWzuytSDeb16vQjoJTCTpmZrAq1d5GMxXhRs9tk,2777 +BTrees/Length.py,sha256=JX5_JLeRIwh79j_r87GnjFosExLwrj8H2eBjdvfb2IY,1937 +BTrees/MergeTemplate.c,sha256=oLbW1QqeERrfnkg9YXfDUXU4I4OAJZ7jQ1OJcWQ7mqM,12070 +BTrees/OIBTree.py,sha256=KvFHcH_80Cxy_Qclv8SlGrUVWapudsSGEKsEhOfcvcE,3338 +BTrees/OLBTree.py,sha256=SXe4IJ0NOAfd3YJbV5L68VJkfdC3ZqzFOv58SRBXiKQ,3330 +BTrees/OOBTree.py,sha256=n2dZCvzq83FJZ45lTywvYBvT2ozhJZZDraS6jGDZUaA,2524 +BTrees/SetOpTemplate.c,sha256=T5hDNXFqSW6i1rKtCVErg6W-H8HOYkc5hCh1j6-8uW8,15571 +BTrees/SetTemplate.c,sha256=GIW_9986HcjjwCXG2XnDz_BNHgsygf1umnv3FD92Gr8,10119 +BTrees/TreeSetTemplate.c,sha256=z7sOs0RDVCgoufuDIiMb1oBKEKyJpgGtaUY063cF7ZE,8455 +BTrees/_IFBTree.c,sha256=BIJrf9LcDM_DRKCtnXJqoAnwvDwhnh_3x_l7CQWzZ3g,1136 +BTrees/_IFBTree.cpython-36m-darwin.so,sha256=-pDtbOXMyRLnyEzGPESPtNezHMVYjIkcI4_8LjbfQ90,186992 +BTrees/_IIBTree.c,sha256=DXzS4WGNkVYneiEGtX9F0olC750rxFQfcAVzUlaBCT0,1130 +BTrees/_IIBTree.cpython-36m-darwin.so,sha256=fY6YmOcdTiBl3DeGES-_KBdMLxDgCcGq7zFNfg8_dfc,182652 +BTrees/_IOBTree.c,sha256=nL6yr4-u6rPo_y8A8TD1irmPVG0t4szMVh4NKumypgI,1141 +BTrees/_IOBTree.cpython-36m-darwin.so,sha256=vNJgmpO6m0rYNxQfpSe5Qse1d5BNGEIovI72szY6Ogk,182500 +BTrees/_LFBTree.c,sha256=vZ8O4_6sYU54cSwbcyy1FXUfbypQzuE59tYN6Po10GE,1208 +BTrees/_LFBTree.cpython-36m-darwin.so,sha256=KuOsXfRQGAFZZjIKv9RCs_EEiiUXmrhZoVOSLZQQMms,187080 +BTrees/_LLBTree.c,sha256=P31gcA6clVlSgAlBLzJ4hqdZMPqwV6q9pKfEexXRH9Q,1199 +BTrees/_LLBTree.cpython-36m-darwin.so,sha256=WOhNkol3_nyCdjN7eUsLfz7hYw1ybbSi4Xz0AtvjmSc,186820 +BTrees/_LOBTree.c,sha256=-bq0twqbE0oqlxyMS6chYBR5ObvLvFQAa0IyFK-l4ro,1178 +BTrees/_LOBTree.cpython-36m-darwin.so,sha256=wLbc18-MMpOeFIrdQVm932uKHq-nb_2h3c25OfOieO4,182588 +BTrees/_OIBTree.c,sha256=6SLJqZskZHEQB4OqCIlZZwGRwADi7MOvu-8W3hRJBo4,1141 +BTrees/_OIBTree.cpython-36m-darwin.so,sha256=E4ToIjjUlcb1BXQ2XexI17hzxGpLkAhzFdTjZoOi4hA,178356 +BTrees/_OLBTree.c,sha256=Hu0ylTuFWEPivGsL0ClS1YLdvx3fTIS9phtMpSt9zXo,1178 +BTrees/_OLBTree.cpython-36m-darwin.so,sha256=qoKSvSbbNErXIMNVKYxyapIqDRNiU9M7VSXNxDE19m4,178428 +BTrees/_OOBTree.c,sha256=e-Wd39TQyfwnX9X-o1mp9S_hIUpWGmUjmGvdH4f3__Q,1150 +BTrees/_OOBTree.cpython-36m-darwin.so,sha256=7jOL1UtqyRz7YKvmBGma5asRslm4GlRKpf9sLxteCEk,174148 +BTrees/__init__.py,sha256=SuUBN3IfVFkSbKLece09uEDFg76AwWWD2qx37AaYXxg,1871 +BTrees/__pycache__/IFBTree.cpython-36.pyc,, +BTrees/__pycache__/IIBTree.cpython-36.pyc,, +BTrees/__pycache__/IOBTree.cpython-36.pyc,, +BTrees/__pycache__/Interfaces.cpython-36.pyc,, +BTrees/__pycache__/LFBTree.cpython-36.pyc,, +BTrees/__pycache__/LLBTree.cpython-36.pyc,, +BTrees/__pycache__/LOBTree.cpython-36.pyc,, +BTrees/__pycache__/Length.cpython-36.pyc,, +BTrees/__pycache__/OIBTree.cpython-36.pyc,, +BTrees/__pycache__/OLBTree.cpython-36.pyc,, +BTrees/__pycache__/OOBTree.cpython-36.pyc,, +BTrees/__pycache__/__init__.cpython-36.pyc,, +BTrees/__pycache__/_base.cpython-36.pyc,, +BTrees/__pycache__/_compat.cpython-36.pyc,, +BTrees/__pycache__/check.cpython-36.pyc,, +BTrees/__pycache__/fsBTree.cpython-36.pyc,, +BTrees/__pycache__/utils.cpython-36.pyc,, +BTrees/_base.py,sha256=h6CFY1XeQHNJ9lkH6UWwMSTkTWOoOXNLUdUqvNbo1O4,49411 +BTrees/_compat.h,sha256=7Juyq0L7R692-QxH7QDk3HD7AA3N0eAnwfP4zoqoGJg,1392 +BTrees/_compat.py,sha256=30krwQo7chtAhl6V-LY0G4723rA43bZSHiERonPCHQc,2902 +BTrees/_fsBTree.c,sha256=R2xeZeyvlUvJObF-66R2HJOFoPHLO87p3rVpe9byssY,4805 +BTrees/_fsBTree.cpython-36m-darwin.so,sha256=3YzUd8gVpOREfkVJzM2NaY_cIYgZgmCULvLXSv01Rg8,174228 +BTrees/check.py,sha256=73Wkf_42KfDQ_LjUlTC9hYC_v3e2gUXOJFkGbqwAAXM,15481 +BTrees/floatvaluemacros.h,sha256=8--_OkpCuflybs5jaP3B54BWxw38u8vESph4IM545AI,899 +BTrees/fsBTree.py,sha256=r1_iTqRLCBqKGPPsvj1D0mYoRzN85amNnrJJbjiHWj0,3185 +BTrees/intkeymacros.h,sha256=M0lCFColrnPHK6qRbbgwqwAITplrCVfIDCcHp7bSE-I,1547 +BTrees/intvaluemacros.h,sha256=oL_1993h7sS6O716DaKHytbC094xcBO4L1llPqX20Mo,1704 +BTrees/objectkeymacros.h,sha256=BGhqG4nV122anWw1CNJE3dj2c5LhqKwDcvvx-Y_8QE0,1285 +BTrees/objectvaluemacros.h,sha256=duuNoUjv-IwKbzEVxZjvvmDCETmzzASeQp1N2188BuM,460 +BTrees/sorters.c,sha256=_RFLxqdW3ZhfJnMfo7tS0cQ1OuftmTmdwMLoJmH29p4,15218 +BTrees/tests/__init__.py,sha256=as-9vgC96TSaF9bHM7EBWBANqSJOgJjwWtUT3lvHQZg,52 +BTrees/tests/__pycache__/__init__.cpython-36.pyc,, +BTrees/tests/__pycache__/common.cpython-36.pyc,, +BTrees/tests/__pycache__/testBTrees.cpython-36.pyc,, +BTrees/tests/__pycache__/testBTreesUnicode.cpython-36.pyc,, +BTrees/tests/__pycache__/testConflict.cpython-36.pyc,, +BTrees/tests/__pycache__/test_IFBTree.cpython-36.pyc,, +BTrees/tests/__pycache__/test_IIBTree.cpython-36.pyc,, +BTrees/tests/__pycache__/test_IOBTree.cpython-36.pyc,, +BTrees/tests/__pycache__/test_LFBTree.cpython-36.pyc,, +BTrees/tests/__pycache__/test_LLBTree.cpython-36.pyc,, +BTrees/tests/__pycache__/test_LOBTree.cpython-36.pyc,, +BTrees/tests/__pycache__/test_Length.cpython-36.pyc,, +BTrees/tests/__pycache__/test_OIBTree.cpython-36.pyc,, +BTrees/tests/__pycache__/test_OLBTree.cpython-36.pyc,, +BTrees/tests/__pycache__/test_OOBTree.cpython-36.pyc,, +BTrees/tests/__pycache__/test__base.cpython-36.pyc,, +BTrees/tests/__pycache__/test_btreesubclass.cpython-36.pyc,, +BTrees/tests/__pycache__/test_check.cpython-36.pyc,, +BTrees/tests/__pycache__/test_fsBTree.cpython-36.pyc,, +BTrees/tests/__pycache__/test_utils.cpython-36.pyc,, +BTrees/tests/common.py,sha256=ypzQE17QBOIpZNQmiRBtLMUA3nN68ZpMsjhjcnJmrhU,86610 +BTrees/tests/testBTrees.py,sha256=9kHPN0-V9ZTYE05-KPocMChxWsrG3YtdGDxy_wfmZAg,17407 +BTrees/tests/testBTreesUnicode.py,sha256=mPQraEu9VPV4mDlMcJRhpKJC0hvK2-IwJ-aZASP-DsM,2554 +BTrees/tests/testConflict.py,sha256=O6bjf7ApdPaYzYCpmPcMC4yoEeS07dfvO280sKuNBZU,21323 +BTrees/tests/test_IFBTree.py,sha256=lDac4rWXBtzaIYcbWWPcEshJKvLlwcJRzPzpltUC4rU,10641 +BTrees/tests/test_IIBTree.py,sha256=ois9MwkJ33ZoryShqIRt6FE51rGPwBRT-fw8ceoZtmo,14197 +BTrees/tests/test_IOBTree.py,sha256=LwwX727SNAWCf3tP18ZdY1rxNIYUseCKP2O1I860QE0,11531 +BTrees/tests/test_LFBTree.py,sha256=RE2Fwo1jPPhSE4WI5R8Q0Q65wyRRydEJ5wMKcBcLK2w,8957 +BTrees/tests/test_LLBTree.py,sha256=ECYj7Fn_em3RMwpd7PYb7Na5IR0FM5_lkrFbP1IkjP8,11760 +BTrees/tests/test_LOBTree.py,sha256=RnAkGsl6prphDGpd-twb562MDlhYnn8Ka_EagQb1sEI,10124 +BTrees/tests/test_Length.py,sha256=gyEXtVvSw7O_AMA9Yd2usemR_Qgie3sdr4lsCjkM0wY,3502 +BTrees/tests/test_OIBTree.py,sha256=eeB1T_nlMb5DQMbD1ASmEwj-TEC7Pb8fSXwpVh-nyEU,10947 +BTrees/tests/test_OLBTree.py,sha256=Bm6QqM9NdoqhrRqRykjurAqO9Sg8YzUQ4BrZWA8cc8A,10293 +BTrees/tests/test_OOBTree.py,sha256=bfnhr0zQzvQVMW9FtL1QWT6J38Lt-b1nVYC4r7hZrS8,12573 +BTrees/tests/test__base.py,sha256=7teTiKJfi-jc-SG1RL4KVhAYmy_H6oC-bCjY3J-2I2Q,114377 +BTrees/tests/test_btreesubclass.py,sha256=bT0TwSCViQGoG8hRSzNLC6VAw3wlzaP1n80OZGjwREA,1646 +BTrees/tests/test_check.py,sha256=UdeT_QGMF5ole6fya4853XcxrJSzeW3GSnz2J4RmngQ,12948 +BTrees/tests/test_fsBTree.py,sha256=88hNt0wIFTizjPaUgHkz2msFO6-eFPOyHSEQKZKci-0,2136 +BTrees/tests/test_utils.py,sha256=8B0lqFwYoHZalmEUyogHfDlN6IoERBaPjlr0TD-xu50,2703 +BTrees/utils.py,sha256=Vnpv1X63hp3aBPUeS7EPuZo0NoGCBmuogfu8pAcLgSk,1442 +terryfy/__init__.py,sha256=RBtPV4__9gzLYGnKiTMv1tGSXLxzFpIuVjARdyHHi0w,31 +terryfy/__pycache__/__init__.cpython-36.pyc,, +terryfy/__pycache__/bdist_wheel.cpython-36.pyc,, +terryfy/__pycache__/cp_suff_real_libs.cpython-36.pyc,, +terryfy/__pycache__/fuse_suff_real_libs.cpython-36.pyc,, +terryfy/__pycache__/monkeyexec.cpython-36.pyc,, +terryfy/__pycache__/repath_lib_names.cpython-36.pyc,, +terryfy/__pycache__/test_travisparse.cpython-36.pyc,, +terryfy/__pycache__/travisparse.cpython-36.pyc,, +terryfy/__pycache__/wafutils.cpython-36.pyc,, +terryfy/bdist_wheel.py,sha256=uMS-xIbyD81dC8nyKh6NuFNA3qVWY6TVXyzzsV5DBTc,811 +terryfy/cp_suff_real_libs.py,sha256=11dwW6AePQ2sVB0C4wurs-tr4z1ytRgiHz_PsXGpFiw,720 +terryfy/fuse_suff_real_libs.py,sha256=wfsxzciiEf1FgTga7MSJ0KXsulUyWvYaul5Z4lIfM0E,1099 +terryfy/monkeyexec.py,sha256=QFZ5IB5JwsJK-Pc7IFnwrOD1pygE812zYn98kV-6V8A,1708 +terryfy/repath_lib_names.py,sha256=kv2dHTRRYHjIwNbcfbFNna_6dHC6yyd-orO2BWMfNOk,1018 +terryfy/test_travisparse.py,sha256=eGxYZSYdzwkxI6B9oYhZB-iGu4ifVla_F4i3Nkq7efo,2148 +terryfy/travisparse.py,sha256=Io2vjJJz6iBHtSqZGMUm0YZ8RUO3d4y06bgYMC0qWT4,1763 +terryfy/wafutils.py,sha256=FIxHuY991jWwdrGB_PVjSHLf3aeZpqKWoK_9rJbH8Vk,9118 diff --git a/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/WHEEL b/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/WHEEL new file mode 100644 index 0000000..63e588c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: false +Tag: cp36-cp36m-macosx_10_6_intel + diff --git a/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/entry_points.txt b/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/entry_points.txt new file mode 100644 index 0000000..5c979ee --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/entry_points.txt @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/top_level.txt new file mode 100644 index 0000000..440e27d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees-4.5.1.dist-info/top_level.txt @@ -0,0 +1,2 @@ +BTrees +terryfy diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/BTreeItemsTemplate.c b/thesisenv/lib/python3.6/site-packages/BTrees/BTreeItemsTemplate.c new file mode 100644 index 0000000..148480a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/BTreeItemsTemplate.c @@ -0,0 +1,790 @@ +/***************************************************************************** + + Copyright (c) 2001, 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +#define BTREEITEMSTEMPLATE_C "$Id$\n" + +/* A BTreeItems struct is returned from calling .items(), .keys() or + * .values() on a BTree-based data structure, and is also the result of + * taking slices of those. It represents a contiguous slice of a BTree. + * + * The start of the slice is in firstbucket, at offset first. The end of + * the slice is in lastbucket, at offset last. Both endpoints are inclusive. + * It must possible to get from firstbucket to lastbucket via following + * bucket 'next' pointers zero or more times. firstbucket, first, lastbucket, + * and last are readonly after initialization. An empty slice is represented + * by firstbucket == lastbucket == currentbucket == NULL. + * + * 'kind' determines whether this slice represents 'k'eys alone, 'v'alues + * alone, or 'i'items (key+value pairs). 'kind' is also readonly after + * initialization. + * + * The combination of currentbucket, currentoffset and pseudoindex acts as + * a search finger. Offset currentoffset in bucket currentbucket is at index + * pseudoindex, where pseudoindex==0 corresponds to offset first in bucket + * firstbucket, and pseudoindex==-1 corresponds to offset last in bucket + * lastbucket. The function BTreeItems_seek() can be used to set this combo + * correctly for any in-bounds index, and uses this combo on input to avoid + * needing to search from the start (or end) on each call. Calling + * BTreeItems_seek() with consecutive larger positions is very efficent. + * Calling it with consecutive smaller positions is more efficient than if + * a search finger weren't being used at all, but is still quadratic time + * in the number of buckets in the slice. + */ +typedef struct +{ + PyObject_HEAD + Bucket *firstbucket; /* First bucket */ + Bucket *currentbucket; /* Current bucket (search finger) */ + Bucket *lastbucket; /* Last bucket */ + int currentoffset; /* Offset in currentbucket */ + int pseudoindex; /* search finger index */ + int first; /* Start offset in firstbucket */ + int last; /* End offset in lastbucket */ + char kind; /* 'k', 'v', 'i' */ +} BTreeItems; + +#define ITEMS(O)((BTreeItems*)(O)) + +static PyObject * +newBTreeItems(char kind, + Bucket *lowbucket, int lowoffset, + Bucket *highbucket, int highoffset); + +static void +BTreeItems_dealloc(BTreeItems *self) +{ + Py_XDECREF(self->firstbucket); + Py_XDECREF(self->lastbucket); + Py_XDECREF(self->currentbucket); + PyObject_DEL(self); +} + +static Py_ssize_t +BTreeItems_length_or_nonzero(BTreeItems *self, int nonzero) +{ + Py_ssize_t r; + Bucket *b, *next; + + b = self->firstbucket; + if (b == NULL) + return 0; + + r = self->last + 1 - self->first; + + if (nonzero && r > 0) + /* Short-circuit if all we care about is nonempty */ + return 1; + + if (b == self->lastbucket) + return r; + + Py_INCREF(b); + PER_USE_OR_RETURN(b, -1); + while ((next = b->next)) + { + r += b->len; + if (nonzero && r > 0) + /* Short-circuit if all we care about is nonempty */ + break; + + if (next == self->lastbucket) + break; /* we already counted the last bucket */ + + Py_INCREF(next); + PER_UNUSE(b); + Py_DECREF(b); + b = next; + PER_USE_OR_RETURN(b, -1); + } + PER_UNUSE(b); + Py_DECREF(b); + + return r >= 0 ? r : 0; +} + +static Py_ssize_t +BTreeItems_length(BTreeItems *self) +{ + return BTreeItems_length_or_nonzero(self, 0); +} + +/* +** BTreeItems_seek +** +** Find the ith position in the BTreeItems. +** +** Arguments: self The BTree +** i the index to seek to, in 0 .. len(self)-1, or in +** -len(self) .. -1, as for indexing a Python sequence. +** +** +** Returns 0 if successful, -1 on failure to seek (like out-of-bounds). +** Upon successful return, index i is at offset self->currentoffset in bucket +** self->currentbucket. +*/ +static int +BTreeItems_seek(BTreeItems *self, Py_ssize_t i) +{ + int delta, pseudoindex, currentoffset; + Bucket *b, *currentbucket; + int error; + + pseudoindex = self->pseudoindex; + currentoffset = self->currentoffset; + currentbucket = self->currentbucket; + if (currentbucket == NULL) + goto no_match; + + delta = i - pseudoindex; + while (delta > 0) /* move right */ + { + int max; + /* Want to move right delta positions; the most we can move right in + * this bucket is currentbucket->len - currentoffset - 1 positions. + */ + PER_USE_OR_RETURN(currentbucket, -1); + max = currentbucket->len - currentoffset - 1; + b = currentbucket->next; + PER_UNUSE(currentbucket); + if (delta <= max) + { + currentoffset += delta; + pseudoindex += delta; + if (currentbucket == self->lastbucket + && currentoffset > self->last) + goto no_match; + break; + } + /* Move to start of next bucket. */ + if (currentbucket == self->lastbucket || b == NULL) + goto no_match; + currentbucket = b; + pseudoindex += max + 1; + delta -= max + 1; + currentoffset = 0; + } + while (delta < 0) /* move left */ + { + int status; + /* Want to move left -delta positions; the most we can move left in + * this bucket is currentoffset positions. + */ + if ((-delta) <= currentoffset) + { + currentoffset += delta; + pseudoindex += delta; + if (currentbucket == self->firstbucket + && currentoffset < self->first) + goto no_match; + break; + } + /* Move to end of previous bucket. */ + if (currentbucket == self->firstbucket) + goto no_match; + status = PreviousBucket(¤tbucket, self->firstbucket); + if (status == 0) + goto no_match; + else if (status < 0) + return -1; + pseudoindex -= currentoffset + 1; + delta += currentoffset + 1; + PER_USE_OR_RETURN(currentbucket, -1); + currentoffset = currentbucket->len - 1; + PER_UNUSE(currentbucket); + } + + assert(pseudoindex == i); + + /* Alas, the user may have mutated the bucket since the last time we + * were called, and if they deleted stuff, we may be pointing into + * trash memory now. + */ + PER_USE_OR_RETURN(currentbucket, -1); + error = currentoffset < 0 || currentoffset >= currentbucket->len; + PER_UNUSE(currentbucket); + if (error) + { + PyErr_SetString(PyExc_RuntimeError, + "the bucket being iterated changed size"); + return -1; + } + + Py_INCREF(currentbucket); + Py_DECREF(self->currentbucket); + self->currentbucket = currentbucket; + self->currentoffset = currentoffset; + self->pseudoindex = pseudoindex; + return 0; + +no_match: + IndexError(i); + return -1; +} + + +/* Return the right kind ('k','v','i') of entry from bucket b at offset i. + * b must be activated. Returns NULL on error. + */ +static PyObject * +getBucketEntry(Bucket *b, int i, char kind) +{ + PyObject *result = NULL; + + assert(b); + assert(0 <= i && i < b->len); + + switch (kind) + { + case 'k': + COPY_KEY_TO_OBJECT(result, b->keys[i]); + break; + + case 'v': + COPY_VALUE_TO_OBJECT(result, b->values[i]); + break; + + case 'i': + { + PyObject *key; + PyObject *value;; + + COPY_KEY_TO_OBJECT(key, b->keys[i]); + if (!key) + break; + + COPY_VALUE_TO_OBJECT(value, b->values[i]); + if (!value) + { + Py_DECREF(key); + break; + } + + result = PyTuple_New(2); + if (result) + { + PyTuple_SET_ITEM(result, 0, key); + PyTuple_SET_ITEM(result, 1, value); + } + else + { + Py_DECREF(key); + Py_DECREF(value); + } + break; + } + + default: + PyErr_SetString(PyExc_AssertionError, + "getBucketEntry: unknown kind"); + break; + } + return result; +} + +/* +** BTreeItems_item +** +** Arguments: self a BTreeItems structure +** i Which item to inspect +** +** Returns: the BTreeItems_item_BTree of self->kind, i +** (ie pulls the ith item out) +*/ +static PyObject * +BTreeItems_item(BTreeItems *self, Py_ssize_t i) +{ + PyObject *result; + + if (BTreeItems_seek(self, i) < 0) + return NULL; + + PER_USE_OR_RETURN(self->currentbucket, NULL); + result = getBucketEntry(self->currentbucket, self->currentoffset, + self->kind); + PER_UNUSE(self->currentbucket); + return result; +} + +/* +** BTreeItems_slice +** +** Creates a new BTreeItems structure representing the slice +** between the low and high range +** +** Arguments: self The old BTreeItems structure +** ilow The start index +** ihigh The end index +** +** Returns: BTreeItems item +*/ +static PyObject * +BTreeItems_slice(BTreeItems *self, Py_ssize_t ilow, Py_ssize_t ihigh) +{ + Bucket *lowbucket; + Bucket *highbucket; + int lowoffset; + int highoffset; + Py_ssize_t length = -1; /* len(self), but computed only if needed */ + + /* Complications: + * A Python slice never raises IndexError, but BTreeItems_seek does. + * Python did only part of index normalization before calling this: + * ilow may be < 0 now, and ihigh may be arbitrarily large. It's + * our responsibility to clip them. + * A Python slice is exclusive of the high index, but a BTreeItems + * struct is inclusive on both ends. + */ + + /* First adjust ilow and ihigh to be legit endpoints in the Python + * sense (ilow inclusive, ihigh exclusive). This block duplicates the + * logic from Python's list_slice function (slicing for builtin lists). + */ + if (ilow < 0) + ilow = 0; + else + { + if (length < 0) + length = BTreeItems_length(self); + if (ilow > length) + ilow = length; + } + + if (ihigh < ilow) + ihigh = ilow; + else + { + if (length < 0) + length = BTreeItems_length(self); + if (ihigh > length) + ihigh = length; + } + assert(0 <= ilow && ilow <= ihigh); + assert(length < 0 || ihigh <= length); + + /* Now adjust for that our struct is inclusive on both ends. This is + * easy *except* when the slice is empty: there's no good way to spell + * that in an inclusive-on-both-ends scheme. For example, if the + * slice is btree.items([:0]), ilow == ihigh == 0 at this point, and if + * we were to subtract 1 from ihigh that would get interpreted by + * BTreeItems_seek as meaning the *entire* set of items. Setting ilow==1 + * and ihigh==0 doesn't work either, as BTreeItems_seek raises IndexError + * if we attempt to seek to ilow==1 when the underlying sequence is empty. + * It seems simplest to deal with empty slices as a special case here. + */ + if (ilow == ihigh) /* empty slice */ + { + lowbucket = highbucket = NULL; + lowoffset = 1; + highoffset = 0; + } + else + { + assert(ilow < ihigh); + --ihigh; /* exclusive -> inclusive */ + + if (BTreeItems_seek(self, ilow) < 0) + return NULL; + lowbucket = self->currentbucket; + lowoffset = self->currentoffset; + + if (BTreeItems_seek(self, ihigh) < 0) + return NULL; + + highbucket = self->currentbucket; + highoffset = self->currentoffset; + } + return newBTreeItems(self->kind, + lowbucket, lowoffset, highbucket, highoffset); +} + +static PyObject * +BTreeItems_subscript(BTreeItems *self, PyObject* subscript) +{ + Py_ssize_t len = BTreeItems_length_or_nonzero(self, 0); + + if (PyIndex_Check(subscript)) + { + Py_ssize_t i = PyNumber_AsSsize_t(subscript, PyExc_IndexError); + if (i == -1 && PyErr_Occurred()) + return NULL; + if (i < 0) + i += len; + return BTreeItems_item(self, i); + } + if (PySlice_Check(subscript)) + { + Py_ssize_t start, stop, step, slicelength; + +#ifdef PY3K +#define SLICEOBJ(x) (x) +#else +#define SLICEOBJ(x) (PySliceObject*)(x) +#endif + + if (PySlice_GetIndicesEx(SLICEOBJ(subscript), len, + &start, &stop, &step, &slicelength) < 0) + { + return NULL; + } + + if (step != 1) + { + PyErr_SetString(PyExc_RuntimeError, + "slices must have step size of 1"); + return NULL; + } + return BTreeItems_slice(self, start, stop); + } + PyErr_SetString(PyExc_RuntimeError, + "Unknown index type: must be int or slice"); + return NULL; +} + +/* Py3K doesn't honor sequence slicing, so implement via mapping */ +static PyMappingMethods BTreeItems_as_mapping = { + (lenfunc)BTreeItems_length, /* mp_length */ + (binaryfunc)BTreeItems_subscript, /* mp_subscript */ +}; + +static PySequenceMethods BTreeItems_as_sequence = +{ + (lenfunc) BTreeItems_length, /* sq_length */ + (binaryfunc)0, /* sq_concat */ + (ssizeargfunc)0, /* sq_repeat */ + (ssizeargfunc) BTreeItems_item, /* sq_item */ +#ifndef PY3K + /* Py3K doesn't honor this slot */ + (ssizessizeargfunc) BTreeItems_slice, /* sq_slice */ +#endif +}; + +/* Number Method items (just for nb_nonzero!) */ + +static int +BTreeItems_nonzero(BTreeItems *self) +{ + return BTreeItems_length_or_nonzero(self, 1); +} + +static PyNumberMethods BTreeItems_as_number_for_nonzero = { + 0, /* nb_add */ + 0, /* nb_subtract */ + 0, /* nb_multiply */ +#ifndef PY3K + 0, /* nb_divide */ +#endif + 0, /* nb_remainder */ + 0, /* nb_divmod */ + 0, /* nb_power */ + 0, /* nb_negative */ + 0, /* nb_positive */ + 0, /* nb_absolute */ + (inquiry)BTreeItems_nonzero /* nb_nonzero */ +}; + +static PyTypeObject BTreeItemsType = { + PyVarObject_HEAD_INIT(NULL, 0) + MOD_NAME_PREFIX "BTreeItems", /* tp_name */ + sizeof(BTreeItems), /* tp_basicsize */ + 0, /* tp_itemsize */ + /* methods */ + (destructor) BTreeItems_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* obsolete tp_getattr */ + 0, /* obsolete tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + &BTreeItems_as_number_for_nonzero, /* tp_as_number */ + &BTreeItems_as_sequence, /* tp_as_sequence */ + &BTreeItems_as_mapping, /* tp_as_mapping */ + (hashfunc)0, /* tp_hash */ + (ternaryfunc)0, /* tp_call */ + (reprfunc)0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + /* Space for future expansion */ + 0L,0L, + "Sequence type used to iterate over BTree items." /* Documentation string */ +}; + +/* Returns a new BTreeItems object representing the contiguous slice from + * offset lowoffset in bucket lowbucket through offset highoffset in bucket + * highbucket, inclusive. Pass lowbucket == NULL for an empty slice. + * The currentbucket is set to lowbucket, currentoffset ot lowoffset, and + * pseudoindex to 0. kind is 'k', 'v' or 'i' (see BTreeItems struct docs). + */ +static PyObject * +newBTreeItems(char kind, + Bucket *lowbucket, int lowoffset, + Bucket *highbucket, int highoffset) +{ + BTreeItems *self; + + UNLESS (self = PyObject_NEW(BTreeItems, &BTreeItemsType)) + return NULL; + self->kind=kind; + + self->first=lowoffset; + self->last=highoffset; + + if (! lowbucket || ! highbucket + || (lowbucket == highbucket && lowoffset > highoffset)) + { + self->firstbucket = 0; + self->lastbucket = 0; + self->currentbucket = 0; + } + else + { + Py_INCREF(lowbucket); + self->firstbucket = lowbucket; + Py_INCREF(highbucket); + self->lastbucket = highbucket; + Py_INCREF(lowbucket); + self->currentbucket = lowbucket; + } + + self->currentoffset = lowoffset; + self->pseudoindex = 0; + + return OBJECT(self); +} + +static int +nextBTreeItems(SetIteration *i) +{ + if (i->position >= 0) + { + if (i->position) + { + DECREF_KEY(i->key); + DECREF_VALUE(i->value); + } + + if (BTreeItems_seek(ITEMS(i->set), i->position) >= 0) + { + Bucket *currentbucket; + + currentbucket = BUCKET(ITEMS(i->set)->currentbucket); + UNLESS(PER_USE(currentbucket)) + { + /* Mark iteration terminated, so that finiSetIteration doesn't + * try to redundantly decref the key and value + */ + i->position = -1; + return -1; + } + + COPY_KEY(i->key, currentbucket->keys[ITEMS(i->set)->currentoffset]); + INCREF_KEY(i->key); + + COPY_VALUE(i->value, + currentbucket->values[ITEMS(i->set)->currentoffset]); + INCREF_VALUE(i->value); + + i->position ++; + + PER_UNUSE(currentbucket); + } + else + { + i->position = -1; + PyErr_Clear(); + } + } + return 0; +} + +static int +nextTreeSetItems(SetIteration *i) +{ + if (i->position >= 0) + { + if (i->position) + { + DECREF_KEY(i->key); + } + + if (BTreeItems_seek(ITEMS(i->set), i->position) >= 0) + { + Bucket *currentbucket; + + currentbucket = BUCKET(ITEMS(i->set)->currentbucket); + UNLESS(PER_USE(currentbucket)) + { + /* Mark iteration terminated, so that finiSetIteration doesn't + * try to redundantly decref the key and value + */ + i->position = -1; + return -1; + } + + COPY_KEY(i->key, currentbucket->keys[ITEMS(i->set)->currentoffset]); + INCREF_KEY(i->key); + + i->position ++; + + PER_UNUSE(currentbucket); + } + else + { + i->position = -1; + PyErr_Clear(); + } + } + return 0; +} + +/* Support for the iteration protocol new in Python 2.2. */ + +static PyTypeObject BTreeIter_Type; + +/* The type of iterator objects, returned by e.g. iter(IIBTree()). */ +typedef struct +{ + PyObject_HEAD + /* We use a BTreeItems object because it's convenient and flexible. + * We abuse it two ways: + * 1. We set currentbucket to NULL when the iteration is finished. + * 2. We don't bother keeping pseudoindex in synch. + */ + BTreeItems *pitems; +} BTreeIter; + +/* Return a new iterator object, to traverse the keys and/or values + * represented by pitems. pitems must not be NULL. Returns NULL if error. + */ +static BTreeIter * +BTreeIter_new(BTreeItems *pitems) +{ + BTreeIter *result; + + assert(pitems != NULL); + result = PyObject_New(BTreeIter, &BTreeIter_Type); + if (result) + { + Py_INCREF(pitems); + result->pitems = pitems; + } + return result; +} + +/* The iterator's tp_dealloc slot. */ +static void +BTreeIter_dealloc(BTreeIter *bi) +{ + Py_DECREF(bi->pitems); + PyObject_Del(bi); +} + +/* The implementation of the iterator's tp_iternext slot. Returns "the next" + * item; returns NULL if error; returns NULL without setting an error if the + * iteration is exhausted (that's the way to terminate the iteration protocol). + */ +static PyObject * +BTreeIter_next(BTreeIter *bi, PyObject *args) +{ + PyObject *result = NULL; /* until proven innocent */ + BTreeItems *items = bi->pitems; + int i = items->currentoffset; + Bucket *bucket = items->currentbucket; + + if (bucket == NULL) /* iteration termination is sticky */ + return NULL; + + PER_USE_OR_RETURN(bucket, NULL); + if (i >= bucket->len) + { + /* We never leave this routine normally with i >= len: somebody + * else mutated the current bucket. + */ + PyErr_SetString(PyExc_RuntimeError, + "the bucket being iterated changed size"); + /* Arrange for that this error is sticky too. */ + items->currentoffset = INT_MAX; + goto Done; + } + + /* Build the result object, from bucket at offset i. */ + result = getBucketEntry(bucket, i, items->kind); + + /* Advance position for next call. */ + if (bucket == items->lastbucket && i >= items->last) + { + /* Next call should terminate the iteration. */ + Py_DECREF(items->currentbucket); + items->currentbucket = NULL; + } + else + { + ++i; + if (i >= bucket->len) + { + Py_XINCREF(bucket->next); + items->currentbucket = bucket->next; + Py_DECREF(bucket); + i = 0; + } + items->currentoffset = i; + } + +Done: + PER_UNUSE(bucket); + return result; +} + +static PyObject * +BTreeIter_getiter(PyObject *it) +{ + Py_INCREF(it); + return it; +} + +static PyTypeObject BTreeIter_Type = { + PyVarObject_HEAD_INIT(NULL, 0) + MODULE_NAME MOD_NAME_PREFIX "TreeIterator", /* tp_name */ + sizeof(BTreeIter), /* tp_basicsize */ + 0, /* tp_itemsize */ + /* methods */ + (destructor)BTreeIter_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /*PyObject_GenericGetAttr,*/ /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT, /* tp_flags */ + 0, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + (getiterfunc)BTreeIter_getiter, /* tp_iter */ + (iternextfunc)BTreeIter_next, /* tp_iternext */ + 0, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ +}; diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/BTreeModuleTemplate.c b/thesisenv/lib/python3.6/site-packages/BTrees/BTreeModuleTemplate.c new file mode 100644 index 0000000..367cc50 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/BTreeModuleTemplate.c @@ -0,0 +1,671 @@ +/***************************************************************************** + + Copyright (c) 2001, 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +#include "Python.h" +/* include structmember.h for offsetof */ +#include "structmember.h" +#include "bytesobject.h" + +#ifdef PERSISTENT +#include "persistent/cPersistence.h" +#else +#define PER_USE_OR_RETURN(self, NULL) +#define PER_ALLOW_DEACTIVATION(self) +#define PER_PREVENT_DEACTIVATION(self) +#define PER_DEL(self) +#define PER_USE(O) 1 +#define PER_ACCESSED(O) 1 +#endif + +#include "_compat.h" + +/* So sue me. This pair gets used all over the place, so much so that it + * interferes with understanding non-persistence parts of algorithms. + * PER_UNUSE can be used after a successul PER_USE or PER_USE_OR_RETURN. + * It allows the object to become ghostified, and tells the persistence + * machinery that the object's fields were used recently. + */ +#define PER_UNUSE(OBJ) do { \ + PER_ALLOW_DEACTIVATION(OBJ); \ + PER_ACCESSED(OBJ); \ +} while (0) + +/* The tp_name slots of the various BTree types contain the fully + * qualified names of the types, e.g. zodb.btrees.OOBTree.OOBTree. + * The full name is usd to support pickling and because it is not + * possible to modify the __module__ slot of a type dynamically. (This + * may be a bug in Python 2.2). + * + * The MODULE_NAME here used to be "BTrees._". We actually want the module + * name to point to the Python module rather than the C, so the underline + * is now removed. + */ +#define MODULE_NAME "BTrees." MOD_NAME_PREFIX "BTree." + +static PyObject *sort_str, *reverse_str, *__setstate___str; +static PyObject *_bucket_type_str, *max_internal_size_str, *max_leaf_size_str; +static PyObject *ConflictError = NULL; + +static void PyVar_Assign(PyObject **v, PyObject *e) { Py_XDECREF(*v); *v=e;} +#define ASSIGN(V,E) PyVar_Assign(&(V),(E)) +#define UNLESS(E) if (!(E)) +#define OBJECT(O) ((PyObject*)(O)) + +#define MIN_BUCKET_ALLOC 16 + +#define SameType_Check(O1, O2) (Py_TYPE((O1))==Py_TYPE((O2))) + +#define ASSERT(C, S, R) if (! (C)) { \ + PyErr_SetString(PyExc_AssertionError, (S)); return (R); } + + +#ifdef NEED_LONG_LONG_SUPPORT +/* Helper code used to support long long instead of int. */ + +#ifndef PY_LONG_LONG +#error "PY_LONG_LONG required but not defined" +#endif + +#ifdef NEED_LONG_LONG_KEYS +static int +longlong_check(PyObject *ob) +{ + if (INT_CHECK(ob)) + return 1; + + if (PyLong_Check(ob)) { + int overflow; + (void)PyLong_AsLongLongAndOverflow(ob, &overflow); + if (overflow) + goto overflow; + return 1; + } + return 0; +overflow: + PyErr_SetString(PyExc_ValueError, + "longlong_check: long integer out of range"); + return 0; +} +#endif + +static PyObject * +longlong_as_object(PY_LONG_LONG val) +{ + if ((val > LONG_MAX) || (val < LONG_MIN)) + return PyLong_FromLongLong(val); + return INT_FROM_LONG((long)val); +} + + +static int +longlong_convert(PyObject *ob, PY_LONG_LONG *value) +{ +#ifndef PY3K + if (PyInt_Check(ob)) + { + (*value) = (PY_LONG_LONG)PyInt_AS_LONG(ob); + return 1; + } +#endif + + if (!PyLong_Check(ob)) + { + PyErr_SetString(PyExc_TypeError, "expected integer key"); + return 0; + } + else + { + PY_LONG_LONG val; + int overflow; + val = PyLong_AsLongLongAndOverflow(ob, &overflow); + if (overflow) + goto overflow; + (*value) = val; + return 1; + } +overflow: + PyErr_SetString(PyExc_ValueError, "long integer out of range"); + return 0; +} +#endif /* NEED_LONG_LONG_SUPPORT */ + + +/* Various kinds of BTree and Bucket structs are instances of + * "sized containers", and have a common initial layout: + * The stuff needed for all Python objects, or all Persistent objects. + * int size: The maximum number of things that could be contained + * without growing the container. + * int len: The number of things currently contained. + * + * Invariant: 0 <= len <= size. + * + * A sized container typically goes on to declare one or more pointers + * to contiguous arrays with 'size' elements each, the initial 'len' of + * which are currently in use. + */ +#ifdef PERSISTENT +#define sizedcontainer_HEAD \ + cPersistent_HEAD \ + int size; \ + int len; +#else +#define sizedcontainer_HEAD \ + PyObject_HEAD \ + int size; \ + int len; +#endif + +/* Nothing is actually of type Sized, but (pointers to) BTree nodes and + * Buckets can be cast to Sized* in contexts that only need to examine + * the members common to all sized containers. + */ +typedef struct Sized_s { + sizedcontainer_HEAD +} Sized; + +#define SIZED(O) ((Sized*)(O)) + +/* A Bucket wraps contiguous vectors of keys and values. Keys are unique, + * and stored in sorted order. The 'values' pointer may be NULL if the + * Bucket is used to implement a set. Buckets serving as leafs of BTrees + * are chained together via 'next', so that the entire BTree contents + * can be traversed in sorted order quickly and easily. + */ +typedef struct Bucket_s { + sizedcontainer_HEAD + struct Bucket_s *next; /* the bucket with the next-larger keys */ + KEY_TYPE *keys; /* 'len' keys, in increasing order */ + VALUE_TYPE *values; /* 'len' corresponding values; NULL if a set */ +} Bucket; + +#define BUCKET(O) ((Bucket*)(O)) + +/* A BTree is complicated. See Maintainer.txt. + */ + +typedef struct BTreeItem_s { + KEY_TYPE key; + Sized *child; /* points to another BTree, or to a Bucket of some sort */ +} BTreeItem; + +typedef struct BTree_s { + sizedcontainer_HEAD + + /* firstbucket points to the bucket containing the smallest key in + * the BTree. This is found by traversing leftmost child pointers + * (data[0].child) until reaching a Bucket. + */ + Bucket *firstbucket; + + /* The BTree points to 'len' children, via the "child" fields of the data + * array. There are len-1 keys in the 'key' fields, stored in increasing + * order. data[0].key is unused. For i in 0 .. len-1, all keys reachable + * from data[i].child are >= data[i].key and < data[i+1].key, at the + * endpoints pretending that data[0].key is minus infinity and + * data[len].key is positive infinity. + */ + BTreeItem *data; + long max_internal_size; + long max_leaf_size; +} BTree; + +static PyTypeObject BTreeType; +static PyTypeObject BucketType; + +#define BTREE(O) ((BTree*)(O)) + +/* Use BTREE_SEARCH to find which child pointer to follow. + * RESULT An int lvalue to hold the index i such that SELF->data[i].child + * is the correct node to search next. + * SELF A pointer to a BTree node. + * KEY The key you're looking for, of type KEY_TYPE. + * ONERROR What to do if key comparison raises an exception; for example, + * perhaps 'return NULL'. + * + * See Maintainer.txt for discussion: this is optimized in subtle ways. + * It's recommended that you call this at the start of a routine, waiting + * to check for self->len == 0 after. + */ +#define BTREE_SEARCH(RESULT, SELF, KEY, ONERROR) { \ + int _lo = 0; \ + int _hi = (SELF)->len; \ + int _i, _cmp; \ + for (_i = _hi >> 1; _i > _lo; _i = (_lo + _hi) >> 1) { \ + TEST_KEY_SET_OR(_cmp, (SELF)->data[_i].key, (KEY)) \ + ONERROR; \ + if (_cmp < 0) _lo = _i; \ + else if (_cmp > 0) _hi = _i; \ + else /* equal */ break; \ + } \ + (RESULT) = _i; \ +} + +/* SetIteration structs are used in the internal set iteration protocol. + * When you want to iterate over a set or bucket or BTree (even an + * individual key!), + * 1. Declare a new iterator: + * SetIteration si = {0,0,0}; + * Using "{0,0,0}" or "{0,0}" appear most common. Only one {0} is + * necssary. At least one must be given so that finiSetIteration() works + * correctly even if you don't get around to calling initSetIteration(). + * 2. Initialize it via + * initSetIteration(&si, PyObject *s, useValues) + * It's an error if that returns an int < 0. In case of error on the + * init call, calling finiSetIteration(&si) is optional. But if the + * init call succeeds, you must eventually call finiSetIteration(), + * and whether or not subsequent calls to si.next() fail. + * 3. Get the first element: + * if (si.next(&si) < 0) { there was an error } + * If the set isn't empty, this sets si.position to an int >= 0, + * si.key to the element's key (of type KEY_TYPE), and maybe si.value to + * the element's value (of type VALUE_TYPE). si.value is defined + * iff si.usesValue is true. + * 4. Process all the elements: + * while (si.position >= 0) { + * do something with si.key and/or si.value; + * if (si.next(&si) < 0) { there was an error; } + * } + * 5. Finalize the SetIterator: + * finiSetIteration(&si); + * This is mandatory! si may contain references to iterator objects, + * keys and values, and they must be cleaned up else they'll leak. If + * this were C++ we'd hide that in the destructor, but in C you have to + * do it by hand. + */ +typedef struct SetIteration_s +{ + PyObject *set; /* the set, bucket, BTree, ..., being iterated */ + int position; /* initialized to 0; set to -1 by next() when done */ + int usesValue; /* true iff 'set' has values & we iterate them */ + KEY_TYPE key; /* next() sets to next key */ + VALUE_TYPE value; /* next() may set to next value */ + int (*next)(struct SetIteration_s*); /* function to get next key+value */ +} SetIteration; + +/* Finish the set iteration protocol. This MUST be called by everyone + * who starts a set iteration, unless the initial call to initSetIteration + * failed; in that case, and only that case, calling finiSetIteration is + * optional. + */ +static void +finiSetIteration(SetIteration *i) +{ + assert(i != NULL); + if (i->set == NULL) + return; + Py_DECREF(i->set); + i->set = NULL; /* so it doesn't hurt to call this again */ + + if (i->position > 0) { + /* next() was called at least once, but didn't finish iterating + * (else position would be negative). So the cached key and + * value need to be cleaned up. + */ + DECREF_KEY(i->key); + if (i->usesValue) { + DECREF_VALUE(i->value); + } + } + i->position = -1; /* stop any stray next calls from doing harm */ +} + +static PyObject * +IndexError(int i) +{ + PyObject *v; + + v = INT_FROM_LONG(i); + if (!v) { + v = Py_None; + Py_INCREF(v); + } + PyErr_SetObject(PyExc_IndexError, v); + Py_DECREF(v); + return NULL; +} + +/* Search for the bucket immediately preceding *current, in the bucket chain + * starting at first. current, *current and first must not be NULL. + * + * Return: + * 1 *current holds the correct bucket; this is a borrowed reference + * 0 no such bucket exists; *current unaltered + * -1 error; *current unaltered + */ +static int +PreviousBucket(Bucket **current, Bucket *first) +{ + Bucket *trailing = NULL; /* first travels; trailing follows it */ + int result = 0; + + assert(current && *current && first); + if (first == *current) + return 0; + + do { + trailing = first; + PER_USE_OR_RETURN(first, -1); + first = first->next; + + ((trailing)->state==cPersistent_STICKY_STATE + && + ((trailing)->state=cPersistent_UPTODATE_STATE)); + + PER_ACCESSED(trailing); + + if (first == *current) { + *current = trailing; + result = 1; + break; + } + } while (first); + + return result; +} + +static void * +BTree_Malloc(size_t sz) +{ + void *r; + + ASSERT(sz > 0, "non-positive size malloc", NULL); + + r = malloc(sz); + if (r) + return r; + + PyErr_NoMemory(); + return NULL; +} + +static void * +BTree_Realloc(void *p, size_t sz) +{ + void *r; + + ASSERT(sz > 0, "non-positive size realloc", NULL); + + if (p) + r = realloc(p, sz); + else + r = malloc(sz); + + UNLESS (r) + PyErr_NoMemory(); + + return r; +} + +/* Shared keyword-argument list for BTree/Bucket + * (iter)?(keys|values|items) + */ +static char *search_keywords[] = {"min", "max", + "excludemin", "excludemax", + 0}; + +#include "BTreeItemsTemplate.c" +#include "BucketTemplate.c" +#include "SetTemplate.c" +#include "BTreeTemplate.c" +#include "TreeSetTemplate.c" +#include "SetOpTemplate.c" +#include "MergeTemplate.c" + +static struct PyMethodDef module_methods[] = { + {"difference", (PyCFunction) difference_m, METH_VARARGS, + "difference(o1, o2) -- " + "compute the difference between o1 and o2" + }, + {"union", (PyCFunction) union_m, METH_VARARGS, + "union(o1, o2) -- compute the union of o1 and o2\n" + }, + {"intersection", (PyCFunction) intersection_m, METH_VARARGS, + "intersection(o1, o2) -- " + "compute the intersection of o1 and o2" + }, +#ifdef MERGE + {"weightedUnion", (PyCFunction) wunion_m, METH_VARARGS, + "weightedUnion(o1, o2 [, w1, w2]) -- compute the union of o1 and o2\n" + "\nw1 and w2 are weights." + }, + {"weightedIntersection", (PyCFunction) wintersection_m, METH_VARARGS, + "weightedIntersection(o1, o2 [, w1, w2]) -- " + "compute the intersection of o1 and o2\n" + "\nw1 and w2 are weights." + }, +#endif +#ifdef MULTI_INT_UNION + {"multiunion", (PyCFunction) multiunion_m, METH_VARARGS, + "multiunion(seq) -- compute union of a sequence of integer sets.\n" + "\n" + "Each element of seq must be an integer set, or convertible to one\n" + "via the set iteration protocol. The union returned is an IISet." + }, +#endif + {NULL, NULL} /* sentinel */ +}; + +static char BTree_module_documentation[] = +"\n" +MASTER_ID +BTREEITEMSTEMPLATE_C +"$Id$\n" +BTREETEMPLATE_C +BUCKETTEMPLATE_C +KEYMACROS_H +MERGETEMPLATE_C +SETOPTEMPLATE_C +SETTEMPLATE_C +TREESETTEMPLATE_C +VALUEMACROS_H +BTREEITEMSTEMPLATE_C +; + +int +init_persist_type(PyTypeObject *type) +{ +#ifdef PY3K + ((PyObject*)type)->ob_type = &PyType_Type; +#else + type->ob_type = &PyType_Type; +#endif + type->tp_base = cPersistenceCAPI->pertype; + + if (PyType_Ready(type) < 0) + return 0; + + return 1; +} + +#ifdef PY3K +static struct PyModuleDef moduledef = { + PyModuleDef_HEAD_INIT, + "_" MOD_NAME_PREFIX "BTree", /* m_name */ + BTree_module_documentation, /* m_doc */ + -1, /* m_size */ + module_methods, /* m_methods */ + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL, /* m_free */ + }; + +#endif + +static PyObject* +module_init(void) +{ + PyObject *module, *mod_dict, *interfaces, *conflicterr; + +#ifdef KEY_TYPE_IS_PYOBJECT + object_ = PyTuple_GetItem(Py_TYPE(Py_None)->tp_bases, 0); + if (object_ == NULL) + return NULL; +#endif + + sort_str = INTERN("sort"); + if (!sort_str) + return NULL; + reverse_str = INTERN("reverse"); + if (!reverse_str) + return NULL; + __setstate___str = INTERN("__setstate__"); + if (!__setstate___str) + return NULL; + _bucket_type_str = INTERN("_bucket_type"); + if (!_bucket_type_str) + return NULL; + + max_internal_size_str = INTERN("max_internal_size"); + if (! max_internal_size_str) + return NULL; + max_leaf_size_str = INTERN("max_leaf_size"); + if (! max_leaf_size_str) + return NULL; + + /* Grab the ConflictError class */ + interfaces = PyImport_ImportModule("BTrees.Interfaces"); + if (interfaces != NULL) + { + conflicterr = PyObject_GetAttrString(interfaces, "BTreesConflictError"); + if (conflicterr != NULL) + ConflictError = conflicterr; + Py_DECREF(interfaces); + } + + if (ConflictError == NULL) + { + Py_INCREF(PyExc_ValueError); + ConflictError=PyExc_ValueError; + } + + /* Initialize the PyPersist_C_API and the type objects. */ +#ifdef PY3K + cPersistenceCAPI = (cPersistenceCAPIstruct *)PyCapsule_Import( + "persistent.cPersistence.CAPI", 0); +#else + cPersistenceCAPI = (cPersistenceCAPIstruct *)PyCObject_Import( + "persistent.cPersistence", "CAPI"); +#endif + if (cPersistenceCAPI == NULL) { + /* The Capsule API attempts to import 'persistent' and then + * walk down to the specified attribute using getattr. If the C + * extensions aren't available, this can result in an + * AttributeError being raised. Let that percolate up as an + * ImportError so it can be caught in the expected way. + */ + if (PyErr_Occurred() && !PyErr_ExceptionMatches(PyExc_ImportError)) { + PyErr_SetString(PyExc_ImportError, "persistent C extension unavailable"); + } + return NULL; + } + +#ifdef PY3K +#define _SET_TYPE(typ) ((PyObject*)(&typ))->ob_type = &PyType_Type +#else +#define _SET_TYPE(typ) (typ).ob_type = &PyType_Type +#endif + _SET_TYPE(BTreeItemsType); + _SET_TYPE(BTreeIter_Type); + BTreeIter_Type.tp_getattro = PyObject_GenericGetAttr; + BucketType.tp_new = PyType_GenericNew; + SetType.tp_new = PyType_GenericNew; + BTreeType.tp_new = PyType_GenericNew; + TreeSetType.tp_new = PyType_GenericNew; + if (!init_persist_type(&BucketType)) + return NULL; + if (!init_persist_type(&BTreeType)) + return NULL; + if (!init_persist_type(&SetType)) + return NULL; + if (!init_persist_type(&TreeSetType)) + return NULL; + + if (PyDict_SetItem(BTreeType.tp_dict, _bucket_type_str, + (PyObject *)&BucketType) < 0) + { + fprintf(stderr, "btree failed\n"); + return NULL; + } + if (PyDict_SetItem(TreeSetType.tp_dict, _bucket_type_str, + (PyObject *)&SetType) < 0) + { + fprintf(stderr, "bucket failed\n"); + return NULL; + } + + /* Create the module and add the functions */ +#ifdef PY3K + module = PyModule_Create(&moduledef); +#else + module = Py_InitModule4("_" MOD_NAME_PREFIX "BTree", + module_methods, BTree_module_documentation, + (PyObject *)NULL, PYTHON_API_VERSION); +#endif + + /* Add some symbolic constants to the module */ + mod_dict = PyModule_GetDict(module); + if (PyDict_SetItemString(mod_dict, MOD_NAME_PREFIX "Bucket", + (PyObject *)&BucketType) < 0) + return NULL; + if (PyDict_SetItemString(mod_dict, MOD_NAME_PREFIX "BTree", + (PyObject *)&BTreeType) < 0) + return NULL; + if (PyDict_SetItemString(mod_dict, MOD_NAME_PREFIX "Set", + (PyObject *)&SetType) < 0) + return NULL; + if (PyDict_SetItemString(mod_dict, MOD_NAME_PREFIX "TreeSet", + (PyObject *)&TreeSetType) < 0) + return NULL; + if (PyDict_SetItemString(mod_dict, MOD_NAME_PREFIX "TreeIterator", + (PyObject *)&BTreeIter_Type) < 0) + return NULL; + /* We also want to be able to access these constants without the prefix + * so that code can more easily exchange modules (particularly the integer + * and long modules, but also others). The TreeIterator is only internal, + * so we don't bother to expose that. + */ + if (PyDict_SetItemString(mod_dict, "Bucket", + (PyObject *)&BucketType) < 0) + return NULL; + if (PyDict_SetItemString(mod_dict, "BTree", + (PyObject *)&BTreeType) < 0) + return NULL; + if (PyDict_SetItemString(mod_dict, "Set", + (PyObject *)&SetType) < 0) + return NULL; + if (PyDict_SetItemString(mod_dict, "TreeSet", + (PyObject *)&TreeSetType) < 0) + return NULL; +#if defined(ZODB_64BIT_INTS) && defined(NEED_LONG_LONG_SUPPORT) + if (PyDict_SetItemString(mod_dict, "using64bits", Py_True) < 0) + return NULL; +#else + if (PyDict_SetItemString(mod_dict, "using64bits", Py_False) < 0) + return NULL; +#endif + return module; +} + +#ifdef PY3K +PyMODINIT_FUNC INITMODULE(void) +{ + return module_init(); +} +#else +PyMODINIT_FUNC INITMODULE(void) +{ + module_init(); +} +#endif diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/BTreeTemplate.c b/thesisenv/lib/python3.6/site-packages/BTrees/BTreeTemplate.c new file mode 100644 index 0000000..cd61adb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/BTreeTemplate.c @@ -0,0 +1,2474 @@ +/***************************************************************************** + + Copyright (c) 2001, 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + +****************************************************************************/ +#include "_compat.h" + +#define BTREETEMPLATE_C "$Id$\n" + +static long +_get_max_size(BTree *self, PyObject *name, long default_max) +{ + PyObject *size; + long isize; + + size = PyObject_GetAttr(OBJECT(OBJECT(self)->ob_type), name); + if (size == NULL) { + PyErr_Clear(); + return default_max; + } +#ifdef PY3K + isize = PyLong_AsLong(size); +#else + isize = PyInt_AsLong(size); +#endif + + Py_DECREF(size); + if (isize <= 0 && ! PyErr_Occurred()) { + PyErr_SetString(PyExc_ValueError, + "non-positive max size in BTree subclass"); + return -1; + } + + return isize; +} + +static int +_max_internal_size(BTree *self) +{ + long isize; + + if (self->max_internal_size > 0) return self->max_internal_size; + isize = _get_max_size(self, max_internal_size_str, DEFAULT_MAX_BTREE_SIZE); + self->max_internal_size = isize; + return isize; +} + +static int +_max_leaf_size(BTree *self) +{ + long isize; + + if (self->max_leaf_size > 0) return self->max_leaf_size; + isize = _get_max_size(self, max_leaf_size_str, DEFAULT_MAX_BUCKET_SIZE); + self->max_leaf_size = isize; + return isize; +} + +/* Sanity-check a BTree. This is a private helper for BTree_check. Return: + * -1 Error. If it's an internal inconsistency in the BTree, + * AssertionError is set. + * 0 No problem found. + * + * nextbucket is the bucket "one beyond the end" of the BTree; the last bucket + * directly reachable from following right child pointers *should* be linked + * to nextbucket (and this is checked). + */ +static int +BTree_check_inner(BTree *self, Bucket *nextbucket) +{ + int i; + Bucket *bucketafter; + Sized *child; + char *errormsg = "internal error"; /* someone should have overriden */ + Sized *activated_child = NULL; + int result = -1; /* until proved innocent */ + +#define CHECK(CONDITION, ERRORMSG) \ + if (!(CONDITION)) { \ + errormsg = (ERRORMSG); \ + goto Error; \ + } + + PER_USE_OR_RETURN(self, -1); + CHECK(self->len >= 0, "BTree len < 0"); + CHECK(self->len <= self->size, "BTree len > size"); + if (self->len == 0) /* Empty BTree. */ + { + CHECK(self->firstbucket == NULL, + "Empty BTree has non-NULL firstbucket"); + result = 0; + goto Done; + } + /* Non-empty BTree. */ + CHECK(self->firstbucket != NULL, "Non-empty BTree has NULL firstbucket"); + + /* Obscure: The first bucket is pointed to at least by self->firstbucket + * and data[0].child of whichever BTree node it's a child of. However, + * if persistence is enabled then the latter BTree node may be a ghost + * at this point, and so its pointers "don't count": we can only rely + * on self's pointers being intact. + */ +#ifdef PERSISTENT + CHECK(Py_REFCNT(self->firstbucket) >= 1, + "Non-empty BTree firstbucket has refcount < 1"); +#else + CHECK(Py_REFCNT(self->firstbucket) >= 2, + "Non-empty BTree firstbucket has refcount < 2"); +#endif + + for (i = 0; i < self->len; ++i) + { + CHECK(self->data[i].child != NULL, "BTree has NULL child"); + } + + if (SameType_Check(self, self->data[0].child)) + { + /* Our children are also BTrees. */ + child = self->data[0].child; + UNLESS (PER_USE(child)) + goto Done; + activated_child = child; + CHECK(self->firstbucket == BTREE(child)->firstbucket, + "BTree has firstbucket different than " + "its first child's firstbucket"); + PER_ALLOW_DEACTIVATION(child); + activated_child = NULL; + for (i = 0; i < self->len; ++i) + { + child = self->data[i].child; + CHECK(SameType_Check(self, child), + "BTree children have different types"); + if (i == self->len - 1) + bucketafter = nextbucket; + else + { + BTree *child2 = BTREE(self->data[i+1].child); + UNLESS (PER_USE(child2)) + goto Done; + bucketafter = child2->firstbucket; + PER_ALLOW_DEACTIVATION(child2); + } + if (BTree_check_inner(BTREE(child), bucketafter) < 0) + goto Done; + } + } + else /* Our children are buckets. */ + { + CHECK(self->firstbucket == BUCKET(self->data[0].child), + "Bottom-level BTree node has inconsistent firstbucket belief"); + for (i = 0; i < self->len; ++i) + { + child = self->data[i].child; + UNLESS (PER_USE(child)) + goto Done; + activated_child = child; + CHECK(!SameType_Check(self, child), + "BTree children have different types"); + CHECK(child->len >= 1, "Bucket length < 1");/* no empty buckets! */ + CHECK(child->len <= child->size, "Bucket len > size"); +#ifdef PERSISTENT + CHECK(Py_REFCNT(child) >= 1, "Bucket has refcount < 1"); +#else + CHECK(Py_REFCNT(child) >= 2, "Bucket has refcount < 2"); +#endif + if (i == self->len - 1) + bucketafter = nextbucket; + else + bucketafter = BUCKET(self->data[i+1].child); + CHECK(BUCKET(child)->next == bucketafter, + "Bucket next pointer is damaged"); + PER_ALLOW_DEACTIVATION(child); + activated_child = NULL; + } + } + result = 0; + goto Done; + +Error: + PyErr_SetString(PyExc_AssertionError, errormsg); + result = -1; + +Done: + /* No point updating access time -- this isn't a "real" use. */ + PER_ALLOW_DEACTIVATION(self); + if (activated_child) + { + PER_ALLOW_DEACTIVATION(activated_child); + } + return result; + +#undef CHECK +} + +/* Sanity-check a BTree. This is the ._check() method. Return: + * NULL Error. If it's an internal inconsistency in the BTree, + * AssertionError is set. + * Py_None No problem found. + */ +static PyObject* +BTree_check(BTree *self) +{ + PyObject *result = NULL; + int i = BTree_check_inner(self, NULL); + + if (i >= 0) + { + result = Py_None; + Py_INCREF(result); + } + return result; +} + +#define _BGET_REPLACE_TYPE_ERROR 1 +#define _BGET_ALLOW_TYPE_ERROR 0 +/* +** _BTree_get +** +** Search a BTree. +** +** Arguments +** self a pointer to a BTree +** keyarg the key to search for, as a Python object +** has_key true/false; when false, try to return the associated +** value; when true, return a boolean +** replace_type_err true/false: When true, ignore the TypeError from +** a key conversion issue, instead +** transforming it into a KeyError set. If +** you are just reading/searching, set to +** true. If you will be adding/updating, +** however, set to false. Or use +** _BGET_REPLACE_TYPE_ERROR +** and _BGET_ALLOW_TYPE_ERROR, respectively. +** Return +** When has_key false: +** If key exists, its associated value. +** If key doesn't exist, NULL and KeyError is set. +** When has_key true: +** A Python int is returned in any case. +** If key exists, the depth of the bucket in which it was found. +** If key doesn't exist, 0. +*/ +static PyObject * +_BTree_get(BTree *self, PyObject *keyarg, int has_key, int replace_type_err) +{ + KEY_TYPE key; + PyObject *result = NULL; /* guilty until proved innocent */ + int copied = 1; + + COPY_KEY_FROM_ARG(key, keyarg, copied); + UNLESS (copied) + { + if (replace_type_err && PyErr_ExceptionMatches(PyExc_TypeError)) + { + PyErr_Clear(); + PyErr_SetObject(PyExc_KeyError, keyarg); + } + return NULL; + } + + PER_USE_OR_RETURN(self, NULL); + if (self->len == 0) + { + /* empty BTree */ + if (has_key) + result = INT_FROM_LONG(0); + else + PyErr_SetObject(PyExc_KeyError, keyarg); + } + else + { + for (;;) + { + int i; + Sized *child; + + BTREE_SEARCH(i, self, key, goto Done); + child = self->data[i].child; + has_key += has_key != 0; /* bump depth counter, maybe */ + if (SameType_Check(self, child)) + { + PER_UNUSE(self); + self = BTREE(child); + PER_USE_OR_RETURN(self, NULL); + } + else + { + result = _bucket_get(BUCKET(child), keyarg, has_key); + break; + } + } + } + +Done: + PER_UNUSE(self); + return result; +} + +static PyObject * +BTree_get(BTree *self, PyObject *key) +{ + return _BTree_get(self, key, 0, _BGET_REPLACE_TYPE_ERROR); +} + +/* Create a new bucket for the BTree or TreeSet using the class attribute + _bucket_type, which is normally initialized to BucketType or SetType + as appropriate. +*/ +static Sized * +BTree_newBucket(BTree *self) +{ + PyObject *factory; + Sized *result; + + /* _bucket_type_str defined in BTreeModuleTemplate.c */ + factory = PyObject_GetAttr((PyObject *)Py_TYPE(self), _bucket_type_str); + if (factory == NULL) + return NULL; + /* TODO: Should we check that the factory actually returns something + of the appropriate type? How? The C code here is going to + depend on any custom bucket type having the same layout at the + C level. + */ + result = SIZED(PyObject_CallObject(factory, NULL)); + Py_DECREF(factory); + return result; +} + +/* + * Move data from the current BTree, from index onward, to the newly created + * BTree 'next'. self and next must both be activated. If index is OOB (< 0 + * or >= self->len), use self->len / 2 as the index (i.e., split at the + * midpoint). self must have at least 2 children on entry, and index must + * be such that self and next each have at least one child at exit. self's + * accessed time is updated. + * + * Return: + * -1 error + * 0 OK + */ +static int +BTree_split(BTree *self, int index, BTree *next) +{ + int next_size; + Sized *child; + + if (index < 0 || index >= self->len) + index = self->len / 2; + + next_size = self->len - index; + ASSERT(index > 0, "split creates empty tree", -1); + ASSERT(next_size > 0, "split creates empty tree", -1); + + next->data = BTree_Malloc(sizeof(BTreeItem) * next_size); + if (!next->data) + return -1; + memcpy(next->data, self->data + index, sizeof(BTreeItem) * next_size); + next->size = next_size; /* but don't set len until we succeed */ + + /* Set next's firstbucket. self->firstbucket is still correct. */ + child = next->data[0].child; + if (SameType_Check(self, child)) + { + PER_USE_OR_RETURN(child, -1); + next->firstbucket = BTREE(child)->firstbucket; + PER_UNUSE(child); + } + else + next->firstbucket = BUCKET(child); + Py_INCREF(next->firstbucket); + + next->len = next_size; + self->len = index; + return PER_CHANGED(self) >= 0 ? 0 : -1; +} + + +/* Fwd decl -- BTree_grow and BTree_split_root reference each other. */ +static int BTree_grow(BTree *self, int index, int noval); + +/* Split the root. This is a little special because the root isn't a child + * of anything else, and the root needs to retain its object identity. So + * this routine moves the root's data into a new child, and splits the + * latter. This leaves the root with two children. + * + * Return: + * 0 OK + * -1 error + * + * CAUTION: The caller must call PER_CHANGED on self. + */ +static int +BTree_split_root(BTree *self, int noval) +{ + BTree *child; + BTreeItem *d; + + /* Create a child BTree, and a new data vector for self. */ + child = BTREE(PyObject_CallObject(OBJECT(Py_TYPE(self)), NULL)); + if (!child) + return -1; + + d = BTree_Malloc(sizeof(BTreeItem) * 2); + if (!d) { + Py_DECREF(child); + return -1; + } + + /* Move our data to new BTree. */ + child->size = self->size; + child->len = self->len; + child->data = self->data; + child->firstbucket = self->firstbucket; + Py_INCREF(child->firstbucket); + + /* Point self to child and split the child. */ + self->data = d; + self->len = 1; + self->size = 2; + self->data[0].child = SIZED(child); /* transfers reference ownership */ + return BTree_grow(self, 0, noval); +} + +/* +** BTree_grow +** +** Grow a BTree +** +** Arguments: self The BTree +** index self->data[index].child needs to be split. index +** must be 0 if self is empty (len == 0), and a new +** empty bucket is created then. +** noval Boolean; is this a set (true) or mapping (false)? +** +** Returns: 0 on success +** -1 on failure +** +** CAUTION: If self is empty on entry, this routine adds an empty bucket. +** That isn't a legitimate BTree; if the caller doesn't put something in +** in the bucket (say, because of a later error), the BTree must be cleared +** to get rid of the empty bucket. +*/ +static int +BTree_grow(BTree *self, int index, int noval) +{ + int i; + Sized *v, *e = 0; + BTreeItem *d; + + if (self->len == self->size) + { + if (self->size) + { + d = BTree_Realloc(self->data, sizeof(BTreeItem) * self->size * 2); + if (d == NULL) + return -1; + self->data = d; + self->size *= 2; + } + else + { + d = BTree_Malloc(sizeof(BTreeItem) * 2); + if (d == NULL) + return -1; + self->data = d; + self->size = 2; + } + } + + if (self->len) + { + long max_size = _max_internal_size(self); + if (max_size < 0) return -1; + + d = self->data + index; + v = d->child; + /* Create a new object of the same type as the target value */ + e = (Sized *)PyObject_CallObject((PyObject *)Py_TYPE(v), NULL); + if (e == NULL) + return -1; + + UNLESS(PER_USE(v)) + { + Py_DECREF(e); + return -1; + } + + /* Now split between the original (v) and the new (e) at the midpoint*/ + if (SameType_Check(self, v)) + i = BTree_split((BTree *)v, -1, (BTree *)e); + else + i = bucket_split((Bucket *)v, -1, (Bucket *)e); + PER_ALLOW_DEACTIVATION(v); + + if (i < 0) + { + Py_DECREF(e); + assert(PyErr_Occurred()); + return -1; + } + + index++; + d++; + if (self->len > index) /* Shift up the old values one array slot */ + memmove(d+1, d, sizeof(BTreeItem)*(self->len-index)); + + if (SameType_Check(self, v)) + { + COPY_KEY(d->key, BTREE(e)->data->key); + + /* We take the unused reference from e, so there's no + reason to INCREF! + */ + /* INCREF_KEY(self->data[1].key); */ + } + else + { + COPY_KEY(d->key, BUCKET(e)->keys[0]); + INCREF_KEY(d->key); + } + d->child = e; + self->len++; + + if (self->len >= max_size * 2) /* the root is huge */ + return BTree_split_root(self, noval); + } + else + { + /* The BTree is empty. Create an empty bucket. See CAUTION in + * the comments preceding. + */ + assert(index == 0); + d = self->data; + d->child = BTree_newBucket(self); + if (d->child == NULL) + return -1; + self->len = 1; + Py_INCREF(d->child); + self->firstbucket = (Bucket *)d->child; + } + + return 0; +} + +/* Return the rightmost bucket reachable from following child pointers + * from self. The caller gets a new reference to this bucket. Note that + * bucket 'next' pointers are not followed: if self is an interior node + * of a BTree, this returns the rightmost bucket in that node's subtree. + * In case of error, returns NULL. + * + * self must not be a ghost; this isn't checked. The result may be a ghost. + * + * Pragmatics: Note that the rightmost bucket's last key is the largest + * key in self's subtree. + */ +static Bucket * +BTree_lastBucket(BTree *self) +{ + Sized *pchild; + Bucket *result; + + UNLESS (self->data && self->len) + { + IndexError(-1); /* is this the best action to take? */ + return NULL; + } + + pchild = self->data[self->len - 1].child; + if (SameType_Check(self, pchild)) + { + self = BTREE(pchild); + PER_USE_OR_RETURN(self, NULL); + result = BTree_lastBucket(self); + PER_UNUSE(self); + } + else + { + Py_INCREF(pchild); + result = BUCKET(pchild); + } + return result; +} + +static int +BTree_deleteNextBucket(BTree *self) +{ + Bucket *b; + + UNLESS (PER_USE(self)) + return -1; + + b = BTree_lastBucket(self); + if (b == NULL) + goto err; + if (Bucket_deleteNextBucket(b) < 0) + goto err; + + Py_DECREF(b); + PER_UNUSE(self); + + return 0; + +err: + Py_XDECREF(b); + PER_ALLOW_DEACTIVATION(self); + return -1; +} + +/* +** _BTree_clear +** +** Clears out all of the values in the BTree (firstbucket, keys, and children); +** leaving self an empty BTree. +** +** Arguments: self The BTree +** +** Returns: 0 on success +** -1 on failure +** +** Internal: Deallocation order is important. The danger is that a long +** list of buckets may get freed "at once" via decref'ing the first bucket, +** in which case a chain of consequenct Py_DECREF calls may blow the stack. +** Luckily, every bucket has a refcount of at least two, one due to being a +** BTree node's child, and another either because it's not the first bucket in +** the chain (so the preceding bucket points to it), or because firstbucket +** points to it. By clearing in the natural depth-first, left-to-right +** order, the BTree->bucket child pointers prevent Py_DECREF(bucket->next) +** calls from freeing bucket->next, and the maximum stack depth is equal +** to the height of the tree. +**/ +static int +_BTree_clear(BTree *self) +{ + const int len = self->len; + + if (self->firstbucket) + { + /* Obscure: The first bucket is pointed to at least by + * self->firstbucket and data[0].child of whichever BTree node it's + * a child of. However, if persistence is enabled then the latter + * BTree node may be a ghost at this point, and so its pointers "don't + * count": we can only rely on self's pointers being intact. + */ +#ifdef PERSISTENT + ASSERT(Py_REFCNT(self->firstbucket) > 0, + "Invalid firstbucket pointer", -1); +#else + ASSERT(Py_REFCNT(self->firstbucket) > 1, + "Invalid firstbucket pointer", -1); +#endif + Py_DECREF(self->firstbucket); + self->firstbucket = NULL; + } + + if (self->data) + { + int i; + if (len > 0) /* 0 is special because key 0 is trash */ + { + Py_DECREF(self->data[0].child); + } + + for (i = 1; i < len; i++) + { +#ifdef KEY_TYPE_IS_PYOBJECT + DECREF_KEY(self->data[i].key); +#endif + Py_DECREF(self->data[i].child); + } + free(self->data); + self->data = NULL; + } + + self->len = self->size = 0; + return 0; +} + +/* + Set (value != 0) or delete (value=0) a tree item. + + If unique is non-zero, then only change if the key is + new. + + If noval is non-zero, then don't set a value (the tree + is a set). + + Return: + -1 error + 0 successful, and number of entries didn't change + >0 successful, and number of entries did change + + Internal + There are two distinct return values > 0: + + 1 Successful, number of entries changed, but firstbucket did not go away. + + 2 Successful, number of entries changed, firstbucket did go away. + This can only happen on a delete (value == NULL). The caller may + need to change its own firstbucket pointer, and in any case *someone* + needs to adjust the 'next' pointer of the bucket immediately preceding + the bucket that went away (it needs to point to the bucket immediately + following the bucket that went away). +*/ +static int +_BTree_set(BTree *self, PyObject *keyarg, PyObject *value, + int unique, int noval) +{ + int changed = 0; /* did I mutate? */ + int min; /* index of child I searched */ + BTreeItem *d; /* self->data[min] */ + int childlength; /* len(self->data[min].child) */ + int status; /* our return value; and return value from callee */ + int self_was_empty; /* was self empty at entry? */ + + KEY_TYPE key; + int copied = 1; + + COPY_KEY_FROM_ARG(key, keyarg, copied); + if (!copied) + return -1; +#ifdef KEY_CHECK_ON_SET + if (value && !KEY_CHECK_ON_SET(keyarg)) + return -1; +#endif + + PER_USE_OR_RETURN(self, -1); + + self_was_empty = self->len == 0; + if (self_was_empty) + { + /* We're empty. Make room. */ + if (value) + { + if (BTree_grow(self, 0, noval) < 0) + goto Error; + } + else + { + /* Can't delete a key from an empty BTree. */ + PyErr_SetObject(PyExc_KeyError, keyarg); + goto Error; + } + } + + /* Find the right child to search, and hand the work off to it. */ + BTREE_SEARCH(min, self, key, goto Error); + d = self->data + min; + +#ifdef PERSISTENT + PER_READCURRENT(self, goto Error); +#endif + + if (SameType_Check(self, d->child)) + status = _BTree_set(BTREE(d->child), keyarg, value, unique, noval); + else + { + int bucket_changed = 0; + status = _bucket_set(BUCKET(d->child), keyarg, + value, unique, noval, &bucket_changed); +#ifdef PERSISTENT + /* If a BTree contains only a single bucket, BTree.__getstate__() + * includes the bucket's entire state, and the bucket doesn't get + * an oid of its own. So if we have a single oid-less bucket that + * changed, it's *our* oid that should be marked as changed -- the + * bucket doesn't have one. + */ + if (bucket_changed + && self->len == 1 + && self->data[0].child->oid == NULL) + { + changed = 1; + } +#endif + } + if (status == 0) + goto Done; + if (status < 0) + goto Error; + assert(status == 1 || status == 2); + + /* The child changed size. Get its new size. Note that since the tree + * rooted at the child changed size, so did the tree rooted at self: + * our status must be >= 1 too. + */ + UNLESS(PER_USE(d->child)) + goto Error; + childlength = d->child->len; + PER_UNUSE(d->child); + + if (value) + { + /* A bucket got bigger -- if it's "too big", split it. */ + int toobig; + + assert(status == 1); /* can be 2 only on deletes */ + if (SameType_Check(self, d->child)) { + long max_size = _max_internal_size(self); + if (max_size < 0) return -1; + toobig = childlength > max_size; + } + else { + long max_size = _max_leaf_size(self); + if (max_size < 0) return -1; + toobig = childlength > max_size; + } + if (toobig) { + if (BTree_grow(self, min, noval) < 0) + goto Error; + changed = 1; /* BTree_grow mutated self */ + } + goto Done; /* and status still == 1 */ + } + + /* A bucket got smaller. This is much harder, and despite that we + * don't try to rebalance the tree. + */ + + if (min && childlength) + { /* We removed a key. but the node child is non-empty. If the + deleted key is the node key, then update the node key using + the smallest key of the node child. + + This doesn't apply to the 0th node, whos key is unused. + */ + int _cmp = 1; + TEST_KEY_SET_OR(_cmp, key, d->key) goto Error; + if (_cmp == 0) /* Need to replace key with first key from child */ + { + Bucket *bucket; + + if (SameType_Check(self, d->child)) + { + UNLESS(PER_USE(d->child)) + goto Error; + bucket = BTREE(d->child)->firstbucket; + PER_UNUSE(d->child); + } + else + bucket = BUCKET(d->child); + + UNLESS(PER_USE(bucket)) + goto Error; + DECREF_KEY(d->key); + COPY_KEY(d->key, bucket->keys[0]); + INCREF_KEY(d->key); + PER_UNUSE(bucket); + if (PER_CHANGED(self) < 0) + goto Error; + } + } + + if (status == 2) + { + /* The child must be a BTree because bucket.set never returns 2 */ + /* Two problems to solve: May have to adjust our own firstbucket, + * and the bucket that went away needs to get unlinked. + */ + if (min) + { + /* This wasn't our firstbucket, so no need to adjust ours (note + * that it can't be the firstbucket of any node above us either). + * Tell "the tree to the left" to do the unlinking. + */ + if (BTree_deleteNextBucket(BTREE(d[-1].child)) < 0) + goto Error; + status = 1; /* we solved the child's firstbucket problem */ + } + else + { + /* This was our firstbucket. Update to new firstbucket value. */ + Bucket *nextbucket; + UNLESS(PER_USE(d->child)) + goto Error; + nextbucket = BTREE(d->child)->firstbucket; + PER_UNUSE(d->child); + + Py_XINCREF(nextbucket); + Py_DECREF(self->firstbucket); + self->firstbucket = nextbucket; + changed = 1; + + /* The caller has to do the unlinking -- we can't. Also, since + * it was our firstbucket, it may also be theirs. + */ + assert(status == 2); + } + } + + /* If the child isn't empty, we're done! We did all that was possible for + * us to do with the firstbucket problems the child gave us, and since the + * child isn't empty don't create any new firstbucket problems of our own. + */ + if (childlength) + goto Done; + + /* The child became empty: we need to remove it from self->data. + * But first, if we're a bottom-level node, we've got more bucket-fiddling + * to set up. + */ + if (! SameType_Check(self, d->child)) + { + /* We're about to delete a bucket, so need to adjust bucket pointers. */ + if (min) + { + /* It's not our first bucket, so we can tell the previous + * bucket to adjust its reference to it. It can't be anyone + * else's first bucket either, so the caller needn't do anything. + */ + if (Bucket_deleteNextBucket(BUCKET(d[-1].child)) < 0) + goto Error; + /* status should be 1, and already is: if it were 2, the + * block above would have set it to 1 in its min != 0 branch. + */ + assert(status == 1); + } + else + { + Bucket *nextbucket; + /* It's our first bucket. We can't unlink it directly. */ + /* 'changed' will be set true by the deletion code following. */ + UNLESS(PER_USE(d->child)) + goto Error; + nextbucket = BUCKET(d->child)->next; + PER_UNUSE(d->child); + + Py_XINCREF(nextbucket); + Py_DECREF(self->firstbucket); + self->firstbucket = nextbucket; + + status = 2; /* we're giving our caller a new firstbucket problem */ + } + } + + /* Remove the child from self->data. */ + Py_DECREF(d->child); +#ifdef KEY_TYPE_IS_PYOBJECT + if (min) + { + DECREF_KEY(d->key); + } + else if (self->len > 1) + { + /* We're deleting the first child of a BTree with more than one + * child. The key at d+1 is about to be shifted into slot 0, + * and hence never to be referenced again (the key in slot 0 is + * trash). + */ + DECREF_KEY((d+1)->key); + } + /* Else min==0 and len==1: we're emptying the BTree entirely, and + * there is no key in need of decrefing. + */ +#endif + --self->len; + if (min < self->len) + memmove(d, d+1, (self->len - min) * sizeof(BTreeItem)); + changed = 1; + +Done: +#ifdef PERSISTENT + if (changed) + { + if (PER_CHANGED(self) < 0) + goto Error; + } +#endif + PER_UNUSE(self); + return status; + +Error: + assert(PyErr_Occurred()); + if (self_was_empty) + { + /* BTree_grow may have left the BTree in an invalid state. Make + * sure the tree is a legitimate empty tree. + */ + _BTree_clear(self); + } + PER_UNUSE(self); + return -1; +} + +/* +** BTree_setitem +** +** wrapper for _BTree_set +** +** Arguments: self The BTree +** key The key to insert +** v The value to insert +** +** Returns -1 on failure +** 0 on success +*/ +static int +BTree_setitem(BTree *self, PyObject *key, PyObject *v) +{ + if (_BTree_set(self, key, v, 0, 0) < 0) + return -1; + return 0; +} + +#ifdef PERSISTENT +static PyObject * +BTree__p_deactivate(BTree *self, PyObject *args, PyObject *keywords) +{ + int ghostify = 1; + PyObject *force = NULL; + + if (args && PyTuple_GET_SIZE(args) > 0) + { + PyErr_SetString(PyExc_TypeError, + "_p_deactivate takes not positional arguments"); + return NULL; + } + if (keywords) + { + int size = PyDict_Size(keywords); + force = PyDict_GetItemString(keywords, "force"); + if (force) + size--; + if (size) + { + PyErr_SetString(PyExc_TypeError, + "_p_deactivate only accepts keyword arg force"); + return NULL; + } + } + + if (self->jar && self->oid) + { + ghostify = self->state == cPersistent_UPTODATE_STATE; + if (!ghostify && force) + { + if (PyObject_IsTrue(force)) + ghostify = 1; + if (PyErr_Occurred()) + return NULL; + } + if (ghostify) + { + if (_BTree_clear(self) < 0) + return NULL; + PER_GHOSTIFY(self); + } + } + + Py_INCREF(Py_None); + return Py_None; +} +#endif + +static PyObject * +BTree_clear(BTree *self) +{ + UNLESS (PER_USE(self)) return NULL; + + if (self->len) + { + if (_BTree_clear(self) < 0) + goto err; + if (PER_CHANGED(self) < 0) + goto err; + } + + PER_UNUSE(self); + + Py_INCREF(Py_None); + return Py_None; + +err: + PER_UNUSE(self); + return NULL; +} + +/* + * Return: + * + * For an empty BTree (self->len == 0), None. + * + * For a BTree with one child (self->len == 1), and that child is a bucket, + * and that bucket has a NULL oid, a one-tuple containing a one-tuple + * containing the bucket's state: + * + * ( + * ( + * child[0].__getstate__(), + * ), + * ) + * + * Else a two-tuple. The first element is a tuple interleaving the BTree's + * keys and direct children, of size 2*self->len - 1 (key[0] is unused and + * is not saved). The second element is the firstbucket: + * + * ( + * (child[0], key[1], child[1], key[2], child[2], ..., + * key[len-1], child[len-1]), + * self->firstbucket + * ) + * + * In the above, key[i] means self->data[i].key, and similarly for child[i]. + */ +static PyObject * +BTree_getstate(BTree *self) +{ + PyObject *r = NULL; + PyObject *o; + int i, l; + + UNLESS (PER_USE(self)) + return NULL; + + if (self->len) + { + r = PyTuple_New(self->len * 2 - 1); + if (r == NULL) + goto err; + + if (self->len == 1 + && Py_TYPE(self->data->child) != Py_TYPE(self) +#ifdef PERSISTENT + && BUCKET(self->data->child)->oid == NULL +#endif + ) + { + /* We have just one bucket. Save its data directly. */ + o = bucket_getstate((Bucket *)self->data->child); + if (o == NULL) + goto err; + PyTuple_SET_ITEM(r, 0, o); + ASSIGN(r, Py_BuildValue("(O)", r)); + } + else + { + for (i=0, l=0; i < self->len; i++) + { + if (i) + { + COPY_KEY_TO_OBJECT(o, self->data[i].key); + PyTuple_SET_ITEM(r, l, o); + l++; + } + o = (PyObject *)self->data[i].child; + Py_INCREF(o); + PyTuple_SET_ITEM(r,l,o); + l++; + } + ASSIGN(r, Py_BuildValue("OO", r, self->firstbucket)); + } + + } + else + { + r = Py_None; + Py_INCREF(r); + } + + PER_UNUSE(self); + + return r; + +err: + PER_UNUSE(self); + Py_XDECREF(r); + return NULL; +} + +static int +_BTree_setstate(BTree *self, PyObject *state, int noval) +{ + PyObject *items, *firstbucket = NULL; + BTreeItem *d; + int len, l, i, copied=1; + + if (_BTree_clear(self) < 0) + return -1; + + /* The state of a BTree can be one of the following: + None -- an empty BTree + A one-tuple -- a single bucket btree + A two-tuple -- a BTree with more than one bucket + See comments for BTree_getstate() for the details. + */ + + if (state == Py_None) + return 0; + + if (!PyArg_ParseTuple(state, "O|O:__setstate__", &items, &firstbucket)) + return -1; + + if (!PyTuple_Check(items)) + { + PyErr_SetString(PyExc_TypeError, + "tuple required for first state element"); + return -1; + } + + len = PyTuple_Size(items); + if (len < 0) + return -1; + len = (len + 1) / 2; + + assert(len > 0); /* If the BTree is empty, it's state is None. */ + assert(self->size == 0); /* We called _BTree_clear(). */ + + self->data = BTree_Malloc(sizeof(BTreeItem) * len); + if (self->data == NULL) + return -1; + self->size = len; + + for (i = 0, d = self->data, l = 0; i < len; i++, d++) + { + PyObject *v; + if (i) + { /* skip the first key slot */ + COPY_KEY_FROM_ARG(d->key, PyTuple_GET_ITEM(items, l), copied); + l++; + if (!copied) + return -1; + INCREF_KEY(d->key); + } + v = PyTuple_GET_ITEM(items, l); + if (PyTuple_Check(v)) + { + /* Handle the special case in __getstate__() for a BTree + with a single bucket. */ + d->child = BTree_newBucket(self); + if (!d->child) + return -1; + if (noval) + { + if (_set_setstate(BUCKET(d->child), v) < 0) + return -1; + } + else + { + if (_bucket_setstate(BUCKET(d->child), v) < 0) + return -1; + } + } + else + { + d->child = (Sized *)v; + Py_INCREF(v); + } + l++; + } + + if (!firstbucket) + firstbucket = (PyObject *)self->data->child; + + if (!PyObject_IsInstance(firstbucket, (PyObject *) + (noval ? &SetType : &BucketType))) + { + PyErr_SetString(PyExc_TypeError, + "No firstbucket in non-empty BTree"); + return -1; + } + self->firstbucket = BUCKET(firstbucket); + Py_INCREF(firstbucket); +#ifndef PERSISTENT + /* firstbucket is also the child of some BTree node, but that node may + * be a ghost if persistence is enabled. + */ + assert(Py_REFCNT(self->firstbucket) > 1); +#endif + self->len = len; + + return 0; +} + +static PyObject * +BTree_setstate(BTree *self, PyObject *arg) +{ + int r; + + PER_PREVENT_DEACTIVATION(self); + r = _BTree_setstate(self, arg, 0); + PER_UNUSE(self); + + if (r < 0) + return NULL; + Py_INCREF(Py_None); + return Py_None; +} + +#ifdef PERSISTENT + +/* Recognize the special cases of a BTree that's empty or contains a single + * bucket. In the former case, return a borrowed reference to Py_None. + * In this single-bucket case, the bucket state is embedded directly in the + * BTree state, like so: + * + * ( + * ( + * thebucket.__getstate__(), + * ), + * ) + * + * When this obtains, return a borrowed reference to thebucket.__getstate__(). + * Else return NULL with an exception set. The exception should always be + * ConflictError then, but may be TypeError if the state makes no sense at all + * for a BTree (corrupted or hostile state). + */ +PyObject * +get_bucket_state(PyObject *t) +{ + if (t == Py_None) + return Py_None; /* an empty BTree */ + if (! PyTuple_Check(t)) + { + PyErr_SetString(PyExc_TypeError, + "_p_resolveConflict: expected tuple or None for state"); + return NULL; + } + + if (PyTuple_GET_SIZE(t) == 2) + { + /* A non-degenerate BTree. */ + return merge_error(-1, -1, -1, 11); + } + + /* We're in the one-bucket case. */ + + if (PyTuple_GET_SIZE(t) != 1) + { + PyErr_SetString(PyExc_TypeError, + "_p_resolveConflict: expected 1- or 2-tuple for state"); + return NULL; + } + + t = PyTuple_GET_ITEM(t, 0); + if (! PyTuple_Check(t) || PyTuple_GET_SIZE(t) != 1) + { + PyErr_SetString(PyExc_TypeError, + "_p_resolveConflict: expected 1-tuple containing " + "bucket state"); + return NULL; + } + + t = PyTuple_GET_ITEM(t, 0); + if (! PyTuple_Check(t)) + { + PyErr_SetString(PyExc_TypeError, + "_p_resolveConflict: expected tuple for bucket state"); + return NULL; + } + + return t; +} + +/* Tricky. The only kind of BTree conflict we can actually potentially + * resolve is the special case of a BTree containing a single bucket, + * in which case this becomes a fancy way of calling the bucket conflict + * resolution code. + */ +static PyObject * +BTree__p_resolveConflict(BTree *self, PyObject *args) +{ + PyObject *s[3]; + PyObject *x, *y, *z; + + if (!PyArg_ParseTuple(args, "OOO", &x, &y, &z)) + return NULL; + + s[0] = get_bucket_state(x); + if (s[0] == NULL) + return NULL; + s[1] = get_bucket_state(y); + if (s[1] == NULL) + return NULL; + s[2] = get_bucket_state(z); + if (s[2] == NULL) + return NULL; + + if (PyObject_IsInstance((PyObject *)self, (PyObject *)&BTreeType)) + x = _bucket__p_resolveConflict(OBJECT(&BucketType), s); + else + x = _bucket__p_resolveConflict(OBJECT(&SetType), s); + + if (x == NULL) + return NULL; + + return Py_BuildValue("((N))", x); +} +#endif + +/* + BTree_findRangeEnd -- Find one end, expressed as a bucket and + position, for a range search. + + If low, return bucket and index of the smallest item >= key, + otherwise return bucket and index of the largest item <= key. + + If exclude_equal, exact matches aren't acceptable; if one is found, + move right if low, or left if !low (this is for range searches exclusive + of an endpoint). + + Return: + -1 Error; offset and bucket unchanged + 0 Not found; offset and bucket unchanged + 1 Correct bucket and offset stored; the caller owns a new reference + to the bucket. + + Internal: + We do binary searches in BTree nodes downward, at each step following + C(i) where K(i) <= key < K(i+1). As always, K(i) <= C(i) < K(i+1) too. + (See Maintainer.txt for the meaning of that notation.) That eventually + leads to a bucket where we do Bucket_findRangeEnd. That usually works, + but there are two cases where it can fail to find the correct answer: + + 1. On a low search, we find a bucket with keys >= K(i), but that doesn't + imply there are keys in the bucket >= key. For example, suppose + a bucket has keys in 1..100, its successor's keys are in 200..300, and + we're doing a low search on 150. We'll end up in the first bucket, + but there are no keys >= 150 in it. K(i+1) > key, though, and all + the keys in C(i+1) >= K(i+1) > key, so the first key in the next + bucket (if any) is the correct result. This is easy to find by + following the bucket 'next' pointer. + + 2. On a high search, again that the keys in the bucket are >= K(i) + doesn't imply that any key in the bucket is <= key, but it's harder + for this to fail (and an earlier version of this routine didn't + catch it): if K(i) itself is in the bucket, it works (then + K(i) <= key is *a* key in the bucket that's in the desired range). + But when keys get deleted from buckets, they aren't also deleted from + BTree nodes, so there's no guarantee that K(i) is in the bucket. + For example, delete the smallest key S from some bucket, and S + remains in the interior BTree nodes. Do a high search for S, and + the BTree nodes direct the search to the bucket S used to be in, + but all keys remaining in that bucket are > S. The largest key in + the *preceding* bucket (if any) is < K(i), though, and K(i) <= key, + so the largest key in the preceding bucket is < key and so is the + proper result. + + This is harder to get at efficiently, as buckets are linked only in + the increasing direction. While we're searching downward, + deepest_smaller is set to the node deepest in the tree where + we *could* have gone to the left of C(i). The rightmost bucket in + deepest_smaller's subtree is the bucket preceding the bucket we find + at first. This is clumsy to get at, but efficient. +*/ +static int +BTree_findRangeEnd(BTree *self, PyObject *keyarg, int low, int exclude_equal, + Bucket **bucket, int *offset) +{ + Sized *deepest_smaller = NULL; /* last possibility to move left */ + int deepest_smaller_is_btree = 0; /* Boolean; if false, it's a bucket */ + Bucket *pbucket; + int self_got_rebound = 0; /* Boolean; when true, deactivate self */ + int result = -1; /* Until proven innocent */ + int i; + KEY_TYPE key; + int copied = 1; + + COPY_KEY_FROM_ARG(key, keyarg, copied); + UNLESS (copied) + return -1; + + /* We don't need to: PER_USE_OR_RETURN(self, -1); + because the caller does. */ + UNLESS (self->data && self->len) + return 0; + + /* Search downward until hitting a bucket, stored in pbucket. */ + for (;;) + { + Sized *pchild; + int pchild_is_btree; + + BTREE_SEARCH(i, self, key, goto Done); + pchild = self->data[i].child; + pchild_is_btree = SameType_Check(self, pchild); + if (i) + { + deepest_smaller = self->data[i-1].child; + deepest_smaller_is_btree = pchild_is_btree; + } + + if (pchild_is_btree) + { + if (self_got_rebound) + { + PER_UNUSE(self); + } + self = BTREE(pchild); + self_got_rebound = 1; + PER_USE_OR_RETURN(self, -1); + } + else + { + pbucket = BUCKET(pchild); + break; + } + } + + /* Search the bucket for a suitable key. */ + i = Bucket_findRangeEnd(pbucket, keyarg, low, exclude_equal, offset); + if (i < 0) + goto Done; + if (i > 0) + { + Py_INCREF(pbucket); + *bucket = pbucket; + result = 1; + goto Done; + } + /* This may be one of the two difficult cases detailed in the comments. */ + if (low) + { + Bucket *next; + + UNLESS(PER_USE(pbucket)) goto Done; + next = pbucket->next; + if (next) { + result = 1; + Py_INCREF(next); + *bucket = next; + *offset = 0; + } + else + result = 0; + PER_UNUSE(pbucket); + } + /* High-end search: if it's possible to go left, do so. */ + else if (deepest_smaller) + { + if (deepest_smaller_is_btree) + { + UNLESS(PER_USE(deepest_smaller)) + goto Done; + /* We own the reference this returns. */ + pbucket = BTree_lastBucket(BTREE(deepest_smaller)); + PER_UNUSE(deepest_smaller); + if (pbucket == NULL) + goto Done; /* error */ + } + else + { + pbucket = BUCKET(deepest_smaller); + Py_INCREF(pbucket); + } + UNLESS(PER_USE(pbucket)) + goto Done; + result = 1; + *bucket = pbucket; /* transfer ownership to caller */ + *offset = pbucket->len - 1; + PER_UNUSE(pbucket); + } + else + result = 0; /* simply not found */ + +Done: + if (self_got_rebound) + { + PER_UNUSE(self); + } + return result; +} + +static PyObject * +BTree_maxminKey(BTree *self, PyObject *args, int min) +{ + PyObject *key=0; + Bucket *bucket = NULL; + int offset, rc; + int empty_tree = 1; + + UNLESS (PyArg_ParseTuple(args, "|O", &key)) + return NULL; + + UNLESS (PER_USE(self)) + return NULL; + + UNLESS (self->data && self->len) + goto empty; + + /* Find the range */ + + if (key && key != Py_None) + { + if ((rc = BTree_findRangeEnd(self, key, min, 0, &bucket, &offset)) <= 0) + { + if (rc < 0) + goto err; + empty_tree = 0; + goto empty; + } + PER_UNUSE(self); + UNLESS (PER_USE(bucket)) + { + Py_DECREF(bucket); + return NULL; + } + } + else if (min) + { + bucket = self->firstbucket; + PER_UNUSE(self); + PER_USE_OR_RETURN(bucket, NULL); + Py_INCREF(bucket); + offset = 0; + } + else + { + bucket = BTree_lastBucket(self); + PER_UNUSE(self); + UNLESS (PER_USE(bucket)) + { + Py_DECREF(bucket); + return NULL; + } + assert(bucket->len); + offset = bucket->len - 1; + } + + COPY_KEY_TO_OBJECT(key, bucket->keys[offset]); + PER_UNUSE(bucket); + Py_DECREF(bucket); + + return key; + +empty: + PyErr_SetString(PyExc_ValueError, + empty_tree ? "empty tree" : + "no key satisfies the conditions"); +err: + PER_UNUSE(self); + if (bucket) + { + PER_UNUSE(bucket); + Py_DECREF(bucket); + } + return NULL; +} + +static PyObject * +BTree_minKey(BTree *self, PyObject *args) +{ + return BTree_maxminKey(self, args, 1); +} + +static PyObject * +BTree_maxKey(BTree *self, PyObject *args) +{ + return BTree_maxminKey(self, args, 0); +} + +/* +** BTree_rangeSearch +** +** Generates a BTreeItems object based on the two indexes passed in, +** being the range between them. +** +*/ +static PyObject * +BTree_rangeSearch(BTree *self, PyObject *args, PyObject *kw, char type) +{ + PyObject *min = Py_None; + PyObject *max = Py_None; + int excludemin = 0; + int excludemax = 0; + int rc; + Bucket *lowbucket = NULL; + Bucket *highbucket = NULL; + int lowoffset; + int highoffset; + PyObject *result; + + if (args) + { + if (! PyArg_ParseTupleAndKeywords(args, kw, "|OOii", search_keywords, + &min, + &max, + &excludemin, + &excludemax)) + return NULL; + } + + UNLESS (PER_USE(self)) + return NULL; + + UNLESS (self->data && self->len) + goto empty; + + /* Find the low range */ + if (min != Py_None) + { + if ((rc = BTree_findRangeEnd(self, min, 1, excludemin, + &lowbucket, &lowoffset)) <= 0) + { + if (rc < 0) + goto err; + goto empty; + } + } + else + { + lowbucket = self->firstbucket; + lowoffset = 0; + if (excludemin) + { + int bucketlen; + UNLESS (PER_USE(lowbucket)) + goto err; + bucketlen = lowbucket->len; + PER_UNUSE(lowbucket); + if (bucketlen > 1) + lowoffset = 1; + else if (self->len < 2) + goto empty; + else + { /* move to first item in next bucket */ + Bucket *next; + UNLESS (PER_USE(lowbucket)) + goto err; + next = lowbucket->next; + PER_UNUSE(lowbucket); + assert(next != NULL); + lowbucket = next; + /* and lowoffset is still 0 */ + assert(lowoffset == 0); + } + } + Py_INCREF(lowbucket); + } + + /* Find the high range */ + if (max != Py_None) + { + if ((rc = BTree_findRangeEnd(self, max, 0, excludemax, + &highbucket, &highoffset)) <= 0) + { + Py_DECREF(lowbucket); + if (rc < 0) + goto err; + goto empty; + } + } + else + { + int bucketlen; + highbucket = BTree_lastBucket(self); + assert(highbucket != NULL); /* we know self isn't empty */ + UNLESS (PER_USE(highbucket)) + goto err_and_decref_buckets; + bucketlen = highbucket->len; + PER_UNUSE(highbucket); + highoffset = bucketlen - 1; + if (excludemax) + { + if (highoffset > 0) + --highoffset; + else if (self->len < 2) + goto empty_and_decref_buckets; + else /* move to last item of preceding bucket */ + { + int status; + assert(highbucket != self->firstbucket); + Py_DECREF(highbucket); + status = PreviousBucket(&highbucket, self->firstbucket); + if (status < 0) + { + Py_DECREF(lowbucket); + goto err; + } + assert(status > 0); + Py_INCREF(highbucket); + UNLESS (PER_USE(highbucket)) + goto err_and_decref_buckets; + highoffset = highbucket->len - 1; + PER_UNUSE(highbucket); + } + } + assert(highoffset >= 0); + } + + /* It's still possible that the range is empty, even if min < max. For + * example, if min=3 and max=4, and 3 and 4 aren't in the BTree, but 2 and + * 5 are, then the low position points to the 5 now and the high position + * points to the 2 now. They're not necessarily even in the same bucket, + * so there's no trick we can play with pointer compares to get out + * cheap in general. + */ + if (lowbucket == highbucket && lowoffset > highoffset) + goto empty_and_decref_buckets; /* definitely empty */ + + /* The buckets differ, or they're the same and the offsets show a non- + * empty range. + */ + if (min != Py_None && max != Py_None && /* both args user-supplied */ + lowbucket != highbucket) /* and different buckets */ + { + KEY_TYPE first; + KEY_TYPE last; + int cmp; + + /* Have to check the hard way: see how the endpoints compare. */ + UNLESS (PER_USE(lowbucket)) + goto err_and_decref_buckets; + COPY_KEY(first, lowbucket->keys[lowoffset]); + PER_UNUSE(lowbucket); + + UNLESS (PER_USE(highbucket)) + goto err_and_decref_buckets; + COPY_KEY(last, highbucket->keys[highoffset]); + PER_UNUSE(highbucket); + + TEST_KEY_SET_OR(cmp, first, last) + goto err_and_decref_buckets; + if (cmp > 0) + goto empty_and_decref_buckets; + } + + PER_UNUSE(self); + + result = newBTreeItems(type, lowbucket, lowoffset, highbucket, highoffset); + Py_DECREF(lowbucket); + Py_DECREF(highbucket); + return result; + +err_and_decref_buckets: + Py_DECREF(lowbucket); + Py_DECREF(highbucket); + +err: + PER_UNUSE(self); + return NULL; + +empty_and_decref_buckets: + Py_DECREF(lowbucket); + Py_DECREF(highbucket); + +empty: + PER_UNUSE(self); + return newBTreeItems(type, 0, 0, 0, 0); +} + +/* +** BTree_keys +*/ +static PyObject * +BTree_keys(BTree *self, PyObject *args, PyObject *kw) +{ + return BTree_rangeSearch(self, args, kw, 'k'); +} + +/* +** BTree_values +*/ +static PyObject * +BTree_values(BTree *self, PyObject *args, PyObject *kw) +{ + return BTree_rangeSearch(self, args, kw, 'v'); +} + +/* +** BTree_items +*/ +static PyObject * +BTree_items(BTree *self, PyObject *args, PyObject *kw) +{ + return BTree_rangeSearch(self, args, kw, 'i'); +} + +static PyObject * +BTree_byValue(BTree *self, PyObject *omin) +{ + PyObject *r=0, *o=0, *item=0; + VALUE_TYPE min; + VALUE_TYPE v; + int copied=1; + SetIteration it = {0, 0, 1}; + + UNLESS (PER_USE(self)) + return NULL; + + COPY_VALUE_FROM_ARG(min, omin, copied); + UNLESS(copied) + return NULL; + + UNLESS (r=PyList_New(0)) + goto err; + + it.set=BTree_rangeSearch(self, NULL, NULL, 'i'); + UNLESS(it.set) + goto err; + + if (nextBTreeItems(&it) < 0) + goto err; + + while (it.position >= 0) + { + if (TEST_VALUE(it.value, min) >= 0) + { + UNLESS (item = PyTuple_New(2)) + goto err; + + COPY_KEY_TO_OBJECT(o, it.key); + UNLESS (o) + goto err; + PyTuple_SET_ITEM(item, 1, o); + + COPY_VALUE(v, it.value); + NORMALIZE_VALUE(v, min); + COPY_VALUE_TO_OBJECT(o, v); + DECREF_VALUE(v); + UNLESS (o) + goto err; + PyTuple_SET_ITEM(item, 0, o); + + if (PyList_Append(r, item) < 0) + goto err; + Py_DECREF(item); + item = 0; + } + if (nextBTreeItems(&it) < 0) + goto err; + } + + item=PyObject_GetAttr(r,sort_str); + UNLESS (item) + goto err; + ASSIGN(item, PyObject_CallObject(item, NULL)); + UNLESS (item) + goto err; + ASSIGN(item, PyObject_GetAttr(r, reverse_str)); + UNLESS (item) + goto err; + ASSIGN(item, PyObject_CallObject(item, NULL)); + UNLESS (item) + goto err; + Py_DECREF(item); + + finiSetIteration(&it); + PER_UNUSE(self); + return r; + +err: + PER_UNUSE(self); + Py_XDECREF(r); + finiSetIteration(&it); + Py_XDECREF(item); + return NULL; +} + +/* +** BTree_getm +*/ +static PyObject * +BTree_getm(BTree *self, PyObject *args) +{ + PyObject *key, *d=Py_None, *r; + + UNLESS (PyArg_ParseTuple(args, "O|O", &key, &d)) + return NULL; + if ((r=_BTree_get(self, key, 0, _BGET_REPLACE_TYPE_ERROR))) + return r; + UNLESS (PyErr_ExceptionMatches(PyExc_KeyError)) + return NULL; + PyErr_Clear(); + Py_INCREF(d); + return d; +} + +static PyObject * +BTree_has_key(BTree *self, PyObject *key) +{ + return _BTree_get(self, key, 1, _BGET_REPLACE_TYPE_ERROR); +} + +static PyObject * +BTree_setdefault(BTree *self, PyObject *args) +{ + PyObject *key; + PyObject *failobj; /* default */ + PyObject *value; /* return value */ + + if (! PyArg_UnpackTuple(args, "setdefault", 2, 2, &key, &failobj)) + return NULL; + + value = _BTree_get(self, key, 0, _BGET_ALLOW_TYPE_ERROR); + if (value != NULL) + return value; + + /* The key isn't in the tree. If that's not due to a KeyError exception, + * pass back the unexpected exception. + */ + if (! PyErr_ExceptionMatches(PyExc_KeyError)) + return NULL; + PyErr_Clear(); + + /* Associate `key` with `failobj` in the tree, and return `failobj`. */ + value = failobj; + if (_BTree_set(self, key, failobj, 0, 0) < 0) + value = NULL; + Py_XINCREF(value); + return value; +} + +/* forward declaration */ +static Py_ssize_t +BTree_length_or_nonzero(BTree *self, int nonzero); + +static PyObject * +BTree_pop(BTree *self, PyObject *args) +{ + PyObject *key; + PyObject *failobj = NULL; /* default */ + PyObject *value; /* return value */ + + if (! PyArg_UnpackTuple(args, "pop", 1, 2, &key, &failobj)) + return NULL; + + value = _BTree_get(self, key, 0, _BGET_ALLOW_TYPE_ERROR); + if (value != NULL) + { + /* Delete key and associated value. */ + if (_BTree_set(self, key, NULL, 0, 0) < 0) + { + Py_DECREF(value); + return NULL;; + } + return value; + } + + /* The key isn't in the tree. If that's not due to a KeyError exception, + * pass back the unexpected exception. + */ + if (! PyErr_ExceptionMatches(PyExc_KeyError)) + return NULL; + + if (failobj != NULL) + { + /* Clear the KeyError and return the explicit default. */ + PyErr_Clear(); + Py_INCREF(failobj); + return failobj; + } + + /* No default given. The only difference in this case is the error + * message, which depends on whether the tree is empty. + */ + if (BTree_length_or_nonzero(self, 1) == 0) /* tree is empty */ + PyErr_SetString(PyExc_KeyError, "pop(): BTree is empty"); + return NULL; +} + +/* Search BTree self for key. This is the sq_contains slot of the + * PySequenceMethods. + * + * Return: + * -1 error + * 0 not found + * 1 found + */ +static int +BTree_contains(BTree *self, PyObject *key) +{ + PyObject *asobj = _BTree_get(self, key, 1, _BGET_REPLACE_TYPE_ERROR); + int result = -1; + + if (asobj != NULL) + { + result = INT_AS_LONG(asobj) ? 1 : 0; + Py_DECREF(asobj); + } + else if (PyErr_ExceptionMatches(PyExc_KeyError)) + { + PyErr_Clear(); + result = 0; + } + return result; +} + +static PyObject * +BTree_addUnique(BTree *self, PyObject *args) +{ + int grew; + PyObject *key, *v; + + UNLESS (PyArg_ParseTuple(args, "OO", &key, &v)) + return NULL; + + if ((grew=_BTree_set(self, key, v, 1, 0)) < 0) + return NULL; + return INT_FROM_LONG(grew); +} + +/**************************************************************************/ +/* Iterator support. */ + +/* A helper to build all the iterators for BTrees and TreeSets. + * If args is NULL, the iterator spans the entire structure. Else it's an + * argument tuple, with optional low and high arguments. + * kind is 'k', 'v' or 'i'. + * Returns a BTreeIter object, or NULL if error. + */ +static PyObject * +buildBTreeIter(BTree *self, PyObject *args, PyObject *kw, char kind) +{ + BTreeIter *result = NULL; + BTreeItems *items = (BTreeItems *)BTree_rangeSearch(self, args, kw, kind); + + if (items) + { + result = BTreeIter_new(items); + Py_DECREF(items); + } + return (PyObject *)result; +} + +/* The implementation of iter(BTree_or_TreeSet); the BTree tp_iter slot. */ +static PyObject * +BTree_getiter(BTree *self) +{ + return buildBTreeIter(self, NULL, NULL, 'k'); +} + +/* The implementation of BTree.iterkeys(). */ +static PyObject * +BTree_iterkeys(BTree *self, PyObject *args, PyObject *kw) +{ + return buildBTreeIter(self, args, kw, 'k'); +} + +/* The implementation of BTree.itervalues(). */ +static PyObject * +BTree_itervalues(BTree *self, PyObject *args, PyObject *kw) +{ + return buildBTreeIter(self, args, kw, 'v'); +} + +/* The implementation of BTree.iteritems(). */ +static PyObject * +BTree_iteritems(BTree *self, PyObject *args, PyObject *kw) +{ + return buildBTreeIter(self, args, kw, 'i'); +} + +/* End of iterator support. */ + + +/* Caution: Even though the _firstbucket attribute is read-only, a program + could do arbitrary damage to the btree internals. For example, it could + call clear() on a bucket inside a BTree. + + We need to decide if the convenience for inspecting BTrees is worth + the risk. +*/ + +static struct PyMemberDef BTree_members[] = { + {"_firstbucket", T_OBJECT, offsetof(BTree, firstbucket), READONLY}, + {NULL} +}; + +static struct PyMethodDef BTree_methods[] = { + {"__getstate__", (PyCFunction) BTree_getstate, METH_NOARGS, + "__getstate__() -> state\n\n" + "Return the picklable state of the BTree."}, + + {"__setstate__", (PyCFunction) BTree_setstate, METH_O, + "__setstate__(state)\n\n" + "Set the state of the BTree."}, + + {"has_key", (PyCFunction) BTree_has_key, METH_O, + "has_key(key)\n\n" + "Return true if the BTree contains the given key."}, + + {"keys", (PyCFunction) BTree_keys, METH_VARARGS | METH_KEYWORDS, + "keys([min, max]) -> list of keys\n\n" + "Returns the keys of the BTree. If min and max are supplied, only\n" + "keys greater than min and less than max are returned."}, + + {"values", (PyCFunction) BTree_values, METH_VARARGS | METH_KEYWORDS, + "values([min, max]) -> list of values\n\n" + "Returns the values of the BTree. If min and max are supplied, only\n" + "values corresponding to keys greater than min and less than max are\n" + "returned."}, + + {"items", (PyCFunction) BTree_items, METH_VARARGS | METH_KEYWORDS, + "items([min, max]) -> -- list of key, value pairs\n\n" + "Returns the items of the BTree. If min and max are supplied, only\n" + "items with keys greater than min and less than max are returned."}, + + {"byValue", (PyCFunction) BTree_byValue, METH_O, + "byValue(min) -> list of value, key pairs\n\n" + "Returns list of value, key pairs where the value is >= min. The\n" + "list is sorted by value. Note that items() returns keys in the\n" + "opposite order."}, + + {"get", (PyCFunction) BTree_getm, METH_VARARGS, + "get(key[, default=None]) -> Value for key or default\n\n" + "Return the value or the default if the key is not found."}, + + {"setdefault", (PyCFunction) BTree_setdefault, METH_VARARGS, + "D.setdefault(k, d) -> D.get(k, d), also set D[k]=d if k not in D.\n\n" + "Return the value like get() except that if key is missing, d is both\n" + "returned and inserted into the BTree as the value of k."}, + + {"pop", (PyCFunction) BTree_pop, METH_VARARGS, + "D.pop(k[, d]) -> v, remove key and return the corresponding value.\n\n" + "If key is not found, d is returned if given, otherwise KeyError\n" + "is raised."}, + + {"maxKey", (PyCFunction) BTree_maxKey, METH_VARARGS, + "maxKey([max]) -> key\n\n" + "Return the largest key in the BTree. If max is specified, return\n" + "the largest key <= max."}, + + {"minKey", (PyCFunction) BTree_minKey, METH_VARARGS, + "minKey([mi]) -> key\n\n" + "Return the smallest key in the BTree. If min is specified, return\n" + "the smallest key >= min."}, + + {"clear", (PyCFunction) BTree_clear, METH_NOARGS, + "clear()\n\nRemove all of the items from the BTree."}, + + {"insert", (PyCFunction)BTree_addUnique, METH_VARARGS, + "insert(key, value) -> 0 or 1\n\n" + "Add an item if the key is not already used. Return 1 if the item was\n" + "added, or 0 otherwise."}, + + {"update", (PyCFunction) Mapping_update, METH_O, + "update(collection)\n\n Add the items from the given collection."}, + + {"iterkeys", (PyCFunction) BTree_iterkeys, METH_VARARGS | METH_KEYWORDS, + "B.iterkeys([min[,max]]) -> an iterator over the keys of B"}, + + {"itervalues", (PyCFunction) BTree_itervalues, METH_VARARGS | METH_KEYWORDS, + "B.itervalues([min[,max]]) -> an iterator over the values of B"}, + + {"iteritems", (PyCFunction) BTree_iteritems, METH_VARARGS | METH_KEYWORDS, + "B.iteritems([min[,max]]) -> an iterator over the (key, value) " + "items of B"}, + + {"_check", (PyCFunction) BTree_check, METH_NOARGS, + "Perform sanity check on BTree, and raise exception if flawed."}, + +#ifdef PERSISTENT + {"_p_resolveConflict", + (PyCFunction) BTree__p_resolveConflict, METH_VARARGS, + "_p_resolveConflict() -- Reinitialize from a newly created copy"}, + + {"_p_deactivate", + (PyCFunction) BTree__p_deactivate, METH_VARARGS | METH_KEYWORDS, + "_p_deactivate()\n\nReinitialize from a newly created copy."}, +#endif + {NULL, NULL} +}; + +static int +BTree_init(PyObject *self, PyObject *args, PyObject *kwds) +{ + PyObject *v = NULL; + + BTREE(self)->max_leaf_size = 0; + BTREE(self)->max_internal_size = 0; + + if (!PyArg_ParseTuple(args, "|O:" MOD_NAME_PREFIX "BTree", &v)) + return -1; + + if (v) + return update_from_seq(self, v); + else + return 0; +} + +static void +BTree_dealloc(BTree *self) +{ + PyObject_GC_UnTrack((PyObject *)self); + if (self->state != cPersistent_GHOST_STATE) { + _BTree_clear(self); + } + cPersistenceCAPI->pertype->tp_dealloc((PyObject *)self); +} + +static int +BTree_traverse(BTree *self, visitproc visit, void *arg) +{ + int err = 0; + int i, len; + +#define VISIT(SLOT) \ + if (SLOT) { \ + err = visit((PyObject *)(SLOT), arg); \ + if (err) \ + goto Done; \ + } + + if (Py_TYPE(self) == &BTreeType) + assert(Py_TYPE(self)->tp_dictoffset == 0); + + /* Call our base type's traverse function. Because BTrees are + * subclasses of Peristent, there must be one. + */ + err = cPersistenceCAPI->pertype->tp_traverse((PyObject *)self, visit, arg); + if (err) + goto Done; + + /* If this is registered with the persistence system, cleaning up cycles + * is the database's problem. It would be horrid to unghostify BTree + * nodes here just to chase pointers every time gc runs. + */ + if (self->state == cPersistent_GHOST_STATE) + goto Done; + + len = self->len; +#ifdef KEY_TYPE_IS_PYOBJECT + /* Keys are Python objects so need to be traversed. Note that the + * key 0 slot is unused and should not be traversed. + */ + for (i = 1; i < len; i++) + VISIT(self->data[i].key); +#endif + + /* Children are always pointers, and child 0 is legit. */ + for (i = 0; i < len; i++) + VISIT(self->data[i].child); + + VISIT(self->firstbucket); + +Done: + return err; + +#undef VISIT +} + +static int +BTree_tp_clear(BTree *self) +{ + if (self->state != cPersistent_GHOST_STATE) + _BTree_clear(self); + return 0; +} + +/* + * Return the number of elements in a BTree. nonzero is a Boolean, and + * when true requests just a non-empty/empty result. Testing for emptiness + * is efficient (constant-time). Getting the true length takes time + * proportional to the number of leaves (buckets). + * + * Return: + * When nonzero true: + * -1 error + * 0 empty + * 1 not empty + * When nonzero false (possibly expensive!): + * -1 error + * >= 0 number of elements. + */ +static Py_ssize_t +BTree_length_or_nonzero(BTree *self, int nonzero) +{ + int result; + Bucket *b; + Bucket *next; + + PER_USE_OR_RETURN(self, -1); + b = self->firstbucket; + PER_UNUSE(self); + if (nonzero) + return b != NULL; + + result = 0; + while (b) + { + PER_USE_OR_RETURN(b, -1); + result += b->len; + next = b->next; + PER_UNUSE(b); + b = next; + } + return result; +} + +static Py_ssize_t +BTree_length(BTree *self) +{ + return BTree_length_or_nonzero(self, 0); +} + +static PyMappingMethods BTree_as_mapping = { + (lenfunc)BTree_length, /* mp_length */ + (binaryfunc)BTree_get, /* mp_subscript */ + (objobjargproc)BTree_setitem, /* mp_ass_subscript */ +}; + +static PySequenceMethods BTree_as_sequence = { + (lenfunc)0, /* sq_length */ + (binaryfunc)0, /* sq_concat */ + (ssizeargfunc)0, /* sq_repeat */ + (ssizeargfunc)0, /* sq_item */ + (ssizessizeargfunc)0, /* sq_slice */ + (ssizeobjargproc)0, /* sq_ass_item */ + (ssizessizeobjargproc)0, /* sq_ass_slice */ + (objobjproc)BTree_contains, /* sq_contains */ + 0, /* sq_inplace_concat */ + 0, /* sq_inplace_repeat */ +}; + +static Py_ssize_t +BTree_nonzero(BTree *self) +{ + return BTree_length_or_nonzero(self, 1); +} + +static PyNumberMethods BTree_as_number_for_nonzero = { + 0, /* nb_add */ + 0, /* nb_subtract */ + 0, /* nb_multiply */ +#ifndef PY3K + 0, /* nb_divide */ +#endif + 0, /* nb_remainder */ + 0, /* nb_divmod */ + 0, /* nb_power */ + 0, /* nb_negative */ + 0, /* nb_positive */ + 0, /* nb_absolute */ + (inquiry)BTree_nonzero /* nb_nonzero */ +}; + +static PyTypeObject BTreeType = { + PyVarObject_HEAD_INIT(NULL, 0) + MODULE_NAME MOD_NAME_PREFIX "BTree", /* tp_name */ + sizeof(BTree), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)BTree_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + &BTree_as_number_for_nonzero, /* tp_as_number */ + &BTree_as_sequence, /* tp_as_sequence */ + &BTree_as_mapping, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | + Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_BASETYPE, /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)BTree_traverse, /* tp_traverse */ + (inquiry)BTree_tp_clear, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + (getiterfunc)BTree_getiter, /* tp_iter */ + 0, /* tp_iternext */ + BTree_methods, /* tp_methods */ + BTree_members, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + BTree_init, /* tp_init */ + 0, /* tp_alloc */ + 0, /*PyType_GenericNew,*/ /* tp_new */ +}; diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/BucketTemplate.c b/thesisenv/lib/python3.6/site-packages/BTrees/BucketTemplate.c new file mode 100644 index 0000000..27db39a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/BucketTemplate.c @@ -0,0 +1,1957 @@ +/***************************************************************************** + + Copyright (c) 2001, 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + +****************************************************************************/ + +#define BUCKETTEMPLATE_C "$Id$\n" + +/* Use BUCKET_SEARCH to find the index at which a key belongs. + * INDEX An int lvalue to hold the index i such that KEY belongs at + * SELF->keys[i]. Note that this will equal SELF->len if KEY + * is larger than the bucket's largest key. Else it's the + * smallest i such that SELF->keys[i] >= KEY. + * ABSENT An int lvalue to hold a Boolean result, true (!= 0) if the + * key is absent, false (== 0) if the key is at INDEX. + * SELF A pointer to a Bucket node. + * KEY The key you're looking for, of type KEY_TYPE. + * ONERROR What to do if key comparison raises an exception; for example, + * perhaps 'return NULL'. + * + * See Maintainer.txt for discussion: this is optimized in subtle ways. + * It's recommended that you call this at the start of a routine, waiting + * to check for self->len == 0 after (if an empty bucket is special in + * context; INDEX becomes 0 and ABSENT becomes true if this macro is run + * with an empty SELF, and that may be all the invoker needs to know). + */ +#define BUCKET_SEARCH(INDEX, ABSENT, SELF, KEY, ONERROR) { \ + int _lo = 0; \ + int _hi = (SELF)->len; \ + int _i; \ + int _cmp = 1; \ + for (_i = _hi >> 1; _lo < _hi; _i = (_lo + _hi) >> 1) { \ + TEST_KEY_SET_OR(_cmp, (SELF)->keys[_i], (KEY)) \ + ONERROR; \ + if (_cmp < 0) _lo = _i + 1; \ + else if (_cmp == 0) break; \ + else _hi = _i; \ + } \ + (INDEX) = _i; \ + (ABSENT) = _cmp; \ + } + +/* +** _bucket_get +** +** Search a bucket for a given key. +** +** Arguments +** self The bucket +** keyarg The key to look for +** has_key Boolean; if true, return a true/false result; else return +** the value associated with the key. +** +** Return +** If has_key: +** Returns the Python int 0 if the key is absent, else returns +** has_key itself as a Python int. A BTree caller generally passes +** the depth of the bucket for has_key, so a true result returns +** the bucket depth then. +** Note that has_key should be true when searching set buckets. +** If not has_key: +** If the key is present, returns the associated value, and the +** caller owns the reference. Else returns NULL and sets KeyError. +** Whether or not has_key: +** If a comparison sets an exception, returns NULL. +*/ +static PyObject * +_bucket_get(Bucket *self, PyObject *keyarg, int has_key) +{ + int i, cmp; + KEY_TYPE key; + PyObject *r = NULL; + int copied = 1; + + COPY_KEY_FROM_ARG(key, keyarg, copied); + UNLESS (copied) return NULL; + + UNLESS (PER_USE(self)) return NULL; + + BUCKET_SEARCH(i, cmp, self, key, goto Done); + if (has_key) + r = INT_FROM_LONG(cmp ? 0 : has_key); + else + { + if (cmp == 0) + { + COPY_VALUE_TO_OBJECT(r, self->values[i]); + } + else + PyErr_SetObject(PyExc_KeyError, keyarg); + } + +Done: + PER_UNUSE(self); + return r; +} + +static PyObject * +bucket_getitem(Bucket *self, PyObject *key) +{ + return _bucket_get(self, key, 0); +} + +/* +** Bucket_grow +** +** Resize a bucket. +** +** Arguments: self The bucket. +** newsize The new maximum capacity. If < 0, double the +** current size unless the bucket is currently empty, +** in which case use MIN_BUCKET_ALLOC. +** noval Boolean; if true, allocate only key space and not +** value space +** +** Returns: -1 on error, and MemoryError exception is set +** 0 on success +*/ +static int +Bucket_grow(Bucket *self, int newsize, int noval) +{ + KEY_TYPE *keys; + VALUE_TYPE *values; + + if (self->size) + { + if (newsize < 0) + newsize = self->size * 2; + if (newsize < 0) /* int overflow */ + goto Overflow; + UNLESS (keys = BTree_Realloc(self->keys, sizeof(KEY_TYPE) * newsize)) + return -1; + + UNLESS (noval) + { + values = BTree_Realloc(self->values, sizeof(VALUE_TYPE) * newsize); + if (values == NULL) + { + free(keys); + return -1; + } + self->values = values; + } + self->keys = keys; + } + else + { + if (newsize < 0) + newsize = MIN_BUCKET_ALLOC; + UNLESS (self->keys = BTree_Malloc(sizeof(KEY_TYPE) * newsize)) + return -1; + UNLESS (noval) + { + self->values = BTree_Malloc(sizeof(VALUE_TYPE) * newsize); + if (self->values == NULL) + { + free(self->keys); + self->keys = NULL; + return -1; + } + } + } + self->size = newsize; + return 0; + +Overflow: + PyErr_NoMemory(); + return -1; +} + +/* So far, bucket_append is called only by multiunion_m(), so is called + * only when MULTI_INT_UNION is defined. Flavors of BTree/Bucket that + * don't support MULTI_INT_UNION don't call bucket_append (yet), and + * gcc complains if bucket_append is compiled in those cases. So only + * compile bucket_append if it's going to be used. + */ +#ifdef MULTI_INT_UNION +/* + * Append a slice of the "from" bucket to self. + * + * self Append (at least keys) to this bucket. self must be activated + * upon entry, and remains activated at exit. If copyValues + * is true, self must be empty or already have a non-NULL values + * pointer. self's access and modification times aren't updated. + * from The bucket from which to take keys, and possibly values. from + * must be activated upon entry, and remains activated at exit. + * If copyValues is true, from must have a non-NULL values + * pointer. self and from must not be the same. from's access + * time isn't updated. + * i, n The slice from[i : i+n] is appended to self. Must have + * i >= 0, n > 0 and i+n <= from->len. + * copyValues Boolean. If true, copy values from the slice as well as keys. + * In this case, from must have a non-NULL values pointer, and + * self must too (unless self is empty, in which case a values + * vector will be allocated for it). + * overallocate Boolean. If self doesn't have enough room upon entry to hold + * all the appended stuff, then if overallocate is false exactly + * enough room will be allocated to hold the new stuff, else if + * overallocate is true an excess will be allocated. overallocate + * may be a good idea if you expect to append more stuff to self + * later; else overallocate should be false. + * + * CAUTION: If self is empty upon entry (self->size == 0), and copyValues is + * false, then no space for values will get allocated. This can be a trap if + * the caller intends to copy values itself. + * + * Return + * -1 Error. + * 0 OK. + */ +static int +bucket_append(Bucket *self, Bucket *from, int i, int n, + int copyValues, int overallocate) +{ + int newlen; + + assert(self && from && self != from); + assert(i >= 0); + assert(n > 0); + assert(i+n <= from->len); + + /* Make room. */ + newlen = self->len + n; + if (newlen > self->size) + { + int newsize = newlen; + if (overallocate) /* boost by 25% -- pretty arbitrary */ + newsize += newsize >> 2; + if (Bucket_grow(self, newsize, ! copyValues) < 0) + return -1; + } + assert(newlen <= self->size); + + /* Copy stuff. */ + memcpy(self->keys + self->len, from->keys + i, n * sizeof(KEY_TYPE)); + if (copyValues) + { + assert(self->values); + assert(from->values); + memcpy(self->values + self->len, from->values + i, + n * sizeof(VALUE_TYPE)); + } + self->len = newlen; + + /* Bump refcounts. */ +#ifdef KEY_TYPE_IS_PYOBJECT + { + int j; + PyObject **p = from->keys + i; + for (j = 0; j < n; ++j, ++p) + { + Py_INCREF(*p); + } + } +#endif +#ifdef VALUE_TYPE_IS_PYOBJECT + if (copyValues) + { + int j; + PyObject **p = from->values + i; + for (j = 0; j < n; ++j, ++p) + { + Py_INCREF(*p); + } + } +#endif + return 0; +} +#endif /* MULTI_INT_UNION */ + +/* +** _bucket_set: Assign a value to a key in a bucket, delete a key+value +** pair, or just insert a key. +** +** Arguments +** self The bucket +** keyarg The key to look for +** v The value to associate with key; NULL means delete the key. +** If NULL, it's an error (KeyError) if the key isn't present. +** Note that if this is a set bucket, and you want to insert +** a new set element, v must be non-NULL although its exact +** value will be ignored. Passing Py_None is good for this. +** unique Boolean; when true, don't replace the value if the key is +** already present. +** noval Boolean; when true, operate on keys only (ignore values) +** changed ignored on input +** +** Return +** -1 on error +** 0 on success and the # of bucket entries didn't change +** 1 on success and the # of bucket entries did change +** *changed If non-NULL, set to 1 on any mutation of the bucket. +*/ +static int +_bucket_set(Bucket *self, PyObject *keyarg, PyObject *v, + int unique, int noval, int *changed) +{ + int i, cmp; + KEY_TYPE key; + + /* Subtle: there may or may not be a value. If there is, we need to + * check its type early, so that in case of error we can get out before + * mutating the bucket. But because value isn't used on all paths, if + * we don't initialize value then gcc gives a nuisance complaint that + * value may be used initialized (it can't be, but gcc doesn't know + * that). So we initialize it. However, VALUE_TYPE can be various types, + * including int, PyObject*, and char[6], so it's a puzzle to spell + * initialization. It so happens that {0} is a valid initializer for all + * these types. + */ + VALUE_TYPE value = {0}; /* squash nuisance warning */ + int result = -1; /* until proven innocent */ + int copied = 1; + + COPY_KEY_FROM_ARG(key, keyarg, copied); + UNLESS(copied) + return -1; +#ifdef KEY_CHECK_ON_SET + if (v && !KEY_CHECK_ON_SET(keyarg)) + return -1; +#endif + + /* Copy the value early (if needed), so that in case of error a + * pile of bucket mutations don't need to be undone. + */ + if (v && !noval) { + COPY_VALUE_FROM_ARG(value, v, copied); + UNLESS(copied) + return -1; + } + + UNLESS (PER_USE(self)) + return -1; + + BUCKET_SEARCH(i, cmp, self, key, goto Done); + if (cmp == 0) + { + /* The key exists, at index i. */ + if (v) + { + /* The key exists at index i, and there's a new value. + * If unique, we're not supposed to replace it. If noval, or this + * is a set bucket (self->values is NULL), there's nothing to do. + */ + if (unique || noval || self->values == NULL) + { + result = 0; + goto Done; + } + + /* The key exists at index i, and we need to replace the value. */ +#ifdef VALUE_SAME + /* short-circuit if no change */ + if (VALUE_SAME(self->values[i], value)) + { + result = 0; + goto Done; + } +#endif + if (changed) + *changed = 1; + DECREF_VALUE(self->values[i]); + COPY_VALUE(self->values[i], value); + INCREF_VALUE(self->values[i]); + if (PER_CHANGED(self) >= 0) + result = 0; + goto Done; + } + + /* The key exists at index i, and should be deleted. */ + DECREF_KEY(self->keys[i]); + self->len--; + if (i < self->len) + memmove(self->keys + i, self->keys + i+1, + sizeof(KEY_TYPE)*(self->len - i)); + + if (self->values) + { + DECREF_VALUE(self->values[i]); + if (i < self->len) + memmove(self->values + i, self->values + i+1, + sizeof(VALUE_TYPE)*(self->len - i)); + } + + if (! self->len) + { + self->size = 0; + free(self->keys); + self->keys = NULL; + if (self->values) + { + free(self->values); + self->values = NULL; + } + } + + if (changed) + *changed = 1; + if (PER_CHANGED(self) >= 0) + result = 1; + goto Done; + } + + /* The key doesn't exist, and belongs at index i. */ + if (!v) + { + /* Can't delete a non-existent key. */ + PyErr_SetObject(PyExc_KeyError, keyarg); + goto Done; + } + + /* The key doesn't exist and should be inserted at index i. */ + if (self->len == self->size && Bucket_grow(self, -1, noval) < 0) + goto Done; + + if (self->len > i) + { + memmove(self->keys + i + 1, self->keys + i, + sizeof(KEY_TYPE) * (self->len - i)); + if (self->values) + { + memmove(self->values + i + 1, self->values + i, + sizeof(VALUE_TYPE) * (self->len - i)); + } + } + + COPY_KEY(self->keys[i], key); + INCREF_KEY(self->keys[i]); + + if (! noval) + { + COPY_VALUE(self->values[i], value); + INCREF_VALUE(self->values[i]); + } + + self->len++; + if (changed) + *changed = 1; + if (PER_CHANGED(self) >= 0) + result = 1; + +Done: + PER_UNUSE(self); + return result; +} + +/* +** bucket_setitem +** +** wrapper for _bucket_set (eliminates +1 return code) +** +** Arguments: self The bucket +** key The key to insert under +** v The value to insert +** +** Returns 0 on success +** -1 on failure +*/ +static int +bucket_setitem(Bucket *self, PyObject *key, PyObject *v) +{ + if (_bucket_set(self, key, v, 0, 0, 0) < 0) + return -1; + return 0; +} + +/** + ** Accepts a sequence of 2-tuples, or any object with an items() + ** method that returns an iterable object producing 2-tuples. + */ +static int +update_from_seq(PyObject *map, PyObject *seq) +{ + PyObject *iter, *o, *k, *v; + int err = -1; + + /* One path creates a new seq object. The other path has an + INCREF of the seq argument. So seq must always be DECREFed on + the way out. + */ + /* Use items() if it's not a sequence. Alas, PySequence_Check() + * returns true for a PeristentMapping or PersistentDict, and we + * want to use items() in those cases too. + */ +#ifdef PY3K +#define ITERITEMS "items" +#else +#define ITERITEMS "iteritems" +#endif + if (!PySequence_Check(seq) || /* or it "looks like a dict" */ + PyObject_HasAttrString(seq, ITERITEMS)) +#undef ITERITEMS + { + PyObject *items; + items = PyObject_GetAttrString(seq, "items"); + if (items == NULL) + return -1; + seq = PyObject_CallObject(items, NULL); + Py_DECREF(items); + if (seq == NULL) + return -1; + } + else + Py_INCREF(seq); + + iter = PyObject_GetIter(seq); + if (iter == NULL) + goto err; + while (1) + { + o = PyIter_Next(iter); + if (o == NULL) + { + if (PyErr_Occurred()) + goto err; + else + break; + } + if (!PyTuple_Check(o) || PyTuple_GET_SIZE(o) != 2) + { + Py_DECREF(o); + PyErr_SetString(PyExc_TypeError, + "Sequence must contain 2-item tuples"); + goto err; + } + k = PyTuple_GET_ITEM(o, 0); + v = PyTuple_GET_ITEM(o, 1); + if (PyObject_SetItem(map, k, v) < 0) + { + Py_DECREF(o); + goto err; + } + Py_DECREF(o); + } + + err = 0; +err: + Py_DECREF(iter); + Py_DECREF(seq); + return err; +} + +static PyObject * +Mapping_update(PyObject *self, PyObject *seq) +{ + if (update_from_seq(self, seq) < 0) + return NULL; + Py_INCREF(Py_None); + return Py_None; +} + +/* +** bucket_split +** +** Splits one bucket into two +** +** Arguments: self The bucket +** index the index of the key to split at (O.O.B use midpoint) +** next the new bucket to split into +** +** Returns: 0 on success +** -1 on failure +*/ +static int +bucket_split(Bucket *self, int index, Bucket *next) +{ + int next_size; + + ASSERT(self->len > 1, "split of empty bucket", -1); + + if (index < 0 || index >= self->len) + index = self->len / 2; + + next_size = self->len - index; + + next->keys = BTree_Malloc(sizeof(KEY_TYPE) * next_size); + if (!next->keys) + return -1; + memcpy(next->keys, self->keys + index, sizeof(KEY_TYPE) * next_size); + if (self->values) { + next->values = BTree_Malloc(sizeof(VALUE_TYPE) * next_size); + if (!next->values) { + free(next->keys); + next->keys = NULL; + return -1; + } + memcpy(next->values, self->values + index, + sizeof(VALUE_TYPE) * next_size); + } + next->size = next_size; + next->len = next_size; + self->len = index; + + next->next = self->next; + + Py_INCREF(next); + self->next = next; + + if (PER_CHANGED(self) < 0) + return -1; + + return 0; +} + +/* Set self->next to self->next->next, i.e. unlink self's successor from + * the chain. + * + * Return: + * -1 error + * 0 OK + */ +static int +Bucket_deleteNextBucket(Bucket *self) +{ + int result = -1; /* until proven innocent */ + Bucket *successor; + + PER_USE_OR_RETURN(self, -1); + successor = self->next; + if (successor) + { + Bucket *next; + /* Before: self -> successor -> next + * After: self --------------> next + */ + UNLESS (PER_USE(successor)) + goto Done; + next = successor->next; + PER_UNUSE(successor); + + Py_XINCREF(next); /* it may be NULL, of course */ + self->next = next; + Py_DECREF(successor); + if (PER_CHANGED(self) < 0) + goto Done; + } + result = 0; + +Done: + PER_UNUSE(self); + return result; +} + +/* + Bucket_findRangeEnd -- Find the index of a range endpoint + (possibly) contained in a bucket. + + Arguments: self The bucket + keyarg The key to match against + low Boolean; true for low end of range, false for high + exclude_equal Boolean; if true, don't accept an exact match, + and if there is one then move right if low and + left if !low. + offset The output offset + + If low true, *offset <- index of the smallest item >= key, + if low false the index of the largest item <= key. In either case, if there + is no such index, *offset is left alone and 0 is returned. + + Return: + 0 No suitable index exists; *offset has not been changed + 1 The correct index was stored into *offset + -1 Error + + Example: Suppose the keys are [2, 4], and exclude_equal is false. Searching + for 2 sets *offset to 0 and returns 1 regardless of low. Searching for 4 + sets *offset to 1 and returns 1 regardless of low. + Searching for 1: + If low true, sets *offset to 0, returns 1. + If low false, returns 0. + Searching for 3: + If low true, sets *offset to 1, returns 1. + If low false, sets *offset to 0, returns 1. + Searching for 5: + If low true, returns 0. + If low false, sets *offset to 1, returns 1. + + The 1, 3 and 5 examples are the same when exclude_equal is true. +*/ +static int +Bucket_findRangeEnd(Bucket *self, PyObject *keyarg, int low, int exclude_equal, + int *offset) +{ + int i, cmp; + int result = -1; /* until proven innocent */ + KEY_TYPE key; + int copied = 1; + + COPY_KEY_FROM_ARG(key, keyarg, copied); + UNLESS (copied) + return -1; + + UNLESS (PER_USE(self)) + return -1; + + BUCKET_SEARCH(i, cmp, self, key, goto Done); + if (cmp == 0) { + /* exact match at index i */ + if (exclude_equal) + { + /* but we don't want an exact match */ + if (low) + ++i; + else + --i; + } + } + /* Else keys[i-1] < key < keys[i], picturing infinities at OOB indices, + * and i has the smallest item > key, which is correct for low. + */ + else if (! low) + /* i-1 has the largest item < key (unless i-1 is 0OB) */ + --i; + + result = 0 <= i && i < self->len; + if (result) + *offset = i; + +Done: + PER_UNUSE(self); + return result; +} + +static PyObject * +Bucket_maxminKey(Bucket *self, PyObject *args, int min) +{ + PyObject *key=0; + int rc, offset = 0; + int empty_bucket = 1; + + if (args && ! PyArg_ParseTuple(args, "|O", &key)) + return NULL; + + PER_USE_OR_RETURN(self, NULL); + + UNLESS (self->len) + goto empty; + + /* Find the low range */ + if (key && key != Py_None) + { + if ((rc = Bucket_findRangeEnd(self, key, min, 0, &offset)) <= 0) + { + if (rc < 0) + return NULL; + empty_bucket = 0; + goto empty; + } + } + else if (min) + offset = 0; + else + offset = self->len -1; + + COPY_KEY_TO_OBJECT(key, self->keys[offset]); + PER_UNUSE(self); + + return key; + +empty: + PyErr_SetString(PyExc_ValueError, + empty_bucket ? "empty bucket" : + "no key satisfies the conditions"); + PER_UNUSE(self); + return NULL; +} + +static PyObject * +Bucket_minKey(Bucket *self, PyObject *args) +{ + return Bucket_maxminKey(self, args, 1); +} + +static PyObject * +Bucket_maxKey(Bucket *self, PyObject *args) +{ + return Bucket_maxminKey(self, args, 0); +} + +static int +Bucket_rangeSearch(Bucket *self, PyObject *args, PyObject *kw, + int *low, int *high) +{ + PyObject *min = Py_None; + PyObject *max = Py_None; + int excludemin = 0; + int excludemax = 0; + int rc; + + if (args) + { + if (! PyArg_ParseTupleAndKeywords(args, kw, "|OOii", search_keywords, + &min, + &max, + &excludemin, + &excludemax)) + return -1; + } + + UNLESS (self->len) + goto empty; + + /* Find the low range */ + if (min != Py_None) + { + rc = Bucket_findRangeEnd(self, min, 1, excludemin, low); + if (rc < 0) + return -1; + if (rc == 0) + goto empty; + } + else + { + *low = 0; + if (excludemin) + { + if (self->len < 2) + goto empty; + ++*low; + } + } + + /* Find the high range */ + if (max != Py_None) + { + rc = Bucket_findRangeEnd(self, max, 0, excludemax, high); + if (rc < 0) + return -1; + if (rc == 0) + goto empty; + } + else + { + *high = self->len - 1; + if (excludemax) + { + if (self->len < 2) + goto empty; + --*high; + } + } + + /* If min < max to begin with, it's quite possible that low > high now. */ + if (*low <= *high) + return 0; + +empty: + *low = 0; + *high = -1; + return 0; +} + +/* +** bucket_keys +** +** Generate a list of all keys in the bucket +** +** Arguments: self The Bucket +** args (unused) +** +** Returns: list of bucket keys +*/ +static PyObject * +bucket_keys(Bucket *self, PyObject *args, PyObject *kw) +{ + PyObject *r = NULL, *key; + int i, low, high; + + PER_USE_OR_RETURN(self, NULL); + + if (Bucket_rangeSearch(self, args, kw, &low, &high) < 0) + goto err; + + r = PyList_New(high-low+1); + if (r == NULL) + goto err; + + for (i=low; i <= high; i++) + { + COPY_KEY_TO_OBJECT(key, self->keys[i]); + if (PyList_SetItem(r, i-low , key) < 0) + goto err; + } + + PER_UNUSE(self); + return r; + +err: + PER_UNUSE(self); + Py_XDECREF(r); + return NULL; +} + +/* +** bucket_values +** +** Generate a list of all values in the bucket +** +** Arguments: self The Bucket +** args (unused) +** +** Returns list of values +*/ +static PyObject * +bucket_values(Bucket *self, PyObject *args, PyObject *kw) +{ + PyObject *r=0, *v; + int i, low, high; + + PER_USE_OR_RETURN(self, NULL); + + if (Bucket_rangeSearch(self, args, kw, &low, &high) < 0) + goto err; + + UNLESS (r=PyList_New(high-low+1)) + goto err; + + for (i=low; i <= high; i++) + { + COPY_VALUE_TO_OBJECT(v, self->values[i]); + UNLESS (v) + goto err; + if (PyList_SetItem(r, i-low, v) < 0) + goto err; + } + + PER_UNUSE(self); + return r; + +err: + PER_UNUSE(self); + Py_XDECREF(r); + return NULL; +} + +/* +** bucket_items +** +** Returns a list of all items in a bucket +** +** Arguments: self The Bucket +** args (unused) +** +** Returns: list of all items in the bucket +*/ +static PyObject * +bucket_items(Bucket *self, PyObject *args, PyObject *kw) +{ + PyObject *r=0, *o=0, *item=0; + int i, low, high; + + PER_USE_OR_RETURN(self, NULL); + + if (Bucket_rangeSearch(self, args, kw, &low, &high) < 0) + goto err; + + UNLESS (r=PyList_New(high-low+1)) + goto err; + + for (i=low; i <= high; i++) + { + UNLESS (item = PyTuple_New(2)) + goto err; + + COPY_KEY_TO_OBJECT(o, self->keys[i]); + UNLESS (o) + goto err; + PyTuple_SET_ITEM(item, 0, o); + + COPY_VALUE_TO_OBJECT(o, self->values[i]); + UNLESS (o) + goto err; + PyTuple_SET_ITEM(item, 1, o); + + if (PyList_SetItem(r, i-low, item) < 0) + goto err; + + item = 0; + } + + PER_UNUSE(self); + return r; + +err: + PER_UNUSE(self); + Py_XDECREF(r); + Py_XDECREF(item); + return NULL; +} + +static PyObject * +bucket_byValue(Bucket *self, PyObject *omin) +{ + PyObject *r=0, *o=0, *item=0; + VALUE_TYPE min; + VALUE_TYPE v; + int i, l, copied=1; + + PER_USE_OR_RETURN(self, NULL); + + COPY_VALUE_FROM_ARG(min, omin, copied); + UNLESS(copied) + return NULL; + + for (i=0, l=0; i < self->len; i++) + if (TEST_VALUE(self->values[i], min) >= 0) + l++; + + UNLESS (r=PyList_New(l)) + goto err; + + for (i=0, l=0; i < self->len; i++) + { + if (TEST_VALUE(self->values[i], min) < 0) + continue; + + UNLESS (item = PyTuple_New(2)) + goto err; + + COPY_KEY_TO_OBJECT(o, self->keys[i]); + UNLESS (o) + goto err; + PyTuple_SET_ITEM(item, 1, o); + + COPY_VALUE(v, self->values[i]); + NORMALIZE_VALUE(v, min); + COPY_VALUE_TO_OBJECT(o, v); + DECREF_VALUE(v); + UNLESS (o) + goto err; + PyTuple_SET_ITEM(item, 0, o); + + if (PyList_SetItem(r, l, item) < 0) + goto err; + l++; + + item = 0; + } + + item=PyObject_GetAttr(r,sort_str); + UNLESS (item) + goto err; + ASSIGN(item, PyObject_CallObject(item, NULL)); + UNLESS (item) + goto err; + ASSIGN(item, PyObject_GetAttr(r, reverse_str)); + UNLESS (item) + goto err; + ASSIGN(item, PyObject_CallObject(item, NULL)); + UNLESS (item) + goto err; + Py_DECREF(item); + + PER_UNUSE(self); + return r; + +err: + PER_UNUSE(self); + Py_XDECREF(r); + Py_XDECREF(item); + return NULL; +} + +static int +_bucket_clear(Bucket *self) +{ + const int len = self->len; + /* Don't declare i at this level. If neither keys nor values are + * PyObject*, i won't be referenced, and you'll get a nuisance compiler + * wng for declaring it here. + */ + self->len = self->size = 0; + + if (self->next) + { + Py_DECREF(self->next); + self->next = NULL; + } + + /* Silence compiler warning about unused variable len for the case + when neither key nor value is an object, i.e. II. */ + (void)len; + + if (self->keys) + { +#ifdef KEY_TYPE_IS_PYOBJECT + int i; + for (i = 0; i < len; ++i) + DECREF_KEY(self->keys[i]); +#endif + free(self->keys); + self->keys = NULL; + } + + if (self->values) + { +#ifdef VALUE_TYPE_IS_PYOBJECT + int i; + for (i = 0; i < len; ++i) + DECREF_VALUE(self->values[i]); +#endif + free(self->values); + self->values = NULL; + } + return 0; +} + +#ifdef PERSISTENT +static PyObject * +bucket__p_deactivate(Bucket *self, PyObject *args, PyObject *keywords) +{ + int ghostify = 1; + PyObject *force = NULL; + + if (args && PyTuple_GET_SIZE(args) > 0) + { + PyErr_SetString(PyExc_TypeError, + "_p_deactivate takes no positional arguments"); + return NULL; + } + if (keywords) + { + int size = PyDict_Size(keywords); + force = PyDict_GetItemString(keywords, "force"); + if (force) + size--; + if (size) { + PyErr_SetString(PyExc_TypeError, + "_p_deactivate only accepts keyword arg force"); + return NULL; + } + } + + if (self->jar && self->oid) + { + ghostify = self->state == cPersistent_UPTODATE_STATE; + if (!ghostify && force) { + if (PyObject_IsTrue(force)) + ghostify = 1; + if (PyErr_Occurred()) + return NULL; + } + if (ghostify) { + if (_bucket_clear(self) < 0) + return NULL; + PER_GHOSTIFY(self); + } + } + Py_INCREF(Py_None); + return Py_None; +} +#endif + +static PyObject * +bucket_clear(Bucket *self, PyObject *args) +{ + PER_USE_OR_RETURN(self, NULL); + + if (self->len) + { + if (_bucket_clear(self) < 0) + return NULL; + if (PER_CHANGED(self) < 0) + goto err; + } + PER_UNUSE(self); + Py_INCREF(Py_None); + return Py_None; + +err: + PER_UNUSE(self); + return NULL; +} + +/* + * Return: + * + * For a set bucket (self->values is NULL), a one-tuple or two-tuple. The + * first element is a tuple of keys, of length self->len. The second element + * is the next bucket, present if and only if next is non-NULL: + * + * ( + * (keys[0], keys[1], ..., keys[len-1]), + * next iff non-NULL> + * ) + * + * For a mapping bucket (self->values is not NULL), a one-tuple or two-tuple. + * The first element is a tuple interleaving keys and values, of length + * 2 * self->len. The second element is the next bucket, present iff next is + * non-NULL: + * + * ( + * (keys[0], values[0], keys[1], values[1], ..., + * keys[len-1], values[len-1]), + * next iff non-NULL> + * ) + */ +static PyObject * +bucket_getstate(Bucket *self) +{ + PyObject *o = NULL, *items = NULL, *state; + int i, len, l; + + PER_USE_OR_RETURN(self, NULL); + + len = self->len; + + if (self->values) /* Bucket */ + { + items = PyTuple_New(len * 2); + if (items == NULL) + goto err; + for (i = 0, l = 0; i < len; i++) { + COPY_KEY_TO_OBJECT(o, self->keys[i]); + if (o == NULL) + goto err; + PyTuple_SET_ITEM(items, l, o); + l++; + + COPY_VALUE_TO_OBJECT(o, self->values[i]); + if (o == NULL) + goto err; + PyTuple_SET_ITEM(items, l, o); + l++; + } + } + else /* Set */ + { + items = PyTuple_New(len); + if (items == NULL) + goto err; + for (i = 0; i < len; i++) { + COPY_KEY_TO_OBJECT(o, self->keys[i]); + if (o == NULL) + goto err; + PyTuple_SET_ITEM(items, i, o); + } + } + + if (self->next) + state = Py_BuildValue("OO", items, self->next); + else + state = Py_BuildValue("(O)", items); + Py_DECREF(items); + + PER_UNUSE(self); + return state; + +err: + PER_UNUSE(self); + Py_XDECREF(items); + return NULL; +} + +static int +_bucket_setstate(Bucket *self, PyObject *state) +{ + PyObject *k, *v, *items; + Bucket *next = NULL; + int i, l, len, copied=1; + KEY_TYPE *keys; + VALUE_TYPE *values; + + if (!PyArg_ParseTuple(state, "O|O:__setstate__", &items, &next)) + return -1; + + if (!PyTuple_Check(items)) { + PyErr_SetString(PyExc_TypeError, + "tuple required for first state element"); + return -1; + } + + len = PyTuple_Size(items); + if (len < 0) + return -1; + len /= 2; + + for (i = self->len; --i >= 0; ) { + DECREF_KEY(self->keys[i]); + DECREF_VALUE(self->values[i]); + } + self->len = 0; + + if (self->next) { + Py_DECREF(self->next); + self->next = NULL; + } + + if (len > self->size) { + keys = BTree_Realloc(self->keys, sizeof(KEY_TYPE)*len); + if (keys == NULL) + return -1; + values = BTree_Realloc(self->values, sizeof(VALUE_TYPE)*len); + if (values == NULL) + return -1; + self->keys = keys; + self->values = values; + self->size = len; + } + + for (i=0, l=0; i < len; i++) { + k = PyTuple_GET_ITEM(items, l); + l++; + v = PyTuple_GET_ITEM(items, l); + l++; + + COPY_KEY_FROM_ARG(self->keys[i], k, copied); + if (!copied) + return -1; + COPY_VALUE_FROM_ARG(self->values[i], v, copied); + if (!copied) + return -1; + INCREF_KEY(self->keys[i]); + INCREF_VALUE(self->values[i]); + } + + self->len = len; + + if (next) { + self->next = next; + Py_INCREF(next); + } + + return 0; +} + +static PyObject * +bucket_setstate(Bucket *self, PyObject *state) +{ + int r; + + PER_PREVENT_DEACTIVATION(self); + r = _bucket_setstate(self, state); + PER_UNUSE(self); + + if (r < 0) + return NULL; + Py_INCREF(Py_None); + return Py_None; +} + +static PyObject * +bucket_has_key(Bucket *self, PyObject *key) +{ + return _bucket_get(self, key, 1); +} + +static PyObject * +bucket_setdefault(Bucket *self, PyObject *args) +{ + PyObject *key; + PyObject *failobj; /* default */ + PyObject *value; /* return value */ + int dummy_changed; /* in order to call _bucket_set */ + + if (! PyArg_UnpackTuple(args, "setdefault", 2, 2, &key, &failobj)) + return NULL; + + value = _bucket_get(self, key, 0); + if (value != NULL) + return value; + + /* The key isn't in the bucket. If that's not due to a KeyError exception, + * pass back the unexpected exception. + */ + if (! PyErr_ExceptionMatches(PyExc_KeyError)) + return NULL; + PyErr_Clear(); + + /* Associate `key` with `failobj` in the bucket, and return `failobj`. */ + value = failobj; + if (_bucket_set(self, key, failobj, 0, 0, &dummy_changed) < 0) + value = NULL; + Py_XINCREF(value); + return value; +} + + +/* forward declaration */ +static int +Bucket_length(Bucket *self); + +static PyObject * +bucket_pop(Bucket *self, PyObject *args) +{ + PyObject *key; + PyObject *failobj = NULL; /* default */ + PyObject *value; /* return value */ + int dummy_changed; /* in order to call _bucket_set */ + + if (! PyArg_UnpackTuple(args, "pop", 1, 2, &key, &failobj)) + return NULL; + + value = _bucket_get(self, key, 0); + if (value != NULL) { + /* Delete key and associated value. */ + if (_bucket_set(self, key, NULL, 0, 0, &dummy_changed) < 0) { + Py_DECREF(value); + return NULL; + } + return value; + } + + /* The key isn't in the bucket. If that's not due to a KeyError exception, + * pass back the unexpected exception. + */ + if (! PyErr_ExceptionMatches(PyExc_KeyError)) + return NULL; + + if (failobj != NULL) { + /* Clear the KeyError and return the explicit default. */ + PyErr_Clear(); + Py_INCREF(failobj); + return failobj; + } + + /* No default given. The only difference in this case is the error + * message, which depends on whether the bucket is empty. + */ + if (Bucket_length(self) == 0) + PyErr_SetString(PyExc_KeyError, "pop(): Bucket is empty"); + return NULL; +} + +/* Search bucket self for key. This is the sq_contains slot of the + * PySequenceMethods. + * + * Return: + * -1 error + * 0 not found + * 1 found + */ +static int +bucket_contains(Bucket *self, PyObject *key) +{ + PyObject *asobj = _bucket_get(self, key, 1); + int result = -1; + + if (asobj != NULL) { + result = INT_AS_LONG(asobj) ? 1 : 0; + Py_DECREF(asobj); + } + return result; +} + +/* +** bucket_getm +** +*/ +static PyObject * +bucket_getm(Bucket *self, PyObject *args) +{ + PyObject *key, *d=Py_None, *r; + + if (!PyArg_ParseTuple(args, "O|O:get", &key, &d)) + return NULL; + r = _bucket_get(self, key, 0); + if (r) + return r; + if (!PyErr_ExceptionMatches(PyExc_KeyError)) + return NULL; + PyErr_Clear(); + Py_INCREF(d); + return d; +} + +/**************************************************************************/ +/* Iterator support. */ + +/* A helper to build all the iterators for Buckets and Sets. + * If args is NULL, the iterator spans the entire structure. Else it's an + * argument tuple, with optional low and high arguments. + * kind is 'k', 'v' or 'i'. + * Returns a BTreeIter object, or NULL if error. + */ +static PyObject * +buildBucketIter(Bucket *self, PyObject *args, PyObject *kw, char kind) +{ + BTreeItems *items; + int lowoffset, highoffset; + BTreeIter *result = NULL; + + PER_USE_OR_RETURN(self, NULL); + if (Bucket_rangeSearch(self, args, kw, &lowoffset, &highoffset) < 0) + goto Done; + + items = (BTreeItems *)newBTreeItems(kind, self, lowoffset, + self, highoffset); + if (items == NULL) + goto Done; + + result = BTreeIter_new(items); /* win or lose, we're done */ + Py_DECREF(items); + +Done: + PER_UNUSE(self); + return (PyObject *)result; +} + +/* The implementation of iter(Bucket_or_Set); the Bucket tp_iter slot. */ +static PyObject * +Bucket_getiter(Bucket *self) +{ + return buildBucketIter(self, NULL, NULL, 'k'); +} + +/* The implementation of Bucket.iterkeys(). */ +static PyObject * +Bucket_iterkeys(Bucket *self, PyObject *args, PyObject *kw) +{ + return buildBucketIter(self, args, kw, 'k'); +} + +/* The implementation of Bucket.itervalues(). */ +static PyObject * +Bucket_itervalues(Bucket *self, PyObject *args, PyObject *kw) +{ + return buildBucketIter(self, args, kw, 'v'); +} + +/* The implementation of Bucket.iteritems(). */ +static PyObject * +Bucket_iteritems(Bucket *self, PyObject *args, PyObject *kw) +{ + return buildBucketIter(self, args, kw, 'i'); +} + +/* End of iterator support. */ + +#ifdef PERSISTENT +static PyObject *merge_error(int p1, int p2, int p3, int reason); +static PyObject *bucket_merge(Bucket *s1, Bucket *s2, Bucket *s3); + +static PyObject * +_bucket__p_resolveConflict(PyObject *ob_type, PyObject *s[3]) +{ + PyObject *result = NULL; /* guilty until proved innocent */ + Bucket *b[3] = {NULL, NULL, NULL}; + PyObject *meth = NULL; + PyObject *a = NULL; + int i; + + for (i = 0; i < 3; i++) { + PyObject *r; + + b[i] = (Bucket*)PyObject_CallObject((PyObject *)ob_type, NULL); + if (b[i] == NULL) + goto Done; + if (s[i] == Py_None) /* None is equivalent to empty, for BTrees */ + continue; + meth = PyObject_GetAttr((PyObject *)b[i], __setstate___str); + if (meth == NULL) + goto Done; + a = PyTuple_New(1); + if (a == NULL) + goto Done; + PyTuple_SET_ITEM(a, 0, s[i]); + Py_INCREF(s[i]); + r = PyObject_CallObject(meth, a); /* b[i].__setstate__(s[i]) */ + if (r == NULL) + goto Done; + Py_DECREF(r); + Py_DECREF(a); + Py_DECREF(meth); + a = meth = NULL; + } + + if (b[0]->next != b[1]->next || b[0]->next != b[2]->next) + merge_error(-1, -1, -1, 0); + else + result = bucket_merge(b[0], b[1], b[2]); + +Done: + Py_XDECREF(meth); + Py_XDECREF(a); + Py_XDECREF(b[0]); + Py_XDECREF(b[1]); + Py_XDECREF(b[2]); + + return result; +} + +static PyObject * +bucket__p_resolveConflict(Bucket *self, PyObject *args) +{ + PyObject *s[3]; + + if (!PyArg_ParseTuple(args, "OOO", &s[0], &s[1], &s[2])) + return NULL; + + return _bucket__p_resolveConflict((PyObject *)Py_TYPE(self), s); +} +#endif + +/* Caution: Even though the _next attribute is read-only, a program could + do arbitrary damage to the btree internals. For example, it could call + clear() on a bucket inside a BTree. + + We need to decide if the convenience for inspecting BTrees is worth + the risk. +*/ + +static struct PyMemberDef Bucket_members[] = { + {"_next", T_OBJECT, offsetof(Bucket, next)}, + {NULL} +}; + +static struct PyMethodDef Bucket_methods[] = { + {"__getstate__", (PyCFunction) bucket_getstate, METH_NOARGS, + "__getstate__() -- Return the picklable state of the object"}, + + {"__setstate__", (PyCFunction) bucket_setstate, METH_O, + "__setstate__() -- Set the state of the object"}, + + {"keys", (PyCFunction) bucket_keys, METH_VARARGS | METH_KEYWORDS, + "keys([min, max]) -- Return the keys"}, + + {"has_key", (PyCFunction) bucket_has_key, METH_O, + "has_key(key) -- Test whether the bucket contains the given key"}, + + {"clear", (PyCFunction) bucket_clear, METH_VARARGS, + "clear() -- Remove all of the items from the bucket"}, + + {"update", (PyCFunction) Mapping_update, METH_O, + "update(collection) -- Add the items from the given collection"}, + + {"maxKey", (PyCFunction) Bucket_maxKey, METH_VARARGS, + "maxKey([key]) -- Find the maximum key\n\n" + "If an argument is given, find the maximum <= the argument"}, + + {"minKey", (PyCFunction) Bucket_minKey, METH_VARARGS, + "minKey([key]) -- Find the minimum key\n\n" + "If an argument is given, find the minimum >= the argument"}, + + {"values", (PyCFunction) bucket_values, METH_VARARGS | METH_KEYWORDS, + "values([min, max]) -- Return the values"}, + + {"items", (PyCFunction) bucket_items, METH_VARARGS | METH_KEYWORDS, + "items([min, max])) -- Return the items"}, + + {"byValue", (PyCFunction) bucket_byValue, METH_O, + "byValue(min) -- " + "Return value-keys with values >= min and reverse sorted by values"}, + + {"get", (PyCFunction) bucket_getm, METH_VARARGS, + "get(key[,default]) -- Look up a value\n\n" + "Return the default (or None) if the key is not found."}, + + {"setdefault", (PyCFunction) bucket_setdefault, METH_VARARGS, + "D.setdefault(k, d) -> D.get(k, d), also set D[k]=d if k not in D.\n\n" + "Return the value like get() except that if key is missing, d is both\n" + "returned and inserted into the bucket as the value of k."}, + + {"pop", (PyCFunction) bucket_pop, METH_VARARGS, + "D.pop(k[, d]) -> v, remove key and return the corresponding value.\n\n" + "If key is not found, d is returned if given, otherwise KeyError\n" + "is raised."}, + + {"iterkeys", (PyCFunction) Bucket_iterkeys, METH_VARARGS | METH_KEYWORDS, + "B.iterkeys([min[,max]]) -> an iterator over the keys of B"}, + + {"itervalues", + (PyCFunction) Bucket_itervalues, METH_VARARGS | METH_KEYWORDS, + "B.itervalues([min[,max]]) -> an iterator over the values of B"}, + + {"iteritems", (PyCFunction) Bucket_iteritems, METH_VARARGS | METH_KEYWORDS, + "B.iteritems([min[,max]]) -> an iterator over the (key, value) " + "items of B"}, + +#ifdef EXTRA_BUCKET_METHODS + EXTRA_BUCKET_METHODS +#endif + +#ifdef PERSISTENT + {"_p_resolveConflict", + (PyCFunction) bucket__p_resolveConflict, METH_VARARGS, + "_p_resolveConflict() -- Reinitialize from a newly created copy"}, + + {"_p_deactivate", + (PyCFunction) bucket__p_deactivate, METH_VARARGS | METH_KEYWORDS, + "_p_deactivate() -- Reinitialize from a newly created copy"}, +#endif + {NULL, NULL} +}; + +static int +Bucket_init(PyObject *self, PyObject *args, PyObject *kwds) +{ + PyObject *v = NULL; + + if (!PyArg_ParseTuple(args, "|O:" MOD_NAME_PREFIX "Bucket", &v)) + return -1; + + if (v) + return update_from_seq(self, v); + else + return 0; +} + +static void +bucket_dealloc(Bucket *self) +{ + PyObject_GC_UnTrack((PyObject *)self); + if (self->state != cPersistent_GHOST_STATE) { + _bucket_clear(self); + } + + cPersistenceCAPI->pertype->tp_dealloc((PyObject *)self); +} + +static int +bucket_traverse(Bucket *self, visitproc visit, void *arg) +{ + int err = 0; + int i, len; + +#define VISIT(SLOT) \ + if (SLOT) { \ + err = visit((PyObject *)(SLOT), arg); \ + if (err) \ + goto Done; \ + } + + /* Call our base type's traverse function. Because buckets are + * subclasses of Peristent, there must be one. + */ + err = cPersistenceCAPI->pertype->tp_traverse((PyObject *)self, visit, arg); + if (err) + goto Done; + + /* If this is registered with the persistence system, cleaning up cycles + * is the database's problem. It would be horrid to unghostify buckets + * here just to chase pointers every time gc runs. + */ + if (self->state == cPersistent_GHOST_STATE) + goto Done; + + len = self->len; + /* if neither keys nor values are PyObject*, "i" is otherwise + unreferenced and we get a nuisance compiler wng */ + (void)i; + (void)len; +#ifdef KEY_TYPE_IS_PYOBJECT + /* Keys are Python objects so need to be traversed. */ + for (i = 0; i < len; i++) + VISIT(self->keys[i]); +#endif + +#ifdef VALUE_TYPE_IS_PYOBJECT + if (self->values != NULL) { + /* self->values exists (this is a mapping bucket, not a set bucket), + * and are Python objects, so need to be traversed. */ + for (i = 0; i < len; i++) + VISIT(self->values[i]); + } +#endif + + VISIT(self->next); + +Done: + return err; + +#undef VISIT +} + +static int +bucket_tp_clear(Bucket *self) +{ + if (self->state != cPersistent_GHOST_STATE) + _bucket_clear(self); + return 0; +} + +/* Code to access Bucket objects as mappings */ +static int +Bucket_length( Bucket *self) +{ + int r; + UNLESS (PER_USE(self)) + return -1; + r = self->len; + PER_UNUSE(self); + return r; +} + +static PyMappingMethods Bucket_as_mapping = { + (lenfunc)Bucket_length, /*mp_length*/ + (binaryfunc)bucket_getitem, /*mp_subscript*/ + (objobjargproc)bucket_setitem, /*mp_ass_subscript*/ +}; + +static PySequenceMethods Bucket_as_sequence = { + (lenfunc)0, /* sq_length */ + (binaryfunc)0, /* sq_concat */ + (ssizeargfunc)0, /* sq_repeat */ + (ssizeargfunc)0, /* sq_item */ + (ssizessizeargfunc)0, /* sq_slice */ + (ssizeobjargproc)0, /* sq_ass_item */ + (ssizessizeobjargproc)0, /* sq_ass_slice */ + (objobjproc)bucket_contains, /* sq_contains */ + 0, /* sq_inplace_concat */ + 0, /* sq_inplace_repeat */ +}; + +static PyObject * +bucket_repr(Bucket *self) +{ + PyObject *i, *r; +#ifdef PY3K + PyObject *rb; +#endif + char repr[10000]; + int rv; + + i = bucket_items(self, NULL, NULL); + if (!i) + { + return NULL; + } + r = PyObject_Repr(i); + Py_DECREF(i); + if (!r) + { + return NULL; + } +#ifdef PY3K + rb = PyUnicode_AsLatin1String(r); + rv = PyOS_snprintf(repr, sizeof(repr), + "%s(%s)", Py_TYPE(self)->tp_name, + PyBytes_AsString(rb)); + Py_DECREF(rb); +#else + rv = PyOS_snprintf(repr, sizeof(repr), + "%s(%s)", Py_TYPE(self)->tp_name, + PyBytes_AS_STRING(r)); +#endif + if (rv > 0 && (size_t)rv < sizeof(repr)) + { + Py_DECREF(r); +#ifdef PY3K + return PyUnicode_DecodeLatin1(repr, strlen(repr), "surrogateescape"); +#else + return PyBytes_FromStringAndSize(repr, strlen(repr)); +#endif + } + else + { + /* The static buffer wasn't big enough */ + int size; + PyObject *s; +#ifdef PY3K + PyObject *result; +#endif + /* 3 for the parens and the null byte */ + size = strlen(Py_TYPE(self)->tp_name) + PyBytes_GET_SIZE(r) + 3; + s = PyBytes_FromStringAndSize(NULL, size); + if (!s) { + Py_DECREF(r); + return r; + } + PyOS_snprintf(PyBytes_AS_STRING(s), size, + "%s(%s)", Py_TYPE(self)->tp_name, PyBytes_AS_STRING(r)); + Py_DECREF(r); +#ifdef PY3K + result = PyUnicode_FromEncodedObject(s, "latin1", "surrogateescape"); + Py_DECREF(s); + return result; +#else + return s; +#endif + } +} + +static PyTypeObject BucketType = { + PyVarObject_HEAD_INIT(NULL, 0) + MODULE_NAME MOD_NAME_PREFIX "Bucket", /* tp_name */ + sizeof(Bucket), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)bucket_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + (reprfunc)bucket_repr, /* tp_repr */ + 0, /* tp_as_number */ + &Bucket_as_sequence, /* tp_as_sequence */ + &Bucket_as_mapping, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | + Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_BASETYPE, /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)bucket_traverse, /* tp_traverse */ + (inquiry)bucket_tp_clear, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + (getiterfunc)Bucket_getiter, /* tp_iter */ + 0, /* tp_iternext */ + Bucket_methods, /* tp_methods */ + Bucket_members, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + Bucket_init, /* tp_init */ + 0, /* tp_alloc */ + 0, /*PyType_GenericNew,*/ /* tp_new */ +}; + +static int +nextBucket(SetIteration *i) +{ + if (i->position >= 0) + { + UNLESS(PER_USE(BUCKET(i->set))) + return -1; + + if (i->position) + { + DECREF_KEY(i->key); + DECREF_VALUE(i->value); + } + + if (i->position < BUCKET(i->set)->len) + { + COPY_KEY(i->key, BUCKET(i->set)->keys[i->position]); + INCREF_KEY(i->key); + COPY_VALUE(i->value, BUCKET(i->set)->values[i->position]); + INCREF_VALUE(i->value); + i->position ++; + } + else + { + i->position = -1; + PER_ACCESSED(BUCKET(i->set)); + } + + PER_ALLOW_DEACTIVATION(BUCKET(i->set)); + } + + return 0; +} diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/Development.txt b/thesisenv/lib/python3.6/site-packages/BTrees/Development.txt new file mode 100644 index 0000000..c391984 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/Development.txt @@ -0,0 +1,429 @@ +===================== +Developer Information +===================== + +This document provides information for developers who maintain or extend +`BTrees`. + +Macros +====== + +`BTrees` are defined using a "template", roughly akin to a C++ template. To +create a new family of `BTrees`, create a source file that defines macros used +to handle differences in key and value types: + + +Configuration Macros +-------------------- + +``MASTER_ID`` + + A string to hold an RCS/CVS Id key to be included in compiled binaries. + +``MOD_NAME_PREFIX`` + + A string (like "IO" or "OO") that provides the prefix used for the module. + This gets used to generate type names and the internal module name string. + +``DEFAULT_MAX_BUCKET_SIZE`` + + An int giving the maximum bucket size (number of key/value pairs). When a + bucket gets larger than this due to an insertion *into a BTREE*, it + splits. Inserting into a bucket directly doesn't split, and functions + that produce a bucket output (e.g., ``union()``) also have no bound on how + large a bucket may get. Someday this will be tunable on `BTree`. + instances. + +``DEFAULT_MAX_BTREE_SIZE`` + + An ``int`` giving the maximum size (number of children) of an internal + btree node. Someday this will be tunable on ``BTree`` instances. + + +Macros for Keys +--------------- + +``KEY_TYPE`` + + The C type declaration for keys (e.g., ``int`` or ``PyObject*``). + +``KEY_TYPE_IS_PYOBJECT`` + + Define if ``KEY_TYPE`` is a ``PyObject*`, else ``undef``. + +``KEY_CHECK(K)`` + + Tests whether the ``PyObject* K`` can be converted to the (``C``) key type + (``KEY_TYPE``). The macro should return a boolean (zero for false, + non-zero for true). When it returns false, its caller should probably set + a ``TypeError`` exception. + +``KEY_CHECK_ON_SET(K)`` + + Like ``KEY_CHECK``, but only checked during ``__setitem__``. + +``TEST_KEY_SET_OR(V, K, T)`` + + Like Python's ``cmp()``. Compares K(ey) to T(arget), where ``K`` + and ``T`` are ``C`` values of type `KEY_TYPE`. ``V`` is assigned an `int` + value depending on the outcome:: + + < 0 if K < T + == 0 if K == T + > 0 if K > T + + This macro acts like an ``if``, where the following statement is executed + only if a Python exception has been raised because the values could not be + compared. + +``DECREF_KEY(K)`` + + ``K`` is a value of ``KEY_TYPE``. If ``KEY_TYPE`` is a flavor of + ``PyObject*``, write this to do ``Py_DECREF(K)``. Else (e.g., + ``KEY_TYPE`` is ``int``) make it a nop. + +``INCREF_KEY(K)`` + + ``K`` is a value of `KEY_TYPE`. If `KEY_TYPE` is a flavor of + ``PyObject*``, write this to do ``Py_INCREF(K)``. Else (e.g., `KEY_TYPE` + is ``int``) make it a nop. + +``COPY_KEY(K, E)`` + + Like ``K=E``. Copy a key from ``E`` to ``K``, both of ``KEY_TYPE``. Note + that this doesn't ``decref K`` or ``incref E`` when ``KEY_TYPE`` is a + ``PyObject*``; the caller is responsible for keeping refcounts straight. + +``COPY_KEY_TO_OBJECT(O, K)`` + + Roughly like ``O=K``. ``O`` is a ``PyObject*``, and the macro must build + a Python object form of ``K``, assign it to ``O``, and ensure that ``O`` + owns the reference to its new value. It may do this by creating a new + Python object based on ``K`` (e.g., ``PyInt_FromLong(K)`` when + ``KEY_TYPE`` is ``int``), or simply by doing ``Py_INCREF(K)`` if + ``KEY_TYPE`` is a ``PyObject*``. + +``COPY_KEY_FROM_ARG(TARGET, ARG, STATUS)`` + + Copy an argument to the target without creating a new reference to + ``ARG``. ``ARG`` is a ``PyObject*``, and ``TARGET`` is of type + ``KEY_TYPE``. If this can't be done (for example, ``KEY_CHECK(ARG)`` + returns false), set a Python error and set status to ``0``. If there is + no error, leave status alone. + + +Macros for Values +----------------- + +``VALUE_TYPE`` + + The C type declaration for values (e.g., ``int`` or ``PyObject*``). + +``VALUE_TYPE_IS_PYOBJECT`` + + Define if ``VALUE_TYPE`` is a ``PyObject*``, else ``undef``. + +``TEST_VALUE(X, Y)`` + + Like Python's ``cmp()``. Compares ``X`` to ``Y``, where ``X`` & ``Y`` are + ``C`` values of type ``VALUE_TYPE``. The macro returns an ``int``, with + value:: + + < 0 if X < Y + == 0 if X == Y + > 0 if X > Y + + Bug: There is no provision for determining whether the comparison attempt + failed (set a Python exception). + +``DECREF_VALUE(K)`` + + Like ``DECREF_KEY``, except applied to values of ``VALUE_TYPE``. + +``INCREF_VALUE(K)`` + + Like ``INCREF_KEY``, except applied to values of ``VALUE_TYPE``. + +``COPY_VALUE(K, E)`` + + Like ``COPY_KEY``, except applied to values of ``VALUE_TYPE``. + +``COPY_VALUE_TO_OBJECT(O, K)`` + + Like ``COPY_KEY_TO_OBJECT``, except applied to values of ``VALUE_TYPE``. + +``COPY_VALUE_FROM_ARG(TARGET, ARG, STATUS)`` + + Like ``COPY_KEY_FROM_ARG``, except applied to values of ``VALUE_TYPE``. + +``NORMALIZE_VALUE(V, MIN)`` + + Normalize the value, ``V``, using the parameter ``MIN``. This is almost + certainly a YAGNI. It is a no-op for most types. For integers, ``V`` is + replaced by ``V/MIN`` only if ``MIN > 0``. + + +Macros for Set Operations +------------------------- + +``MERGE_DEFAULT`` + + A value of ``VALUE_TYPE`` specifying the value to associate with set + elements when sets are merged with mappings via weighed union or weighted + intersection. + +``MERGE(O1, w1, O2, w2)`` + + Performs a weighted merge of two values, ``O1`` and ``O2``, using weights + ``w1`` and ``w2``. The result must be of ``VALUE_TYPE``. Note that + weighted unions and weighted intersections are not enabled if this macro + is left undefined. + +``MERGE_WEIGHT(O, w)`` + + Computes a weighted value for ``O``. The result must be of + ``VALUE_TYPE``. This is used for "filling out" weighted unions, i.e. to + compute a weighted value for keys that appear in only one of the input + mappings. If left undefined, ``MERGE_WEIGHT`` defaults to:: + + #define MERGE_WEIGHT(O, w) (O) + +``MULTI_INT_UNION`` + + The value doesn't matter. If defined, `SetOpTemplate.c` compiles code for + a ``multiunion()`` function (compute a union of many input sets at high + speed). This currently makes sense only for structures with integer keys. + + +BTree Clues +=========== + +More or less random bits of helpful info. + ++ In papers and textbooks, this flavor of BTree is usually called a B+-Tree, + where "+" is a superscript. + ++ All keys and all values live in the bucket leaf nodes. Keys in interior + (BTree) nodes merely serve to guide a search efficiently toward the correct + leaf. + ++ When a key is deleted, it's physically removed from the bucket it's in, but + this doesn't propagate back up the tree: since keys in interior nodes only + serve to guide searches, it's OK-- and saves time --to leave "stale" keys in + interior nodes. + ++ No attempt is made to rebalance the tree after a deletion, unless a bucket + thereby becomes entirely empty. "Classic BTrees" do rebalance, keeping all + buckets at least half full (provided there are enough keys in the entire + tree to fill half a bucket). The tradeoffs are murky. Pathological cases + in the presence of deletion do exist. Pathologies include trees tending + toward only one key per bucket, and buckets at differing depths (all buckets + are at the same depth in a classic BTree). + ++ ``DEFAULT_MAX_BUCKET_SIZE`` and ``DEFAULT_MAX_BTREE_SIZE`` are chosen mostly + to "even out" pickle sizes in storage. That's why, e.g., an `IIBTree` has + larger values than an `OOBTree`: pickles store ints more efficiently than + they can store arbitrary Python objects. + ++ In a non-empty BTree, every bucket node contains at least one key, and every + BTree node contains at least one child and a non-NULL firstbucket pointer. + However, a BTree node may not contain any keys. + ++ An empty BTree consists solely of a BTree node with ``len==0`` and + ``firstbucket==NULL``. + ++ Although a BTree can become unbalanced under a mix of inserts and deletes + (meaning both that there's nothing stronger that can be said about buckets + than that they're not empty, and that buckets can appear at different + depths), a BTree node always has children of the same kind: they're all + buckets, or they're all BTree nodes. + + +The ``BTREE_SEARCH`` Macro +========================== + +For notational ease, consider a fixed BTree node ``x``, and let + +:: + + K(i) mean x->data.key[i] + C(i) mean all the keys reachable from x->data.child[i] + +For each ``i`` in ``0`` to ``x->len-1`` inclusive, + +:: + + K(i) <= C(i) < K(i+1) + +is a BTree node invariant, where we pretend that ``K(0)`` holds a key smaller +than any possible key, and ``K(x->len)`` holds a key larger than any possible +key. (Note that ``K(x->len)`` doesn't actually exist, and ``K(0)`` is never +used although space for it exists in non-empty BTree nodes.) + +When searching for a key ``k``, then, the child pointer we want to follow is +the one at index ``i`` such that ``K(i) <= k < K(i+1)``. There can be at most +one such ``i``, since the ``K(i)`` are strictly increasing. And there is at +least one such ``i`` provided the tree isn't empty (so that ``0 < len``). For +the moment, assume the tree isn't empty (we'll get back to that later). + +The macro's chief loop invariant is + +:: + + K(lo) < k < K(hi) + +This holds trivially at the start, since ``lo`` is set to ``0``, and ``hi`` to +``x->len``, and we pretend ``K(0)`` is minus infinity and ``K(len)`` is plus +infinity. Inside the loop, if ``K(i) < k`` we set ``lo`` to ``i``, and if +``K(i) > k`` we set ``hi`` to ``i``. These obviously preserve the invariant. +If ``K(i) == k``, the loop breaks and sets the result to ``i``, and since +``K(i) == k`` in that case ``i`` is obviously the correct result. + +Other cases depend on how ``i = floor((lo + hi)/2)`` works, exactly. Suppose +``lo + d = hi`` for some ``d >= 0``. Then ``i = floor((lo + lo + d)/2) = +floor(lo + d/2) = lo + floor(d/2)``. So: + +a. ``[d == 0] (lo == i == hi)`` if and only if ``(lo == hi)``. +b. ``[d == 1] (lo == i < hi)`` if and only if ``(lo+1 == hi)``. +c. ``[d > 1] (lo < i < hi)`` if and only if ``(lo+1 < hi)``. + +If the node is empty ``(x->len == 0)``, then ``lo==i==hi==0`` at the start, +and the loop exits immediately (the first ``i > lo`` test fails), without +entering the body. + +Else ``lo < hi`` at the start, and the invariant ``K(lo) < k < K(hi)`` holds. + +If ``lo+1 < hi``, we're in case (c): ``i`` is strictly between ``lo`` and +``hi``, so the loop body is entered, and regardless of whether the body sets +the new ``lo`` or the new ``hi`` to ``i``, the new ``lo`` is strictly less +than the new ``hi``, and the difference between the new ``lo`` and new ``hi`` +is strictly less than the difference between the old ``lo`` and old ``hi``. +So long as the new ``lo + 1`` remains < the new ``hi``, we stay in this case. +We can't stay in this case forever, though: because ``hi-lo`` decreases on +each trip but remains > ``0``, ``lo+1 == hi`` must eventually become true. +(In fact, it becomes true quickly, in about ``log2(x->len)`` trips; the point +is more that ``lo`` doesn't equal ``hi`` when the loop ends, it has to end +with ``lo+1==hi`` and ``i==lo``). + +Then we're in case (b): ``i==lo==hi-1`` then, and the loop exits. The +invariant still holds, with ``lo==i`` and ``hi==lo+1==i+1``:: + + K(i) < k < K(i+1) + +so ``i`` is again the correct answer. + + +Optimization points: +-------------------- + ++ Division by 2 is done via shift rather via "/2". These are signed ints, and + almost all C compilers treat signed int division as truncating, and shifting + is not the same as truncation for signed int division. The compiler has no + way to know these values aren't negative, so has to generate longer-winded + code for "/2". But we know these values aren't negative, and exploit it. + ++ The order of _cmp comparisons matters. We're in an interior BTree node, and + are looking at only a tiny fraction of all the keys that exist. So finding + the key exactly in this node is unlikely, and checking ``_cmp == 0`` is a + waste of time to the same extent. It doesn't matter whether we check for + ``_cmp < 0`` or ``_cmp > 0`` first, so long as we do both before worrying + about equality. + ++ At the start of a routine, it's better to run this macro even if ``x->len`` + is ``0`` (check for that afterwards). We just called a function and so + probably drained the pipeline. If the first thing we do then is read up + ``self->len`` and check it against ``0``, we just sit there waiting for the + data to get read up, and then another immediate test-and-branch, and for a + very unlikely case (BTree nodes are rarely empty). It's better to get into + the loop right away so the normal case makes progress ASAP. + + +The ``BUCKET_SEARCH`` Macro +=========================== + +This has a different job than ``BTREE_SEARCH``: the key ``0`` slot is +legitimate in a bucket, and we want to find the index at which the key +belongs. If the key is larger than the bucket's largest key, a new slot at +index len is where it belongs, else it belongs at the smallest ``i`` with +``keys[i]`` >= the key we're looking for. We also need to know whether or not +the key is present (``BTREE_SEARCH`` didn't care; it only wanted to find the +next node to search). + +The mechanics of the search are quite similar, though. The primary +loop invariant changes to (say we're searching for key ``k``):: + + K(lo-1) < k < K(hi) + +where ``K(i)`` means ``keys[i]``, and we pretend ``K(-1)`` is minus infinity +and ``K(len)`` is plus infinity. + +If the bucket is empty, ``lo=hi=i=0`` at the start, the loop body is never +entered, and the macro sets ``INDEX`` to 0 and ``ABSENT`` to true. That's why +``_cmp`` is initialized to 1 (``_cmp`` becomes ``ABSENT``). + +Else the bucket is not empty, lok``, ``hi`` is set to ``i``, preserving that ``K[hi] = K[i] > k``. + +If the loop exits after either of those, ``_cmp != 0``, so ``ABSENT`` becomes +true. + +If ``K[i]=k``, the loop breaks, so that ``INDEX`` becomes ``i``, and +``ABSENT`` becomes false (``_cmp=0`` in this case). + +The same case analysis for ``BTREE_SEARCH`` on ``lo`` and ``hi`` holds here: + +a. ``(lo == i == hi)`` if and only if ``(lo == hi)``. +b. ``(lo == i < hi)`` if and only if ``(lo+1 == hi)``. +c. ``(lo < i < hi)`` if and only if ``(lo+1 < hi)``. + +So long as ``lo+1 < hi``, we're in case (c), and either break with equality +(in which case the right results are obviously computed) or narrow the range. +If equality doesn't obtain, the range eventually narrows to cases (a) or (b). + +To go from (c) to (a), we must have ``lo+2==hi`` at the start, and +``K[i]=K[lo+1] + key``), because when it pays it narrows the range more (we get a little + boost from setting ``lo=i+1`` in this case; the other case sets ``hi=i``, + which isn't as much of a narrowing). diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/IFBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/IFBTree.py new file mode 100644 index 0000000..2c091e8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/IFBTree.py @@ -0,0 +1,112 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +__all__ = ('Bucket', 'Set', 'BTree', 'TreeSet', + 'IFBucket', 'IFSet', 'IFBTree', 'IFTreeSet', + 'union', 'intersection', 'difference', + 'weightedUnion', 'weightedIntersection', 'multiunion', + ) + +from zope.interface import moduleProvides + +from .Interfaces import IIntegerFloatBTreeModule +from ._base import Bucket +from ._base import MERGE +from ._base import MERGE_WEIGHT_numeric +from ._base import MERGE_DEFAULT_float +from ._base import Set +from ._base import Tree as BTree +from ._base import TreeSet +from ._base import _TreeIterator +from ._base import difference as _difference +from ._base import intersection as _intersection +from ._base import multiunion as _multiunion +from ._base import set_operation as _set_operation +from ._base import to_int as _to_key +from ._base import to_float as _to_value +from ._base import union as _union +from ._base import weightedIntersection as _weightedIntersection +from ._base import weightedUnion as _weightedUnion +from ._base import _fix_pickle +from ._compat import import_c_extension + +_BUCKET_SIZE = 120 +_TREE_SIZE = 500 +using64bits = False + +class IFBucketPy(Bucket): + _to_key = _to_key + _to_value = _to_value + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_float + + +class IFSetPy(Set): + _to_key = _to_key + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_float + + +class IFBTreePy(BTree): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + _to_value = _to_value + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_float + + +class IFTreeSetPy(TreeSet): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_float + + +class IFTreeIteratorPy(_TreeIterator): + pass + + +# Can't declare forward refs, so fix up afterwards: + +IFBucketPy._mapping_type = IFBucketPy._bucket_type = IFBucketPy +IFBucketPy._set_type = IFSetPy + +IFSetPy._mapping_type = IFBucketPy +IFSetPy._set_type = IFSetPy._bucket_type = IFSetPy + +IFBTreePy._mapping_type = IFBTreePy._bucket_type = IFBucketPy +IFBTreePy._set_type = IFSetPy + +IFTreeSetPy._mapping_type = IFBucketPy +IFTreeSetPy._set_type = IFTreeSetPy._bucket_type = IFSetPy + + +differencePy = _set_operation(_difference, IFSetPy) +unionPy = _set_operation(_union, IFSetPy) +intersectionPy = _set_operation(_intersection, IFSetPy) +multiunionPy = _set_operation(_multiunion, IFSetPy) +weightedUnionPy = _set_operation(_weightedUnion, IFSetPy) +weightedIntersectionPy = _set_operation(_weightedIntersection, IFSetPy) + +import_c_extension(globals()) + +_fix_pickle(globals(), __name__) + +moduleProvides(IIntegerFloatBTreeModule) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/IIBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/IIBTree.py new file mode 100644 index 0000000..f05709d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/IIBTree.py @@ -0,0 +1,113 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +__all__ = ('Bucket', 'Set', 'BTree', 'TreeSet', + 'IIBucket', 'IISet', 'IIBTree', 'IITreeSet', + 'union', 'intersection', 'difference', + 'weightedUnion', 'weightedIntersection', 'multiunion', + ) + +from zope.interface import moduleProvides + +from .Interfaces import IIntegerIntegerBTreeModule +from ._base import Bucket +from ._base import MERGE +from ._base import MERGE_WEIGHT_numeric +from ._base import MERGE_DEFAULT_int +from ._base import Set +from ._base import Tree as BTree +from ._base import TreeSet +from ._base import _TreeIterator +from ._base import difference as _difference +from ._base import intersection as _intersection +from ._base import multiunion as _multiunion +from ._base import set_operation as _set_operation +from ._base import to_int as _to_key +_to_value = _to_key +from ._base import union as _union +from ._base import weightedIntersection as _weightedIntersection +from ._base import weightedUnion as _weightedUnion +from ._base import _fix_pickle +from ._compat import import_c_extension + +_BUCKET_SIZE = 120 +_TREE_SIZE = 500 +using64bits = False + + +class IIBucketPy(Bucket): + _to_key = _to_key + _to_value = _to_value + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_int + + +class IISetPy(Set): + _to_key = _to_key + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_int + + +class IIBTreePy(BTree): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + _to_value = _to_value + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_int + + +class IITreeSetPy(TreeSet): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_int + + +class IITreeIteratorPy(_TreeIterator): + pass + + +# Can't declare forward refs, so fix up afterwards: + +IIBucketPy._mapping_type = IIBucketPy._bucket_type = IIBucketPy +IIBucketPy._set_type = IISetPy + +IISetPy._mapping_type = IIBucketPy +IISetPy._set_type = IISetPy._bucket_type = IISetPy + +IIBTreePy._mapping_type = IIBTreePy._bucket_type = IIBucketPy +IIBTreePy._set_type = IISetPy + +IITreeSetPy._mapping_type = IIBucketPy +IITreeSetPy._set_type = IITreeSetPy._bucket_type = IISetPy + + +differencePy = _set_operation(_difference, IISetPy) +unionPy = _set_operation(_union, IISetPy) +intersectionPy = _set_operation(_intersection, IISetPy) +multiunionPy = _set_operation(_multiunion, IISetPy) +weightedUnionPy = _set_operation(_weightedUnion, IISetPy) +weightedIntersectionPy = _set_operation(_weightedIntersection, IISetPy) + +import_c_extension(globals()) + +_fix_pickle(globals(), __name__) + +moduleProvides(IIntegerIntegerBTreeModule) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/IOBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/IOBTree.py new file mode 100644 index 0000000..328c25e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/IOBTree.py @@ -0,0 +1,95 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +__all__ = ('Bucket', 'Set', 'BTree', 'TreeSet', + 'IOBucket', 'IOSet', 'IOBTree', 'IOTreeSet', + 'union', 'intersection', 'difference', 'multiunion', + ) + +from zope.interface import moduleProvides + +from .Interfaces import IIntegerObjectBTreeModule +from ._base import Bucket +from ._base import MERGE_WEIGHT_default +from ._base import Set +from ._base import Tree as BTree +from ._base import TreeSet +from ._base import _TreeIterator +from ._base import difference as _difference +from ._base import intersection as _intersection +from ._base import multiunion as _multiunion +from ._base import set_operation as _set_operation +from ._base import to_int as _to_key +from ._base import to_ob as _to_value +from ._base import union as _union +from ._base import _fix_pickle +from ._compat import import_c_extension + +_BUCKET_SIZE = 60 +_TREE_SIZE = 500 +using64bits = False + + +class IOBucketPy(Bucket): + _to_key = _to_key + _to_value = _to_value + MERGE_WEIGHT = MERGE_WEIGHT_default + + +class IOSetPy(Set): + _to_key = _to_key + + +class IOBTreePy(BTree): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + _to_value = _to_value + MERGE_WEIGHT = MERGE_WEIGHT_default + + +class IOTreeSetPy(TreeSet): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + +class IOTreeIteratorPy(_TreeIterator): + pass + + +# Can't declare forward refs, so fix up afterwards: + +IOBucketPy._mapping_type = IOBucketPy._bucket_type = IOBucketPy +IOBucketPy._set_type = IOSetPy + +IOSetPy._mapping_type = IOBucketPy +IOSetPy._set_type = IOSetPy._bucket_type = IOSetPy + +IOBTreePy._mapping_type = IOBTreePy._bucket_type = IOBucketPy +IOBTreePy._set_type = IOSetPy + +IOTreeSetPy._mapping_type = IOBucketPy +IOTreeSetPy._set_type = IOTreeSetPy._bucket_type = IOSetPy + + +differencePy = _set_operation(_difference, IOSetPy) +unionPy = _set_operation(_union, IOSetPy) +intersectionPy = _set_operation(_intersection, IOSetPy) +multiunionPy = _set_operation(_multiunion, IOSetPy) + +import_c_extension(globals()) + +_fix_pickle(globals(), __name__) + +moduleProvides(IIntegerObjectBTreeModule) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/Interfaces.py b/thesisenv/lib/python3.6/site-packages/BTrees/Interfaces.py new file mode 100644 index 0000000..cc4dcce --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/Interfaces.py @@ -0,0 +1,527 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +from zope.interface import Interface, Attribute + + +class ICollection(Interface): + + def clear(): + """Remove all of the items from the collection.""" + + def __nonzero__(): + """Check if the collection is non-empty. + + Return a true value if the collection is non-empty and a + false value otherwise. + """ + + +class IReadSequence(Interface): + + def __getitem__(index): + """Return the value at the given index. + + An IndexError is raised if the index cannot be found. + """ + + def __getslice__(index1, index2): + """Return a subsequence from the original sequence. + + The subsequence includes the items from index1 up to, but not + including, index2. + """ + +class IKeyed(ICollection): + + def has_key(key): + """Check whether the object has an item with the given key. + + Return a true value if the key is present, else a false value. + """ + + def keys(min=None, max=None, excludemin=False, excludemax=False): + """Return an IReadSequence containing the keys in the collection. + + The type of the IReadSequence is not specified. It could be a list + or a tuple or some other type. + + All arguments are optional, and may be specified as keyword + arguments, or by position. + + If a min is specified, then output is constrained to keys greater + than or equal to the given min, and, if excludemin is specified and + true, is further constrained to keys strictly greater than min. A + min value of None is ignored. If min is None or not specified, and + excludemin is true, the smallest key is excluded. + + If a max is specified, then output is constrained to keys less than + or equal to the given max, and, if excludemax is specified and + true, is further constrained to keys strictly less than max. A max + value of None is ignored. If max is None or not specified, and + excludemax is true, the largest key is excluded. + """ + + def maxKey(key=None): + """Return the maximum key. + + If a key argument if provided and not None, return the largest key + that is less than or equal to the argument. Raise an exception if + no such key exists. + """ + + def minKey(key=None): + """Return the minimum key. + + If a key argument if provided and not None, return the smallest key + that is greater than or equal to the argument. Raise an exception + if no such key exists. + """ + + +class ISetMutable(IKeyed): + + def insert(key): + """Add the key (value) to the set. + + If the key was already in the set, return 0, otherwise return 1. + """ + + def remove(key): + """Remove the key from the set. + + Raises KeyError if key is not in the set. + """ + + def update(seq): + """Add the items from the given sequence to the set.""" + + +class ISized(Interface): + """An object that supports __len__.""" + + def __len__(): + """Return the number of items in the container.""" + + +class IKeySequence(IKeyed, ISized): + + def __getitem__(index): + """Return the key in the given index position. + + This allows iteration with for loops and use in functions, + like map and list, that read sequences. + """ + + +class ISet(IKeySequence, ISetMutable): + pass + + +class ITreeSet(IKeyed, ISetMutable): + pass + +class IMinimalDictionary(ISized, IKeyed): + + def get(key, default): + """Get the value associated with the given key. + + Return the default if has_key(key) is false. + """ + + def __getitem__(key): + """Get the value associated with the given key. + + Raise KeyError if has_key(key) is false. + """ + + def __setitem__(key, value): + """Set the value associated with the given key.""" + + def __delitem__(key): + """Delete the value associated with the given key. + + Raise KeyError if has_key(key) is false. + """ + + def values(min=None, max=None, excludemin=False, excludemax=False): + """Return an IReadSequence containing the values in the collection. + + The type of the IReadSequence is not specified. It could be a list + or a tuple or some other type. + + All arguments are optional, and may be specified as keyword + arguments, or by position. + + If a min is specified, then output is constrained to values whose + keys are greater than or equal to the given min, and, if excludemin + is specified and true, is further constrained to values whose keys + are strictly greater than min. A min value of None is ignored. If + min is None or not specified, and excludemin is true, the value + corresponding to the smallest key is excluded. + + If a max is specified, then output is constrained to values whose + keys are less than or equal to the given max, and, if excludemax is + specified and true, is further constrained to values whose keys are + strictly less than max. A max value of None is ignored. If max is + None or not specified, and excludemax is true, the value + corresponding to the largest key is excluded. + """ + + def items(min=None, max=None, excludemin=False, excludemax=False): + """Return an IReadSequence containing the items in the collection. + + An item is a 2-tuple, a (key, value) pair. + + The type of the IReadSequence is not specified. It could be a list + or a tuple or some other type. + + All arguments are optional, and may be specified as keyword + arguments, or by position. + + If a min is specified, then output is constrained to items whose + keys are greater than or equal to the given min, and, if excludemin + is specified and true, is further constrained to items whose keys + are strictly greater than min. A min value of None is ignored. If + min is None or not specified, and excludemin is true, the item with + the smallest key is excluded. + + If a max is specified, then output is constrained to items whose + keys are less than or equal to the given max, and, if excludemax is + specified and true, is further constrained to items whose keys are + strictly less than max. A max value of None is ignored. If max is + None or not specified, and excludemax is true, the item with the + largest key is excluded. + """ + +class IDictionaryIsh(IMinimalDictionary): + + def update(collection): + """Add the items from the given collection object to the collection. + + The input collection must be a sequence of (key, value) 2-tuples, + or an object with an 'items' method that returns a sequence of + (key, value) pairs. + """ + + def byValue(minValue): + """Return a sequence of (value, key) pairs, sorted by value. + + Values < minValue are omitted and other values are "normalized" by + the minimum value. This normalization may be a noop, but, for + integer values, the normalization is division. + """ + + def setdefault(key, d): + """D.setdefault(k, d) -> D.get(k, d), also set D[k]=d if k not in D. + + Return the value like get() except that if key is missing, d is both + returned and inserted into the dictionary as the value of k. + + Note that, unlike as for Python's dict.setdefault(), d is not + optional. Python defaults d to None, but that doesn't make sense + for mappings that can't have None as a value (for example, an + IIBTree can have only integers as values). + """ + + def pop(key, d): + """D.pop(k[, d]) -> v, remove key and return the corresponding value. + + If key is not found, d is returned if given, otherwise KeyError is + raised. + """ + + +class IBTree(IDictionaryIsh): + + def insert(key, value): + """Insert a key and value into the collection. + + If the key was already in the collection, then there is no + change and 0 is returned. + + If the key was not already in the collection, then the item is + added and 1 is returned. + + This method is here to allow one to generate random keys and + to insert and test whether the key was there in one operation. + + A standard idiom for generating new keys will be:: + + key = generate_key() + while not t.insert(key, value): + key=generate_key() + """ + + +class IMerge(Interface): + """Object with methods for merging sets, buckets, and trees. + + These methods are supplied in modules that define collection + classes with particular key and value types. The operations apply + only to collections from the same module. For example, the + IIBTree.union can only be used with IIBTree.IIBTree, + IIBTree.IIBucket, IIBTree.IISet, and IIBTree.IITreeSet. + + The implementing module has a value type. The IOBTree and OOBTree + modules have object value type. The IIBTree and OIBTree modules + have integer value types. Other modules may be defined in the + future that have other value types. + + The individual types are classified into set (Set and TreeSet) and + mapping (Bucket and BTree) types. + """ + + def difference(c1, c2): + """Return the keys or items in c1 for which there is no key in c2. + + If c1 is None, then None is returned. If c2 is None, then c1 + is returned. + + If neither c1 nor c2 is None, the output is a Set if c1 is a Set or + TreeSet, and is a Bucket if c1 is a Bucket or BTree. + """ + + def union(c1, c2): + """Compute the Union of c1 and c2. + + If c1 is None, then c2 is returned, otherwise, if c2 is None, + then c1 is returned. + + The output is a Set containing keys from the input + collections. + """ + + def intersection(c1, c2): + """Compute the intersection of c1 and c2. + + If c1 is None, then c2 is returned, otherwise, if c2 is None, + then c1 is returned. + + The output is a Set containing matching keys from the input + collections. + """ + + +class IBTreeModule(Interface): + """These are available in all modules (IOBTree, OIBTree, OOBTree, IIBTree, + IFBTree, LFBTree, LOBTree, OLBTree, and LLBTree). + """ + + BTree = Attribute( + """The IBTree for this module. + + Also available as [prefix]BTree, as in IOBTree.""") + + Bucket = Attribute( + """The leaf-node data buckets used by the BTree. + + (IBucket is not currently defined in this file, but is essentially + IDictionaryIsh, with the exception of __nonzero__, as of this + writing.) + + Also available as [prefix]Bucket, as in IOBucket.""") + + TreeSet = Attribute( + """The ITreeSet for this module. + + Also available as [prefix]TreeSet, as in IOTreeSet.""") + + Set = Attribute( + """The ISet for this module: the leaf-node data buckets used by the + TreeSet. + + Also available as [prefix]BTree, as in IOSet.""") + + +class IIMerge(IMerge): + """Merge collections with integer value type. + + A primary intent is to support operations with no or integer + values, which are used as "scores" to rate indiviual keys. That + is, in this context, a BTree or Bucket is viewed as a set with + scored keys, using integer scores. + """ + + def weightedUnion(c1, c2, weight1=1, weight2=1): + """Compute the weighted union of c1 and c2. + + If c1 and c2 are None, the output is (0, None). + + If c1 is None and c2 is not None, the output is (weight2, c2). + + If c1 is not None and c2 is None, the output is (weight1, c1). + + Else, and hereafter, c1 is not None and c2 is not None. + + If c1 and c2 are both sets, the output is 1 and the (unweighted) + union of the sets. + + Else the output is 1 and a Bucket whose keys are the union of c1 and + c2's keys, and whose values are:: + + v1*weight1 + v2*weight2 + + where: + + v1 is 0 if the key is not in c1 + 1 if the key is in c1 and c1 is a set + c1[key] if the key is in c1 and c1 is a mapping + + v2 is 0 if the key is not in c2 + 1 if the key is in c2 and c2 is a set + c2[key] if the key is in c2 and c2 is a mapping + + Note that c1 and c2 must be collections. + """ + + def weightedIntersection(c1, c2, weight1=1, weight2=1): + """Compute the weighted intersection of c1 and c2. + + If c1 and c2 are None, the output is (0, None). + + If c1 is None and c2 is not None, the output is (weight2, c2). + + If c1 is not None and c2 is None, the output is (weight1, c1). + + Else, and hereafter, c1 is not None and c2 is not None. + + If c1 and c2 are both sets, the output is the sum of the weights + and the (unweighted) intersection of the sets. + + Else the output is 1 and a Bucket whose keys are the intersection of + c1 and c2's keys, and whose values are:: + + v1*weight1 + v2*weight2 + + where: + + v1 is 1 if c1 is a set + c1[key] if c1 is a mapping + + v2 is 1 if c2 is a set + c2[key] if c2 is a mapping + + Note that c1 and c2 must be collections. + """ + + +class IMergeIntegerKey(IMerge): + """IMerge-able objects with integer keys. + + Concretely, this means the types in IOBTree and IIBTree. + """ + + def multiunion(seq): + """Return union of (zero or more) integer sets, as an integer set. + + seq is a sequence of objects each convertible to an integer set. + These objects are convertible to an integer set: + + + An integer, which is added to the union. + + + A Set or TreeSet from the same module (for example, an + IIBTree.TreeSet for IIBTree.multiunion()). The elements of the + set are added to the union. + + + A Bucket or BTree from the same module (for example, an + IOBTree.IOBTree for IOBTree.multiunion()). The keys of the + mapping are added to the union. + + The union is returned as a Set from the same module (for example, + IIBTree.multiunion() returns an IIBTree.IISet). + + The point to this method is that it can run much faster than + doing a sequence of two-input union() calls. Under the covers, + all the integers in all the inputs are sorted via a single + linear-time radix sort, then duplicates are removed in a second + linear-time pass. + """ + +class IBTreeFamily(Interface): + """the 64-bit or 32-bit family""" + IO = Attribute('The IIntegerObjectBTreeModule for this family') + OI = Attribute('The IObjectIntegerBTreeModule for this family') + II = Attribute('The IIntegerIntegerBTreeModule for this family') + IF = Attribute('The IIntegerFloatBTreeModule for this family') + OO = Attribute('The IObjectObjectBTreeModule for this family') + maxint = Attribute('The maximum integer storable in this family') + minint = Attribute('The minimum integer storable in this family') + + +class IIntegerObjectBTreeModule(IBTreeModule, IMerge): + """keys, or set values, are integers; values are objects. + + describes IOBTree and LOBTree""" + + family = Attribute('The IBTreeFamily of this module') + + +class IObjectIntegerBTreeModule(IBTreeModule, IIMerge): + """keys, or set values, are objects; values are integers. + + Object keys (and set values) must sort reliably (for instance, *not* on + object id)! Homogenous key types recommended. + + describes OIBTree and LOBTree""" + + family = Attribute('The IBTreeFamily of this module') + + +class IIntegerIntegerBTreeModule(IBTreeModule, IIMerge, IMergeIntegerKey): + """keys, or set values, are integers; values are also integers. + + describes IIBTree and LLBTree""" + + family = Attribute('The IBTreeFamily of this module') + + +class IObjectObjectBTreeModule(IBTreeModule, IMerge): + """keys, or set values, are objects; values are also objects. + + Object keys (and set values) must sort reliably (for instance, *not* on + object id)! Homogenous key types recommended. + + describes OOBTree""" + + # Note that there's no ``family`` attribute; all families include + # the OO flavor of BTrees. + + +class IIntegerFloatBTreeModule(IBTreeModule, IMerge): + """keys, or set values, are integers; values are floats. + + describes IFBTree and LFBTree""" + + family = Attribute('The IBTreeFamily of this module') + + +try: + from ZODB.POSException import BTreesConflictError +except ImportError: + class BTreesConflictError(ValueError): + @property + def reason(self): + return self.args[-1] + +############################################################### +# IMPORTANT NOTE +# +# Getting the length of a BTree, TreeSet, or output of keys, +# values, or items of same is expensive. If you need to get the +# length, you need to maintain this separately. +# +# Eventually, I need to express this through the interfaces. +# +################################################################ diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/LFBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/LFBTree.py new file mode 100644 index 0000000..810c515 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/LFBTree.py @@ -0,0 +1,113 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +__all__ = ('Bucket', 'Set', 'BTree', 'TreeSet', + 'LFBucket', 'LFSet', 'LFBTree', 'LFTreeSet', + 'union', 'intersection', 'difference', + 'weightedUnion', 'weightedIntersection', 'multiunion', + ) + +from zope.interface import moduleProvides + +from .Interfaces import IIntegerFloatBTreeModule +from ._base import Bucket +from ._base import MERGE +from ._base import MERGE_WEIGHT_numeric +from ._base import MERGE_DEFAULT_float +from ._base import Set +from ._base import Tree as BTree +from ._base import TreeSet +from ._base import _TreeIterator +from ._base import difference as _difference +from ._base import intersection as _intersection +from ._base import multiunion as _multiunion +from ._base import set_operation as _set_operation +from ._base import to_long as _to_key +from ._base import to_float as _to_value +from ._base import union as _union +from ._base import weightedIntersection as _weightedIntersection +from ._base import weightedUnion as _weightedUnion +from ._base import _fix_pickle +from ._compat import import_c_extension + +_BUCKET_SIZE = 120 +_TREE_SIZE = 500 +using64bits = True + + +class LFBucketPy(Bucket): + _to_key = _to_key + _to_value = _to_value + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_float + + +class LFSetPy(Set): + _to_key = _to_key + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_float + + +class LFBTreePy(BTree): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + _to_value = _to_value + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_float + + +class LFTreeSetPy(TreeSet): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_float + + +class LFTreeIteratorPy(_TreeIterator): + pass + + +# Can't declare forward refs, so fix up afterwards: + +LFBucketPy._mapping_type = LFBucketPy._bucket_type = LFBucketPy +LFBucketPy._set_type = LFSetPy + +LFSetPy._mapping_type = LFBucketPy +LFSetPy._set_type = LFSetPy._bucket_type = LFSetPy + +LFBTreePy._mapping_type = LFBTreePy._bucket_type = LFBucketPy +LFBTreePy._set_type = LFSetPy + +LFTreeSetPy._mapping_type = LFBucketPy +LFTreeSetPy._set_type = LFTreeSetPy._bucket_type = LFSetPy + + +differencePy = _set_operation(_difference, LFSetPy) +unionPy = _set_operation(_union, LFSetPy) +intersectionPy = _set_operation(_intersection, LFSetPy) +multiunionPy = _set_operation(_multiunion, LFSetPy) +weightedUnionPy = _set_operation(_weightedUnion, LFSetPy) +weightedIntersectionPy = _set_operation(_weightedIntersection, LFSetPy) + +import_c_extension(globals()) + +_fix_pickle(globals(), __name__) + +moduleProvides(IIntegerFloatBTreeModule) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/LLBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/LLBTree.py new file mode 100644 index 0000000..490688d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/LLBTree.py @@ -0,0 +1,113 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +__all__ = ('Bucket', 'Set', 'BTree', 'TreeSet', + 'LLBucket', 'LLSet', 'LLBTree', 'LLTreeSet', + 'union', 'intersection', 'difference', + 'weightedUnion', 'weightedIntersection', 'multiunion', + ) + +from zope.interface import moduleProvides + +from .Interfaces import IIntegerIntegerBTreeModule +from ._base import Bucket +from ._base import MERGE +from ._base import MERGE_WEIGHT_numeric +from ._base import MERGE_DEFAULT_int +from ._base import Set +from ._base import Tree as BTree +from ._base import TreeSet +from ._base import _TreeIterator +from ._base import difference as _difference +from ._base import intersection as _intersection +from ._base import multiunion as _multiunion +from ._base import set_operation as _set_operation +from ._base import to_long as _to_key +from ._base import to_long as _to_value +from ._base import union as _union +from ._base import weightedIntersection as _weightedIntersection +from ._base import weightedUnion as _weightedUnion +from ._base import _fix_pickle +from ._compat import import_c_extension + +_BUCKET_SIZE = 120 +_TREE_SIZE = 500 +using64bits = True + + +class LLBucketPy(Bucket): + _to_key = _to_key + _to_value = _to_value + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_int + + +class LLSetPy(Set): + _to_key = _to_key + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_int + + +class LLBTreePy(BTree): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + _to_value = _to_value + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_int + + +class LLTreeSetPy(TreeSet): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_int + + +class LLTreeIteratorPy(_TreeIterator): + pass + + +# Can't declare forward refs, so fix up afterwards: + +LLBucketPy._mapping_type = LLBucketPy._bucket_type = LLBucketPy +LLBucketPy._set_type = LLSetPy + +LLSetPy._mapping_type = LLBucketPy +LLSetPy._set_type = LLSetPy._bucket_type = LLSetPy + +LLBTreePy._mapping_type = LLBTreePy._bucket_type = LLBucketPy +LLBTreePy._set_type = LLSetPy + +LLTreeSetPy._mapping_type = LLBucketPy +LLTreeSetPy._set_type = LLTreeSetPy._bucket_type = LLSetPy + + +differencePy = _set_operation(_difference, LLSetPy) +unionPy = _set_operation(_union, LLSetPy) +intersectionPy = _set_operation(_intersection, LLSetPy) +multiunionPy = _set_operation(_multiunion, LLSetPy) +weightedUnionPy = _set_operation(_weightedUnion, LLSetPy) +weightedIntersectionPy = _set_operation(_weightedIntersection, LLSetPy) + +import_c_extension(globals()) + +_fix_pickle(globals(), __name__) + +moduleProvides(IIntegerIntegerBTreeModule) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/LOBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/LOBTree.py new file mode 100644 index 0000000..6de4697 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/LOBTree.py @@ -0,0 +1,96 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +__all__ = ('Bucket', 'Set', 'BTree', 'TreeSet', + 'LOBucket', 'LOSet', 'LOBTree', 'LOTreeSet', + 'union', 'intersection', 'difference', 'multiunion', + ) + +from zope.interface import moduleProvides + +from .Interfaces import IIntegerObjectBTreeModule +from ._base import Bucket +from ._base import MERGE_WEIGHT_default +from ._base import Set +from ._base import Tree as BTree +from ._base import TreeSet +from ._base import _TreeIterator +from ._base import difference as _difference +from ._base import intersection as _intersection +from ._base import multiunion as _multiunion +from ._base import set_operation as _set_operation +from ._base import to_long as _to_key +from ._base import to_ob as _to_value +from ._base import union as _union +from ._base import _fix_pickle +from ._compat import import_c_extension + +_BUCKET_SIZE = 60 +_TREE_SIZE = 500 +using64bits = True + + +class LOBucketPy(Bucket): + _to_key = _to_key + _to_value = _to_value + MERGE_WEIGHT = MERGE_WEIGHT_default + + +class LOSetPy(Set): + _to_key = _to_key + + +class LOBTreePy(BTree): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + _to_value = _to_value + MERGE_WEIGHT = MERGE_WEIGHT_default + + +class LOTreeSetPy(TreeSet): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + + +class LOTreeIteratorPy(_TreeIterator): + pass + + +# Can't declare forward refs, so fix up afterwards: + +LOBucketPy._mapping_type = LOBucketPy._bucket_type = LOBucketPy +LOBucketPy._set_type = LOSetPy + +LOSetPy._mapping_type = LOBucketPy +LOSetPy._set_type = LOSetPy._bucket_type = LOSetPy + +LOBTreePy._mapping_type = LOBTreePy._bucket_type = LOBucketPy +LOBTreePy._set_type = LOSetPy + +LOTreeSetPy._mapping_type = LOBucketPy +LOTreeSetPy._set_type = LOTreeSetPy._bucket_type = LOSetPy + + +differencePy = _set_operation(_difference, LOSetPy) +unionPy = _set_operation(_union, LOSetPy) +intersectionPy = _set_operation(_intersection, LOSetPy) +multiunionPy = _set_operation(_multiunion, LOSetPy) + +import_c_extension(globals()) + +_fix_pickle(globals(), __name__) + +moduleProvides(IIntegerObjectBTreeModule) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/Length.py b/thesisenv/lib/python3.6/site-packages/BTrees/Length.py new file mode 100644 index 0000000..ef72193 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/Length.py @@ -0,0 +1,58 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +import persistent + +class Length(persistent.Persistent): + """BTree lengths are often too expensive to compute. + + Objects that use BTrees need to keep track of lengths themselves. + This class provides an object for doing this. + + As a bonus, the object support application-level conflict + resolution. + + It is tempting to to assign length objects to __len__ attributes + to provide instance-specific __len__ methods. However, this no + longer works as expected, because new-style classes cache + class-defined slot methods (like __len__) in C type slots. Thus, + instance-defined slot fillers are ignored. + """ + # class-level default required to keep copy.deepcopy happy -- see + # https://bugs.launchpad.net/zodb/+bug/516653 + value = 0 + + def __init__(self, v=0): + self.value = v + + def __getstate__(self): + return self.value + + def __setstate__(self, v): + self.value = v + + def set(self, v): + "Set the length value to v." + self.value = v + + def _p_resolveConflict(self, old, s1, s2): + return s1 + s2 - old + + def change(self, delta): + "Add delta to the length value." + self.value += delta + + def __call__(self, *args): + "Return the current length value." + return self.value diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/MergeTemplate.c b/thesisenv/lib/python3.6/site-packages/BTrees/MergeTemplate.c new file mode 100644 index 0000000..57eda14 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/MergeTemplate.c @@ -0,0 +1,349 @@ +/***************************************************************************** + + Copyright (c) 2001, 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + +****************************************************************************/ + +#define MERGETEMPLATE_C "$Id$\n" + +/**************************************************************************** + Set operations +****************************************************************************/ + +static int +merge_output(Bucket *r, SetIteration *i, int mapping) +{ + if (r->len >= r->size && Bucket_grow(r, -1, !mapping) < 0) + return -1; + COPY_KEY(r->keys[r->len], i->key); + INCREF_KEY(r->keys[r->len]); + if (mapping) { + COPY_VALUE(r->values[r->len], i->value); + INCREF_VALUE(r->values[r->len]); + } + r->len++; + return 0; +} + +/* The "reason" argument is a little integer giving "a reason" for the + * error. In the Zope3 codebase, these are mapped to explanatory strings + * via zodb/btrees/interfaces.py. + */ +static PyObject * +merge_error(int p1, int p2, int p3, int reason) +{ + PyObject *r; + + UNLESS (r=Py_BuildValue("iiii", p1, p2, p3, reason)) r=Py_None; + if (ConflictError == NULL) { + ConflictError = PyExc_ValueError; + Py_INCREF(ConflictError); + } + PyErr_SetObject(ConflictError, r); + if (r != Py_None) + { + Py_DECREF(r); + } + + return NULL; +} + +/* It's hard to explain "the rules" for bucket_merge, in large part because + * any automatic conflict-resolution scheme is going to be incorrect for + * some endcases of *some* app. The scheme here is pretty conservative, + * and should be OK for most apps. It's easier to explain what the code + * allows than what it forbids: + * + * Leaving things alone: it's OK if both s2 and s3 leave a piece of s1 + * alone (don't delete the key, and don't change the value). + * + * Key deletion: a transaction (s2 or s3) can delete a key (from s1), but + * only if the other transaction (of s2 and s3) doesn't delete the same key. + * However, it's not OK for s2 and s3 to, between them, end up deleting all + * the keys. This is a higher-level constraint, due to that the caller of + * bucket_merge() doesn't have enough info to unlink the resulting empty + * bucket from its BTree correctly. It's also not OK if s2 or s3 are empty, + * because the transaction that emptied the bucket unlinked the bucket from + * the tree, and nothing we do here can get it linked back in again. + * + * Key insertion: s2 or s3 can add a new key, provided the other transaction + * doesn't insert the same key. It's not OK even if they insert the same + * pair. + * + * Mapping value modification: s2 or s3 can modify the value associated + * with a key in s1, provided the other transaction doesn't make a + * modification of the same key to a different value. It's OK if s2 and s3 + * both give the same new value to the key while it's hard to be precise about + * why, this doesn't seem consistent with that it's *not* OK for both to add + * a new key mapping to the same value). + */ +static PyObject * +bucket_merge(Bucket *s1, Bucket *s2, Bucket *s3) +{ + Bucket *r=0; + PyObject *s; + SetIteration i1 = {0,0,0}, i2 = {0,0,0}, i3 = {0,0,0}; + int cmp12, cmp13, cmp23, mapping, set; + + /* If either "after" bucket is empty, punt. */ + if (s2->len == 0 || s3->len == 0) + { + merge_error(-1, -1, -1, 12); + goto err; + } + + if (initSetIteration(&i1, OBJECT(s1), 1) < 0) + goto err; + if (initSetIteration(&i2, OBJECT(s2), 1) < 0) + goto err; + if (initSetIteration(&i3, OBJECT(s3), 1) < 0) + goto err; + + mapping = i1.usesValue | i2.usesValue | i3.usesValue; + set = !mapping; + + if (mapping) + r = (Bucket *)PyObject_CallObject((PyObject *)&BucketType, NULL); + else + r = (Bucket *)PyObject_CallObject((PyObject *)&SetType, NULL); + if (r == NULL) + goto err; + + if (i1.next(&i1) < 0) + goto err; + if (i2.next(&i2) < 0) + goto err; + if (i3.next(&i3) < 0) + goto err; + + /* Consult zodb/btrees/interfaces.py for the meaning of the last + * argument passed to merge_error(). + */ + /* TODO: This isn't passing on errors raised by value comparisons. */ + while (i1.position >= 0 && i2.position >= 0 && i3.position >= 0) + { + TEST_KEY_SET_OR(cmp12, i1.key, i2.key) goto err; + TEST_KEY_SET_OR(cmp13, i1.key, i3.key) goto err; + if (cmp12==0) + { + if (cmp13==0) + { + if (set || (TEST_VALUE(i1.value, i2.value) == 0)) + { /* change in i3 value or all same */ + if (merge_output(r, &i3, mapping) < 0) goto err; + } + else if (set || (TEST_VALUE(i1.value, i3.value) == 0)) + { /* change in i2 value */ + if (merge_output(r, &i2, mapping) < 0) goto err; + } + else + { /* conflicting value changes in i2 and i3 */ + merge_error(i1.position, i2.position, i3.position, 1); + goto err; + } + if (i1.next(&i1) < 0) goto err; + if (i2.next(&i2) < 0) goto err; + if (i3.next(&i3) < 0) goto err; + } + else if (cmp13 > 0) + { /* insert i3 */ + if (merge_output(r, &i3, mapping) < 0) goto err; + if (i3.next(&i3) < 0) goto err; + } + else if (set || (TEST_VALUE(i1.value, i2.value) == 0)) + { /* deleted in i3 */ + if (i3.position == 1) + { + /* Deleted the first item. This will modify the + parent node, so we don't know if merging will be + safe + */ + merge_error(i1.position, i2.position, i3.position, 13); + goto err; + } + if (i1.next(&i1) < 0) goto err; + if (i2.next(&i2) < 0) goto err; + } + else + { /* conflicting del in i3 and change in i2 */ + merge_error(i1.position, i2.position, i3.position, 2); + goto err; + } + } + else if (cmp13 == 0) + { + if (cmp12 > 0) + { /* insert i2 */ + if (merge_output(r, &i2, mapping) < 0) goto err; + if (i2.next(&i2) < 0) goto err; + } + else if (set || (TEST_VALUE(i1.value, i3.value) == 0)) + { /* deleted in i2 */ + if (i2.position == 1) + { + /* Deleted the first item. This will modify the + parent node, so we don't know if merging will be + safe + */ + merge_error(i1.position, i2.position, i3.position, 13); + goto err; + } + if (i1.next(&i1) < 0) goto err; + if (i3.next(&i3) < 0) goto err; + } + else + { /* conflicting del in i2 and change in i3 */ + merge_error(i1.position, i2.position, i3.position, 3); + goto err; + } + } + else + { /* Both keys changed */ + TEST_KEY_SET_OR(cmp23, i2.key, i3.key) goto err; + if (cmp23==0) + { /* dueling inserts or deletes */ + merge_error(i1.position, i2.position, i3.position, 4); + goto err; + } + if (cmp12 > 0) + { /* insert i2 */ + if (cmp23 > 0) + { /* insert i3 first */ + if (merge_output(r, &i3, mapping) < 0) goto err; + if (i3.next(&i3) < 0) goto err; + } + else + { /* insert i2 first */ + if (merge_output(r, &i2, mapping) < 0) goto err; + if (i2.next(&i2) < 0) goto err; + } + } + else if (cmp13 > 0) + { /* Insert i3 */ + if (merge_output(r, &i3, mapping) < 0) goto err; + if (i3.next(&i3) < 0) goto err; + } + else + { /* 1<2 and 1<3: both deleted 1.key */ + merge_error(i1.position, i2.position, i3.position, 5); + goto err; + } + } + } + + while (i2.position >= 0 && i3.position >= 0) + { /* New inserts */ + TEST_KEY_SET_OR(cmp23, i2.key, i3.key) goto err; + if (cmp23==0) + { /* dueling inserts */ + merge_error(i1.position, i2.position, i3.position, 6); + goto err; + } + if (cmp23 > 0) + { /* insert i3 */ + if (merge_output(r, &i3, mapping) < 0) goto err; + if (i3.next(&i3) < 0) goto err; + } + else + { /* insert i2 */ + if (merge_output(r, &i2, mapping) < 0) goto err; + if (i2.next(&i2) < 0) goto err; + } + } + + while (i1.position >= 0 && i2.position >= 0) + { /* remainder of i1 deleted in i3 */ + TEST_KEY_SET_OR(cmp12, i1.key, i2.key) goto err; + if (cmp12 > 0) + { /* insert i2 */ + if (merge_output(r, &i2, mapping) < 0) goto err; + if (i2.next(&i2) < 0) goto err; + } + else if (cmp12==0 && (set || (TEST_VALUE(i1.value, i2.value) == 0))) + { /* delete i3 */ + if (i1.next(&i1) < 0) goto err; + if (i2.next(&i2) < 0) goto err; + } + else + { /* Dueling deletes or delete and change */ + merge_error(i1.position, i2.position, i3.position, 7); + goto err; + } + } + + while (i1.position >= 0 && i3.position >= 0) + { /* remainder of i1 deleted in i2 */ + TEST_KEY_SET_OR(cmp13, i1.key, i3.key) goto err; + if (cmp13 > 0) + { /* insert i3 */ + if (merge_output(r, &i3, mapping) < 0) goto err; + if (i3.next(&i3) < 0) goto err; + } + else if (cmp13==0 && (set || (TEST_VALUE(i1.value, i3.value) == 0))) + { /* delete i2 */ + if (i1.next(&i1) < 0) goto err; + if (i3.next(&i3) < 0) goto err; + } + else + { /* Dueling deletes or delete and change */ + merge_error(i1.position, i2.position, i3.position, 8); + goto err; + } + } + + if (i1.position >= 0) + { /* Dueling deletes */ + merge_error(i1.position, i2.position, i3.position, 9); + goto err; + } + + while (i2.position >= 0) + { /* Inserting i2 at end */ + if (merge_output(r, &i2, mapping) < 0) goto err; + if (i2.next(&i2) < 0) goto err; + } + + while (i3.position >= 0) + { /* Inserting i3 at end */ + if (merge_output(r, &i3, mapping) < 0) goto err; + if (i3.next(&i3) < 0) goto err; + } + + /* If the output bucket is empty, conflict resolution doesn't have + * enough info to unlink it from its containing BTree correctly. + */ + if (r->len == 0) + { + merge_error(-1, -1, -1, 10); + goto err; + } + + finiSetIteration(&i1); + finiSetIteration(&i2); + finiSetIteration(&i3); + + if (s1->next) + { + Py_INCREF(s1->next); + r->next = s1->next; + } + s = bucket_getstate(r); + Py_DECREF(r); + + return s; + + err: + finiSetIteration(&i1); + finiSetIteration(&i2); + finiSetIteration(&i3); + Py_XDECREF(r); + return NULL; +} diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/OIBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/OIBTree.py new file mode 100644 index 0000000..04cf57b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/OIBTree.py @@ -0,0 +1,110 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +__all__ = ('Bucket', 'Set', 'BTree', 'TreeSet', + 'OIBucket', 'OISet', 'OIBTree', 'OITreeSet', + 'union', 'intersection', 'difference', + 'weightedUnion', 'weightedIntersection', + ) + +from zope.interface import moduleProvides + +from .Interfaces import IObjectIntegerBTreeModule +from ._base import Bucket +from ._base import MERGE +from ._base import MERGE_WEIGHT_numeric +from ._base import MERGE_DEFAULT_float +from ._base import Set +from ._base import Tree as BTree +from ._base import TreeSet +from ._base import _TreeIterator +from ._base import difference as _difference +from ._base import intersection as _intersection +from ._base import set_operation as _set_operation +from ._base import to_ob as _to_key +from ._base import to_int as _to_value +from ._base import union as _union +from ._base import weightedIntersection as _weightedIntersection +from ._base import weightedUnion as _weightedUnion +from ._base import _fix_pickle +from ._compat import import_c_extension + +_BUCKET_SIZE = 60 +_TREE_SIZE = 250 +using64bits = True + +class OIBucketPy(Bucket): + _to_key = _to_key + _to_value = _to_value + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_float + + +class OISetPy(Set): + _to_key = _to_key + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_float + + +class OIBTreePy(BTree): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + _to_value = _to_value + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_float + + +class OITreeSetPy(TreeSet): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_float + + +class OITreeIteratorPy(_TreeIterator): + pass + + +# Can't declare forward refs, so fix up afterwards: + +OIBucketPy._mapping_type = OIBucketPy._bucket_type = OIBucketPy +OIBucketPy._set_type = OISetPy + +OISetPy._mapping_type = OIBucketPy +OISetPy._set_type = OISetPy._bucket_type = OISetPy + +OIBTreePy._mapping_type = OIBTreePy._bucket_type = OIBucketPy +OIBTreePy._set_type = OISetPy + +OITreeSetPy._mapping_type = OIBucketPy +OITreeSetPy._set_type = OITreeSetPy._bucket_type = OISetPy + + +differencePy = _set_operation(_difference, OISetPy) +unionPy = _set_operation(_union, OISetPy) +intersectionPy = _set_operation(_intersection, OISetPy) +weightedUnionPy = _set_operation(_weightedUnion, OISetPy) +weightedIntersectionPy = _set_operation(_weightedIntersection, OISetPy) + +import_c_extension(globals()) + +_fix_pickle(globals(), __name__) + +moduleProvides(IObjectIntegerBTreeModule) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/OLBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/OLBTree.py new file mode 100644 index 0000000..7c1fd9f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/OLBTree.py @@ -0,0 +1,111 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +__all__ = ('Bucket', 'Set', 'BTree', 'TreeSet', + 'OLBucket', 'OLSet', 'OLBTree', 'OLTreeSet', + 'union', 'intersection', 'difference', + 'weightedUnion', 'weightedIntersection', + ) + +from zope.interface import moduleProvides + +from .Interfaces import IObjectIntegerBTreeModule +from ._base import Bucket +from ._base import MERGE +from ._base import MERGE_WEIGHT_numeric +from ._base import MERGE_DEFAULT_int +from ._base import Set +from ._base import Tree as BTree +from ._base import TreeSet +from ._base import _TreeIterator +from ._base import difference as _difference +from ._base import intersection as _intersection +from ._base import set_operation as _set_operation +from ._base import to_ob as _to_key +from ._base import to_long as _to_value +from ._base import union as _union +from ._base import weightedIntersection as _weightedIntersection +from ._base import weightedUnion as _weightedUnion +from ._base import _fix_pickle +from ._compat import import_c_extension + +_BUCKET_SIZE = 60 +_TREE_SIZE = 250 +using64bits = True + + +class OLBucketPy(Bucket): + _to_key = _to_key + _to_value = _to_value + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_int + + +class OLSetPy(Set): + _to_key = _to_key + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_int + + +class OLBTreePy(BTree): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + _to_value = _to_value + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_int + + +class OLTreeSetPy(TreeSet): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + MERGE = MERGE + MERGE_WEIGHT = MERGE_WEIGHT_numeric + MERGE_DEFAULT = MERGE_DEFAULT_int + + +class OLTreeIteratorPy(_TreeIterator): + pass + + +# Can't declare forward refs, so fix up afterwards: + +OLBucketPy._mapping_type = OLBucketPy._bucket_type = OLBucketPy +OLBucketPy._set_type = OLSetPy + +OLSetPy._mapping_type = OLBucketPy +OLSetPy._set_type = OLSetPy._bucket_type = OLSetPy + +OLBTreePy._mapping_type = OLBTreePy._bucket_type = OLBucketPy +OLBTreePy._set_type = OLSetPy + +OLTreeSetPy._mapping_type = OLBucketPy +OLTreeSetPy._set_type = OLTreeSetPy._bucket_type = OLSetPy + + +differencePy = _set_operation(_difference, OLSetPy) +unionPy = _set_operation(_union, OLSetPy) +intersectionPy = _set_operation(_intersection, OLSetPy) +weightedUnionPy = _set_operation(_weightedUnion, OLSetPy) +weightedIntersectionPy = _set_operation(_weightedIntersection, OLSetPy) + +import_c_extension(globals()) + +_fix_pickle(globals(), __name__) + +moduleProvides(IObjectIntegerBTreeModule) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/OOBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/OOBTree.py new file mode 100644 index 0000000..c14bcd6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/OOBTree.py @@ -0,0 +1,91 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +__all__ = ('Bucket', 'Set', 'BTree', 'TreeSet', + 'OOBucket', 'OOSet', 'OOBTree', 'OOTreeSet', + 'union', 'intersection','difference', + ) + +from zope.interface import moduleProvides + +from .Interfaces import IObjectObjectBTreeModule +from ._base import Bucket +from ._base import Set +from ._base import Tree as BTree +from ._base import TreeSet +from ._base import _TreeIterator +from ._base import difference as _difference +from ._base import intersection as _intersection +from ._base import set_operation as _set_operation +from ._base import to_ob as _to_key +_to_value = _to_key +from ._base import union as _union +from ._base import _fix_pickle +from ._compat import import_c_extension + +_BUCKET_SIZE = 30 +_TREE_SIZE = 250 +using64bits = False + + +class OOBucketPy(Bucket): + _to_key = _to_key + _to_value = _to_value + + +class OOSetPy(Set): + _to_key = _to_key + + +class OOBTreePy(BTree): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + _to_value = _to_value + + +class OOTreeSetPy(TreeSet): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + + +class OOTreeIteratorPy(_TreeIterator): + pass + + +# Can't declare forward refs, so fix up afterwards: + +OOBucketPy._mapping_type = OOBucketPy._bucket_type = OOBucketPy +OOBucketPy._set_type = OOSetPy + +OOSetPy._mapping_type = OOBucketPy +OOSetPy._set_type = OOSetPy._bucket_type = OOSetPy + +OOBTreePy._mapping_type = OOBTreePy._bucket_type = OOBucketPy +OOBTreePy._set_type = OOSetPy + +OOTreeSetPy._mapping_type = OOBucketPy +OOTreeSetPy._set_type = OOTreeSetPy._bucket_type = OOSetPy + + +differencePy = _set_operation(_difference, OOSetPy) +unionPy = _set_operation(_union, OOSetPy) +intersectionPy = _set_operation(_intersection, OOSetPy) + +import_c_extension(globals()) + +_fix_pickle(globals(), __name__) + +moduleProvides(IObjectObjectBTreeModule) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/SetOpTemplate.c b/thesisenv/lib/python3.6/site-packages/BTrees/SetOpTemplate.c new file mode 100644 index 0000000..7164ada --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/SetOpTemplate.c @@ -0,0 +1,557 @@ +/***************************************************************************** + + Copyright (c) 2001, 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + +****************************************************************************/ + +/**************************************************************************** + Set operations +****************************************************************************/ + +#define SETOPTEMPLATE_C "$Id$\n" + +#ifdef KEY_CHECK +static int +nextKeyAsSet(SetIteration *i) +{ + if (i->position >= 0) { + if (i->position) { + DECREF_KEY(i->key); + i->position = -1; + } + else + i->position = 1; + } + return 0; +} +#endif + +/* initSetIteration + * + * Start the set iteration protocol. See the comments at struct SetIteration. + * + * Arguments + * i The address of a SetIteration control struct. + * s The address of the set, bucket, BTree, ..., to be iterated. + * useValues Boolean; if true, and s has values (is a mapping), copy + * them into i->value each time i->next() is called; else + * ignore s's values even if s is a mapping. + * + * Return + * 0 on success; -1 and an exception set if error. + * i.usesValue is set to 1 (true) if s has values and useValues was + * true; else usesValue is set to 0 (false). + * i.set gets a new reference to s, or to some other object used to + * iterate over s. + * i.position is set to 0. + * i.next is set to an appropriate iteration function. + * i.key and i.value are left alone. + * + * Internal + * i.position < 0 means iteration terminated. + * i.position = 0 means iteration hasn't yet begun (next() hasn't + * been called yet). + * In all other cases, i.key, and possibly i.value, own references. + * These must be cleaned up, either by next() routines, or by + * finiSetIteration. + * next() routines must ensure the above. They should return without + * doing anything when i.position < 0. + * It's the responsibility of {init, fini}setIteration to clean up + * the reference in i.set, and to ensure that no stale references + * live in i.key or i.value if iteration terminates abnormally. + * A SetIteration struct has been cleaned up iff i.set is NULL. + */ +static int +initSetIteration(SetIteration *i, PyObject *s, int useValues) +{ + i->set = NULL; + i->position = -1; /* set to 0 only on normal return */ + i->usesValue = 0; /* assume it's a set or that values aren't iterated */ + + if (PyObject_IsInstance(s, (PyObject *)&BucketType)) + { + i->set = s; + Py_INCREF(s); + + if (useValues) + { + i->usesValue = 1; + i->next = nextBucket; + } + else + i->next = nextSet; + } + else if (PyObject_IsInstance(s, (PyObject *)&SetType)) + { + i->set = s; + Py_INCREF(s); + i->next = nextSet; + } + else if (PyObject_IsInstance(s, (PyObject *)&BTreeType)) + { + i->set = BTree_rangeSearch(BTREE(s), NULL, NULL, 'i'); + UNLESS(i->set) return -1; + + if (useValues) + { + i->usesValue = 1; + i->next = nextBTreeItems; + } + else + i->next = nextTreeSetItems; + } + else if (PyObject_IsInstance(s, (PyObject *)&TreeSetType)) + { + i->set = BTree_rangeSearch(BTREE(s), NULL, NULL, 'k'); + UNLESS(i->set) return -1; + i->next = nextTreeSetItems; + } +#ifdef KEY_CHECK + else if (KEY_CHECK(s)) + { + int copied = 1; + COPY_KEY_FROM_ARG(i->key, s, copied); + UNLESS (copied) return -1; + + INCREF_KEY(i->key); + i->set = s; + Py_INCREF(s); + i->next = nextKeyAsSet; + } +#endif + else + { + PyErr_SetString(PyExc_TypeError, "invalid argument"); + return -1; + } + + i->position = 0; + + return 0; +} + +#ifndef MERGE_WEIGHT +#define MERGE_WEIGHT(O, w) (O) +#endif + +static int +copyRemaining(Bucket *r, SetIteration *i, int merge, + + /* See comment # 42 */ +#ifdef MERGE + VALUE_TYPE w) +#else + int w) +#endif +{ + while (i->position >= 0) + { + if(r->len >= r->size && Bucket_grow(r, -1, ! merge) < 0) return -1; + COPY_KEY(r->keys[r->len], i->key); + INCREF_KEY(r->keys[r->len]); + + if (merge) + { + COPY_VALUE(r->values[r->len], MERGE_WEIGHT(i->value, w)); + INCREF_VALUE(r->values[r->len]); + } + r->len++; + if (i->next(i) < 0) return -1; + } + + return 0; +} + +/* This is the workhorse for all set merge operations: the weighted and + * unweighted flavors of union and intersection, and set difference. The + * algorithm is conceptually simple but the code is complicated due to all + * the options. + * + * s1, s2 + * The input collections to be merged. + * + * usevalues1, usevalues2 + * Booleans. In the output, should values from s1 (or s2) be used? This + * only makes sense when an operation intends to support mapping outputs; + * these should both be false for operations that want pure set outputs. + * + * w1, w2 + * If usevalues1(2) are true, these are the weights to apply to the + * input values. + * + * c1 + * Boolean. Should keys that appear in c1 but not c2 appear in the output? + * c12 + * Boolean. Should keys that appear in both inputs appear in the output? + * c2 + * Boolean. Should keys that appear in c2 but not c1 appear in the output? + * + * Returns NULL if error, else a Set or Bucket, depending on whether a set or + * mapping was requested. + */ +static PyObject * +set_operation(PyObject *s1, PyObject *s2, + int usevalues1, int usevalues2, + + /* Comment # 42 + + The following ifdef works around a template/type problem + + Weights are passed as integers. In particular, the weight passed by + difference is one. This works fine in the int value and float value + cases but makes no sense in the object value case. In the object + value case, we don't do merging, so we don't use the weights, so it + doesn't matter what they are. + */ +#ifdef MERGE + VALUE_TYPE w1, VALUE_TYPE w2, +#else + int w1, int w2, +#endif + int c1, int c12, int c2) + + +{ + Bucket *r=0; + SetIteration i1 = {0,0,0}, i2 = {0,0,0}; + int cmp, merge; + + if (initSetIteration(&i1, s1, usevalues1) < 0) goto err; + if (initSetIteration(&i2, s2, usevalues2) < 0) goto err; + merge = i1.usesValue | i2.usesValue; + + if (merge) + { +#ifndef MERGE + if (c12 && i1.usesValue && i2.usesValue) goto invalid_set_operation; +#endif + if (! i1.usesValue&& i2.usesValue) + { + SetIteration t; + int i; + + /* See comment # 42 above */ +#ifdef MERGE + VALUE_TYPE v; +#else + int v; +#endif + + t=i1; i1=i2; i2=t; + i=c1; c1=c2; c2=i; + v=w1; w1=w2; w2=v; + } +#ifdef MERGE_DEFAULT + i1.value=MERGE_DEFAULT; + i2.value=MERGE_DEFAULT; +#else + if (i1.usesValue) + { + if (! i2.usesValue && c2) goto invalid_set_operation; + } + else + { + if (c1 || c12) goto invalid_set_operation; + } +#endif + + UNLESS(r=BUCKET(PyObject_CallObject(OBJECT(&BucketType), NULL))) + goto err; + } + else + { + UNLESS(r=BUCKET(PyObject_CallObject(OBJECT(&SetType), NULL))) + goto err; + } + + if (i1.next(&i1) < 0) goto err; + if (i2.next(&i2) < 0) goto err; + + while (i1.position >= 0 && i2.position >= 0) + { + TEST_KEY_SET_OR(cmp, i1.key, i2.key) goto err; + if(cmp < 0) + { + if(c1) + { + if(r->len >= r->size && Bucket_grow(r, -1, ! merge) < 0) goto err; + COPY_KEY(r->keys[r->len], i1.key); + INCREF_KEY(r->keys[r->len]); + if (merge) + { + COPY_VALUE(r->values[r->len], MERGE_WEIGHT(i1.value, w1)); + INCREF_VALUE(r->values[r->len]); + } + r->len++; + } + if (i1.next(&i1) < 0) goto err; + } + else if(cmp==0) + { + if(c12) + { + if(r->len >= r->size && Bucket_grow(r, -1, ! merge) < 0) goto err; + COPY_KEY(r->keys[r->len], i1.key); + INCREF_KEY(r->keys[r->len]); + if (merge) + { +#ifdef MERGE + r->values[r->len] = MERGE(i1.value, w1, i2.value, w2); +#else + COPY_VALUE(r->values[r->len], i1.value); + INCREF_VALUE(r->values[r->len]); +#endif + } + r->len++; + } + if (i1.next(&i1) < 0) goto err; + if (i2.next(&i2) < 0) goto err; + } + else + { + if(c2) + { + if(r->len >= r->size && Bucket_grow(r, -1, ! merge) < 0) goto err; + COPY_KEY(r->keys[r->len], i2.key); + INCREF_KEY(r->keys[r->len]); + if (merge) + { + COPY_VALUE(r->values[r->len], MERGE_WEIGHT(i2.value, w2)); + INCREF_VALUE(r->values[r->len]); + } + r->len++; + } + if (i2.next(&i2) < 0) goto err; + } + } + if(c1 && copyRemaining(r, &i1, merge, w1) < 0) goto err; + if(c2 && copyRemaining(r, &i2, merge, w2) < 0) goto err; + + + finiSetIteration(&i1); + finiSetIteration(&i2); + + return OBJECT(r); + +#ifndef MERGE_DEFAULT + invalid_set_operation: + PyErr_SetString(PyExc_TypeError, "invalid set operation"); +#endif + + err: + finiSetIteration(&i1); + finiSetIteration(&i2); + Py_XDECREF(r); + return NULL; +} + +static PyObject * +difference_m(PyObject *ignored, PyObject *args) +{ + PyObject *o1, *o2; + + UNLESS(PyArg_ParseTuple(args, "OO", &o1, &o2)) return NULL; + + + if (o1 == Py_None || o2 == Py_None) + { + /* difference(None, X) -> None; difference(X, None) -> X */ + Py_INCREF(o1); + return o1; + } + + return set_operation(o1, o2, 1, 0, /* preserve values from o1, ignore o2's */ + 1, 0, /* o1's values multiplied by 1 */ + 1, 0, 0); /* take only keys unique to o1 */ +} + +static PyObject * +union_m(PyObject *ignored, PyObject *args) +{ + PyObject *o1, *o2; + + UNLESS(PyArg_ParseTuple(args, "OO", &o1, &o2)) return NULL; + + if (o1 == Py_None) + { + Py_INCREF(o2); + return o2; + } + else if (o2 == Py_None) + { + Py_INCREF(o1); + return o1; + } + + return set_operation(o1, o2, 0, 0, /* ignore values in both */ + 1, 1, /* the weights are irrelevant */ + 1, 1, 1); /* take all keys */ +} + +static PyObject * +intersection_m(PyObject *ignored, PyObject *args) +{ + PyObject *o1, *o2; + + UNLESS(PyArg_ParseTuple(args, "OO", &o1, &o2)) return NULL; + + if (o1 == Py_None) + { + Py_INCREF(o2); + return o2; + } + else if (o2 == Py_None) + { + Py_INCREF(o1); + return o1; + } + + return set_operation(o1, o2, 0, 0, /* ignore values in both */ + 1, 1, /* the weights are irrelevant */ + 0, 1, 0); /* take only keys common to both */ +} + +#ifdef MERGE + +static PyObject * +wunion_m(PyObject *ignored, PyObject *args) +{ + PyObject *o1, *o2; + VALUE_TYPE w1 = 1, w2 = 1; + + UNLESS(PyArg_ParseTuple(args, "OO|" VALUE_PARSE VALUE_PARSE, + &o1, &o2, &w1, &w2) + ) return NULL; + + if (o1 == Py_None) + return Py_BuildValue(VALUE_PARSE "O", (o2 == Py_None ? 0 : w2), o2); + else if (o2 == Py_None) + return Py_BuildValue(VALUE_PARSE "O", w1, o1); + + o1 = set_operation(o1, o2, 1, 1, w1, w2, 1, 1, 1); + if (o1) + ASSIGN(o1, Py_BuildValue(VALUE_PARSE "O", (VALUE_TYPE)1, o1)); + + return o1; +} + +static PyObject * +wintersection_m(PyObject *ignored, PyObject *args) +{ + PyObject *o1, *o2; + VALUE_TYPE w1 = 1, w2 = 1; + + UNLESS(PyArg_ParseTuple(args, "OO|" VALUE_PARSE VALUE_PARSE, + &o1, &o2, &w1, &w2) + ) return NULL; + + if (o1 == Py_None) + return Py_BuildValue(VALUE_PARSE "O", (o2 == Py_None ? 0 : w2), o2); + else if (o2 == Py_None) + return Py_BuildValue(VALUE_PARSE "O", w1, o1); + + o1 = set_operation(o1, o2, 1, 1, w1, w2, 0, 1, 0); + if (o1) + ASSIGN(o1, Py_BuildValue(VALUE_PARSE "O", + ((o1->ob_type == (PyTypeObject*)(&SetType)) ? w2+w1 : 1), + o1)); + + return o1; +} + +#endif + +#ifdef MULTI_INT_UNION +#include "sorters.c" + +/* Input is a sequence of integer sets (or convertible to sets by the + set iteration protocol). Output is the union of the sets. The point + is to run much faster than doing pairs of unions. +*/ +static PyObject * +multiunion_m(PyObject *ignored, PyObject *args) +{ + PyObject *seq; /* input sequence */ + int n; /* length of input sequence */ + PyObject *set = NULL; /* an element of the input sequence */ + Bucket *result; /* result set */ + SetIteration setiter = {0}; + int i; + + UNLESS(PyArg_ParseTuple(args, "O", &seq)) + return NULL; + + n = PyObject_Length(seq); + if (n < 0) + return NULL; + + /* Construct an empty result set. */ + result = BUCKET(PyObject_CallObject(OBJECT(&SetType), NULL)); + if (result == NULL) + return NULL; + + /* For each set in the input sequence, append its elements to the result + set. At this point, we ignore the possibility of duplicates. */ + for (i = 0; i < n; ++i) { + set = PySequence_GetItem(seq, i); + if (set == NULL) + goto Error; + + /* If set is a bucket, do a straight resize + memcpy. */ + if (set->ob_type == (PyTypeObject*)&SetType || + set->ob_type == (PyTypeObject*)&BucketType) + { + Bucket *b = BUCKET(set); + int status = 0; + + UNLESS (PER_USE(b)) goto Error; + if (b->len) + status = bucket_append(result, b, 0, b->len, 0, i < n-1); + PER_UNUSE(b); + if (status < 0) goto Error; + } + else { + /* No cheap way: iterate over set's elements one at a time. */ + if (initSetIteration(&setiter, set, 0) < 0) goto Error; + if (setiter.next(&setiter) < 0) goto Error; + while (setiter.position >= 0) { + if (result->len >= result->size && Bucket_grow(result, -1, 1) < 0) + goto Error; + COPY_KEY(result->keys[result->len], setiter.key); + ++result->len; + /* We know the key is an int, so no need to incref it. */ + if (setiter.next(&setiter) < 0) goto Error; + } + finiSetIteration(&setiter); + } + Py_DECREF(set); + set = NULL; + } + + /* Combine, sort, remove duplicates, and reset the result's len. + If the set shrinks (which happens if and only if there are + duplicates), no point to realloc'ing the set smaller, as we + expect the result set to be short-lived. + */ + if (result->len > 0) { + size_t newlen; /* number of elements in final result set */ + newlen = sort_int_nodups(result->keys, (size_t)result->len); + result->len = (int)newlen; + } + return (PyObject *)result; + + Error: + Py_DECREF(result); + Py_XDECREF(set); + finiSetIteration(&setiter); + return NULL; +} +#endif diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/SetTemplate.c b/thesisenv/lib/python3.6/site-packages/BTrees/SetTemplate.c new file mode 100644 index 0000000..bec7f96 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/SetTemplate.c @@ -0,0 +1,381 @@ +/***************************************************************************** + + Copyright (c) 2001, 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ +#include "_compat.h" + +#define SETTEMPLATE_C "$Id$\n" + +static PyObject * +Set_insert(Bucket *self, PyObject *args) +{ + PyObject *key; + int i; + + UNLESS (PyArg_ParseTuple(args, "O", &key)) + return NULL; + if ( (i=_bucket_set(self, key, Py_None, 1, 1, 0)) < 0) + return NULL; + return INT_FROM_LONG(i); +} + +/* _Set_update and _TreeSet_update are identical except for the + function they call to add the element to the set. +*/ + +static int +_Set_update(Bucket *self, PyObject *seq) +{ + int n=0, ind=0; + PyObject *iter, *v; + + iter = PyObject_GetIter(seq); + if (iter == NULL) + return -1; + + while (1) { + v = PyIter_Next(iter); + if (v == NULL) { + if (PyErr_Occurred()) + goto err; + else + break; + } + ind = _bucket_set(self, v, Py_None, 1, 1, 0); + Py_DECREF(v); + if (ind < 0) + goto err; + else + n += ind; + } + +err: + Py_DECREF(iter); + if (ind < 0) + return -1; + return n; +} + +static PyObject * +Set_update(Bucket *self, PyObject *args) +{ + PyObject *seq = NULL; + int n = 0; + + if (!PyArg_ParseTuple(args, "|O:update", &seq)) + return NULL; + + if (seq) { + n = _Set_update(self, seq); + if (n < 0) + return NULL; + } + + return INT_FROM_LONG(n); +} + +static PyObject * +Set_remove(Bucket *self, PyObject *args) +{ + PyObject *key; + + UNLESS (PyArg_ParseTuple(args, "O", &key)) + return NULL; + if (_bucket_set(self, key, NULL, 0, 1, 0) < 0) + return NULL; + + Py_INCREF(Py_None); + return Py_None; +} + +static int +_set_setstate(Bucket *self, PyObject *args) +{ + PyObject *k, *items; + Bucket *next=0; + int i, l, copied=1; + KEY_TYPE *keys; + + UNLESS (PyArg_ParseTuple(args, "O|O", &items, &next)) + return -1; + + if (!PyTuple_Check(items)) { + PyErr_SetString(PyExc_TypeError, + "tuple required for first state element"); + return -1; + } + + if ((l=PyTuple_Size(items)) < 0) + return -1; + + for (i=self->len; --i >= 0; ) + { + DECREF_KEY(self->keys[i]); + } + self->len=0; + + if (self->next) + { + Py_DECREF(self->next); + self->next=0; + } + + if (l > self->size) + { + UNLESS (keys=BTree_Realloc(self->keys, sizeof(KEY_TYPE)*l)) + return -1; + self->keys=keys; + self->size=l; + } + + for (i=0; ikeys[i], k, copied); + UNLESS (copied) + return -1; + INCREF_KEY(self->keys[i]); + } + + self->len=l; + + if (next) + { + self->next=next; + Py_INCREF(next); + } + + return 0; +} + +static PyObject * +set_setstate(Bucket *self, PyObject *args) +{ + int r; + + UNLESS (PyArg_ParseTuple(args, "O", &args)) + return NULL; + + PER_PREVENT_DEACTIVATION(self); + r=_set_setstate(self, args); + PER_UNUSE(self); + + if (r < 0) + return NULL; + Py_INCREF(Py_None); + return Py_None; +} + +static struct PyMethodDef Set_methods[] = { + {"__getstate__", (PyCFunction) bucket_getstate, METH_VARARGS, + "__getstate__() -- Return the picklable state of the object"}, + + {"__setstate__", (PyCFunction) set_setstate, METH_VARARGS, + "__setstate__() -- Set the state of the object"}, + + {"keys", (PyCFunction) bucket_keys, METH_VARARGS | METH_KEYWORDS, + "keys() -- Return the keys"}, + + {"has_key", (PyCFunction) bucket_has_key, METH_O, + "has_key(key) -- Test whether the bucket contains the given key"}, + + {"clear", (PyCFunction) bucket_clear, METH_VARARGS, + "clear() -- Remove all of the items from the bucket"}, + + {"maxKey", (PyCFunction) Bucket_maxKey, METH_VARARGS, + "maxKey([key]) -- Find the maximum key\n\n" + "If an argument is given, find the maximum <= the argument"}, + + {"minKey", (PyCFunction) Bucket_minKey, METH_VARARGS, + "minKey([key]) -- Find the minimum key\n\n" + "If an argument is given, find the minimum >= the argument"}, + +#ifdef PERSISTENT + {"_p_resolveConflict", + (PyCFunction) bucket__p_resolveConflict, METH_VARARGS, + "_p_resolveConflict() -- Reinitialize from a newly created copy"}, + + {"_p_deactivate", + (PyCFunction) bucket__p_deactivate, METH_VARARGS | METH_KEYWORDS, + "_p_deactivate() -- Reinitialize from a newly created copy"}, +#endif + + {"add", (PyCFunction)Set_insert, METH_VARARGS, + "add(id) -- Add a key to the set"}, + + {"insert", (PyCFunction)Set_insert, METH_VARARGS, + "insert(id) -- Add a key to the set"}, + + {"update", (PyCFunction)Set_update, METH_VARARGS, + "update(seq) -- Add the items from the given sequence to the set"}, + + {"remove", (PyCFunction)Set_remove, METH_VARARGS, + "remove(id) -- Remove an id from the set"}, + + {NULL, NULL} /* sentinel */ +}; + +static int +Set_init(PyObject *self, PyObject *args, PyObject *kwds) +{ + PyObject *v = NULL; + + if (!PyArg_ParseTuple(args, "|O:" MOD_NAME_PREFIX "Set", &v)) + return -1; + + if (v) + return _Set_update((Bucket *)self, v); + else + return 0; +} + + + +static PyObject * +set_repr(Bucket *self) +{ + static PyObject *format; + PyObject *r, *t; + + if (!format) + format = TEXT_FROM_STRING(MOD_NAME_PREFIX "Set(%s)"); + UNLESS (t = PyTuple_New(1)) + return NULL; + UNLESS (r = bucket_keys(self, NULL, NULL)) + goto err; + PyTuple_SET_ITEM(t, 0, r); + r = t; + ASSIGN(r, TEXT_FORMAT(format, r)); + return r; +err: + Py_DECREF(t); + return NULL; +} + +static Py_ssize_t +set_length(Bucket *self) +{ + int r; + + PER_USE_OR_RETURN(self, -1); + r = self->len; + PER_UNUSE(self); + + return r; +} + +static PyObject * +set_item(Bucket *self, Py_ssize_t index) +{ + PyObject *r=0; + + PER_USE_OR_RETURN(self, NULL); + if (index >= 0 && index < self->len) + { + COPY_KEY_TO_OBJECT(r, self->keys[index]); + } + else + IndexError(index); + + PER_UNUSE(self); + + return r; +} + +static PySequenceMethods set_as_sequence = { + (lenfunc)set_length, /* sq_length */ + (binaryfunc)0, /* sq_concat */ + (ssizeargfunc)0, /* sq_repeat */ + (ssizeargfunc)set_item, /* sq_item */ + (ssizessizeargfunc)0, /* sq_slice */ + (ssizeobjargproc)0, /* sq_ass_item */ + (ssizessizeobjargproc)0, /* sq_ass_slice */ + (objobjproc)bucket_contains, /* sq_contains */ + 0, /* sq_inplace_concat */ + 0, /* sq_inplace_repeat */ +}; + +static PyTypeObject SetType = { + PyVarObject_HEAD_INIT(NULL, 0) /* PyPersist_Type */ + MODULE_NAME MOD_NAME_PREFIX "Set", /* tp_name */ + sizeof(Bucket), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)bucket_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + (reprfunc)set_repr, /* tp_repr */ + 0, /* tp_as_number */ + &set_as_sequence, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | + Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_BASETYPE, /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)bucket_traverse, /* tp_traverse */ + (inquiry)bucket_tp_clear, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + (getiterfunc)Bucket_getiter, /* tp_iter */ + 0, /* tp_iternext */ + Set_methods, /* tp_methods */ + Bucket_members, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + Set_init, /* tp_init */ + 0, /* tp_alloc */ + 0, /*PyType_GenericNew,*/ /* tp_new */ +}; + +static int +nextSet(SetIteration *i) +{ + + if (i->position >= 0) + { + UNLESS(PER_USE(BUCKET(i->set))) + return -1; + + if (i->position) + { + DECREF_KEY(i->key); + } + + if (i->position < BUCKET(i->set)->len) + { + COPY_KEY(i->key, BUCKET(i->set)->keys[i->position]); + INCREF_KEY(i->key); + i->position ++; + } + else + { + i->position = -1; + PER_ACCESSED(BUCKET(i->set)); + } + + PER_ALLOW_DEACTIVATION(BUCKET(i->set)); + } + + + return 0; +} diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/TreeSetTemplate.c b/thesisenv/lib/python3.6/site-packages/BTrees/TreeSetTemplate.c new file mode 100644 index 0000000..7ad878a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/TreeSetTemplate.c @@ -0,0 +1,254 @@ +/***************************************************************************** + + Copyright (c) 2001, 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ +#include "_compat.h" + +#define TREESETTEMPLATE_C "$Id$\n" + +static PyObject * +TreeSet_insert(BTree *self, PyObject *args) +{ + PyObject *key; + int i; + + if (!PyArg_ParseTuple(args, "O:insert", &key)) + return NULL; + i = _BTree_set(self, key, Py_None, 1, 1); + if (i < 0) + return NULL; + return INT_FROM_LONG(i); +} + +/* _Set_update and _TreeSet_update are identical except for the + function they call to add the element to the set. +*/ + +static int +_TreeSet_update(BTree *self, PyObject *seq) +{ + int n=0, ind=0; + PyObject *iter, *v; + + iter = PyObject_GetIter(seq); + if (iter == NULL) + return -1; + + while (1) + { + v = PyIter_Next(iter); + if (v == NULL) + { + if (PyErr_Occurred()) + goto err; + else + break; + } + ind = _BTree_set(self, v, Py_None, 1, 1); + Py_DECREF(v); + if (ind < 0) + goto err; + else + n += ind; + } + +err: + Py_DECREF(iter); + if (ind < 0) + return -1; + return n; +} + +static PyObject * +TreeSet_update(BTree *self, PyObject *args) +{ + PyObject *seq = NULL; + int n = 0; + + if (!PyArg_ParseTuple(args, "|O:update", &seq)) + return NULL; + + if (seq) + { + n = _TreeSet_update(self, seq); + if (n < 0) + return NULL; + } + + return INT_FROM_LONG(n); +} + + +static PyObject * +TreeSet_remove(BTree *self, PyObject *args) +{ + PyObject *key; + + UNLESS (PyArg_ParseTuple(args, "O", &key)) + return NULL; + if (_BTree_set(self, key, NULL, 0, 1) < 0) + return NULL; + Py_INCREF(Py_None); + return Py_None; +} + +static PyObject * +TreeSet_setstate(BTree *self, PyObject *args) +{ + int r; + + if (!PyArg_ParseTuple(args,"O",&args)) + return NULL; + + PER_PREVENT_DEACTIVATION(self); + r=_BTree_setstate(self, args, 1); + PER_UNUSE(self); + + if (r < 0) + return NULL; + Py_INCREF(Py_None); + return Py_None; +} + +static struct PyMethodDef TreeSet_methods[] = +{ + {"__getstate__", (PyCFunction) BTree_getstate, METH_NOARGS, + "__getstate__() -> state\n\n" + "Return the picklable state of the TreeSet."}, + + {"__setstate__", (PyCFunction) TreeSet_setstate, METH_VARARGS, + "__setstate__(state)\n\n" + "Set the state of the TreeSet."}, + + {"has_key", (PyCFunction) BTree_has_key, METH_O, + "has_key(key)\n\n" + "Return true if the TreeSet contains the given key."}, + + {"keys", (PyCFunction) BTree_keys, METH_VARARGS | METH_KEYWORDS, + "keys([min, max]) -> list of keys\n\n" + "Returns the keys of the TreeSet. If min and max are supplied, only\n" + "keys greater than min and less than max are returned."}, + + {"maxKey", (PyCFunction) BTree_maxKey, METH_VARARGS, + "maxKey([max]) -> key\n\n" + "Return the largest key in the BTree. If max is specified, return\n" + "the largest key <= max."}, + + {"minKey", (PyCFunction) BTree_minKey, METH_VARARGS, + "minKey([mi]) -> key\n\n" + "Return the smallest key in the BTree. If min is specified, return\n" + "the smallest key >= min."}, + + {"clear", (PyCFunction) BTree_clear, METH_NOARGS, + "clear()\n\nRemove all of the items from the BTree."}, + + {"add", (PyCFunction)TreeSet_insert, METH_VARARGS, + "add(id) -- Add an item to the set"}, + + {"insert", (PyCFunction)TreeSet_insert, METH_VARARGS, + "insert(id) -- Add an item to the set"}, + + {"update", (PyCFunction)TreeSet_update, METH_VARARGS, + "update(collection)\n\n Add the items from the given collection."}, + + {"remove", (PyCFunction)TreeSet_remove, METH_VARARGS, + "remove(id) -- Remove a key from the set"}, + + {"_check", (PyCFunction) BTree_check, METH_NOARGS, + "Perform sanity check on TreeSet, and raise exception if flawed."}, + +#ifdef PERSISTENT + {"_p_resolveConflict", + (PyCFunction) BTree__p_resolveConflict, METH_VARARGS, + "_p_resolveConflict() -- Reinitialize from a newly created copy"}, + + {"_p_deactivate", + (PyCFunction) BTree__p_deactivate, METH_VARARGS | METH_KEYWORDS, + "_p_deactivate()\n\nReinitialize from a newly created copy."}, +#endif + {NULL, NULL} /* sentinel */ +}; + +static PyMappingMethods TreeSet_as_mapping = { + (lenfunc)BTree_length, /*mp_length*/ +}; + +static PySequenceMethods TreeSet_as_sequence = { + (lenfunc)0, /* sq_length */ + (binaryfunc)0, /* sq_concat */ + (ssizeargfunc)0, /* sq_repeat */ + (ssizeargfunc)0, /* sq_item */ + (ssizessizeargfunc)0, /* sq_slice */ + (ssizeobjargproc)0, /* sq_ass_item */ + (ssizessizeobjargproc)0, /* sq_ass_slice */ + (objobjproc)BTree_contains, /* sq_contains */ + 0, /* sq_inplace_concat */ + 0, /* sq_inplace_repeat */ +}; + +static int +TreeSet_init(PyObject *self, PyObject *args, PyObject *kwds) +{ + PyObject *v = NULL; + + if (!PyArg_ParseTuple(args, "|O:" MOD_NAME_PREFIX "TreeSet", &v)) + return -1; + + if (v) + return _TreeSet_update((BTree *)self, v); + else + return 0; +} + +static PyTypeObject TreeSetType = +{ + PyVarObject_HEAD_INIT(NULL, 0) + MODULE_NAME MOD_NAME_PREFIX "TreeSet", /* tp_name */ + sizeof(BTree), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)BTree_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + &BTree_as_number_for_nonzero, /* tp_as_number */ + &TreeSet_as_sequence, /* tp_as_sequence */ + &TreeSet_as_mapping, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | + Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_BASETYPE, /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)BTree_traverse, /* tp_traverse */ + (inquiry)BTree_tp_clear, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + (getiterfunc)BTree_getiter, /* tp_iter */ + 0, /* tp_iternext */ + TreeSet_methods, /* tp_methods */ + BTree_members, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + TreeSet_init, /* tp_init */ + 0, /* tp_alloc */ + 0, /*PyType_GenericNew,*/ /* tp_new */ +}; diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_IFBTree.c b/thesisenv/lib/python3.6/site-packages/BTrees/_IFBTree.c new file mode 100644 index 0000000..e272bf1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/_IFBTree.c @@ -0,0 +1,41 @@ +/*############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################*/ + +#define MASTER_ID "$Id$\n" + +/* IFBTree - int key, float value BTree + + Implements a collection using int type keys + and float type values +*/ + +/* Setup template macros */ + +#define PERSISTENT + +#define MOD_NAME_PREFIX "IF" + +#define DEFAULT_MAX_BUCKET_SIZE 120 +#define DEFAULT_MAX_BTREE_SIZE 500 + +#include "_compat.h" +#include "intkeymacros.h" +#include "floatvaluemacros.h" + +#ifdef PY3K +#define INITMODULE PyInit__IFBTree +#else +#define INITMODULE init_IFBTree +#endif +#include "BTreeModuleTemplate.c" diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_IFBTree.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/BTrees/_IFBTree.cpython-36m-darwin.so new file mode 100755 index 0000000..b46c2c4 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/BTrees/_IFBTree.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_IIBTree.c b/thesisenv/lib/python3.6/site-packages/BTrees/_IIBTree.c new file mode 100644 index 0000000..ba5e58e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/_IIBTree.c @@ -0,0 +1,41 @@ +/*############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################*/ + +#define MASTER_ID "$Id$\n" + +/* IIBTree - int key, int value BTree + + Implements a collection using int type keys + and int type values +*/ + +/* Setup template macros */ + +#define PERSISTENT + +#define MOD_NAME_PREFIX "II" + +#define DEFAULT_MAX_BUCKET_SIZE 120 +#define DEFAULT_MAX_BTREE_SIZE 500 + +#include "_compat.h" +#include "intkeymacros.h" +#include "intvaluemacros.h" + +#ifdef PY3K +#define INITMODULE PyInit__IIBTree +#else +#define INITMODULE init_IIBTree +#endif +#include "BTreeModuleTemplate.c" diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_IIBTree.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/BTrees/_IIBTree.cpython-36m-darwin.so new file mode 100755 index 0000000..0f1964d Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/BTrees/_IIBTree.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_IOBTree.c b/thesisenv/lib/python3.6/site-packages/BTrees/_IOBTree.c new file mode 100644 index 0000000..f2bf096 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/_IOBTree.c @@ -0,0 +1,39 @@ +/*############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################*/ + +#define MASTER_ID "$Id$\n" + +/* IOBTree - int key, object value BTree + + Implements a collection using int type keys + and object type values +*/ + +#define PERSISTENT + +#define MOD_NAME_PREFIX "IO" + +#define DEFAULT_MAX_BUCKET_SIZE 60 +#define DEFAULT_MAX_BTREE_SIZE 500 + +#include "_compat.h" +#include "intkeymacros.h" +#include "objectvaluemacros.h" + +#ifdef PY3K +#define INITMODULE PyInit__IOBTree +#else +#define INITMODULE init_IOBTree +#endif +#include "BTreeModuleTemplate.c" diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_IOBTree.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/BTrees/_IOBTree.cpython-36m-darwin.so new file mode 100755 index 0000000..96afcdc Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/BTrees/_IOBTree.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_LFBTree.c b/thesisenv/lib/python3.6/site-packages/BTrees/_LFBTree.c new file mode 100644 index 0000000..8fcd226 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/_LFBTree.c @@ -0,0 +1,43 @@ +/*############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################*/ + +#define MASTER_ID "$Id: _IFBTree.c 67074 2006-04-17 19:13:39Z fdrake $\n" + +/* IFBTree - int key, float value BTree + + Implements a collection using int type keys + and float type values +*/ + +/* Setup template macros */ + +#define PERSISTENT + +#define MOD_NAME_PREFIX "LF" + +#define DEFAULT_MAX_BUCKET_SIZE 120 +#define DEFAULT_MAX_BTREE_SIZE 500 + +#define ZODB_64BIT_INTS + +#include "_compat.h" +#include "intkeymacros.h" +#include "floatvaluemacros.h" + +#ifdef PY3K +#define INITMODULE PyInit__LFBTree +#else +#define INITMODULE init_LFBTree +#endif +#include "BTreeModuleTemplate.c" diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_LFBTree.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/BTrees/_LFBTree.cpython-36m-darwin.so new file mode 100755 index 0000000..d77737b Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/BTrees/_LFBTree.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_LLBTree.c b/thesisenv/lib/python3.6/site-packages/BTrees/_LLBTree.c new file mode 100644 index 0000000..f3d237c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/_LLBTree.c @@ -0,0 +1,43 @@ +/*############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################*/ + +#define MASTER_ID "$Id: _IIBTree.c 25186 2004-06-02 15:07:33Z jim $\n" + +/* IIBTree - int key, int value BTree + + Implements a collection using int type keys + and int type values +*/ + +/* Setup template macros */ + +#define PERSISTENT + +#define MOD_NAME_PREFIX "LL" + +#define DEFAULT_MAX_BUCKET_SIZE 120 +#define DEFAULT_MAX_BTREE_SIZE 500 + +#define ZODB_64BIT_INTS + +#include "_compat.h" +#include "intkeymacros.h" +#include "intvaluemacros.h" + +#ifdef PY3K +#define INITMODULE PyInit__LLBTree +#else +#define INITMODULE init_LLBTree +#endif +#include "BTreeModuleTemplate.c" diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_LLBTree.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/BTrees/_LLBTree.cpython-36m-darwin.so new file mode 100755 index 0000000..452ec0a Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/BTrees/_LLBTree.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_LOBTree.c b/thesisenv/lib/python3.6/site-packages/BTrees/_LOBTree.c new file mode 100644 index 0000000..d9fc0e0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/_LOBTree.c @@ -0,0 +1,41 @@ +/*############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################*/ + +#define MASTER_ID "$Id: _IOBTree.c 25186 2004-06-02 15:07:33Z jim $\n" + +/* IOBTree - int key, object value BTree + + Implements a collection using int type keys + and object type values +*/ + +#define PERSISTENT + +#define MOD_NAME_PREFIX "LO" + +#define DEFAULT_MAX_BUCKET_SIZE 60 +#define DEFAULT_MAX_BTREE_SIZE 500 + +#define ZODB_64BIT_INTS + +#include "_compat.h" +#include "intkeymacros.h" +#include "objectvaluemacros.h" + +#ifdef PY3K +#define INITMODULE PyInit__LOBTree +#else +#define INITMODULE init_LOBTree +#endif +#include "BTreeModuleTemplate.c" diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_LOBTree.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/BTrees/_LOBTree.cpython-36m-darwin.so new file mode 100755 index 0000000..06e9ad7 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/BTrees/_LOBTree.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_OIBTree.c b/thesisenv/lib/python3.6/site-packages/BTrees/_OIBTree.c new file mode 100644 index 0000000..73b28f5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/_OIBTree.c @@ -0,0 +1,39 @@ +/*############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################*/ + +#define MASTER_ID "$Id$\n" + +/* OIBTree - object key, int value BTree + + Implements a collection using object type keys + and int type values +*/ + +#define PERSISTENT + +#define MOD_NAME_PREFIX "OI" + +#define DEFAULT_MAX_BUCKET_SIZE 60 +#define DEFAULT_MAX_BTREE_SIZE 250 + +#include "_compat.h" +#include "objectkeymacros.h" +#include "intvaluemacros.h" + +#ifdef PY3K +#define INITMODULE PyInit__OIBTree +#else +#define INITMODULE init_OIBTree +#endif +#include "BTreeModuleTemplate.c" diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_OIBTree.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/BTrees/_OIBTree.cpython-36m-darwin.so new file mode 100755 index 0000000..6a8b7bc Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/BTrees/_OIBTree.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_OLBTree.c b/thesisenv/lib/python3.6/site-packages/BTrees/_OLBTree.c new file mode 100644 index 0000000..9f279fb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/_OLBTree.c @@ -0,0 +1,41 @@ +/*############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################*/ + +#define MASTER_ID "$Id: _OIBTree.c 25186 2004-06-02 15:07:33Z jim $\n" + +/* OIBTree - object key, int value BTree + + Implements a collection using object type keys + and int type values +*/ + +#define PERSISTENT + +#define MOD_NAME_PREFIX "OL" + +#define DEFAULT_MAX_BUCKET_SIZE 60 +#define DEFAULT_MAX_BTREE_SIZE 250 + +#define ZODB_64BIT_INTS + +#include "_compat.h" +#include "objectkeymacros.h" +#include "intvaluemacros.h" + +#ifdef PY3K +#define INITMODULE PyInit__OLBTree +#else +#define INITMODULE init_OLBTree +#endif +#include "BTreeModuleTemplate.c" diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_OLBTree.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/BTrees/_OLBTree.cpython-36m-darwin.so new file mode 100755 index 0000000..2508397 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/BTrees/_OLBTree.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_OOBTree.c b/thesisenv/lib/python3.6/site-packages/BTrees/_OOBTree.c new file mode 100644 index 0000000..1ce3322 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/_OOBTree.c @@ -0,0 +1,39 @@ +/*############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################*/ + +#define MASTER_ID "$Id$\n" + +/* OOBTree - object key, object value BTree + + Implements a collection using object type keys + and object type values +*/ + +#define PERSISTENT + +#define MOD_NAME_PREFIX "OO" + +#define DEFAULT_MAX_BUCKET_SIZE 30 +#define DEFAULT_MAX_BTREE_SIZE 250 + +#include "_compat.h" +#include "objectkeymacros.h" +#include "objectvaluemacros.h" + +#ifdef PY3K +#define INITMODULE PyInit__OOBTree +#else +#define INITMODULE init_OOBTree +#endif +#include "BTreeModuleTemplate.c" diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_OOBTree.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/BTrees/_OOBTree.cpython-36m-darwin.so new file mode 100755 index 0000000..02ac2e1 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/BTrees/_OOBTree.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/__init__.py b/thesisenv/lib/python3.6/site-packages/BTrees/__init__.py new file mode 100644 index 0000000..ab65d67 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/__init__.py @@ -0,0 +1,69 @@ +############################################################################# +# +# Copyright (c) 2007 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################# + +import zope.interface +import BTrees.Interfaces + + +@zope.interface.implementer(BTrees.Interfaces.IBTreeFamily) +class _Family(object): + + from BTrees import OOBTree as OO + +class _Family32(_Family): + from BTrees import OIBTree as OI + from BTrees import IIBTree as II + from BTrees import IOBTree as IO + from BTrees import IFBTree as IF + + maxint = int(2**31-1) + minint = -maxint - 1 + + def __reduce__(self): + return _family32, () + +class _Family64(_Family): + from BTrees import OLBTree as OI + from BTrees import LLBTree as II + from BTrees import LOBTree as IO + from BTrees import LFBTree as IF + + maxint = 2**63-1 + minint = -maxint - 1 + + def __reduce__(self): + return _family64, () + +def _family32(): + return family32 +_family32.__safe_for_unpickling__ = True + +def _family64(): + return family64 +_family64.__safe_for_unpickling__ = True + + +family32 = _Family32() +family64 = _Family64() + + +BTrees.family64.IO.family = family64 +BTrees.family64.OI.family = family64 +BTrees.family64.IF.family = family64 +BTrees.family64.II.family = family64 + +BTrees.family32.IO.family = family32 +BTrees.family32.OI.family = family32 +BTrees.family32.IF.family = family32 +BTrees.family32.II.family = family32 diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_base.py b/thesisenv/lib/python3.6/site-packages/BTrees/_base.py new file mode 100644 index 0000000..bee2948 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/_base.py @@ -0,0 +1,1595 @@ +############################################################################## +# +# Copyright 2011 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Python BTree implementation +""" + +from struct import Struct +from struct import error as struct_error +from operator import index + +from persistent import Persistent + +from .Interfaces import BTreesConflictError +from ._compat import PY3 +from ._compat import compare +from ._compat import int_types +from ._compat import xrange + + +_marker = object() + + +class _Base(Persistent): + + __slots__ = () + _key_type = list + + def __init__(self, items=None): + self.clear() + if items: + self.update(items) + + try: + # Detect the presence of the C extensions. + # If they're NOT around, we don't need to do any of the + # special pickle support to make Python versions look like + # C---we just rename the classes. By not defining these methods, + # we can (theoretically) avoid a bit of a slowdown. + # If the C extensions are around, we do need these methods, but + # these classes are unlikely to be used in production anyway. + __import__('BTrees._OOBTree') + except ImportError: # pragma: no cover + pass + else: + def __reduce__(self): + # Swap out the type constructor for the C version, if present. + func, typ_gna, state = Persistent.__reduce__(self) + # We ignore the returned type altogether in favor of + # our calculated class (which allows subclasses but replaces our exact + # type with the C equivalent) + typ = self.__class__ + gna = typ_gna[1:] + return (func, (typ,) + gna, state) + + @property + def __class__(self): + type_self = type(self) + return type_self._BTree_reduce_as if type_self._BTree_reduce_up_bound is type_self else type_self + + @property + def _BTree_reduce_as(self): + # Return the pickle replacement class for this object. + # If the C extensions are available, this will be the + # C type (setup by _fix_pickle), otherwise it will be the real + # type of this object. + # This implementation is replaced by _fix_pickle and exists for + # testing purposes. + return type(self) # pragma: no cover + + _BTree_reduce_up_bound = _BTree_reduce_as + +class _BucketBase(_Base): + + __slots__ = ('_keys', + '_next', + '_to_key', + ) + + def clear(self): + self._keys = self._key_type() + self._next = None + + def __len__(self): + return len(self._keys) + + @property + def size(self): + return len(self._keys) + + def _deleteNextBucket(self): + next = self._next + if next is not None: + self._next = next._next + + def _search(self, key): + # Return non-negative index on success + # return -(insertion_index + 1) on fail + low = 0 + keys = self._keys + high = len(keys) + while low < high: + i = (low + high) // 2 + k = keys[i] + if k is key or k == key: + return i + + if compare(k, key) < 0: + low = i + 1 + else: + high = i + return -1 - low + + def minKey(self, key=_marker): + if key is _marker or key is None: + return self._keys[0] + key = self._to_key(key) + index = self._search(key) + if index >= 0: + return key + index = -index - 1 + if index < len(self._keys): + return self._keys[index] + else: + raise ValueError("no key satisfies the conditions") + + def maxKey(self, key=_marker): + if key is _marker or key is None: + return self._keys[-1] + key = self._to_key(key) + index = self._search(key) + if index >= 0: + return key + else: + index = -index-1 + if index: + return self._keys[index-1] + else: + raise ValueError("no key satisfies the conditions") + + def _range(self, min=_marker, max=_marker, + excludemin=False, excludemax=False): + if min is _marker or min is None: + start = 0 + if excludemin: + start = 1 + else: + min = self._to_key(min) + start = self._search(min) + if start >= 0: + if excludemin: + start += 1 + else: + start = -start - 1 + if max is _marker or max is None: + end = len(self._keys) + if excludemax: + end -= 1 + else: + max = self._to_key(max) + end = self._search(max) + if end >= 0: + if not excludemax: + end += 1 + else: + end = -end - 1 + + return start, end + + def keys(self, *args, **kw): + start, end = self._range(*args, **kw) + return self._keys[start:end] + + def iterkeys(self, *args, **kw): + if not (args or kw): + return iter(self._keys) + keys = self._keys + return (keys[i] for i in xrange(*self._range(*args, **kw))) + + def __iter__(self): + return iter(self._keys) + + def __contains__(self, key): + return (self._search(self._to_key(key)) >= 0) + + has_key = __contains__ + + def _repr_helper(self, items): + type_self = type(self) + mod = type_self.__module__ + name = type_self.__name__ + name = name[:-2] if name.endswith("Py") else name + return "%s.%s(%r)" % (mod, name, items) + +class _SetIteration(object): + + __slots__ = ('to_iterate', + 'useValues', + '_iter', + 'active', + 'position', + 'key', + 'value', + ) + + + def __init__(self, to_iterate, useValues=False, default=None): + if to_iterate is None: + to_iterate = () + self.to_iterate = to_iterate + if useValues: + try: + itmeth = to_iterate.iteritems + except AttributeError: + if PY3 and isinstance(to_iterate, dict): #pragma no cover Py3k + itmeth = to_iterate.items().__iter__ + else: + itmeth = to_iterate.__iter__ + useValues = False + else: + self.value = None + else: + itmeth = to_iterate.__iter__ + + self.useValues = useValues + self._iter = itmeth() + self.active = True + self.position = 0 + self.key = _marker + self.value = default + self.advance() + + def advance(self): + try: + if self.useValues: + self.key, self.value = next(self._iter) + else: + self.key = next(self._iter) + self.position += 1 + except StopIteration: + self.active = False + self.position = -1 + + return self + +_object_lt = getattr(object, '__lt__', _marker) + +def _no_default_comparison(key): + # Enforce test that key has non-default comparison. + if key is None: + return + if type(key) is object: + raise TypeError("Can't use object() as keys") + lt = getattr(key, '__lt__', None) + if lt is not None: + # CPython 3.x follows PEP 252, defining '__objclass__' + if getattr(lt, '__objclass__', None) is object: + lt = None # pragma: no cover Py3k + # PyPy3 doesn't follow PEP 252, but defines '__func__' + elif getattr(lt, '__func__', None) is _object_lt: + lt = None # pragma: no cover PyPy3 + if (lt is None and + getattr(key, '__cmp__', None) is None): + raise TypeError("Object has default comparison") + +class Bucket(_BucketBase): + + __slots__ = () + _value_type = list + _to_value = lambda self, x: x + VALUE_SAME_CHECK = False + + def setdefault(self, key, value): + key, value = self._to_key(key), self._to_value(value) + status, value = self._set(key, value, True) + return value + + def pop(self, key, default=_marker): + try: + status, value = self._del(self._to_key(key)) + except KeyError: + if default is _marker: + raise + return default + else: + return value + + def update(self, items): + if hasattr(items, 'iteritems'): + items = items.iteritems() + elif hasattr(items, 'items'): + items = items.items() + + _si = self.__setitem__ + try: + for key, value in items: + _si(key, value) + except ValueError: + raise TypeError('items must be a sequence of 2-tuples') + + def __setitem__(self, key, value): + _no_default_comparison(key) + self._set(self._to_key(key), self._to_value(value)) + + def __delitem__(self, key): + self._del(self._to_key(key)) + + def clear(self): + _BucketBase.clear(self) + self._values = self._value_type() + + def get(self, key, default=None): + index = self._search(self._to_key(key)) + if index < 0: + return default + return self._values[index] + + def __getitem__(self, key): + index = self._search(self._to_key(key)) + if index < 0: + raise KeyError(key) + return self._values[index] + + def _set(self, key, value, ifunset=False): + """Set a value + + Return: status, value + + Status is: + None if no change + 0 if change, but not size change + 1 if change and size change + """ + index = self._search(key) + if index >= 0: + if (ifunset or + self.VALUE_SAME_CHECK and value == self._values[index] + ): + return None, self._values[index] + self._p_changed = True + self._values[index] = value + return 0, value + else: + self._p_changed = True + index = -index - 1 + self._keys.insert(index, key) + self._values.insert(index, value) + return 1, value + + def _del(self, key): + index = self._search(key) + if index >= 0: + self._p_changed = True + del self._keys[index] + return 0, self._values.pop(index) + raise KeyError(key) + + def _split(self, index=-1): + if index < 0 or index >= len(self._keys): + index = len(self._keys) // 2 + new_instance = type(self)() + new_instance._keys = self._keys[index:] + new_instance._values = self._values[index:] + del self._keys[index:] + del self._values[index:] + new_instance._next = self._next + self._next = new_instance + return new_instance + + def values(self, *args, **kw): + start, end = self._range(*args, **kw) + return self._values[start:end] + + def itervalues(self, *args, **kw): + values = self._values + return (values[i] for i in xrange(*self._range(*args, **kw))) + + def items(self, *args, **kw): + keys = self._keys + values = self._values + return [(keys[i], values[i]) + for i in xrange(*self._range(*args, **kw))] + + def iteritems(self, *args, **kw): + keys = self._keys + values = self._values + return ((keys[i], values[i]) + for i in xrange(*self._range(*args, **kw))) + + def __getstate__(self): + keys = self._keys + values = self._values + data = [] + for i in range(len(keys)): + data.append(keys[i]) + data.append(values[i]) + data = tuple(data) + + if self._next is not None: + return data, self._next + return (data, ) + + def __setstate__(self, state): + if not isinstance(state[0], tuple): + raise TypeError("tuple required for first state element") + + self.clear() + if len(state) == 2: + state, self._next = state + else: + self._next = None + state = state[0] + + keys = self._keys + values = self._values + for i in range(0, len(state), 2): + keys.append(state[i]) + values.append(state[i+1]) + + def _p_resolveConflict(self, s_old, s_com, s_new): + b_old = type(self)() + if s_old is not None: + b_old.__setstate__(s_old) + b_com = type(self)() + if s_com is not None: + b_com.__setstate__(s_com) + b_new = type(self)() + if s_new is not None: + b_new.__setstate__(s_new) + if (b_com._next != b_old._next or + b_new._next != b_old._next): + raise BTreesConflictError(-1, -1, -1, 0) + + if not b_com or not b_new: + raise BTreesConflictError(-1, -1, -1, 12) + + i_old = _SetIteration(b_old, True) + i_com = _SetIteration(b_com, True) + i_new = _SetIteration(b_new, True) + + def merge_error(reason): + return BTreesConflictError( + i_old.position, i_com.position, i_new.position, reason) + + result = type(self)() + + def merge_output(it): + result._keys.append(it.key) + result._values.append(it.value) + it.advance() + + while i_old.active and i_com.active and i_new.active: + cmpOC = compare(i_old.key, i_com.key) + cmpON = compare(i_old.key, i_new.key) + if cmpOC == 0: + if cmpON == 0: + if i_com.value == i_old.value: + result[i_old.key] = i_new.value + elif i_new.value == i_old.value: + result[i_old.key] = i_com.value + else: + raise merge_error(1) + i_old.advance() + i_com.advance() + i_new.advance() + elif (cmpON > 0): # insert in new + merge_output(i_new) + elif i_old.value == i_com.value: # deleted new + if i_new.position == 1: + # Deleted the first item. This will modify the + # parent node, so we don't know if merging will be + # safe + raise merge_error(13) + i_old.advance() + i_com.advance() + else: + raise merge_error(2) + elif cmpON == 0: + if cmpOC > 0: # insert committed + merge_output(i_com) + elif i_old.value == i_new.value: # delete committed + if i_com.position == 1: + # Deleted the first item. This will modify the + # parent node, so we don't know if merging will be + # safe + raise merge_error(13) + i_old.advance() + i_new.advance() + else: + raise merge_error(3) + else: # both keys changed + cmpCN = compare(i_com.key, i_new.key) + if cmpCN == 0: # dueling insert + raise merge_error(4) + if cmpOC > 0: # insert committed + if cmpCN > 0: # insert i_new first + merge_output(i_new) + else: + merge_output(i_com) + elif cmpON > 0: # insert i_new + merge_output(i_new) + else: + raise merge_error(5) # both deleted same key + + while i_com.active and i_new.active: # new inserts + cmpCN = compare(i_com.key, i_new.key) + if cmpCN == 0: + raise merge_error(6) # dueling insert + if cmpCN > 0: # insert new + merge_output(i_new) + else: # insert committed + merge_output(i_com) + + while i_old.active and i_com.active: # new deletes rest of original + cmpOC = compare(i_old.key, i_com.key) + if cmpOC > 0: # insert committed + merge_output(i_com) + elif cmpOC == 0 and (i_old.value == i_com.value): # del in new + i_old.advance() + i_com.advance() + else: # dueling deletes or delete and change + raise merge_error(7) + + while i_old.active and i_new.active: + # committed deletes rest of original + cmpON = compare(i_old.key, i_new.key) + if cmpON > 0: # insert new + merge_output(i_new) + elif cmpON == 0 and (i_old.value == i_new.value): + # deleted in committed + i_old.advance() + i_new.advance() + else: # dueling deletes or delete and change + raise merge_error(8) + + if i_old.active: # dueling deletes + raise merge_error(9) + + while i_com.active: + merge_output(i_com) + + while i_new.active: + merge_output(i_new) + + if len(result._keys) == 0: #pragma: no cover + # If the output bucket is empty, conflict resolution doesn't have + # enough info to unlink it from its containing BTree correctly. + # + # XXX TS, 2012-11-16: I don't think this is possible + # + raise merge_error(10) + + result._next = b_old._next + return result.__getstate__() + + def __repr__(self): + return self._repr_helper(self.items()) + +class Set(_BucketBase): + + __slots__ = () + + def add(self, key): + return self._set(self._to_key(key))[0] + + insert = add + + def remove(self, key): + self._del(self._to_key(key)) + + def update(self, items): + add = self.add + for i in items: + add(i) + + def __getstate__(self): + data = tuple(self._keys) + if self._next is not None: + return data, self._next + return (data, ) + + def __setstate__(self, state): + if not isinstance(state[0], tuple): + raise TypeError('tuple required for first state element') + + self.clear() + if len(state) == 2: + state, self._next = state + else: + self._next = None + state = state[0] + + self._keys.extend(state) + + + def _set(self, key, value=None, ifunset=False): + index = self._search(key) + if index < 0: + index = -index - 1 + self._p_changed = True + self._keys.insert(index, key) + return True, None + return False, None + + def _del(self, key): + index = self._search(key) + if index >= 0: + self._p_changed = True + del self._keys[index] + return 0, 0 + raise KeyError(key) + + def __getitem__(self, i): + return self._keys[i] + + def _split(self, index=-1): + if index < 0 or index >= len(self._keys): + index = len(self._keys) // 2 + new_instance = type(self)() + new_instance._keys = self._keys[index:] + del self._keys[index:] + new_instance._next = self._next + self._next = new_instance + return new_instance + + def _p_resolveConflict(self, s_old, s_com, s_new): + + b_old = type(self)() + if s_old is not None: + b_old.__setstate__(s_old) + b_com = type(self)() + if s_com is not None: + b_com.__setstate__(s_com) + b_new = type(self)() + if s_new is not None: + b_new.__setstate__(s_new) + + if (b_com._next != b_old._next or + b_new._next != b_old._next): # conflict: com or new changed _next + raise BTreesConflictError(-1, -1, -1, 0) + + if not b_com or not b_new: # conflict: com or new empty + raise BTreesConflictError(-1, -1, -1, 12) + + i_old = _SetIteration(b_old, True) + i_com = _SetIteration(b_com, True) + i_new = _SetIteration(b_new, True) + + def merge_error(reason): + return BTreesConflictError( + i_old.position, i_com.position, i_new.position, reason) + + result = type(self)() + + def merge_output(it): + result._keys.append(it.key) + it.advance() + + while i_old.active and i_com.active and i_new.active: + cmpOC = compare(i_old.key, i_com.key) + cmpON = compare(i_old.key, i_new.key) + if cmpOC == 0: + if cmpON == 0: # all match + merge_output(i_old) + i_com.advance() + i_new.advance() + elif cmpON > 0: # insert in new + merge_output(i_new) + else: # deleted new + if i_new.position == 1: + # Deleted the first item. This will modify the + # parent node, so we don't know if merging will be + # safe + raise merge_error(13) + i_old.advance() + i_com.advance() + elif cmpON == 0: + if cmpOC > 0: # insert committed + merge_output(i_com) + else: # delete committed + if i_com.position == 1: + # Deleted the first item. This will modify the + # parent node, so we don't know if merging will be + # safe + raise merge_error(13) + i_old.advance() + i_new.advance() + else: # both com and new keys changed + cmpCN = compare(i_com.key, i_new.key) + if cmpCN == 0: # both inserted same key + raise merge_error(4) + if cmpOC > 0: # insert committed + if cmpCN > 0: # insert i_new first + merge_output(i_new) + else: + merge_output(i_com) + elif cmpON > 0: # insert i_new + merge_output(i_new) + else: # both com and new deleted same key + raise merge_error(5) + + while i_com.active and i_new.active: # new inserts + cmpCN = compare(i_com.key, i_new.key) + if cmpCN == 0: # dueling insert + raise merge_error(6) + if cmpCN > 0: # insert new + merge_output(i_new) + else: # insert committed + merge_output(i_com) + + while i_old.active and i_com.active: # new deletes rest of original + cmpOC = compare(i_old.key, i_com.key) + if cmpOC > 0: # insert committed + merge_output(i_com) + elif cmpOC == 0: # del in new + i_old.advance() + i_com.advance() + else: # dueling deletes or delete and change + raise merge_error(7) + + while i_old.active and i_new.active: + # committed deletes rest of original + cmpON = compare(i_old.key, i_new.key) + if cmpON > 0: # insert new + merge_output(i_new) + elif cmpON == 0: # deleted in committed + i_old.advance() + i_new.advance() + else: # dueling deletes or delete and change + raise merge_error(8) + + if i_old.active: # dueling deletes + raise merge_error(9) + + while i_com.active: + merge_output(i_com) + + while i_new.active: + merge_output(i_new) + + if len(result._keys) == 0: #pragma: no cover + # If the output bucket is empty, conflict resolution doesn't have + # enough info to unlink it from its containing BTree correctly. + # + # XXX TS, 2012-11-16: I don't think this is possible + # + raise merge_error(10) + + result._next = b_old._next + return result.__getstate__() + + def __repr__(self): + return self._repr_helper(self._keys) + +class _TreeItem(object): + + __slots__ = ('key', + 'child', + ) + + def __init__(self, key, child): + self.key = key + self.child = child + + +class _Tree(_Base): + + __slots__ = ('_data', + '_firstbucket', + ) + + def __new__(cls, *args): + value = _Base.__new__(cls, *args) + # Empty trees don't get their __setstate__ called upon + # unpickling (or __init__, obviously), so clear() is never called + # and _data and _firstbucket are never defined, unless we do it here. + value._data = [] + value._firstbucket = None + return value + + def setdefault(self, key, value): + return self._set(self._to_key(key), self._to_value(value), True)[1] + + def pop(self, key, default=_marker): + try: + return self._del(self._to_key(key))[1] + except KeyError: + if default is _marker: + raise + return default + + def update(self, items): + if hasattr(items, 'iteritems'): + items = items.iteritems() + elif hasattr(items, 'items'): + items = items.items() + + set = self.__setitem__ + for i in items: + set(*i) + + def __setitem__(self, key, value): + _no_default_comparison(key) + self._set(self._to_key(key), self._to_value(value)) + + def __delitem__(self, key): + self._del(self._to_key(key)) + + def clear(self): + if self._data: + # In the case of __init__, this was already set by __new__ + self._data = [] + self._firstbucket = None + + def __nonzero__(self): + return bool(self._data) + __bool__ = __nonzero__ #Py3k rename + + def __len__(self): + l = 0 + bucket = self._firstbucket + while bucket is not None: + l += len(bucket._keys) + bucket = bucket._next + return l + + @property + def size(self): + return len(self._data) + + def _search(self, key): + data = self._data + if data: + lo = 0 + hi = len(data) + i = hi // 2 + while i > lo: + cmp_ = compare(data[i].key, key) + if cmp_ < 0: + lo = i + elif cmp_ > 0: + hi = i + else: + break + i = (lo + hi) // 2 + return i + return -1 + + def _findbucket(self, key): + index = self._search(key) + if index >= 0: + child = self._data[index].child + if isinstance(child, self._bucket_type): + return child + return child._findbucket(key) + + def __contains__(self, key): + try: + tree_key = self._to_key(key) + except TypeError: + # Can't convert the key, so can't possibly be in the tree + return False + return key in (self._findbucket(tree_key) or ()) + + def has_key(self, key): + index = self._search(key) + if index < 0: + return False + r = self._data[index].child.has_key(key) + return r and r + 1 + + def keys(self, min=_marker, max=_marker, + excludemin=False, excludemax=False, + itertype='iterkeys'): + if not self._data: + return () + + if min is not _marker and min is not None: + min = self._to_key(min) + bucket = self._findbucket(min) + else: + bucket = self._firstbucket + + iterargs = min, max, excludemin, excludemax + + return _TreeItems(bucket, itertype, iterargs) + + def iterkeys(self, min=_marker, max=_marker, + excludemin=False, excludemax=False): + return iter(self.keys(min, max, excludemin, excludemax)) + + def __iter__(self): + return iter(self.keys()) + + def minKey(self, min=_marker): + if min is _marker or min is None: + bucket = self._firstbucket + else: + min = self._to_key(min) + bucket = self._findbucket(min) + if bucket is not None: + return bucket.minKey(min) + raise ValueError('empty tree') + + def maxKey(self, max=_marker): + data = self._data + if not data: + raise ValueError('empty tree') + if max is _marker or max is None: + return data[-1].child.maxKey() + + max = self._to_key(max) + index = self._search(max) + if index and compare(data[index].child.minKey(), max) > 0: + index -= 1 #pragma: no cover no idea how to provoke this + return data[index].child.maxKey(max) + + + def _set(self, key, value=None, ifunset=False): + if (self._p_jar is not None and + self._p_oid is not None and + self._p_serial is not None): + self._p_jar.readCurrent(self) + data = self._data + if data: + index = self._search(key) + child = data[index].child + else: + index = 0 + child = self._bucket_type() + self._firstbucket = child + data.append(_TreeItem(None, child)) + + result = child._set(key, value, ifunset) + grew = result[0] + if grew: + if type(child) is type(self): + max_size = self.max_internal_size + else: + max_size = self.max_leaf_size + if child.size > max_size: + self._grow(child, index) + + # If a BTree contains only a single bucket, BTree.__getstate__() + # includes the bucket's entire state, and the bucket doesn't get + # an oid of its own. So if we have a single oid-less bucket that + # changed, it's *our* oid that should be marked as changed -- the + # bucket doesn't have one. + if (grew is not None and + type(child) is self._bucket_type and + len(data) == 1 and + child._p_oid is None): + self._p_changed = 1 + return result + + def _grow(self, child, index): + self._p_changed = True + new_child = child._split() + self._data.insert(index+1, _TreeItem(new_child.minKey(), new_child)) + if len(self._data) >= self.max_internal_size * 2: + self._split_root() + + def _split_root(self): + child = type(self)() + child._data = self._data + child._firstbucket = self._firstbucket + self._data = [_TreeItem(None, child)] + self._grow(child, 0) + + def _split(self, index=None): + data = self._data + if index is None: + index = len(data) // 2 + + next = type(self)() + next._data = data[index:] + first = data[index] + del data[index:] + if len(data) == 0: + self._firstbucket = None # lost our bucket, can't buy no beer + if isinstance(first.child, type(self)): + next._firstbucket = first.child._firstbucket + else: + next._firstbucket = first.child; + return next + + def _del(self, key): + if (self._p_jar is not None and + self._p_oid is not None and + self._p_serial is not None): + self._p_jar.readCurrent(self) + + data = self._data + if not data: + raise KeyError(key) + + index = self._search(key) + child = data[index].child + + removed_first_bucket, value = child._del(key) + + # See comment in _set about small trees + if (len(data) == 1 and + type(child) is self._bucket_type and + child._p_oid is None): + self._p_changed = True + + # fix up the node key, but not for the 0'th one. + if index > 0 and child.size and compare(key, data[index].key) == 0: + self._p_changed = True + data[index].key = child.minKey() + + if removed_first_bucket: + if index: + data[index-1].child._deleteNextBucket() + removed_first_bucket = False # clear flag + else: + self._firstbucket = child._firstbucket + + if not child.size: + if type(child) is self._bucket_type: + if index: + data[index-1].child._deleteNextBucket() + else: + self._firstbucket = child._next + removed_first_bucket = True + del data[index] + + return removed_first_bucket, value + + def _deleteNextBucket(self): + self._data[-1].child._deleteNextBucket() + + def __getstate__(self): + data = self._data + + if not data: + # Note: returning None here causes our __setstate__ + # to not be called on unpickling + return None + + if (len(data) == 1 and + type(data[0].child) is not type(self) and + data[0].child._p_oid is None + ): + return ((data[0].child.__getstate__(), ), ) + + sdata = [] + for item in data: + if sdata: + sdata.append(item.key) + sdata.append(item.child) + else: + sdata.append(item.child) + + return tuple(sdata), self._firstbucket + + def __setstate__(self, state): + if state and not isinstance(state[0], tuple): + raise TypeError('tuple required for first state element') + + self.clear() + if state is None: + return + + if len(state) == 1: + bucket = self._bucket_type() + bucket.__setstate__(state[0][0]) + state = [bucket], bucket + + data, self._firstbucket = state + data = list(reversed(data)) + + self._data.append(_TreeItem(None, data.pop())) + while data: + key = data.pop() + child = data.pop() + self._data.append(_TreeItem(key, child)) + + def _assert(self, condition, message): + if not condition: + raise AssertionError(message) + + def _check(self, nextbucket=None): + data = self._data + assert_ = self._assert + if not data: + assert_(self._firstbucket is None, + "Empty BTree has non-NULL firstbucket") + return + assert_(self._firstbucket is not None, + "Non-empty BTree has NULL firstbucket") + + child_class = type(data[0].child) + for i in data: + assert_(i.child is not None, "BTree has NULL child") + assert_(type(i.child) is child_class, + "BTree children have different types") + assert_(i.child.size, "Bucket length < 1") + + if child_class is type(self): + assert_(self._firstbucket is data[0].child._firstbucket, + "BTree has firstbucket different than " + "its first child's firstbucket") + for i in range(len(data)-1): + data[i].child._check(data[i+1].child._firstbucket) + data[-1].child._check(nextbucket) + elif child_class is self._bucket_type: + assert_(self._firstbucket is data[0].child, + "Bottom-level BTree node has inconsistent firstbucket " + "belief") + for i in range(len(data)-1): + assert_(data[i].child._next is data[i+1].child, + "Bucket next pointer is damaged") + assert_(data[-1].child._next is nextbucket, + "Bucket next pointer is damaged") + else: + assert_(False, "Incorrect child type") + + def _p_resolveConflict(self, old, com, new): + s_old = _get_simple_btree_bucket_state(old) + s_com = _get_simple_btree_bucket_state(com) + s_new = _get_simple_btree_bucket_state(new) + return (( + self._bucket_type()._p_resolveConflict(s_old, s_com, s_new), ), ) + + def __repr__(self): + r = super(_Tree, self).__repr__() + r = r.replace('Py', '') + return r + +def _get_simple_btree_bucket_state(state): + if state is None: + return state + if not isinstance(state, tuple): + raise TypeError("_p_resolveConflict: expected tuple or None for state") + if len(state) == 2: # non-degenerate BTree, can't resolve + raise BTreesConflictError(-1, -1, -1, 11) + # Peel away wrapper to get to only-bucket state. + if len(state) != 1: + raise TypeError("_p_resolveConflict: expected 1- or 2-tuple for state") + state = state[0] + if not isinstance(state, tuple) or len(state) != 1: + raise TypeError("_p_resolveConflict: expected 1-tuple containing " + "bucket state") + state = state[0] + if not isinstance(state, tuple): + raise TypeError("_p_resolveConflict: expected tuple for bucket state") + return state + + +class _TreeItems(object): + + __slots__ = ('firstbucket', + 'itertype', + 'iterargs', + 'index', + 'it', + 'v', + '_len', + ) + + def __init__(self, firstbucket, itertype, iterargs): + self.firstbucket = firstbucket + self.itertype = itertype + self.iterargs = iterargs + self.index = -1 + self.it = iter(self) + self.v = None + self._len = None + + def __getitem__(self, i): + if isinstance(i, slice): + return list(self)[i] + if i < 0: + i = len(self) + i + if i < 0: + raise IndexError(i) + + if i < self.index: + self.index = -1 + self.it = iter(self) + + while i > self.index: + try: + self.v = next(self.it) + except StopIteration: + raise IndexError(i) + else: + self.index += 1 + return self.v + + def __len__(self): + if self._len is None: + i = 0 + for _ in self: + i += 1 + self._len = i + return self._len + + def __iter__(self): + bucket = self.firstbucket + itertype = self.itertype + iterargs = self.iterargs + done = 0 + # Note that we don't mind if the first bucket yields no + # results due to an idiosyncrasy in how range searches are done. + while bucket is not None: + for k in getattr(bucket, itertype)(*iterargs): + yield k + done = 0 + if done: + return + bucket = bucket._next + done = 1 + + +class _TreeIterator(object): + """ Faux implementation for BBB only. + """ + def __init__(self, items): #pragma: no cover + raise TypeError( + "TreeIterators are private implementation details " + "of the C-based BTrees.\n\n" + "Please use 'iter(tree)', rather than instantiating " + "one directly." + ) + + +class Tree(_Tree): + + __slots__ = () + + def get(self, key, default=None): + bucket = self._findbucket(key) + if bucket: + return bucket.get(key, default) + return default + + def __getitem__(self, key): + bucket = self._findbucket(key) + if bucket: + return bucket[key] + raise KeyError(key) + + def values(self, min=_marker, max=_marker, + excludemin=False, excludemax=False): + return self.keys(min, max, excludemin, excludemax, 'itervalues') + + def itervalues(self, min=_marker, max=_marker, + excludemin=False, excludemax=False): + return iter(self.values(min, max, excludemin, excludemax)) + + def items(self, min=_marker, max=_marker, + excludemin=False, excludemax=False): + return self.keys(min, max, excludemin, excludemax, 'iteritems') + + def iteritems(self, min=_marker, max=_marker, + excludemin=False, excludemax=False): + return iter(self.items(min, max, excludemin, excludemax)) + + def byValue(self, min): + return reversed( + sorted((v, k) for (k, v) in self.iteritems() if v >= min)) + + def insert(self, key, value): + return bool(self._set(key, value, True)[0]) + + +class TreeSet(_Tree): + + __slots__ = () + + def add(self, key): + return self._set(self._to_key(key))[0] + + insert = add + + def remove(self, key): + self._del(self._to_key(key)) + + def update(self, items): + add = self.add + for i in items: + add(i) + + _p_resolveConflict = _Tree._p_resolveConflict + + +class set_operation(object): + + __slots__ = ('func', + 'set_type', + ) + + def __init__(self, func, set_type): + self.func = func + self.set_type = set_type + + def __call__(self, *a, **k): + return self.func(self.set_type, *a, **k) + + +def difference(set_type, o1, o2): + if o1 is None or o2 is None: + return o1 + i1 = _SetIteration(o1, True, 0) + i2 = _SetIteration(o2, False, 0) + if i1.useValues: + result = o1._mapping_type() + def copy(i): + result._keys.append(i.key) + result._values.append(i.value) + else: + result = o1._set_type() + def copy(i): + result._keys.append(i.key) + while i1.active and i2.active: + cmp_ = compare(i1.key, i2.key) + if cmp_ < 0: + copy(i1) + i1.advance() + elif cmp_ == 0: + i1.advance() + i2.advance() + else: + i2.advance() + while i1.active: + copy(i1) + i1.advance() + return result + +def union(set_type, o1, o2): + if o1 is None: + return o2 + if o2 is None: + return o1 + i1 = _SetIteration(o1, False, 0) + i2 = _SetIteration(o2, False, 0) + result = o1._set_type() + def copy(i): + result._keys.append(i.key) + while i1.active and i2.active: + cmp_ = compare(i1.key, i2.key) + if cmp_ < 0: + copy(i1) + i1.advance() + elif cmp_ == 0: + copy(i1) + i1.advance() + i2.advance() + else: + copy(i2) + i2.advance() + while i1.active: + copy(i1) + i1.advance() + while i2.active: + copy(i2) + i2.advance() + return result + +def intersection(set_type, o1, o2): + if o1 is None: + return o2 + if o2 is None: + return o1 + i1 = _SetIteration(o1, False, 0) + i2 = _SetIteration(o2, False, 0) + result = o1._set_type() + def copy(i): + result._keys.append(i.key) + while i1.active and i2.active: + cmp_ = compare(i1.key, i2.key) + if cmp_ < 0: + i1.advance() + elif cmp_ == 0: + copy(i1) + i1.advance() + i2.advance() + else: + i2.advance() + return result + +def _prepMergeIterators(o1, o2): + MERGE_DEFAULT = getattr(o1, 'MERGE_DEFAULT', None) + if MERGE_DEFAULT is None: + raise TypeError("invalid set operation") + i1 = _SetIteration(o1, True, MERGE_DEFAULT) + i2 = _SetIteration(o2, True, MERGE_DEFAULT) + return i1, i2 + +def weightedUnion(set_type, o1, o2, w1=1, w2=1): + if o1 is None: + if o2 is None: + return 0, None + return w2, o2 + if o2 is None: + return w1, o1 + i1, i2 = _prepMergeIterators(o1, o2) + MERGE = getattr(o1, 'MERGE', None) + if MERGE is None and i1.useValues and i2.useValues: + raise TypeError("invalid set operation") + MERGE_WEIGHT = getattr(o1, 'MERGE_WEIGHT') + if (not i1.useValues) and i2.useValues: + i1, i2 = i2, i1 + w1, w2 = w2, w1 + _merging = i1.useValues or i2.useValues + if _merging: + result = o1._mapping_type() + def copy(i, w): + result._keys.append(i.key) + result._values.append(MERGE_WEIGHT(i.value, w)) + else: + result = o1._set_type() + def copy(i, w): + result._keys.append(i.key) + + while i1.active and i2.active: + cmp_ = compare(i1.key, i2.key) + if cmp_ < 0: + copy(i1, w1) + i1.advance() + elif cmp_ == 0: + result._keys.append(i1.key) + if _merging: + result._values.append(MERGE(i1.value, w1, i2.value, w2)) + i1.advance() + i2.advance() + else: + copy(i2, w2) + i2.advance() + while i1.active: + copy(i1, w1) + i1.advance() + while i2.active: + copy(i2, w2) + i2.advance() + return 1, result + +def weightedIntersection(set_type, o1, o2, w1=1, w2=1): + if o1 is None: + if o2 is None: + return 0, None + return w2, o2 + if o2 is None: + return w1, o1 + i1, i2 = _prepMergeIterators(o1, o2) + MERGE = getattr(o1, 'MERGE', None) + if MERGE is None and i1.useValues and i2.useValues: + raise TypeError("invalid set operation") + if (not i1.useValues) and i2.useValues: + i1, i2 = i2, i1 + w1, w2 = w2, w1 + _merging = i1.useValues or i2.useValues + if _merging: + result = o1._mapping_type() + else: + result = o1._set_type() + while i1.active and i2.active: + cmp_ = compare(i1.key, i2.key) + if cmp_ < 0: + i1.advance() + elif cmp_ == 0: + result._keys.append(i1.key) + if _merging: + result._values.append(MERGE(i1.value, w1, i2.value, w2)) + i1.advance() + i2.advance() + else: + i2.advance() + if isinstance(result, (Set, TreeSet)): + return w1 + w2, result + return 1, result + +def multiunion(set_type, seqs): + # XXX simple/slow implementation. Goal is just to get tests to pass. + result = set_type() + for s in seqs: + try: + iter(s) + except TypeError: + s = set_type((s, )) + result.update(s) + return result + +def to_ob(self, v): + return v + +def _packer_unpacker(struct_format): + s = Struct(struct_format) + return s.pack, s.unpack + +int_pack, int_unpack = _packer_unpacker('i') + +def to_int(self, v): + try: + int_pack(index(v)) + except (struct_error, TypeError): + raise TypeError('32-bit integer expected') + + return int(v) + +float_pack = _packer_unpacker('f')[0] + +def to_float(self, v): + try: + float_pack(v) + except struct_error: + raise TypeError('float expected') + return float(v) + + +long_pack, long_unpack = _packer_unpacker('q') + +def to_long(self, v): + try: + long_pack(index(v)) + except (struct_error, TypeError): + if isinstance(v, int_types): + raise ValueError("Value out of range", v) + raise TypeError('64-bit integer expected') + + return int(v) + +def to_bytes(l): + def to(self, v): + if not (isinstance(v, bytes) and len(v) == l): + raise TypeError("%s-byte array expected" % l) + return v + return to + +tos = dict(I=to_int, L=to_long, F=to_float, O=to_ob) + +MERGE_DEFAULT_int = 1 +MERGE_DEFAULT_float = 1.0 + +def MERGE(self, value1, weight1, value2, weight2): + return (value1 * weight1) + (value2 * weight2) + +def MERGE_WEIGHT_default(self, value, weight): + return value + +def MERGE_WEIGHT_numeric(self, value, weight): + return value * weight + +def _fix_pickle(mod_dict, mod_name): + # Make the pure-Python objects pickle with the same + # class names and types as the C extensions by setting the appropriate + # _BTree_reduce_as attribute. + # If the C extensions are not available, we also change the + # __name__ attribute of the type to match the C name (otherwise + # we wind up with *Py in the pickles) + # Each module must call this as `_fix_pickle(globals(), __name__)` + # at the bottom. + + mod_prefix = mod_name.split('.')[-1][:2] # BTrees.OOBTree -> 'OO' + bucket_name = mod_prefix + 'Bucket' + py_bucket_name = bucket_name + 'Py' + + have_c_extensions = mod_dict[bucket_name] is not mod_dict[py_bucket_name] + + for name in 'Bucket', 'Set', 'BTree', 'TreeSet', 'TreeIterator': + raw_name = mod_prefix + name + py_name = raw_name + 'Py' + try: + py_type = mod_dict[py_name] + except KeyError: + if name == 'TreeIterator': + # Optional + continue + raise # pragma: no cover + raw_type = mod_dict[raw_name] # Could be C or Python + + py_type._BTree_reduce_as = raw_type + py_type._BTree_reduce_up_bound = py_type + + if not have_c_extensions: # pragma: no cover + # Set FooPy to have the __name__ of simply Foo. + # We can't do this if the C extension is available, + # because then mod_dict[FooPy.__name__] is not FooPy + # and pickle refuses to save something like that. + # On the other hand (no C extension) this makes our + # Python pickle match the C version by default + py_type.__name__ = raw_name + py_type.__qualname__ = raw_name # Py 3.3+ diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_compat.h b/thesisenv/lib/python3.6/site-packages/BTrees/_compat.h new file mode 100644 index 0000000..9454da2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/_compat.h @@ -0,0 +1,55 @@ +/* Straddle Python 2 / 3 */ +#ifndef BTREES__COMPAT_H +#define BTREES__COMPAT_H + +#include "Python.h" + +#ifdef INTERN +#undef INTERN +#endif + +#ifdef INT_FROM_LONG +#undef INT_FROM_LONG +#endif + +#ifdef INT_CHECK +#undef INT_CHECK +#endif + +#if PY_MAJOR_VERSION >= 3 + +#define PY3K + +#define INTERN PyUnicode_InternFromString +#define INT_FROM_LONG(x) PyLong_FromLong(x) +#define INT_CHECK(x) PyLong_Check(x) +#define INT_AS_LONG(x) PyLong_AS_LONG(x) +#define TEXT_FROM_STRING PyUnicode_FromString +#define TEXT_FORMAT PyUnicode_Format + +/* Note that the second comparison is skipped if the first comparison returns: + + 1 -> There was no error and the answer is -1 + -1 -> There was an error, which the caller will detect with PyError_Occurred. + */ +#define COMPARE(lhs, rhs) \ + (lhs == Py_None ? (rhs == Py_None ? 0 : -1) : (rhs == Py_None ? 1 : \ + (PyObject_RichCompareBool((lhs), (rhs), Py_LT) != 0 ? -1 : \ + (PyObject_RichCompareBool((lhs), (rhs), Py_EQ) > 0 ? 0 : 1)))) + +#else + +#define INTERN PyString_InternFromString +#define INT_FROM_LONG(x) PyInt_FromLong(x) +#define INT_CHECK(x) PyInt_Check(x) +#define INT_AS_LONG(x) PyInt_AS_LONG(x) +#define TEXT_FROM_STRING PyString_FromString +#define TEXT_FORMAT PyString_Format + +#define COMPARE(lhs, rhs) \ + (lhs == Py_None ? (rhs == Py_None ? 0 : -1) : (rhs == Py_None ? 1 : \ + PyObject_Compare((lhs), (rhs)))) + +#endif + +#endif /* BTREES__COMPAT_H */ diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_compat.py b/thesisenv/lib/python3.6/site-packages/BTrees/_compat.py new file mode 100644 index 0000000..d8e4295 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/_compat.py @@ -0,0 +1,100 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import os +import sys + +PYPY = hasattr(sys, 'pypy_version_info') +# We can and do build the C extensions on PyPy, but +# as of Persistent 4.2.5 the persistent C extension is not +# built on PyPy, so importing our C extension will fail anyway. +PURE_PYTHON = os.environ.get('PURE_PYTHON', PYPY) + + +if sys.version_info[0] < 3: #pragma NO COVER Python2 + + PY2 = True + PY3 = False + + int_types = int, long + xrange = xrange + def compare(x, y): + if x is None: + if y is None: + return 0 + else: + return -1 + elif y is None: + return 1 + else: + return cmp(x, y) + + _bytes = str + def _ascii(x): + return bytes(x) + +else: #pragma NO COVER Python3 + + PY2 = False + PY3 = True + + int_types = int, + xrange = range + + def compare(x, y): + if x is None: + if y is None: + return 0 + else: + return -1 + elif y is None: + return 1 + else: + return (x > y) - (y > x) + + _bytes = bytes + def _ascii(x): + return bytes(x, 'ascii') + + +def import_c_extension(mod_globals): + import importlib + c_module = None + module_name = mod_globals['__name__'] + assert module_name.startswith('BTrees.') + module_name = module_name.split('.')[1] + if not PURE_PYTHON: + try: + c_module = importlib.import_module('BTrees._' + module_name) + except ImportError: + pass + if c_module is not None: + new_values = dict(c_module.__dict__) + new_values.pop("__name__", None) + new_values.pop('__file__', None) + new_values.pop('__doc__', None) + mod_globals.update(new_values) + else: + # No C extension, make the Py versions available without that + # extension. The list comprehension both filters and prevents + # concurrent modification errors. + for py in [k for k in mod_globals if k.endswith('Py')]: + mod_globals[py[:-2]] = mod_globals[py] + + # Assign the global aliases + prefix = module_name[:2] + for name in ('Bucket', 'Set', 'BTree', 'TreeSet'): + mod_globals[name] = mod_globals[prefix + name] + + # Cleanup + del mod_globals['import_c_extension'] diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_fsBTree.c b/thesisenv/lib/python3.6/site-packages/BTrees/_fsBTree.c new file mode 100644 index 0000000..540861a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/_fsBTree.c @@ -0,0 +1,164 @@ +/*############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################*/ + +#define MASTER_ID "$Id$\n" + +/* fsBTree - FileStorage index BTree + + This BTree implements a mapping from 2-character strings + to six-character strings. This allows us to efficiently store + a FileStorage index as a nested mapping of 6-character oid prefix + to mapping of 2-character oid suffix to 6-character (byte) file + positions. +*/ + +typedef unsigned char char2[2]; +typedef unsigned char char6[6]; + +/* Setup template macros */ + +#define PERSISTENT + +#define MOD_NAME_PREFIX "fs" + +#define DEFAULT_MAX_BUCKET_SIZE 500 +#define DEFAULT_MAX_BTREE_SIZE 500 + +#include "_compat.h" +/*#include "intkeymacros.h"*/ + +#define KEYMACROS_H "$Id$\n" +#define KEY_TYPE char2 +#undef KEY_TYPE_IS_PYOBJECT +#define KEY_CHECK(K) (PyBytes_Check(K) && PyBytes_GET_SIZE(K)==2) +#define TEST_KEY_SET_OR(V, K, T) if ( ( (V) = ((*(K) < *(T) || (*(K) == *(T) && (K)[1] < (T)[1])) ? -1 : ((*(K) == *(T) && (K)[1] == (T)[1]) ? 0 : 1)) ), 0 ) +#define DECREF_KEY(KEY) +#define INCREF_KEY(k) +#define COPY_KEY(KEY, E) (*(KEY)=*(E), (KEY)[1]=(E)[1]) +#define COPY_KEY_TO_OBJECT(O, K) O=PyBytes_FromStringAndSize((const char*)K,2) +#define COPY_KEY_FROM_ARG(TARGET, ARG, STATUS) \ + if (KEY_CHECK(ARG)) memcpy(TARGET, PyBytes_AS_STRING(ARG), 2); else { \ + PyErr_SetString(PyExc_TypeError, "expected two-character string key"); \ + (STATUS)=0; } + +/*#include "intvaluemacros.h"*/ +#define VALUEMACROS_H "$Id$\n" +#define VALUE_TYPE char6 +#undef VALUE_TYPE_IS_PYOBJECT +#define TEST_VALUE(K, T) memcmp(K,T,6) +#define DECREF_VALUE(k) +#define INCREF_VALUE(k) +#define COPY_VALUE(V, E) (memcpy(V, E, 6)) +#define COPY_VALUE_TO_OBJECT(O, K) O=PyBytes_FromStringAndSize((const char*)K,6) +#define COPY_VALUE_FROM_ARG(TARGET, ARG, STATUS) \ + if ((PyBytes_Check(ARG) && PyBytes_GET_SIZE(ARG)==6)) \ + memcpy(TARGET, PyBytes_AS_STRING(ARG), 6); else { \ + PyErr_SetString(PyExc_TypeError, "expected six-character string key"); \ + (STATUS)=0; } + +#define NORMALIZE_VALUE(V, MIN) + +#include "Python.h" + +static PyObject *bucket_toBytes(PyObject *self); + +static PyObject *bucket_fromBytes(PyObject *self, PyObject *state); + +#define EXTRA_BUCKET_METHODS \ + {"toBytes", (PyCFunction) bucket_toBytes, METH_NOARGS, \ + "toBytes() -- Return the state as a bytes array"}, \ + {"fromBytes", (PyCFunction) bucket_fromBytes, METH_O, \ + "fromSBytes(s) -- Set the state of the object from a bytes array"}, \ + {"toString", (PyCFunction) bucket_toBytes, METH_NOARGS, \ + "toString() -- Deprecated alias for 'toBytes'"}, \ + {"fromString", (PyCFunction) bucket_fromBytes, METH_O, \ + "fromString(s) -- Deprecated alias for 'fromBytes'"}, \ + +#ifdef PY3K +#define INITMODULE PyInit__fsBTree +#else +#define INITMODULE init_fsBTree +#endif +#include "BTreeModuleTemplate.c" + +static PyObject * +bucket_toBytes(PyObject *oself) +{ + Bucket *self = (Bucket *)oself; + PyObject *items = NULL; + int len; + + PER_USE_OR_RETURN(self, NULL); + + len = self->len; + + items = PyBytes_FromStringAndSize(NULL, len*8); + if (items == NULL) + goto err; + memcpy(PyBytes_AS_STRING(items), self->keys, len*2); + memcpy(PyBytes_AS_STRING(items)+len*2, self->values, len*6); + + PER_UNUSE(self); + return items; + + err: + PER_UNUSE(self); + Py_XDECREF(items); + return NULL; +} + +static PyObject * +bucket_fromBytes(PyObject *oself, PyObject *state) +{ + Bucket *self = (Bucket *)oself; + int len; + KEY_TYPE *keys; + VALUE_TYPE *values; + + len = PyBytes_Size(state); + if (len < 0) + return NULL; + + if (len%8) + { + PyErr_SetString(PyExc_ValueError, "state string of wrong size"); + return NULL; + } + len /= 8; + + if (self->next) { + Py_DECREF(self->next); + self->next = NULL; + } + + if (len > self->size) { + keys = BTree_Realloc(self->keys, sizeof(KEY_TYPE)*len); + if (keys == NULL) + return NULL; + values = BTree_Realloc(self->values, sizeof(VALUE_TYPE)*len); + if (values == NULL) + return NULL; + self->keys = keys; + self->values = values; + self->size = len; + } + + memcpy(self->keys, PyBytes_AS_STRING(state), len*2); + memcpy(self->values, PyBytes_AS_STRING(state)+len*2, len*6); + + self->len = len; + + Py_INCREF(self); + return (PyObject *)self; +} diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/_fsBTree.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/BTrees/_fsBTree.cpython-36m-darwin.so new file mode 100755 index 0000000..f94cd1b Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/BTrees/_fsBTree.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/check.py b/thesisenv/lib/python3.6/site-packages/BTrees/check.py new file mode 100644 index 0000000..068bd93 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/check.py @@ -0,0 +1,428 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +""" +Utilities for working with BTrees (TreeSets, Buckets, and Sets) at a low +level. + +The primary function is check(btree), which performs value-based consistency +checks of a kind btree._check() does not perform. See the function docstring +for details. + +display(btree) displays the internal structure of a BTree (TreeSet, etc) to +stdout. + +CAUTION: When a BTree node has only a single bucket child, it can be +impossible to get at the bucket from Python code (__getstate__() may squash +the bucket object out of existence, as a pickling storage optimization). In +such a case, the code here synthesizes a temporary bucket with the same keys +(and values, if the bucket is of a mapping type). This has no first-order +consequences, but can mislead if you pay close attention to reported object +addresses and/or object identity (the synthesized bucket has an address +that doesn't exist in the actual BTree). +""" + +from BTrees.IFBTree import IFBTree, IFBucket, IFSet, IFTreeSet +from BTrees.IFBTree import IFBTreePy, IFBucketPy, IFSetPy, IFTreeSetPy +from BTrees.IIBTree import IIBTree, IIBucket, IISet, IITreeSet +from BTrees.IIBTree import IIBTreePy, IIBucketPy, IISetPy, IITreeSetPy +from BTrees.IOBTree import IOBTree, IOBucket, IOSet, IOTreeSet +from BTrees.IOBTree import IOBTreePy, IOBucketPy, IOSetPy, IOTreeSetPy +from BTrees.LFBTree import LFBTree, LFBucket, LFSet, LFTreeSet +from BTrees.LFBTree import LFBTreePy, LFBucketPy, LFSetPy, LFTreeSetPy +from BTrees.LLBTree import LLBTree, LLBucket, LLSet, LLTreeSet +from BTrees.LLBTree import LLBTreePy, LLBucketPy, LLSetPy, LLTreeSetPy +from BTrees.LOBTree import LOBTree, LOBucket, LOSet, LOTreeSet +from BTrees.LOBTree import LOBTreePy, LOBucketPy, LOSetPy, LOTreeSetPy +from BTrees.OIBTree import OIBTree, OIBucket, OISet, OITreeSet +from BTrees.OIBTree import OIBTreePy, OIBucketPy, OISetPy, OITreeSetPy +from BTrees.OLBTree import OLBTree, OLBucket, OLSet, OLTreeSet +from BTrees.OLBTree import OLBTreePy, OLBucketPy, OLSetPy, OLTreeSetPy +from BTrees.OOBTree import OOBTree, OOBucket, OOSet, OOTreeSet +from BTrees.OOBTree import OOBTreePy, OOBucketPy, OOSetPy, OOTreeSetPy + +from BTrees.utils import positive_id +from BTrees.utils import oid_repr + +TYPE_UNKNOWN, TYPE_BTREE, TYPE_BUCKET = range(3) + +from ._compat import compare + +_type2kind = {} +for kv in ('OO', + 'II', 'IO', 'OI', 'IF', + 'LL', 'LO', 'OL', 'LF', + ): + for name, kind in ( + ('BTree', (TYPE_BTREE, True)), + ('Bucket', (TYPE_BUCKET, True)), + ('TreeSet', (TYPE_BTREE, False)), + ('Set', (TYPE_BUCKET, False)), + ): + _type2kind[globals()[kv+name]] = kind + py = kv + name + 'Py' + _type2kind[globals()[py]] = kind + +# Return pair +# +# TYPE_BTREE or TYPE_BUCKET, is_mapping + +def classify(obj): + return _type2kind[type(obj)] + + +BTREE_EMPTY, BTREE_ONE, BTREE_NORMAL = range(3) + +# If the BTree is empty, returns +# +# BTREE_EMPTY, [], [] +# +# If the BTree has only one bucket, sometimes returns +# +# BTREE_ONE, bucket_state, None +# +# Else returns +# +# BTREE_NORMAL, list of keys, list of kids +# +# and the list of kids has one more entry than the list of keys. +# +# BTree.__getstate__() docs: +# +# For an empty BTree (self->len == 0), None. +# +# For a BTree with one child (self->len == 1), and that child is a bucket, +# and that bucket has a NULL oid, a one-tuple containing a one-tuple +# containing the bucket's state: +# +# ( +# ( +# child[0].__getstate__(), +# ), +# ) +# +# Else a two-tuple. The first element is a tuple interleaving the BTree's +# keys and direct children, of size 2*self->len - 1 (key[0] is unused and +# is not saved). The second element is the firstbucket: +# +# ( +# (child[0], key[1], child[1], key[2], child[2], ..., +# key[len-1], child[len-1]), +# self->firstbucket +# ) + +_btree2bucket = {} +for kv in ('OO', + 'II', 'IO', 'OI', 'IF', + 'LL', 'LO', 'OL', 'LF', + ): + _btree2bucket[globals()[kv+'BTree']] = globals()[kv+'Bucket'] + py = kv + 'BTreePy' + _btree2bucket[globals()[py]] = globals()[kv+'BucketPy'] + _btree2bucket[globals()[kv+'TreeSet']] = globals()[kv+'Set'] + py = kv + 'TreeSetPy' + _btree2bucket[globals()[kv+'TreeSetPy']] = globals()[kv+'SetPy'] + + +def crack_btree(t, is_mapping): + state = t.__getstate__() + if state is None: + return BTREE_EMPTY, [], [] + + assert isinstance(state, tuple) + if len(state) == 1: + state = state[0] + assert isinstance(state, tuple) and len(state) == 1 + state = state[0] + return BTREE_ONE, state, None + + assert len(state) == 2 + data, firstbucket = state + n = len(data) + assert n & 1 + kids = [] + keys = [] + i = 0 + for x in data: + if i & 1: + keys.append(x) + else: + kids.append(x) + i += 1 + return BTREE_NORMAL, keys, kids + +# Returns +# +# keys, values # for a mapping; len(keys) == len(values) in this case +# or +# keys, [] # for a set +# +# bucket.__getstate__() docs: +# +# For a set bucket (self->values is NULL), a one-tuple or two-tuple. The +# first element is a tuple of keys, of length self->len. The second element +# is the next bucket, present if and only if next is non-NULL: +# +# ( +# (keys[0], keys[1], ..., keys[len-1]), +# next iff non-NULL> +# ) +# +# For a mapping bucket (self->values is not NULL), a one-tuple or two-tuple. +# The first element is a tuple interleaving keys and values, of length +# 2 * self->len. The second element is the next bucket, present iff next is +# non-NULL: +# +# ( +# (keys[0], values[0], keys[1], values[1], ..., +# keys[len-1], values[len-1]), +# next iff non-NULL> +# ) + +def crack_bucket(b, is_mapping): + state = b.__getstate__() + assert isinstance(state, tuple) + assert 1 <= len(state) <= 2 + data = state[0] + if not is_mapping: + return data, [] + keys = [] + values = [] + n = len(data) + assert n & 1 == 0 + i = 0 + for x in data: + if i & 1: + values.append(x) + else: + keys.append(x) + i += 1 + return keys, values + +def type_and_adr(obj): + if hasattr(obj, '_p_oid'): + oid = oid_repr(obj._p_oid) + else: + oid = 'None' + return "%s (0x%x oid=%s)" % (type(obj).__name__, positive_id(obj), oid) + +# Walker implements a depth-first search of a BTree (or TreeSet or Set or +# Bucket). Subclasses must implement the visit_btree() and visit_bucket() +# methods, and arrange to call the walk() method. walk() calls the +# visit_XYZ() methods once for each node in the tree, in depth-first +# left-to-right order. + +class Walker: + def __init__(self, obj): + self.obj = obj + + # obj is the BTree (BTree or TreeSet). + # path is a list of indices, from the root. For example, if a BTree node + # is child[5] of child[3] of the root BTree, [3, 5]. + # parent is the parent BTree object, or None if this is the root BTree. + # is_mapping is True for a BTree and False for a TreeSet. + # keys is a list of the BTree's internal keys. + # kids is a list of the BTree's children. + # If the BTree is an empty root node, keys == kids == []. + # Else len(kids) == len(keys) + 1. + # lo and hi are slice bounds on the values the elements of keys *should* + # lie in (lo inclusive, hi exclusive). lo is None if there is no lower + # bound known, and hi is None if no upper bound is known. + + def visit_btree(self, obj, path, parent, is_mapping, + keys, kids, lo, hi): + raise NotImplementedError + + # obj is the bucket (Bucket or Set). + # path is a list of indices, from the root. For example, if a bucket + # node is child[5] of child[3] of the root BTree, [3, 5]. + # parent is the parent BTree object. + # is_mapping is True for a Bucket and False for a Set. + # keys is a list of the bucket's keys. + # values is a list of the bucket's values. + # If is_mapping is false, values == []. Else len(keys) == len(values). + # lo and hi are slice bounds on the values the elements of keys *should* + # lie in (lo inclusive, hi exclusive). lo is None if there is no lower + # bound known, and hi is None if no upper bound is known. + + def visit_bucket(self, obj, path, parent, is_mapping, + keys, values, lo, hi): + raise NotImplementedError + + def walk(self): + obj = self.obj + path = [] + stack = [(obj, path, None, None, None)] + while stack: + obj, path, parent, lo, hi = stack.pop() + kind, is_mapping = classify(obj) + if kind is TYPE_BTREE: + bkind, keys, kids = crack_btree(obj, is_mapping) + if bkind is BTREE_NORMAL: + # push the kids, in reverse order (so they're popped off + # the stack in forward order) + n = len(kids) + for i in range(len(kids)-1, -1, -1): + newlo, newhi = lo, hi + if i < n-1: + newhi = keys[i] + if i > 0: + newlo = keys[i-1] + stack.append((kids[i], + path + [i], + obj, + newlo, + newhi)) + + elif bkind is BTREE_EMPTY: + pass + else: + assert bkind is BTREE_ONE + # Yuck. There isn't a bucket object to pass on, as + # the bucket state is embedded directly in the BTree + # state. Synthesize a bucket. + assert kids is None # and "keys" is really the bucket + # state + bucket = _btree2bucket[type(obj)]() + bucket.__setstate__(keys) + stack.append((bucket, + path + [0], + obj, + lo, + hi)) + keys = [] + kids = [bucket] + + self.visit_btree(obj, + path, + parent, + is_mapping, + keys, + kids, + lo, + hi) + else: + assert kind is TYPE_BUCKET + keys, values = crack_bucket(obj, is_mapping) + self.visit_bucket(obj, + path, + parent, + is_mapping, + keys, + values, + lo, + hi) + + +class Checker(Walker): + def __init__(self, obj): + Walker.__init__(self, obj) + self.errors = [] + + def check(self): + self.walk() + if self.errors: + s = "Errors found in %s:" % type_and_adr(self.obj) + self.errors.insert(0, s) + s = "\n".join(self.errors) + raise AssertionError(s) + + def visit_btree(self, obj, path, parent, is_mapping, + keys, kids, lo, hi): + self.check_sorted(obj, path, keys, lo, hi) + + def visit_bucket(self, obj, path, parent, is_mapping, + keys, values, lo, hi): + self.check_sorted(obj, path, keys, lo, hi) + + def check_sorted(self, obj, path, keys, lo, hi): + i, n = 0, len(keys) + for x in keys: + # lo or hi are ommitted by supplying None. Thus the not + # None checkes below. + if lo is not None and not compare(lo, x) <= 0: + s = "key %r < lower bound %r at index %d" % (x, lo, i) + self.complain(s, obj, path) + if hi is not None and not compare(x, hi) < 0: + s = "key %r >= upper bound %r at index %d" % (x, hi, i) + self.complain(s, obj, path) + if i < n-1 and not compare(x, keys[i+1]) < 0: + s = "key %r at index %d >= key %r at index %d" % ( + x, i, keys[i+1], i+1) + self.complain(s, obj, path) + i += 1 + + def complain(self, msg, obj, path): + s = "%s, in %s, path from root %s" % ( + msg, + type_and_adr(obj), + ".".join(map(str, path))) + self.errors.append(s) + +class Printer(Walker): #pragma NO COVER + def __init__(self, obj): + Walker.__init__(self, obj) + + def display(self): + self.walk() + + def visit_btree(self, obj, path, parent, is_mapping, + keys, kids, lo, hi): + indent = " " * len(path) + print("%s%s %s with %d children" % ( + indent, + ".".join(map(str, path)), + type_and_adr(obj), + len(kids))) + indent += " " + n = len(keys) + for i in range(n): + print("%skey %d: %r" % (indent, i, keys[i])) + + def visit_bucket(self, obj, path, parent, is_mapping, + keys, values, lo, hi): + indent = " " * len(path) + print("%s%s %s with %d keys" % ( + indent, + ".".join(map(str, path)), + type_and_adr(obj), + len(keys))) + indent += " " + n = len(keys) + for i in range(n): + print("%skey %d: %r" % (indent, i, keys[i]),) + if is_mapping: + print("value %r" % (values[i],)) + +def check(btree): + """Check internal value-based invariants in a BTree or TreeSet. + + The btree._check() method checks internal C-level pointer consistency. + The check() function here checks value-based invariants: whether the + keys in leaf bucket and internal nodes are in strictly increasing order, + and whether they all lie in their expected range. The latter is a subtle + invariant that can't be checked locally -- it requires propagating + range info down from the root of the tree, and modifying it at each + level for each child. + + Raises AssertionError if anything is wrong, with a string detail + explaining the problems. The entire tree is checked before + AssertionError is raised, and the string detail may be large (depending + on how much went wrong). + """ + + Checker(btree).check() + +def display(btree): #pragma NO COVER + "Display the internal structure of a BTree, Bucket, TreeSet or Set." + Printer(btree).display() diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/floatvaluemacros.h b/thesisenv/lib/python3.6/site-packages/BTrees/floatvaluemacros.h new file mode 100644 index 0000000..a985e11 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/floatvaluemacros.h @@ -0,0 +1,25 @@ + +#define VALUEMACROS_H "$Id$\n" + +#define VALUE_TYPE float +#undef VALUE_TYPE_IS_PYOBJECT +#define TEST_VALUE(K, T) (((K) < (T)) ? -1 : (((K) > (T)) ? 1: 0)) +#define VALUE_SAME(VALUE, TARGET) ( (VALUE) == (TARGET) ) +#define DECLARE_VALUE(NAME) VALUE_TYPE NAME +#define VALUE_PARSE "f" +#define DECREF_VALUE(k) +#define INCREF_VALUE(k) +#define COPY_VALUE(V, E) (V=(E)) +#define COPY_VALUE_TO_OBJECT(O, K) O=PyFloat_FromDouble(K) +#define COPY_VALUE_FROM_ARG(TARGET, ARG, STATUS) \ + if (PyFloat_Check(ARG)) TARGET = (float)PyFloat_AsDouble(ARG); \ + else if (INT_CHECK(ARG)) TARGET = (float)INT_AS_LONG(ARG); \ + else { \ + PyErr_SetString(PyExc_TypeError, "expected float or int value"); \ + (STATUS)=0; (TARGET)=0; } + +#define NORMALIZE_VALUE(V, MIN) ((MIN) > 0) ? ((V)/=(MIN)) : 0 + +#define MERGE_DEFAULT 1.0f +#define MERGE(O1, w1, O2, w2) ((O1)*(w1)+(O2)*(w2)) +#define MERGE_WEIGHT(O, w) ((O)*(w)) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/fsBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/fsBTree.py new file mode 100644 index 0000000..5106d92 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/fsBTree.py @@ -0,0 +1,110 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +# fsBTrees are data structures used for ZODB FileStorage. They are not +# expected to be "public" excpect to FileStorage. +# Each item in an fsBTree maps a two-byte key to a six-byte value. + +__all__ = ('Bucket', 'Set', 'BTree', 'TreeSet', + 'fsBucket', 'fsSet', 'fsBTree', 'fsTreeSet', + 'union', 'intersection', 'difference', + ) + + +from zope.interface import moduleProvides + +from .Interfaces import IIntegerObjectBTreeModule +from ._base import Bucket +from ._base import Set +from ._base import Tree as BTree +from ._base import TreeSet +from ._base import difference as _difference +from ._base import intersection as _intersection +from ._base import set_operation as _set_operation +from ._base import to_bytes as _to_bytes +from ._base import union as _union +from ._base import _fix_pickle +from ._compat import import_c_extension + + +_BUCKET_SIZE = 500 +_TREE_SIZE = 500 +using64bits = False +_to_key = _to_bytes(2) +_to_value = _to_bytes(6) + + +class fsBucketPy(Bucket): + _to_key = _to_key + _to_value = _to_value + + def toString(self): + return b''.join(self._keys) + b''.join(self._values) + + def fromString(self, v): + length = len(v) + if length % 8 != 0: + raise ValueError() + count = length // 8 + keys, values = v[:count*2], v[count*2:] + self.clear() + while keys and values: + key, keys = keys[:2], keys[2:] + value, values = values[:6], values[6:] + self._keys.append(key) + self._values.append(value) + return self + + +class fsSetPy(Set): + _to_key = _to_key + + +class fsBTreePy(BTree): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + _to_value = _to_value + + +class fsTreeSetPy(TreeSet): + max_leaf_size = _BUCKET_SIZE + max_internal_size = _TREE_SIZE + _to_key = _to_key + + +# Can't declare forward refs, so fix up afterwards: + +fsBucketPy._mapping_type = fsBucketPy._bucket_type = fsBucketPy +fsBucketPy._set_type = fsSetPy + +fsSetPy._mapping_type = fsBucketPy +fsSetPy._set_type = fsSetPy._bucket_type = fsSetPy + +fsBTreePy._mapping_type = fsBTreePy._bucket_type = fsBucketPy +fsBTreePy._set_type = fsSetPy + +fsTreeSetPy._mapping_type = fsBucketPy +fsTreeSetPy._set_type = fsTreeSetPy._bucket_type = fsSetPy + + +differencePy = _set_operation(_difference, fsSetPy) +unionPy = _set_operation(_union, fsSetPy) +intersectionPy = _set_operation(_intersection, fsSetPy) + +import_c_extension(globals()) + +_fix_pickle(globals(), __name__) + +moduleProvides(IIntegerObjectBTreeModule) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/intkeymacros.h b/thesisenv/lib/python3.6/site-packages/BTrees/intkeymacros.h new file mode 100644 index 0000000..f9244b5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/intkeymacros.h @@ -0,0 +1,40 @@ + +#define KEYMACROS_H "$Id$\n" + +#ifdef ZODB_64BIT_INTS +/* PY_LONG_LONG as key */ +#define NEED_LONG_LONG_SUPPORT +#define NEED_LONG_LONG_KEYS +#define KEY_TYPE PY_LONG_LONG +#define KEY_CHECK longlong_check +#define COPY_KEY_TO_OBJECT(O, K) O=longlong_as_object(K) +#define COPY_KEY_FROM_ARG(TARGET, ARG, STATUS) \ + if (!longlong_convert((ARG), &TARGET)) \ + { \ + (STATUS)=0; (TARGET)=0; \ + } +#else +/* C int as key */ +#define KEY_TYPE int +#define KEY_CHECK INT_CHECK +#define COPY_KEY_TO_OBJECT(O, K) O=INT_FROM_LONG(K) +#define COPY_KEY_FROM_ARG(TARGET, ARG, STATUS) \ + if (INT_CHECK(ARG)) { \ + long vcopy = INT_AS_LONG(ARG); \ + if (PyErr_Occurred()) { (STATUS)=0; (TARGET)=0; } \ + else if ((int)vcopy != vcopy) { \ + PyErr_SetString(PyExc_TypeError, "integer out of range"); \ + (STATUS)=0; (TARGET)=0; \ + } \ + else TARGET = vcopy; \ + } else { \ + PyErr_SetString(PyExc_TypeError, "expected integer key"); \ + (STATUS)=0; (TARGET)=0; } +#endif + +#undef KEY_TYPE_IS_PYOBJECT +#define TEST_KEY_SET_OR(V, K, T) if ( ( (V) = (((K) < (T)) ? -1 : (((K) > (T)) ? 1: 0)) ) , 0 ) +#define DECREF_KEY(KEY) +#define INCREF_KEY(k) +#define COPY_KEY(KEY, E) (KEY=(E)) +#define MULTI_INT_UNION 1 diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/intvaluemacros.h b/thesisenv/lib/python3.6/site-packages/BTrees/intvaluemacros.h new file mode 100644 index 0000000..0253bf6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/intvaluemacros.h @@ -0,0 +1,46 @@ + +#define VALUEMACROS_H "$Id$\n" + +#ifdef ZODB_64BIT_INTS +#define NEED_LONG_LONG_SUPPORT +#define VALUE_TYPE PY_LONG_LONG +#define VALUE_PARSE "L" +#define COPY_VALUE_TO_OBJECT(O, K) O=longlong_as_object(K) +#define COPY_VALUE_FROM_ARG(TARGET, ARG, STATUS) \ + if (!longlong_convert((ARG), &TARGET)) \ + { \ + (STATUS)=0; (TARGET)=0; \ + } +#else +#define VALUE_TYPE int +#define VALUE_PARSE "i" +#define COPY_VALUE_TO_OBJECT(O, K) O=INT_FROM_LONG(K) + +#define COPY_VALUE_FROM_ARG(TARGET, ARG, STATUS) \ + if (INT_CHECK(ARG)) { \ + long vcopy = INT_AS_LONG(ARG); \ + if (PyErr_Occurred()) { (STATUS)=0; (TARGET)=0; } \ + else if ((int)vcopy != vcopy) { \ + PyErr_SetString(PyExc_TypeError, "integer out of range"); \ + (STATUS)=0; (TARGET)=0; \ + } \ + else TARGET = vcopy; \ + } else { \ + PyErr_SetString(PyExc_TypeError, "expected integer key"); \ + (STATUS)=0; (TARGET)=0; } + +#endif + +#undef VALUE_TYPE_IS_PYOBJECT +#define TEST_VALUE(K, T) (((K) < (T)) ? -1 : (((K) > (T)) ? 1: 0)) +#define VALUE_SAME(VALUE, TARGET) ( (VALUE) == (TARGET) ) +#define DECLARE_VALUE(NAME) VALUE_TYPE NAME +#define DECREF_VALUE(k) +#define INCREF_VALUE(k) +#define COPY_VALUE(V, E) (V=(E)) + +#define NORMALIZE_VALUE(V, MIN) ((MIN) > 0) ? ((V)/=(MIN)) : 0 + +#define MERGE_DEFAULT 1 +#define MERGE(O1, w1, O2, w2) ((O1)*(w1)+(O2)*(w2)) +#define MERGE_WEIGHT(O, w) ((O)*(w)) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/objectkeymacros.h b/thesisenv/lib/python3.6/site-packages/BTrees/objectkeymacros.h new file mode 100644 index 0000000..8fa516e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/objectkeymacros.h @@ -0,0 +1,44 @@ +#define KEYMACROS_H "$Id$\n" +#define KEY_TYPE PyObject * +#define KEY_TYPE_IS_PYOBJECT + +#include "Python.h" +#include "_compat.h" + +static PyObject *object_; /* initialized in BTreeModuleTemplate init */ + +static int +check_argument_cmp(PyObject *arg) +{ + /* printf("check cmp %p %p %p %p\n", */ + /* arg->ob_type->tp_richcompare, */ + /* ((PyTypeObject *)object_)->ob_type->tp_richcompare, */ + /* arg->ob_type->tp_compare, */ + /* ((PyTypeObject *)object_)->ob_type->tp_compare); */ + if (arg == Py_None) { + return 1; + } + +#ifdef PY3K + if (Py_TYPE(arg)->tp_richcompare == Py_TYPE(object_)->tp_richcompare) +#else + if (Py_TYPE(arg)->tp_richcompare == NULL + && Py_TYPE(arg)->tp_compare == Py_TYPE(object_)->tp_compare) +#endif + { + PyErr_SetString(PyExc_TypeError, "Object has default comparison"); + return 0; + } + return 1; +} + +#define TEST_KEY_SET_OR(V, KEY, TARGET) \ +if ( ( (V) = COMPARE((KEY),(TARGET)) ), PyErr_Occurred() ) +#define INCREF_KEY(k) Py_INCREF(k) +#define DECREF_KEY(KEY) Py_DECREF(KEY) +#define COPY_KEY(KEY, E) KEY=(E) +#define COPY_KEY_TO_OBJECT(O, K) O=(K); Py_INCREF(O) +#define COPY_KEY_FROM_ARG(TARGET, ARG, S) \ + TARGET=(ARG); \ + (S) = 1; +#define KEY_CHECK_ON_SET check_argument_cmp diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/objectvaluemacros.h b/thesisenv/lib/python3.6/site-packages/BTrees/objectvaluemacros.h new file mode 100644 index 0000000..5dbc80c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/objectvaluemacros.h @@ -0,0 +1,12 @@ +#define VALUEMACROS_H "$Id$\n" + +#define VALUE_TYPE PyObject * +#define VALUE_TYPE_IS_PYOBJECT +#define TEST_VALUE(VALUE, TARGET) (COMPARE((VALUE),(TARGET))) +#define DECLARE_VALUE(NAME) VALUE_TYPE NAME +#define INCREF_VALUE(k) Py_INCREF(k) +#define DECREF_VALUE(k) Py_DECREF(k) +#define COPY_VALUE(k,e) k=(e) +#define COPY_VALUE_TO_OBJECT(O, K) O=(K); Py_INCREF(O) +#define COPY_VALUE_FROM_ARG(TARGET, ARG, S) TARGET=(ARG) +#define NORMALIZE_VALUE(V, MIN) Py_INCREF(V) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/sorters.c b/thesisenv/lib/python3.6/site-packages/BTrees/sorters.c new file mode 100644 index 0000000..4cfae0b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/sorters.c @@ -0,0 +1,542 @@ +/***************************************************************************** + + Copyright (c) 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +/* Revision information: $Id$ */ + +/* The only routine here intended to be used outside the file is + size_t sort_int_nodups(int *p, size_t n) + + Sort the array of n ints pointed at by p, in place, and also remove + duplicates. Return the number of unique elements remaining, which occupy + a contiguous and monotonically increasing slice of the array starting at p. + + Example: If the input array is [3, 1, 2, 3, 1, 5, 2], sort_int_nodups + returns 4, and the first 4 elements of the array are changed to + [1, 2, 3, 5]. The content of the remaining array positions is not defined. + + Notes: + + + This is specific to n-byte signed ints, with endianness natural to the + platform. `n` is determined based on ZODB_64BIT_INTS. + + + 4*n bytes of available heap memory are required for best speed + (8*n when ZODB_64BIT_INTS is defined). +*/ + +#include +#include +#include +#include +#include + +/* The type of array elements to be sorted. Most of the routines don't + care about the type, and will work fine for any scalar C type (provided + they're recompiled with element_type appropriately redefined). However, + the radix sort has to know everything about the type's internal + representation. +*/ +typedef KEY_TYPE element_type; + +/* The radixsort is faster than the quicksort for large arrays, but radixsort + has high fixed overhead, making it a poor choice for small arrays. The + crossover point isn't critical, and is sensitive to things like compiler + and machine cache structure, so don't worry much about this. +*/ +#define QUICKSORT_BEATS_RADIXSORT 800U + +/* In turn, the quicksort backs off to an insertion sort for very small + slices. MAX_INSERTION is the largest slice quicksort leaves entirely to + insertion. Because this version of quicksort uses a median-of-3 rule for + selecting a pivot, MAX_INSERTION must be at least 2 (so that quicksort + has at least 3 values to look at in a slice). Again, the exact value here + isn't critical. +*/ +#define MAX_INSERTION 25U + +#if MAX_INSERTION < 2U +# error "MAX_INSERTION must be >= 2" +#endif + +/* LSB-first radix sort of the n elements in 'in'. + 'work' is work storage at least as large as 'in'. Depending on how many + swaps are done internally, the final result may come back in 'in' or 'work'; + and that pointer is returned. + + radixsort_int is specific to signed n-byte ints, with natural machine + endianness. `n` is determined based on ZODB_64BIT_INTS. +*/ +static element_type* +radixsort_int(element_type *in, element_type *work, size_t n) +{ + /* count[i][j] is the number of input elements that have byte value j + in byte position i, where byte position 0 is the LSB. Note that + holding i fixed, the sum of count[i][j] over all j in range(256) + is n. + */ +#ifdef ZODB_64BIT_INTS + size_t count[8][256]; +#else + size_t count[4][256]; +#endif + size_t i; + int offset, offsetinc; + + /* Which byte position are we working on now? 0=LSB, 1, 2, ... */ + size_t bytenum; + +#ifdef ZODB_64BIT_INTS + assert(sizeof(element_type) == 8); +#else + assert(sizeof(element_type) == 4); +#endif + assert(in); + assert(work); + + /* Compute all of count in one pass. */ + memset(count, 0, sizeof(count)); + for (i = 0; i < n; ++i) { + element_type const x = in[i]; + ++count[0][(x ) & 0xff]; + ++count[1][(x >> 8) & 0xff]; + ++count[2][(x >> 16) & 0xff]; + ++count[3][(x >> 24) & 0xff]; +#ifdef ZODB_64BIT_INTS + ++count[4][(x >> 32) & 0xff]; + ++count[5][(x >> 40) & 0xff]; + ++count[6][(x >> 48) & 0xff]; + ++count[7][(x >> 56) & 0xff]; +#endif + } + + /* For p an element_type* cast to char*, offset is how much farther we + have to go to get to the LSB of the element; this is 0 for little- + endian boxes and sizeof(element_type)-1 for big-endian. + offsetinc is 1 or -1, respectively, telling us which direction to go + from p+offset to get to the element's more-significant bytes. + */ + { + element_type one = 1; + if (*(char*)&one) { + /* Little endian. */ + offset = 0; + offsetinc = 1; + } + else { + /* Big endian. */ + offset = sizeof(element_type) - 1; + offsetinc = -1; + } + } + + /* The radix sort. */ + for (bytenum = 0; + bytenum < sizeof(element_type); + ++bytenum, offset += offsetinc) { + + /* Do a stable distribution sort on byte position bytenum, + from in to work. index[i] tells us the work index at which + to store the next in element with byte value i. pinbyte + points to the correct byte in the input array. + */ + size_t index[256]; + unsigned char* pinbyte; + size_t total = 0; + size_t *pcount = count[bytenum]; + + /* Compute the correct output starting index for each possible + byte value. + */ + if (bytenum < sizeof(element_type) - 1) { + for (i = 0; i < 256; ++i) { + const size_t icount = pcount[i]; + index[i] = total; + total += icount; + if (icount == n) + break; + } + if (i < 256) { + /* All bytes in the current position have value + i, so there's nothing to do on this pass. + */ + continue; + } + } + else { + /* The MSB of signed ints needs to be distributed + differently than the other bytes, in order + 0x80, 0x81, ... 0xff, 0x00, 0x01, ... 0x7f + */ + for (i = 128; i < 256; ++i) { + const size_t icount = pcount[i]; + index[i] = total; + total += icount; + if (icount == n) + break; + } + if (i < 256) + continue; + for (i = 0; i < 128; ++i) { + const size_t icount = pcount[i]; + index[i] = total; + total += icount; + if (icount == n) + break; + } + if (i < 128) + continue; + } + assert(total == n); + + /* Distribute the elements according to byte value. Note that + this is where most of the time is spent. + Note: The loop is unrolled 4x by hand, for speed. This + may be a pessimization someday, but was a significant win + on my MSVC 6.0 timing tests. + */ + pinbyte = (unsigned char *)in + offset; + i = 0; + /* Reduce number of elements to copy to a multiple of 4. */ + while ((n - i) & 0x3) { + unsigned char byte = *pinbyte; + work[index[byte]++] = in[i]; + ++i; + pinbyte += sizeof(element_type); + } + for (; i < n; i += 4, pinbyte += 4 * sizeof(element_type)) { + unsigned char byte1 = *(pinbyte ); + unsigned char byte2 = *(pinbyte + sizeof(element_type)); + unsigned char byte3 = *(pinbyte + 2 * sizeof(element_type)); + unsigned char byte4 = *(pinbyte + 3 * sizeof(element_type)); + + element_type in1 = in[i ]; + element_type in2 = in[i+1]; + element_type in3 = in[i+2]; + element_type in4 = in[i+3]; + + work[index[byte1]++] = in1; + work[index[byte2]++] = in2; + work[index[byte3]++] = in3; + work[index[byte4]++] = in4; + } + /* Swap in and work (just a pointer swap). */ + { + element_type *temp = in; + in = work; + work = temp; + } + } + + return in; +} + +/* Remove duplicates from sorted array in, storing exactly one of each distinct + element value into sorted array out. It's OK (and expected!) for in == out, + but otherwise the n elements beginning at in must not overlap with the n + beginning at out. + Return the number of elements in out. +*/ +static size_t +uniq(element_type *out, element_type *in, size_t n) +{ + size_t i; + element_type lastelt; + element_type *pout; + + assert(out); + assert(in); + if (n == 0) + return 0; + + /* i <- first index in 'in' that contains a duplicate. + in[0], in[1], ... in[i-1] are unique, but in[i-1] == in[i]. + Set i to n if everything is unique. + */ + for (i = 1; i < n; ++i) { + if (in[i-1] == in[i]) + break; + } + + /* in[:i] is unique; copy to out[:i] if needed. */ + assert(i > 0); + if (in != out) + memcpy(out, in, i * sizeof(element_type)); + + pout = out + i; + lastelt = in[i-1]; /* safe even when i == n */ + for (++i; i < n; ++i) { + element_type elt = in[i]; + if (elt != lastelt) + *pout++ = lastelt = elt; + } + return pout - out; +} + +#if 0 +/* insertionsort is no longer referenced directly, but I'd like to keep + * the code here just in case. + */ + +/* Straight insertion sort of the n elements starting at 'in'. */ +static void +insertionsort(element_type *in, size_t n) +{ + element_type *p, *q; + element_type minimum; /* smallest seen so far */ + element_type *plimit = in + n; + + assert(in); + if (n < 2) + return; + + minimum = *in; + for (p = in+1; p < plimit; ++p) { + /* *in <= *(in+1) <= ... <= *(p-1). Slide *p into place. */ + element_type thiselt = *p; + if (thiselt < minimum) { + /* This is a new minimum. This saves p-in compares + when it happens, but should happen so rarely that + it's not worth checking for its own sake: the + point is that the far more popular 'else' branch can + exploit that thiselt is *not* the smallest so far. + */ + memmove(in+1, in, (p - in) * sizeof(*in)); + *in = minimum = thiselt; + } + else { + /* thiselt >= minimum, so the loop will find a q + with *q <= thiselt. This saves testing q >= in + on each trip. It's such a simple loop that saving + a per-trip test is a major speed win. + */ + for (q = p-1; *q > thiselt; --q) + *(q+1) = *q; + *(q+1) = thiselt; + } + } +} +#endif + +/* The maximum number of elements in the pending-work stack quicksort + maintains. The maximum stack depth is approximately log2(n), so + arrays of size up to approximately MAX_INSERTION * 2**STACKSIZE can be + sorted. The memory burden for the stack is small, so better safe than + sorry. +*/ +#define STACKSIZE 60 + +/* A _stacknode remembers a contiguous slice of an array that needs to sorted. + lo must be <= hi, and, unlike Python array slices, this includes both ends. +*/ +struct _stacknode { + element_type *lo; + element_type *hi; +}; + +static void +quicksort(element_type *plo, size_t n) +{ + element_type *phi; + + /* Swap two array elements. */ + element_type _temp; +#define SWAP(P, Q) (_temp = *(P), *(P) = *(Q), *(Q) = _temp) + + /* Stack of pending array slices to be sorted. */ + struct _stacknode stack[STACKSIZE]; + struct _stacknode *stackfree = stack; /* available stack slot */ + + /* Push an array slice on the pending-work stack. */ +#define PUSH(PLO, PHI) \ + do { \ + assert(stackfree - stack < STACKSIZE); \ + assert((PLO) <= (PHI)); \ + stackfree->lo = (PLO); \ + stackfree->hi = (PHI); \ + ++stackfree; \ + } while(0) + + assert(plo); + phi = plo + n - 1; + + for (;;) { + element_type pivot; + element_type *pi, *pj; + + assert(plo <= phi); + n = phi - plo + 1; + if (n <= MAX_INSERTION) { + /* Do a small insertion sort. Contra Knuth, we do + this now instead of waiting until the end, because + this little slice is likely still in cache now. + */ + element_type *p, *q; + element_type minimum = *plo; + + for (p = plo+1; p <= phi; ++p) { + /* *plo <= *(plo+1) <= ... <= *(p-1). + Slide *p into place. */ + element_type thiselt = *p; + if (thiselt < minimum) { + /* New minimum. */ + memmove(plo+1, + plo, + (p - plo) * sizeof(*p)); + *plo = minimum = thiselt; + } + else { + /* thiselt >= minimum, so the loop will + find a q with *q <= thiselt. + */ + for (q = p-1; *q > thiselt; --q) + *(q+1) = *q; + *(q+1) = thiselt; + } + } + + /* Pop another slice off the stack. */ + if (stack == stackfree) + break; /* no more slices -- we're done */ + --stackfree; + plo = stackfree->lo; + phi = stackfree->hi; + continue; + } + + /* Parition the slice. + For pivot, take the median of the leftmost, rightmost, and + middle elements. First sort those three; then the median + is the middle one. For technical reasons, the middle + element is swapped to plo+1 first (see Knuth Vol 3 Ed 2 + section 5.2.2 exercise 55 -- reverse-sorted arrays can + take quadratic time otherwise!). + */ + { + element_type *plop1 = plo + 1; + element_type *pmid = plo + (n >> 1); + + assert(plo < pmid && pmid < phi); + SWAP(plop1, pmid); + + /* Sort plo, plop1, phi. */ + /* Smaller of rightmost two -> middle. */ + if (*plop1 > *phi) + SWAP(plop1, phi); + /* Smallest of all -> left; if plo is already the + smallest, the sort is complete. + */ + if (*plo > *plop1) { + SWAP(plo, plop1); + /* Largest of all -> right. */ + if (*plop1 > *phi) + SWAP(plop1, phi); + } + pivot = *plop1; + pi = plop1; + } + assert(*plo <= pivot); + assert(*pi == pivot); + assert(*phi >= pivot); + pj = phi; + + /* Partition wrt pivot. This is the time-critical part, and + nearly every decision in the routine aims at making this + loop as fast as possible -- even small points like + arranging that all loop tests can be done correctly at the + bottoms of loops instead of the tops, and that pointers can + be derefenced directly as-is (without fiddly +1 or -1). + The aim is to make the C here so simple that a compiler + has a good shot at doing as well as hand-crafted assembler. + */ + for (;;) { + /* Invariants: + 1. pi < pj. + 2. All elements at plo, plo+1 .. pi are <= pivot. + 3. All elements at pj, pj+1 .. phi are >= pivot. + 4. There is an element >= pivot to the right of pi. + 5. There is an element <= pivot to the left of pj. + + Note that #4 and #5 save us from needing to check + that the pointers stay in bounds. + */ + assert(pi < pj); + + do { ++pi; } while (*pi < pivot); + assert(pi <= pj); + + do { --pj; } while (*pj > pivot); + assert(pj >= pi - 1); + + if (pi < pj) + SWAP(pi, pj); + else + break; + } + assert(plo+1 < pi && pi <= phi); + assert(plo < pj && pj < phi); + assert(*pi >= pivot); + assert( (pi == pj && *pj == pivot) || + (pj + 1 == pi && *pj <= pivot) ); + + /* Swap pivot into its final position, pj. */ + assert(plo[1] == pivot); + plo[1] = *pj; + *pj = pivot; + + /* Subfiles are from plo to pj-1 inclusive, and pj+1 to phi + inclusive. Push the larger one, and loop back to do the + smaller one directly. + */ + if (pj - plo >= phi - pj) { + PUSH(plo, pj-1); + plo = pj+1; + } + else { + PUSH(pj+1, phi); + phi = pj-1; + } + } + +#undef PUSH +#undef SWAP +} + +/* Sort p and remove duplicates, as fast as we can. */ +static size_t +sort_int_nodups(KEY_TYPE *p, size_t n) +{ + size_t nunique; + element_type *work; + + assert(sizeof(KEY_TYPE) == sizeof(element_type)); + assert(p); + + /* Use quicksort if the array is small, OR if malloc can't find + enough temp memory for radixsort. + */ + work = NULL; + if (n > QUICKSORT_BEATS_RADIXSORT) + work = (element_type *)malloc(n * sizeof(element_type)); + + if (work) { + element_type *out = radixsort_int(p, work, n); + nunique = uniq(p, out, n); + free(work); + } + else { + quicksort(p, n); + nunique = uniq(p, p, n); + } + + return nunique; +} diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/__init__.py new file mode 100644 index 0000000..c98a506 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/__init__.py @@ -0,0 +1 @@ +# If tests is a package, debugging is a bit easier. diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/common.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/common.py new file mode 100644 index 0000000..f2e7c32 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/common.py @@ -0,0 +1,2608 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +from __future__ import division + +import unittest +import platform +from unittest import skip + + +from BTrees._compat import PY3 +from BTrees._compat import PURE_PYTHON +from BTrees._compat import PYPY + +def _no_op(test_method): + return test_method + +try: + __import__('ZODB') +except ImportError: + _skip_wo_ZODB = skip('ZODB not available') +else: + _skip_wo_ZODB = _no_op + +if PY3: + _skip_under_Py3k = skip("Not on Python 3") +else: + _skip_under_Py3k = _no_op + +if platform.architecture()[0] == '32bit': + _skip_on_32_bits = skip("32-bit platform") +else: + _skip_on_32_bits = _no_op + +if PURE_PYTHON: + skipOnPurePython = skip("Not on Pure Python") +else: + skipOnPurePython = _no_op + +def _skip_if_pure_py_and_py_test(self): + if PURE_PYTHON and 'Py' in type(self).__name__: + # No need to run this again. The "C" tests will catch it. + # This relies on the fact that we always define tests in pairs, + # one normal/C and one with Py in the name for the Py test. + raise unittest.SkipTest("Redundant with the C test") + +class Base(object): + # Tests common to all types: sets, buckets, and BTrees + + db = None + + def _getTargetClass(self): + raise NotImplementedError("subclass should return the target type") + + def _makeOne(self): + return self._getTargetClass()() + + def setUp(self): + super(Base, self).setUp() + _skip_if_pure_py_and_py_test(self) + + def tearDown(self): + if self.db is not None: + self.db.close() + + def _getRoot(self): + from ZODB import DB + from ZODB.MappingStorage import MappingStorage + if self.db is None: + # Unclear: On the next line, the ZODB4 flavor of this routine + # [asses a cache_size argument: + # self.db = DB(MappingStorage(), cache_size=1) + # If that's done here, though, testLoadAndStore() and + # testGhostUnghost() both nail the CPU and seemingly + # never finish. + self.db = DB(MappingStorage()) + return self.db.open().root() + + def _closeRoot(self, root): + import transaction + # If we don't commit/abort the transaction, then + # closing the Connection tends to fail with + # "Cannot close connection joined to transaction" + transaction.abort() + root._p_jar.close() + + def testPersistentSubclass(self): + # Can we subclass this and Persistent? + # https://github.com/zopefoundation/BTrees/issues/78 + import persistent + + class PersistentSubclass(persistent.Persistent): + pass + + __traceback_info__ = self._getTargetClass(), persistent.Persistent + type('Subclass', (self._getTargetClass(), PersistentSubclass), {}) + + def testPurePython(self): + import importlib + kind = self._getTargetClass() + class_name = kind.__name__ + module_name = kind.__module__ + module = importlib.import_module(module_name) + + # If we're in pure python mode, our target class module + # should not have an '_' in it (fix_pickle changes the name + # to remove the 'Py') + + # If we're in the C extension mode, our target class + # module still doesn't have the _ in it, but we should be able to find + # a Py class that's different + + self.assertNotIn('_', module_name) + self.assertIs(getattr(module, class_name), kind) + + if not PURE_PYTHON and 'Py' not in type(self).__name__: + self.assertIsNot(getattr(module, class_name + 'Py'), kind) + + @_skip_wo_ZODB + def testLoadAndStore(self): + import transaction + for i in 0, 10, 1000: + t = self._makeOne() + self._populate(t, i) + root = None + root = self._getRoot() + root[i] = t + transaction.commit() + + root2 = self._getRoot() + if hasattr(t, 'items'): + self.assertEqual(list(root2[i].items()) , list(t.items())) + else: + self.assertEqual(list(root2[i].keys()) , list(t.keys())) + + self._closeRoot(root) + self._closeRoot(root2) + + def testSetstateArgumentChecking(self): + try: + self._makeOne().__setstate__(('',)) + except TypeError as v: + self.assertEqual(str(v), 'tuple required for first state element') + else: + raise AssertionError("Expected exception") + + @_skip_wo_ZODB + def testGhostUnghost(self): + import transaction + for i in 0, 10, 1000: + t = self._makeOne() + self._populate(t, i) + root = self._getRoot() + root[i] = t + transaction.commit() + + root2 = self._getRoot() + root2[i]._p_deactivate() + transaction.commit() + if hasattr(t, 'items'): + self.assertEqual(list(root2[i].items()) , list(t.items())) + else: + self.assertEqual(list(root2[i].keys()) , list(t.keys())) + + self._closeRoot(root) + self._closeRoot(root2) + + def testSimpleExclusiveKeyRange(self): + t = self._makeOne() + self.assertEqual(list(t.keys()), []) + self.assertEqual(list(t.keys(excludemin=True)), []) + self.assertEqual(list(t.keys(excludemax=True)), []) + self.assertEqual(list(t.keys(excludemin=True, excludemax=True)), []) + + self._populate(t, 1) + self.assertEqual(list(t.keys()), [0]) + self.assertEqual(list(t.keys(excludemin=True)), []) + self.assertEqual(list(t.keys(excludemax=True)), []) + self.assertEqual(list(t.keys(excludemin=True, excludemax=True)), []) + + t.clear() + self._populate(t, 2) + self.assertEqual(list(t.keys()), [0, 1]) + self.assertEqual(list(t.keys(excludemin=True)), [1]) + self.assertEqual(list(t.keys(excludemax=True)), [0]) + self.assertEqual(list(t.keys(excludemin=True, excludemax=True)), []) + + t.clear() + self._populate(t, 3) + self.assertEqual(list(t.keys()), [0, 1, 2]) + self.assertEqual(list(t.keys(excludemin=True)), [1, 2]) + self.assertEqual(list(t.keys(excludemax=True)), [0, 1]) + self.assertEqual(list(t.keys(excludemin=True, excludemax=True)), [1]) + + self.assertEqual(list(t.keys(-1, 3, excludemin=True, excludemax=True)), + [0, 1, 2]) + self.assertEqual(list(t.keys(0, 3, excludemin=True, excludemax=True)), + [1, 2]) + self.assertEqual(list(t.keys(-1, 2, excludemin=True, excludemax=True)), + [0, 1]) + self.assertEqual(list(t.keys(0, 2, excludemin=True, excludemax=True)), + [1]) + + @_skip_wo_ZODB + def test_UpdatesDoReadChecksOnInternalNodes(self): + import transaction + from ZODB import DB + from ZODB.MappingStorage import MappingStorage + t = self._makeOne() + if not hasattr(t, '_firstbucket'): + return + self._populate(t, 1000) + store = MappingStorage() + db = DB(store) + conn = db.open() + conn.root.t = t + transaction.commit() + + read = [] + def readCurrent(ob): + read.append(ob) + conn.__class__.readCurrent(conn, ob) + return 1 + + conn.readCurrent = readCurrent + + try: + add = t.add + remove = t.remove + except AttributeError: + def add(i): + t[i] = i + def remove(i): + del t[i] + + # Modifying a thing + remove(100) + self.assertTrue(t in read) + del read[:] + add(100) + self.assertTrue(t in read) + del read[:] + + transaction.abort() + conn.cacheMinimize() + list(t) + self.assertTrue(100 in t) + self.assertTrue(not read) + + def test_impl_pickle(self): + # Issue #2 + # Nothing we pickle should include the 'Py' suffix of + # implementation classes, and unpickling should give us + # back the best available type + import pickle + made_one = self._makeOne() + + for proto in range(1, pickle.HIGHEST_PROTOCOL + 1): + dumped_str = pickle.dumps(made_one, proto) + self.assertTrue(b'Py' not in dumped_str, repr(dumped_str)) + + loaded_one = pickle.loads(dumped_str) + + # If we're testing the pure-Python version, but we have the + # C extension available, then the loaded type will be the C + # extension but the made type will be the Python version. + # Otherwise, they match. (Note that if we don't have C extensions + # available, the __name__ will be altered to not have Py in it. See _fix_pickle) + if 'Py' in type(made_one).__name__: + self.assertTrue(type(loaded_one) is not type(made_one)) + else: + self.assertTrue(type(loaded_one) is type(made_one) is self._getTargetClass(), (type(loaded_one), type(made_one), self._getTargetClass(), repr(dumped_str))) + + dumped_str2 = pickle.dumps(loaded_one, proto) + self.assertEqual(dumped_str, dumped_str2) + + def test_pickle_empty(self): + # Issue #2 + # Pickling an empty object and unpickling it should result + # in an object that can be pickled, yielding an identical + # pickle (and not an AttributeError) + import pickle + t = self._makeOne() + + s = pickle.dumps(t) + t2 = pickle.loads(s) + + s2 = pickle.dumps(t2) + self.assertEqual(s, s2) + + if hasattr(t2, '__len__'): + # checks for _firstbucket + self.assertEqual(0, len(t2)) + + # This doesn't hold for things like Bucket and Set, sadly + # self.assertEqual(t, t2) + + def test_pickle_subclass(self): + # Issue #2: Make sure our class swizzling doesn't break + # pickling subclasses + + # We need a globally named subclass for pickle, but it needs + # to be unique in case tests run in parallel + base_class = type(self._makeOne()) + class_name = 'PickleSubclassOf' + base_class.__name__ + PickleSubclass = type(class_name, (base_class,), {}) + globals()[class_name] = PickleSubclass + + import pickle + loaded = pickle.loads(pickle.dumps(PickleSubclass())) + self.assertTrue(type(loaded) is PickleSubclass, type(loaded)) + self.assertTrue(PickleSubclass().__class__ is PickleSubclass) + + def test_isinstance_subclass(self): + # Issue #2: + # In some cases we define a __class__ attribute that gets + # invoked for isinstance and *lies*. Check that isinstance still + # works (almost) as expected. + + t = self._makeOne() + # It's a little bit weird, but in the fibbing case, + # we're an instance of two unrelated classes + self.assertTrue(isinstance(t, type(t)), (t, type(t))) + self.assertTrue(isinstance(t, t.__class__)) + + class Sub(type(t)): + pass + + self.assertTrue(issubclass(Sub, type(t))) + + if type(t) is not t.__class__: + # We're fibbing; this breaks issubclass of itself, + # contrary to the usual mechanism + self.assertFalse(issubclass(t.__class__, type(t))) + + + class NonSub(object): + pass + + self.assertFalse(issubclass(NonSub, type(t))) + self.assertFalse(isinstance(NonSub(), type(t))) + +class MappingBase(Base): + # Tests common to mappings (buckets, btrees) + + def _populate(self, t, l): + # Make some data + for i in range(l): + t[i]=i + + def testShortRepr(self): + # test the repr because buckets have a complex repr implementation + # internally the cutoff from a stack allocated buffer to a heap + # allocated buffer is 10000. + t = self._makeOne() + for i in range(5): + t[i] = i + r = repr(t) + # Make sure the repr is **not* 10000 bytes long for a shrort bucket. + # (the buffer must be terminated when copied). + self.assertTrue(len(r) < 10000) + # Make sure the repr is human readable if it's a bucket + if 'Bucket' in r: + self.assertTrue(r.startswith("BTrees")) + self.assertTrue(r.endswith(repr(t.items()) + ')'), r) + else: + self.assertEqual(r[:8], ' 10000) + + def testGetItemFails(self): + self.assertRaises(KeyError, self._getitemfail) + + def _getitemfail(self): + return self._makeOne()[1] + + def testGetReturnsDefault(self): + self.assertEqual(self._makeOne().get(1) , None) + self.assertEqual(self._makeOne().get(1, 'foo') , 'foo') + + def testSetItemGetItemWorks(self): + t = self._makeOne() + t[1] = 1 + a = t[1] + self.assertEqual(a , 1, repr(a)) + + def testReplaceWorks(self): + t = self._makeOne() + t[1] = 1 + self.assertEqual(t[1] , 1, t[1]) + t[1] = 2 + self.assertEqual(t[1] , 2, t[1]) + + def testLen(self): + import random + t = self._makeOne() + added = {} + r = list(range(1000)) + for x in r: + k = random.choice(r) + t[k] = x + added[k] = x + addl = added.keys() + self.assertEqual(len(t) , len(addl), len(t)) + + def testHasKeyWorks(self): + from .._compat import PY2 + t = self._makeOne() + t[1] = 1 + if PY2: + self.assertTrue(t.has_key(1)) + self.assertTrue(1 in t) + self.assertTrue(0 not in t) + self.assertTrue(2 not in t) + + def testValuesWorks(self): + t = self._makeOne() + for x in range(100): + t[x] = x*x + v = t.values() + for i in range(100): + self.assertEqual(v[i], i*i) + self.assertRaises(IndexError, lambda: v[i+1]) + i = 0 + for value in t.itervalues(): + self.assertEqual(value, i*i) + i += 1 + + def testValuesWorks1(self): + t = self._makeOne() + for x in range(100): + t[99-x] = x + + for x in range(40): + lst = sorted(t.values(0+x,99-x)) + self.assertEqual(lst, list(range(0+x,99-x+1))) + + lst = sorted(t.values(max=99-x, min=0+x)) + self.assertEqual(lst, list(range(0+x,99-x+1))) + + def testValuesNegativeIndex(self): + t = self._makeOne() + L = [-3, 6, -11, 4] + for i in L: + t[i] = i + L = sorted(L) + vals = t.values() + for i in range(-1, -5, -1): + self.assertEqual(vals[i], L[i]) + self.assertRaises(IndexError, lambda: vals[-5]) + + def testKeysWorks(self): + t = self._makeOne() + for x in range(100): + t[x] = x + v = t.keys() + i = 0 + for x in v: + self.assertEqual(x,i) + i = i + 1 + self.assertRaises(IndexError, lambda: v[i]) + + for x in range(40): + lst = t.keys(0+x,99-x) + self.assertEqual(list(lst), list(range(0+x, 99-x+1))) + + lst = t.keys(max=99-x, min=0+x) + self.assertEqual(list(lst), list(range(0+x, 99-x+1))) + + self.assertEqual(len(v), 100) + + def testKeysNegativeIndex(self): + t = self._makeOne() + L = [-3, 6, -11, 4] + for i in L: + t[i] = i + L = sorted(L) + keys = t.keys() + for i in range(-1, -5, -1): + self.assertEqual(keys[i], L[i]) + self.assertRaises(IndexError, lambda: keys[-5]) + + def testItemsWorks(self): + t = self._makeOne() + for x in range(100): + t[x] = 2*x + v = t.items() + i = 0 + for x in v: + self.assertEqual(x[0], i) + self.assertEqual(x[1], 2*i) + i += 1 + self.assertRaises(IndexError, lambda: v[i+1]) + + i = 0 + for x in t.iteritems(): + self.assertEqual(x, (i, 2*i)) + i += 1 + + items = list(t.items(min=12, max=20)) + self.assertEqual(items, list(zip(range(12, 21), range(24, 43, 2)))) + + items = list(t.iteritems(min=12, max=20)) + self.assertEqual(items, list(zip(range(12, 21), range(24, 43, 2)))) + + def testItemsNegativeIndex(self): + t = self._makeOne() + L = [-3, 6, -11, 4] + for i in L: + t[i] = i + L = sorted(L) + items = t.items() + for i in range(-1, -5, -1): + self.assertEqual(items[i], (L[i], L[i])) + self.assertRaises(IndexError, lambda: items[-5]) + + def testDeleteInvalidKeyRaisesKeyError(self): + self.assertRaises(KeyError, self._deletefail) + + def _deletefail(self): + t = self._makeOne() + del t[1] + + def testMaxKeyMinKey(self): + t = self._makeOne() + t[7] = 6 + t[3] = 10 + t[8] = 12 + t[1] = 100 + t[5] = 200 + t[10] = 500 + t[6] = 99 + t[4] = 150 + del t[7] + self.assertEqual(t.maxKey(), 10) + self.assertEqual(t.maxKey(None), 10) + self.assertEqual(t.maxKey(6), 6) + self.assertEqual(t.maxKey(9), 8) + self.assertEqual(t.minKey(), 1) + self.assertEqual(t.minKey(None), 1) + self.assertEqual(t.minKey(3), 3) + self.assertEqual(t.minKey(9), 10) + + try: + t.maxKey(t.minKey() - 1) + except ValueError as err: + self.assertEqual(str(err), "no key satisfies the conditions") + else: + self.fail("expected ValueError") + + try: + t.minKey(t.maxKey() + 1) + except ValueError as err: + self.assertEqual(str(err), "no key satisfies the conditions") + else: + self.fail("expected ValueError") + + def testClear(self): + import random + t = self._makeOne() + r = list(range(100)) + for x in r: + rnd = random.choice(r) + t[rnd] = 0 + t.clear() + diff = lsubtract(list(t.keys()), []) + self.assertEqual(diff, []) + + def testUpdate(self): + import random + t = self._makeOne() + d={} + l=[] + for i in range(10000): + k=random.randrange(-2000, 2001) + d[k]=i + l.append((k, i)) + + items= sorted(d.items()) + + t.update(d) + self.assertEqual(list(t.items()), items) + + t.clear() + self.assertEqual(list(t.items()), []) + + t.update(l) + self.assertEqual(list(t.items()), items) + + # Before ZODB 3.4.2, update/construction from PersistentMapping failed. + def testUpdateFromPersistentMapping(self): + from persistent.mapping import PersistentMapping + t = self._makeOne() + pm = PersistentMapping({1: 2}) + t.update(pm) + self.assertEqual(list(t.items()), [(1, 2)]) + + # Construction goes thru the same internals as .update(). + t = t.__class__(pm) + self.assertEqual(list(t.items()), [(1, 2)]) + + def testEmptyRangeSearches(self): + t = self._makeOne() + t.update([(1,1), (5,5), (9,9)]) + self.assertEqual(list(t.keys(-6,-4)), [], list(t.keys(-6,-4))) + self.assertEqual(list(t.keys(2,4)), [], list(t.keys(2,4))) + self.assertEqual(list(t.keys(6,8)), [], list(t.keys(6,8))) + self.assertEqual(list(t.keys(10,12)), [], list(t.keys(10,12))) + self.assertEqual(list(t.keys(9, 1)), [], list(t.keys(9, 1))) + + # For IITreeSets, this one was returning 31 for len(keys), and + # list(keys) produced a list with 100 elements. + t.clear() + t.update(list(zip(range(300), range(300)))) + keys = t.keys(200, 50) + self.assertEqual(len(keys), 0) + self.assertEqual(list(keys), []) + self.assertEqual(list(t.iterkeys(200, 50)), []) + + keys = t.keys(max=50, min=200) + self.assertEqual(len(keys), 0) + self.assertEqual(list(keys), []) + self.assertEqual(list(t.iterkeys(max=50, min=200)), []) + + def testSlicing(self): + # Test that slicing of .keys()/.values()/.items() works exactly the + # same way as slicing a Python list with the same contents. + # This tests fixes to several bugs in this area, starting with + # http://collector.zope.org/Zope/419, + # "BTreeItems slice contains 1 too many elements". + from .._compat import xrange + t = self._makeOne() + for n in range(10): + t.clear() + self.assertEqual(len(t), 0) + + keys = [] + values = [] + items = [] + for key in range(n): + value = -2 * key + t[key] = value + keys.append(key) + values.append(value) + items.append((key, value)) + self.assertEqual(len(t), n) + + kslice = t.keys() + vslice = t.values() + islice = t.items() + self.assertEqual(len(kslice), n) + self.assertEqual(len(vslice), n) + self.assertEqual(len(islice), n) + + # Test whole-structure slices. + x = kslice[:] + self.assertEqual(list(x), keys[:]) + + x = vslice[:] + self.assertEqual(list(x), values[:]) + + x = islice[:] + self.assertEqual(list(x), items[:]) + + for lo in range(-2*n, 2*n+1): + # Test one-sided slices. + x = kslice[:lo] + self.assertEqual(list(x), keys[:lo]) + x = kslice[lo:] + self.assertEqual(list(x), keys[lo:]) + + x = vslice[:lo] + self.assertEqual(list(x), values[:lo]) + x = vslice[lo:] + self.assertEqual(list(x), values[lo:]) + + x = islice[:lo] + self.assertEqual(list(x), items[:lo]) + x = islice[lo:] + self.assertEqual(list(x), items[lo:]) + + for hi in range(-2*n, 2*n+1): + # Test two-sided slices. + x = kslice[lo:hi] + self.assertEqual(list(x), keys[lo:hi]) + + x = vslice[lo:hi] + self.assertEqual(list(x), values[lo:hi]) + + x = islice[lo:hi] + self.assertEqual(list(x), items[lo:hi]) + + # The specific test case from Zope collector 419. + t.clear() + for i in xrange(100): + t[i] = 1 + tslice = t.items()[20:80] + self.assertEqual(len(tslice), 60) + self.assertEqual(list(tslice), list(zip(range(20, 80), [1]*60))) + + def testIterators(self): + t = self._makeOne() + + for keys in [], [-2], [1, 4], list(range(-170, 2000, 6)): + t.clear() + for k in keys: + t[k] = -3 * k + + self.assertEqual(list(t), keys) + + x = [] + for k in t: + x.append(k) + self.assertEqual(x, keys) + + it = iter(t) + self.assertTrue(it is iter(it)) + x = [] + try: + while 1: + x.append(next(it)) + except StopIteration: + pass + self.assertEqual(x, keys) + + self.assertEqual(list(t.iterkeys()), keys) + self.assertEqual(list(t.itervalues()), list(t.values())) + self.assertEqual(list(t.iteritems()), list(t.items())) + + def testRangedIterators(self): + t = self._makeOne() + + for keys in [], [-2], [1, 4], list(range(-170, 2000, 13)): + t.clear() + values = [] + for k in keys: + value = -3 * k + t[k] = value + values.append(value) + items = list(zip(keys, values)) + + self.assertEqual(list(t.iterkeys()), keys) + self.assertEqual(list(t.itervalues()), values) + self.assertEqual(list(t.iteritems()), items) + + if not keys: + continue + + min_mid_max = (keys[0], keys[len(keys) >> 1], keys[-1]) + for key1 in min_mid_max: + for lo in range(key1 - 1, key1 + 2): + # Test one-sided range iterators. + goodkeys = [k for k in keys if lo <= k] + got = t.iterkeys(lo) + self.assertEqual(goodkeys, list(got)) + + goodvalues = [t[k] for k in goodkeys] + got = t.itervalues(lo) + self.assertEqual(goodvalues, list(got)) + + gooditems = list(zip(goodkeys, goodvalues)) + got = t.iteritems(lo) + self.assertEqual(gooditems, list(got)) + + for key2 in min_mid_max: + for hi in range(key2 - 1, key2 + 2): + goodkeys = [k for k in keys if lo <= k <= hi] + got = t.iterkeys(min=lo, max=hi) + self.assertEqual(goodkeys, list(got)) + + goodvalues = [t[k] for k in goodkeys] + got = t.itervalues(lo, max=hi) + self.assertEqual(goodvalues, list(got)) + + gooditems = list(zip(goodkeys, goodvalues)) + got = t.iteritems(max=hi, min=lo) + self.assertEqual(gooditems, list(got)) + + def testBadUpdateTupleSize(self): + # This one silently ignored the excess in Zope3. + t = self._makeOne() + self.assertRaises(TypeError, t.update, [(1, 2, 3)]) + + # This one dumped core in Zope3. + self.assertRaises(TypeError, t.update, [(1,)]) + + # This one should simply succeed. + t.update([(1, 2)]) + self.assertEqual(list(t.items()), [(1, 2)]) + + def testSimpleExclusivRanges(self): + def identity(x): + return x + def dup(x): + return [(y, y) for y in x] + + for methodname, f in (("keys", identity), + ("values", identity), + ("items", dup), + ("iterkeys", identity), + ("itervalues", identity), + ("iteritems", dup)): + + t = self._makeOne() + meth = getattr(t, methodname, None) + if meth is None: + continue + + self.assertEqual(list(meth()), []) + self.assertEqual(list(meth(excludemin=True)), []) + self.assertEqual(list(meth(excludemax=True)), []) + self.assertEqual(list(meth(excludemin=True, excludemax=True)), []) + + self._populate(t, 1) + self.assertEqual(list(meth()), f([0])) + self.assertEqual(list(meth(excludemin=True)), []) + self.assertEqual(list(meth(excludemax=True)), []) + self.assertEqual(list(meth(excludemin=True, excludemax=True)), []) + + t.clear() + self._populate(t, 2) + self.assertEqual(list(meth()), f([0, 1])) + self.assertEqual(list(meth(excludemin=True)), f([1])) + self.assertEqual(list(meth(excludemax=True)), f([0])) + self.assertEqual(list(meth(excludemin=True, excludemax=True)), []) + + t.clear() + self._populate(t, 3) + self.assertEqual(list(meth()), f([0, 1, 2])) + self.assertEqual(list(meth(excludemin=True)), f([1, 2])) + self.assertEqual(list(meth(excludemax=True)), f([0, 1])) + self.assertEqual(list(meth(excludemin=True, excludemax=True)), + f([1])) + self.assertEqual(list(meth(-1, 3, excludemin=True, + excludemax=True)), + f([0, 1, 2])) + self.assertEqual(list(meth(0, 3, excludemin=True, + excludemax=True)), + f([1, 2])) + self.assertEqual(list(meth(-1, 2, excludemin=True, + excludemax=True)), + f([0, 1])) + self.assertEqual(list(meth(0, 2, excludemin=True, + excludemax=True)), + f([1])) + + def testSetdefault(self): + t = self._makeOne() + + self.assertEqual(t.setdefault(1, 2), 2) + # That should also have associated 1 with 2 in the tree. + self.assertTrue(1 in t) + self.assertEqual(t[1], 2) + # And trying to change it again should have no effect. + self.assertEqual(t.setdefault(1, 666), 2) + self.assertEqual(t[1], 2) + + # Not enough arguments. + self.assertRaises(TypeError, t.setdefault) + self.assertRaises(TypeError, t.setdefault, 1) + # Too many arguments. + self.assertRaises(TypeError, t.setdefault, 1, 2, 3) + + + def testPop(self): + t = self._makeOne() + + # Empty container. + # If no default given, raises KeyError. + self.assertRaises(KeyError, t.pop, 1) + # But if default given, returns that instead. + self.assertEqual(t.pop(1, 42), 42) + + t[1] = 3 + # KeyError when key is not in container and default is not passed. + self.assertRaises(KeyError, t.pop, 5) + self.assertEqual(list(t.items()), [(1, 3)]) + # If key is in container, returns the value and deletes the key. + self.assertEqual(t.pop(1), 3) + self.assertEqual(len(t), 0) + + # If key is present, return value bypassing default. + t[1] = 3 + self.assertEqual(t.pop(1, 7), 3) + self.assertEqual(len(t), 0) + + # Pop only one item. + t[1] = 3 + t[2] = 4 + self.assertEqual(len(t), 2) + self.assertEqual(t.pop(1), 3) + self.assertEqual(len(t), 1) + self.assertEqual(t[2], 4) + self.assertEqual(t.pop(1, 3), 3) + + # Too few arguments. + self.assertRaises(TypeError, t.pop) + # Too many arguments. + self.assertRaises(TypeError, t.pop, 1, 2, 3) + +class BTreeTests(MappingBase): + # Tests common to all BTrees + + def _getTargetClass(self): + # Most of the subclasses override _makeOne and not + # _getTargetClass, so we can get the type that way. + # TODO: This could change for less repetition in the subclasses, + # using the name of the class to import the module and find + # the type. + if type(self)._makeOne is not BTreeTests._makeOne: + return type(self._makeOne()) + raise NotImplementedError() + + def _makeOne(self, *args): + return self._getTargetClass()(*args) + + def _checkIt(self, t): + from BTrees.check import check + t._check() + check(t) + + def testDeleteNoChildrenWorks(self): + t = self._makeOne() + t[5] = 6 + t[2] = 10 + t[6] = 12 + t[1] = 100 + t[3] = 200 + t[10] = 500 + t[4] = 99 + del t[4] + diff = lsubtract(t.keys(), [1,2,3,5,6,10]) + self.assertEqual(diff , [], diff) + self._checkIt(t) + + def testDeleteOneChildWorks(self): + t = self._makeOne() + t[5] = 6 + t[2] = 10 + t[6] = 12 + t[1] = 100 + t[3] = 200 + t[10] = 500 + t[4] = 99 + del t[3] + diff = lsubtract(t.keys(), [1,2,4,5,6,10]) + self.assertEqual(diff , [], diff) + self._checkIt(t) + + def testDeleteTwoChildrenNoInorderSuccessorWorks(self): + t = self._makeOne() + t[5] = 6 + t[2] = 10 + t[6] = 12 + t[1] = 100 + t[3] = 200 + t[10] = 500 + t[4] = 99 + del t[2] + diff = lsubtract(t.keys(), [1,3,4,5,6,10]) + self.assertEqual(diff , [], diff) + self._checkIt(t) + + def testDeleteTwoChildrenInorderSuccessorWorks(self): + # 7, 3, 8, 1, 5, 10, 6, 4 -- del 3 + t = self._makeOne() + t[7] = 6 + t[3] = 10 + t[8] = 12 + t[1] = 100 + t[5] = 200 + t[10] = 500 + t[6] = 99 + t[4] = 150 + del t[3] + diff = lsubtract(t.keys(), [1,4,5,6,7,8,10]) + self.assertEqual(diff , [], diff) + self._checkIt(t) + + def testDeleteRootWorks(self): + # 7, 3, 8, 1, 5, 10, 6, 4 -- del 7 + t = self._makeOne() + t[7] = 6 + t[3] = 10 + t[8] = 12 + t[1] = 100 + t[5] = 200 + t[10] = 500 + t[6] = 99 + t[4] = 150 + del t[7] + diff = lsubtract(t.keys(), [1,3,4,5,6,8,10]) + self.assertEqual(diff , [], diff) + self._checkIt(t) + + def testRandomNonOverlappingInserts(self): + import random + t = self._makeOne() + added = {} + r = list(range(100)) + for x in r: + k = random.choice(r) + if k not in added: + t[k] = x + added[k] = 1 + addl = sorted(added.keys()) + diff = lsubtract(list(t.keys()), addl) + self.assertEqual(diff , [], (diff, addl, list(t.keys()))) + self._checkIt(t) + + def testRandomOverlappingInserts(self): + import random + t = self._makeOne() + added = {} + r = list(range(100)) + for x in r: + k = random.choice(r) + t[k] = x + added[k] = 1 + addl = sorted(added.keys()) + diff = lsubtract(t.keys(), addl) + self.assertEqual(diff , [], diff) + self._checkIt(t) + + def testRandomDeletes(self): + import random + t = self._makeOne() + r = list(range(1000)) + added = [] + for x in r: + k = random.choice(r) + t[k] = x + added.append(k) + deleted = [] + for x in r: + k = random.choice(r) + if k in t: + self.assertTrue(k in t) + del t[k] + deleted.append(k) + if k in t: + self.fail( "had problems deleting %s" % k ) + badones = [] + for x in deleted: + if x in t: + badones.append(x) + self.assertEqual(badones , [], (badones, added, deleted)) + self._checkIt(t) + + def testTargetedDeletes(self): + import random + t = self._makeOne() + r = list(range(1000)) + for x in r: + k = random.choice(r) + t[k] = x + for x in r: + try: + del t[x] + except KeyError: + pass + self.assertEqual(realseq(t.keys()) , [], realseq(t.keys())) + self._checkIt(t) + + def testPathologicalRightBranching(self): + t = self._makeOne() + r = list(range(1000)) + for x in r: + t[x] = 1 + self.assertEqual(realseq(t.keys()) , r, realseq(t.keys())) + for x in r: + del t[x] + self.assertEqual(realseq(t.keys()) , [], realseq(t.keys())) + self._checkIt(t) + + def testPathologicalLeftBranching(self): + t = self._makeOne() + r = list(range(1000)) + revr = list(reversed(r[:])) + for x in revr: + t[x] = 1 + self.assertEqual(realseq(t.keys()) , r, realseq(t.keys())) + + for x in revr: + del t[x] + self.assertEqual(realseq(t.keys()) , [], realseq(t.keys())) + self._checkIt(t) + + def testSuccessorChildParentRewriteExerciseCase(self): + t = self._makeOne() + add_order = [ + 85, 73, 165, 273, 215, 142, 233, 67, 86, 166, 235, 225, 255, + 73, 175, 171, 285, 162, 108, 28, 283, 258, 232, 199, 260, + 298, 275, 44, 261, 291, 4, 181, 285, 289, 216, 212, 129, + 243, 97, 48, 48, 159, 22, 285, 92, 110, 27, 55, 202, 294, + 113, 251, 193, 290, 55, 58, 239, 71, 4, 75, 129, 91, 111, + 271, 101, 289, 194, 218, 77, 142, 94, 100, 115, 101, 226, + 17, 94, 56, 18, 163, 93, 199, 286, 213, 126, 240, 245, 190, + 195, 204, 100, 199, 161, 292, 202, 48, 165, 6, 173, 40, 218, + 271, 228, 7, 166, 173, 138, 93, 22, 140, 41, 234, 17, 249, + 215, 12, 292, 246, 272, 260, 140, 58, 2, 91, 246, 189, 116, + 72, 259, 34, 120, 263, 168, 298, 118, 18, 28, 299, 192, 252, + 112, 60, 277, 273, 286, 15, 263, 141, 241, 172, 255, 52, 89, + 127, 119, 255, 184, 213, 44, 116, 231, 173, 298, 178, 196, + 89, 184, 289, 98, 216, 115, 35, 132, 278, 238, 20, 241, 128, + 179, 159, 107, 206, 194, 31, 260, 122, 56, 144, 118, 283, + 183, 215, 214, 87, 33, 205, 183, 212, 221, 216, 296, 40, + 108, 45, 188, 139, 38, 256, 276, 114, 270, 112, 214, 191, + 147, 111, 299, 107, 101, 43, 84, 127, 67, 205, 251, 38, 91, + 297, 26, 165, 187, 19, 6, 73, 4, 176, 195, 90, 71, 30, 82, + 139, 210, 8, 41, 253, 127, 190, 102, 280, 26, 233, 32, 257, + 194, 263, 203, 190, 111, 218, 199, 29, 81, 207, 18, 180, + 157, 172, 192, 135, 163, 275, 74, 296, 298, 265, 105, 191, + 282, 277, 83, 188, 144, 259, 6, 173, 81, 107, 292, 231, + 129, 65, 161, 113, 103, 136, 255, 285, 289, 1 + ] + delete_order = [ + 276, 273, 12, 275, 2, 286, 127, 83, 92, 33, 101, 195, + 299, 191, 22, 232, 291, 226, 110, 94, 257, 233, 215, 184, + 35, 178, 18, 74, 296, 210, 298, 81, 265, 175, 116, 261, + 212, 277, 260, 234, 6, 129, 31, 4, 235, 249, 34, 289, 105, + 259, 91, 93, 119, 7, 183, 240, 41, 253, 290, 136, 75, 292, + 67, 112, 111, 256, 163, 38, 126, 139, 98, 56, 282, 60, 26, + 55, 245, 225, 32, 52, 40, 271, 29, 252, 239, 89, 87, 205, + 213, 180, 97, 108, 120, 218, 44, 187, 196, 251, 202, 203, + 172, 28, 188, 77, 90, 199, 297, 282, 141, 100, 161, 216, + 73, 19, 17, 189, 30, 258 + ] + for x in add_order: + t[x] = 1 + for x in delete_order: + try: + del t[x] + except KeyError: + if x in t: + self.assertEqual(1,2,"failed to delete %s" % x) + self._checkIt(t) + + def testRangeSearchAfterSequentialInsert(self): + t = self._makeOne() + r = range(100) + for x in r: + t[x] = 0 + diff = lsubtract(list(t.keys(0, 100)), r) + self.assertEqual(diff , [], diff) + # The same thing with no bounds + diff = lsubtract(list(t.keys(None, None)), r) + self.assertEqual(diff , [], diff) + # The same thing with each bound set and the other + # explicitly None + diff = lsubtract(list(t.keys(0, None)), r) + self.assertEqual(diff , [], diff) + diff = lsubtract(list(t.keys(None,100)), r) + self.assertEqual(diff , [], diff) + self._checkIt(t) + + def testRangeSearchAfterRandomInsert(self): + import random + t = self._makeOne() + r = range(100) + a = {} + for x in r: + rnd = random.choice(r) + t[rnd] = 0 + a[rnd] = 0 + diff = lsubtract(list(t.keys(0, 100)), a.keys()) + self.assertEqual(diff, [], diff) + self._checkIt(t) + + def testPathologicalRangeSearch(self): + t = self._makeOne() + # Build a 2-level tree with at least two buckets. + for i in range(200): + t[i] = i + items, dummy = t.__getstate__() + self.assertTrue(len(items) > 2) # at least two buckets and a key + # All values in the first bucket are < firstkey. All in the + # second bucket are >= firstkey, and firstkey is the first key in + # the second bucket. + firstkey = items[1] + therange = t.keys(-1, firstkey) + self.assertEqual(len(therange), firstkey + 1) + self.assertEqual(list(therange), list(range(firstkey + 1))) + # Now for the tricky part. If we delete firstkey, the second bucket + # loses its smallest key, but firstkey remains in the BTree node. + # If we then do a high-end range search on firstkey, the BTree node + # directs us to look in the second bucket, but there's no longer any + # key <= firstkey in that bucket. The correct answer points to the + # end of the *first* bucket. The algorithm has to be smart enough + # to "go backwards" in the BTree then; if it doesn't, it will + # erroneously claim that the range is empty. + del t[firstkey] + therange = t.keys(min=-1, max=firstkey) + self.assertEqual(len(therange), firstkey) + self.assertEqual(list(therange), list(range(firstkey))) + self._checkIt(t) + + def testInsertMethod(self): + t = self._makeOne() + t[0] = 1 + self.assertEqual(t.insert(0, 1) , 0) + self.assertEqual(t.insert(1, 1) , 1) + self.assertEqual(lsubtract(list(t.keys()), [0,1]) , []) + self._checkIt(t) + + def testDamagedIterator(self): + # A cute one from Steve Alexander. This caused the BTreeItems + # object to go insane, accessing memory beyond the allocated part + # of the bucket. If it fails, the symptom is either a C-level + # assertion error (if the BTree code was compiled without NDEBUG), + # or most likely a segfault (if the BTree code was compiled with + # NDEBUG). + t = self._makeOne() + self._populate(t, 10) + # In order for this to fail, it's important that k be a "lazy" + # iterator, referring to the BTree by indirect position (index) + # instead of a fully materialized list. Then the position can + # end up pointing into trash memory, if the bucket pointed to + # shrinks. + k = t.keys() + for dummy in range(20): + try: + del t[k[0]] + except RuntimeError as detail: + self.assertEqual(str(detail), "the bucket being iterated " + "changed size") + break + except KeyError as v: + # The Python implementation behaves very differently and + # gives a key error in this situation. It can't mess up + # memory and can't readily detect changes to underlying buckets + # in any sane way. + self.assertEqual(str(v), str(k[0])) + self._checkIt(t) + + def testAddTwoSetsChanged(self): + # A bug in the BTree Python implementation once + # caused adding a second item to a tree to fail + # to set _p_changed (adding the first item sets it because + # the _firstbucket gets set, but the second item only grew the + # existing bucket) + t = self._makeOne() + # Note that for the property to actually hold, we have to fake a + # _p_jar and _p_oid + t._p_oid = b'\0\0\0\0\0' + class Jar(object): + def __init__(self): + self._cache = self + self.registered = None + + def mru(self, arg): + pass + def readCurrent(self, arg): + pass + def register(self, arg): + self.registered = arg + + t._p_jar = Jar() + t[1] = 3 + # reset these, setting _firstbucket triggered a change + t._p_changed = False + t._p_jar.registered = None + t[2] = 4 + self.assertTrue(t._p_changed) + self.assertEqual(t, t._p_jar.registered) + + # Setting the same key to a different value also triggers a change + t._p_changed = False + t._p_jar.registered = None + t[2] = 5 + self.assertTrue(t._p_changed) + self.assertEqual(t, t._p_jar.registered) + + # Likewise with only a single value + t = self._makeOne() + t._p_oid = b'\0\0\0\0\0' + t._p_jar = Jar() + t[1] = 3 + # reset these, setting _firstbucket triggered a change + t._p_changed = False + t._p_jar.registered = None + + t[1] = 6 + self.assertTrue(t._p_changed) + self.assertEqual(t, t._p_jar.registered) + + def testRemoveInSmallMapSetsChanged(self): + # A bug in the BTree Python implementation once caused + # deleting from a small btree to set _p_changed. + # There must be at least two objects so that _firstbucket doesn't + # get set + t = self._makeOne() + # Note that for the property to actually hold, we have to fake a + # _p_jar and _p_oid + t._p_oid = b'\0\0\0\0\0' + class Jar(object): + def __init__(self): + self._cache = self + self.registered = None + + def mru(self, arg): + pass + def readCurrent(self, arg): + pass + def register(self, arg): + self.registered = arg + + t._p_jar = Jar() + t[0] = 1 + t[1] = 2 + # reset these, setting _firstbucket triggered a change + t._p_changed = False + t._p_jar.registered = None + + # now remove the second value + del t[1] + self.assertTrue(t._p_changed) + self.assertEqual(t, t._p_jar.registered) + + def test_legacy_py_pickle(self): + # Issue #2 + # If we have a pickle that includes the 'Py' suffix, + # it (unfortunately) unpickles to the python type. But + # new pickles never produce that. + import pickle + made_one = self._makeOne() + + for proto in (1, 2): + s = pickle.dumps(made_one, proto) + # It's not legacy + assert b'TreePy\n' not in s, repr(s) + # \np for protocol 1, \nq for proto 2, + assert b'Tree\np' in s or b'Tree\nq' in s, repr(s) + + # Now make it pseudo-legacy + legacys = s.replace(b'Tree\np', b'TreePy\np').replace(b'Tree\nq', b'TreePy\nq') + + # It loads up as the specified class + loaded_one = pickle.loads(legacys) + + # It still functions and can be dumped again, as the original class + s2 = pickle.dumps(loaded_one, proto) + self.assertTrue(b'Py' not in s2) + self.assertEqual(s2, s) + + +class NormalSetTests(Base): + # Test common to all set types + + def _populate(self, t, l): + # Make some data + t.update(range(l)) + + def testShortRepr(self): + t = self._makeOne() + for i in range(5): + t.add(i) + r = repr(t) + # Make sure the repr is **not* 10000 bytes long for a shrort bucket. + # (the buffer must be terminated when copied). + self.assertTrue(len(r) < 10000) + # Make sure the repr is human readable, unless it's a tree + if 'TreeSet' not in r: + self.assertTrue(r.endswith("Set(%r)" % t.keys())) + else: + self.assertEqual(r[:7], '= 3: + break + + transaction.commit() + + # Now, delete the internal key and make sure it's really gone + key = data[1] + del tree[key] + data = tree.__getstate__()[0] + self.assertTrue(data[1] != key) + + # The tree should have changed: + self.assertTrue(tree._p_changed) + + # Grow the btree until we have multiple levels + while 1: + i += 1 + self.add_key(tree, i) + data = tree.__getstate__()[0] + if data[0].__class__ == tree.__class__: + assert len(data[2].__getstate__()[0]) >= 3 + break + + # Now, delete the internal key and make sure it's really gone + key = data[1] + del tree[key] + data = tree.__getstate__()[0] + self.assertTrue(data[1] != key) + + transaction.abort() + db.close() + + +class InternalKeysSetTest(object): + # There must not be any internal keys not in the TreeSet + + def add_key(self, tree, key): + tree.add(key) + + +class ModuleTest(object): + # test for presence of generic names in module + prefix = None + def _getModule(self): + pass + def testNames(self): + names = ['Bucket', 'BTree', 'Set', 'TreeSet'] + for name in names: + klass = getattr(self._getModule(), name) + self.assertEqual(klass.__module__, self._getModule().__name__) + self.assertTrue(klass is getattr(self._getModule(), + self.prefix + name)) + # BBB for zope.app.security ZCML :( + pfx_iter = self.prefix + 'TreeIterator' + klass = getattr(self._getModule(), pfx_iter) + self.assertEqual(klass.__module__, self._getModule().__name__) + + def testModuleProvides(self): + from zope.interface.verify import verifyObject + verifyObject(self._getInterface(), self._getModule()) + + def testFamily(self): + import BTrees + if self.prefix == 'OO': + self.assertTrue( + getattr(self._getModule(), 'family', self) is self) + elif 'L' in self.prefix: + self.assertTrue(self._getModule().family is BTrees.family64) + elif 'I' in self.prefix: + self.assertTrue(self._getModule().family is BTrees.family32) + + +class TypeTest(object): + # tests of various type errors + + def testBadTypeRaises(self): + self.assertRaises(TypeError, self._stringraises) + self.assertRaises(TypeError, self._floatraises) + self.assertRaises(TypeError, self._noneraises) + + +class I_SetsBase(object): + + def setUp(self): + super(I_SetsBase, self).setUp() + _skip_if_pure_py_and_py_test(self) + + def testBadBadKeyAfterFirst(self): + t = self._makeOne() + self.assertRaises(TypeError, t.__class__, [1, '']) + self.assertRaises(TypeError, t.update, [1, '']) + + def testNonIntegerInsertRaises(self): + self.assertRaises(TypeError,self._insertstringraises) + self.assertRaises(TypeError,self._insertfloatraises) + self.assertRaises(TypeError,self._insertnoneraises) + + def _insertstringraises(self): + self._makeOne().insert('a') + + def _insertfloatraises(self): + self._makeOne().insert(1.4) + + def _insertnoneraises(self): + self._makeOne().insert(None) + + +LARGEST_32_BITS = 2147483647 +SMALLEST_32_BITS = -LARGEST_32_BITS - 1 + +SMALLEST_POSITIVE_33_BITS = LARGEST_32_BITS + 1 +LARGEST_NEGATIVE_33_BITS = SMALLEST_32_BITS - 1 + +LARGEST_64_BITS = 0x7fffffffffffffff +SMALLEST_64_BITS = -LARGEST_64_BITS - 1 + +SMALLEST_POSITIVE_65_BITS = LARGEST_64_BITS + 1 +LARGEST_NEGATIVE_65_BITS = SMALLEST_64_BITS - 1 + + +class TestLongIntSupport(object): + + def getTwoValues(self): + # Return two distinct values; these must compare as un-equal. + # + # These values must be usable as values. + return object(), object() + + def getTwoKeys(self): + # Return two distinct values, these must compare as un-equal. + # + #These values must be usable as keys. + return 0, 1 + + def _set_value(self, key, value): + t = self._makeOne() + t[key] = value + + +class TestLongIntKeys(TestLongIntSupport): + + def _makeLong(self, v): + try: + return long(v) + except NameError: #pragma NO COVER Py3k + return int(v) + + def testLongIntKeysWork(self): + from BTrees.IIBTree import using64bits + if not using64bits: + return + t = self._makeOne() + o1, o2 = self.getTwoValues() + assert o1 != o2 + + # Test some small key values first: + zero_long = self._makeLong(0) + t[zero_long] = o1 + self.assertEqual(t[0], o1) + t[0] = o2 + self.assertEqual(t[zero_long], o2) + self.assertEqual(list(t.keys()), [0]) + self.assertEqual(list(t.keys(None,None)), [0]) + + # Test some large key values too: + k1 = SMALLEST_POSITIVE_33_BITS + k2 = LARGEST_64_BITS + k3 = SMALLEST_64_BITS + t[k1] = o1 + t[k2] = o2 + t[k3] = o1 + self.assertEqual(t[k1], o1) + self.assertEqual(t[k2], o2) + self.assertEqual(t[k3], o1) + self.assertEqual(list(t.keys()), [k3, 0, k1, k2]) + self.assertEqual(list(t.keys(k3,None)), [k3, 0, k1, k2]) + self.assertEqual(list(t.keys(None,k2)), [k3, 0, k1, k2]) + + def testLongIntKeysOutOfRange(self): + from BTrees.IIBTree import using64bits + if not using64bits: + return + o1, o2 = self.getTwoValues() + self.assertRaises( + ValueError, + self._set_value, SMALLEST_POSITIVE_65_BITS, o1) + self.assertRaises( + ValueError, + self._set_value, LARGEST_NEGATIVE_65_BITS, o1) + +class TestLongIntValues(TestLongIntSupport): + + def testLongIntValuesWork(self): + from BTrees.IIBTree import using64bits + if not using64bits: + return + t = self._makeOne() + keys = sorted(self.getTwoKeys()) + k1, k2 = keys + assert k1 != k2 + + # This is the smallest positive integer that requires 33 bits: + v1 = SMALLEST_POSITIVE_33_BITS + v2 = v1 + 1 + + t[k1] = v1 + t[k2] = v2 + self.assertEqual(t[k1], v1) + self.assertEqual(t[k2], v2) + self.assertEqual(list(t.values()), [v1, v2]) + self.assertEqual(list(t.values(None,None)), [v1, v2]) + + def testLongIntValuesOutOfRange(self): + from BTrees.IIBTree import using64bits + if not using64bits: + return + k1, k2 = self.getTwoKeys() + self.assertRaises( + ValueError, + self._set_value, k1, SMALLEST_POSITIVE_65_BITS) + self.assertRaises( + ValueError, + self._set_value, k1, LARGEST_NEGATIVE_65_BITS) + +# Given a mapping builder (IIBTree, OOBucket, etc), return a function +# that builds an object of that type given only a list of keys. +def makeBuilder(mapbuilder): + def result(keys=[], mapbuilder=mapbuilder): + return mapbuilder(list(zip(keys, keys))) + return result + +# Subclasses have to set up: +# builders() - function returning functions to build inputs, +# each returned callable tkes an optional keys arg +# intersection, union, difference - set to the type-correct versions +class SetResult(object): + def setUp(self): + super(SetResult, self).setUp() + _skip_if_pure_py_and_py_test(self) + + self.Akeys = [1, 3, 5, 6 ] + self.Bkeys = [ 2, 3, 4, 6, 7] + self.As = [makeset(self.Akeys) for makeset in self.builders()] + self.Bs = [makeset(self.Bkeys) for makeset in self.builders()] + self.emptys = [makeset() for makeset in self.builders()] + + # Slow but obviously correct Python implementations of basic ops. + def _union(self, x, y): + result = list(x) + for e in y: + if e not in result: + result.append(e) + return sorted(result) + + def _intersection(self, x, y): + result = [] + for e in x: + if e in y: + result.append(e) + return result + + def _difference(self, x, y): + result = list(x) + for e in y: + if e in result: + result.remove(e) + # Difference preserves LHS values. + if hasattr(x, "values"): + result = [(k, x[k]) for k in result] + return result + + def testNone(self): + for op in self.union, self.intersection, self.difference: + C = op(None, None) + self.assertTrue(C is None) + + for op in self.union, self.intersection, self.difference: + for A in self.As: + C = op(A, None) + self.assertTrue(C is A) + + C = op(None, A) + if op == self.difference: + self.assertTrue(C is None) + else: + self.assertTrue(C is A) + + def testEmptyUnion(self): + for A in self.As: + for E in self.emptys: + C = self.union(A, E) + self.assertTrue(not hasattr(C, "values")) + self.assertEqual(list(C), self.Akeys) + + C = self.union(E, A) + self.assertTrue(not hasattr(C, "values")) + self.assertEqual(list(C), self.Akeys) + + def testEmptyIntersection(self): + for A in self.As: + for E in self.emptys: + C = self.intersection(A, E) + self.assertTrue(not hasattr(C, "values")) + self.assertEqual(list(C), []) + + C = self.intersection(E, A) + self.assertTrue(not hasattr(C, "values")) + self.assertEqual(list(C), []) + + def testEmptyDifference(self): + for A in self.As: + for E in self.emptys: + C = self.difference(A, E) + # Difference preserves LHS values. + self.assertEqual(hasattr(C, "values"), hasattr(A, "values")) + if hasattr(A, "values"): + self.assertEqual(list(C.items()), list(A.items())) + else: + self.assertEqual(list(C), self.Akeys) + + C = self.difference(E, A) + self.assertEqual(hasattr(C, "values"), hasattr(E, "values")) + self.assertEqual(list(C), []) + + def testUnion(self): + inputs = self.As + self.Bs + for A in inputs: + for B in inputs: + C = self.union(A, B) + self.assertTrue(not hasattr(C, "values")) + self.assertEqual(list(C), self._union(A, B)) + + def testIntersection(self): + inputs = self.As + self.Bs + for A in inputs: + for B in inputs: + C = self.intersection(A, B) + self.assertTrue(not hasattr(C, "values")) + self.assertEqual(list(C), self._intersection(A, B)) + + def testDifference(self): + inputs = self.As + self.Bs + for A in inputs: + for B in inputs: + C = self.difference(A, B) + # Difference preserves LHS values. + self.assertEqual(hasattr(C, "values"), hasattr(A, "values")) + want = self._difference(A, B) + if hasattr(A, "values"): + self.assertEqual(list(C.items()), want) + else: + self.assertEqual(list(C), want) + + def testLargerInputs(self): + from BTrees.IIBTree import IISet + from random import randint + MAXSIZE = 200 + MAXVAL = 400 + for i in range(3): + n = randint(0, MAXSIZE) + Akeys = [randint(1, MAXVAL) for j in range(n)] + As = [makeset(Akeys) for makeset in self.builders()] + Akeys = IISet(Akeys) + + n = randint(0, MAXSIZE) + Bkeys = [randint(1, MAXVAL) for j in range(n)] + Bs = [makeset(Bkeys) for makeset in self.builders()] + Bkeys = IISet(Bkeys) + + for op, simulator in ((self.union, self._union), + (self.intersection, self._intersection), + (self.difference, self._difference)): + for A in As: + for B in Bs: + got = op(A, B) + want = simulator(Akeys, Bkeys) + self.assertEqual(list(got), want, + (A, B, Akeys, Bkeys, list(got), want)) + +# Subclasses must set up (as class variables): +# weightedUnion, weightedIntersection +# builders -- sequence of constructors, taking items +# union, intersection -- the module routines of those names +# mkbucket -- the module bucket builder +class Weighted(object): + + def setUp(self): + self.Aitems = [(1, 10), (3, 30), (5, 50), (6, 60)] + self.Bitems = [(2, 21), (3, 31), (4, 41), (6, 61), (7, 71)] + + self.As = [make(self.Aitems) for make in self.builders()] + self.Bs = [make(self.Bitems) for make in self.builders()] + self.emptys = [make([]) for make in self.builders()] + + weights = [] + for w1 in -3, -1, 0, 1, 7: + for w2 in -3, -1, 0, 1, 7: + weights.append((w1, w2)) + self.weights = weights + + def testBothNone(self): + for op in self.weightedUnion(), self.weightedIntersection(): + w, C = op(None, None) + self.assertTrue(C is None) + self.assertEqual(w, 0) + + w, C = op(None, None, 42, 666) + self.assertTrue(C is None) + self.assertEqual(w, 0) + + def testLeftNone(self): + for op in self.weightedUnion(), self.weightedIntersection(): + for A in self.As + self.emptys: + w, C = op(None, A) + self.assertTrue(C is A) + self.assertEqual(w, 1) + + w, C = op(None, A, 42, 666) + self.assertTrue(C is A) + self.assertEqual(w, 666) + + def testRightNone(self): + for op in self.weightedUnion(), self.weightedIntersection(): + for A in self.As + self.emptys: + w, C = op(A, None) + self.assertTrue(C is A) + self.assertEqual(w, 1) + + w, C = op(A, None, 42, 666) + self.assertTrue(C is A) + self.assertEqual(w, 42) + + # If obj is a set, return a bucket with values all 1; else return obj. + def _normalize(self, obj): + if isaset(obj): + obj = self.mkbucket(list(zip(obj, [1] * len(obj)))) + return obj + + # Python simulation of weightedUnion. + def _wunion(self, A, B, w1=1, w2=1): + if isaset(A) and isaset(B): + return 1, self.union()(A, B).keys() + A = self._normalize(A) + B = self._normalize(B) + result = [] + for key in self.union()(A, B): + v1 = A.get(key, 0) + v2 = B.get(key, 0) + result.append((key, v1*w1 + v2*w2)) + return 1, result + + def testUnion(self): + inputs = self.As + self.Bs + self.emptys + for A in inputs: + for B in inputs: + want_w, want_s = self._wunion(A, B) + got_w, got_s = self.weightedUnion()(A, B) + self.assertEqual(got_w, want_w) + if isaset(got_s): + self.assertEqual(got_s.keys(), want_s) + else: + self.assertEqual(got_s.items(), want_s) + + for w1, w2 in self.weights: + want_w, want_s = self._wunion(A, B, w1, w2) + got_w, got_s = self.weightedUnion()(A, B, w1, w2) + self.assertEqual(got_w, want_w) + if isaset(got_s): + self.assertEqual(got_s.keys(), want_s) + else: + self.assertEqual(got_s.items(), want_s) + + # Python simulation weightedIntersection. + def _wintersection(self, A, B, w1=1, w2=1): + if isaset(A) and isaset(B): + return w1 + w2, self.intersection()(A, B).keys() + A = self._normalize(A) + B = self._normalize(B) + result = [] + for key in self.intersection()(A, B): + result.append((key, A[key]*w1 + B[key]*w2)) + return 1, result + + def testIntersection(self): + inputs = self.As + self.Bs + self.emptys + for A in inputs: + for B in inputs: + want_w, want_s = self._wintersection(A, B) + got_w, got_s = self.weightedIntersection()(A, B) + self.assertEqual(got_w, want_w) + if isaset(got_s): + self.assertEqual(got_s.keys(), want_s) + else: + self.assertEqual(got_s.items(), want_s) + + for w1, w2 in self.weights: + want_w, want_s = self._wintersection(A, B, w1, w2) + got_w, got_s = self.weightedIntersection()(A, B, w1, w2) + self.assertEqual(got_w, want_w) + if isaset(got_s): + self.assertEqual(got_s.keys(), want_s) + else: + self.assertEqual(got_s.items(), want_s) + +# Given a set builder (like OITreeSet or OISet), return a function that +# takes a list of (key, value) pairs and builds a set out of the keys. +def itemsToSet(setbuilder): + def result(items, setbuilder=setbuilder): + return setbuilder([key for key, value in items]) + return result + +# 'thing' is a bucket, btree, set or treeset. Return true iff it's one of the +# latter two. +def isaset(thing): + return not hasattr(thing, 'values') + +# Subclasses must set up (as class variables): +# multiunion, union +# mkset, mktreeset +# mkbucket, mkbtree +class MultiUnion(object): + + def setUp(self): + super(MultiUnion, self).setUp() + _skip_if_pure_py_and_py_test(self) + + def testEmpty(self): + self.assertEqual(len(self.multiunion([])), 0) + + def testOne(self): + for sequence in ([3], + list(range(20)), + list(range(-10, 0, 2)) + list(range(1, 10, 2)), + ): + seq1 = sequence[:] + seq2 = list(reversed(sequence[:])) + seqsorted = sorted(sequence[:]) + for seq in seq1, seq2, seqsorted: + for builder in self.mkset, self.mktreeset: + input = builder(seq) + output = self.multiunion([input]) + self.assertEqual(len(seq), len(output)) + self.assertEqual(seqsorted, list(output)) + + def testValuesIgnored(self): + for builder in self.mkbucket, self.mkbtree: + input = builder([(1, 2), (3, 4), (5, 6)]) + output = self.multiunion([input]) + self.assertEqual([1, 3, 5], list(output)) + + def testBigInput(self): + N = 100000 + if (PURE_PYTHON or 'Py' in type(self).__name__) and not PYPY: + # This is extremely slow in CPython implemented in Python, + # taking 20s or more on a 2015-era laptop + N = N // 10 + input = self.mkset(list(range(N))) + output = self.multiunion([input] * 10) + self.assertEqual(len(output), N) + self.assertEqual(output.minKey(), 0) + self.assertEqual(output.maxKey(), N-1) + self.assertEqual(list(output), list(range(N))) + + def testLotsOfLittleOnes(self): + from random import shuffle + N = 5000 + inputs = [] + mkset, mktreeset = self.mkset, self.mktreeset + for i in range(N): + base = i * 4 - N + inputs.append(mkset([base, base+1])) + inputs.append(mktreeset([base+2, base+3])) + shuffle(inputs) + output = self.multiunion(inputs) + self.assertEqual(len(output), N*4) + self.assertEqual(list(output), list(range(-N, 3*N))) + + def testFunkyKeyIteration(self): + # The internal set iteration protocol allows "iterating over" a + # a single key as if it were a set. + N = 100 + union, mkset = self.union, self.mkset + slow = mkset() + for i in range(N): + slow = union(slow, mkset([i])) + fast = self.multiunion(list(range(N))) # like N distinct singleton sets + self.assertEqual(len(slow), N) + self.assertEqual(len(fast), N) + self.assertEqual(list(slow), list(fast)) + self.assertEqual(list(fast), list(range(N))) + + +class ConflictTestBase(object): + # Tests common to all types: sets, buckets, and BTrees + + storage = None + + def setUp(self): + super(ConflictTestBase, self).setUp() + _skip_if_pure_py_and_py_test(self) + + def tearDown(self): + import transaction + transaction.abort() + if self.storage is not None: + self.storage.close() + self.storage.cleanup() + + def _makeOne(self): + return self._getTargetClass()() + + def openDB(self): + import os + from ZODB.FileStorage import FileStorage + from ZODB.DB import DB + n = 'fs_tmp__%s' % os.getpid() + self.storage = FileStorage(n) + self.db = DB(self.storage) + return self.db + + +def _test_merge(o1, o2, o3, expect, message='failed to merge', should_fail=0): + from BTrees.Interfaces import BTreesConflictError + s1 = o1.__getstate__() + s2 = o2.__getstate__() + s3 = o3.__getstate__() + expected = expect.__getstate__() + if expected is None: + expected = ((((),),),) + + if should_fail: + try: + merged = o1._p_resolveConflict(s1, s2, s3) + except BTreesConflictError as err: + pass + else: + assert 0, message + else: + merged = o1._p_resolveConflict(s1, s2, s3) + assert merged == expected, message + + +class MappingConflictTestBase(ConflictTestBase): + # Tests common to mappings (buckets, btrees). + + def _deletefail(self): + t = self._makeOne() + del t[1] + + def _setupConflict(self): + + l=[ -5124, -7377, 2274, 8801, -9901, 7327, 1565, 17, -679, + 3686, -3607, 14, 6419, -5637, 6040, -4556, -8622, 3847, 7191, + -4067] + + + e1=[(-1704, 0), (5420, 1), (-239, 2), (4024, 3), (-6984, 4)] + e2=[(7745, 0), (4868, 1), (-2548, 2), (-2711, 3), (-3154, 4)] + + + base = self._makeOne() + base.update([(i, i*i) for i in l[:20]]) + b1 = type(base)(base) + b2 = type(base)(base) + bm = type(base)(base) + + items=base.items() + + return base, b1, b2, bm, e1, e2, items + + def testMergeDelete(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + del b1[items[1][0]] + del b2[items[5][0]] + del b1[items[-1][0]] + del b2[items[-2][0]] + del bm[items[1][0]] + del bm[items[5][0]] + del bm[items[-1][0]] + del bm[items[-2][0]] + _test_merge(base, b1, b2, bm, 'merge delete') + + def testMergeDeleteAndUpdate(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + del b1[items[1][0]] + b2[items[5][0]]=1 + del b1[items[-1][0]] + b2[items[-2][0]]=2 + del bm[items[1][0]] + bm[items[5][0]]=1 + del bm[items[-1][0]] + bm[items[-2][0]]=2 + _test_merge(base, b1, b2, bm, 'merge update and delete') + + def testMergeUpdate(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + b1[items[0][0]]=1 + b2[items[5][0]]=2 + b1[items[-1][0]]=3 + b2[items[-2][0]]=4 + bm[items[0][0]]=1 + bm[items[5][0]]=2 + bm[items[-1][0]]=3 + bm[items[-2][0]]=4 + _test_merge(base, b1, b2, bm, 'merge update') + + def testFailMergeDelete(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + del b1[items[0][0]] + del b2[items[0][0]] + _test_merge(base, b1, b2, bm, 'merge conflicting delete', + should_fail=1) + + def testFailMergeUpdate(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + b1[items[0][0]]=1 + b2[items[0][0]]=2 + _test_merge(base, b1, b2, bm, 'merge conflicting update', + should_fail=1) + + def testFailMergeDeleteAndUpdate(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + del b1[items[0][0]] + b2[items[0][0]]=-9 + _test_merge(base, b1, b2, bm, 'merge conflicting update and delete', + should_fail=1) + + def testMergeInserts(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + + b1[-99999]=-99999 + b1[e1[0][0]]=e1[0][1] + b2[99999]=99999 + b2[e1[2][0]]=e1[2][1] + + bm[-99999]=-99999 + bm[e1[0][0]]=e1[0][1] + bm[99999]=99999 + bm[e1[2][0]]=e1[2][1] + _test_merge(base, b1, b2, bm, 'merge insert') + + def testMergeInsertsFromEmpty(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + + base.clear() + b1.clear() + b2.clear() + bm.clear() + + b1.update(e1) + bm.update(e1) + b2.update(e2) + bm.update(e2) + + _test_merge(base, b1, b2, bm, 'merge insert from empty') + + def testFailMergeEmptyAndFill(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + + b1.clear() + bm.clear() + b2.update(e2) + bm.update(e2) + + _test_merge(base, b1, b2, bm, 'merge insert from empty', should_fail=1) + + def testMergeEmpty(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + + b1.clear() + bm.clear() + + _test_merge(base, b1, b2, bm, 'empty one and not other', should_fail=1) + + def testFailMergeInsert(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + b1[-99999]=-99999 + b1[e1[0][0]]=e1[0][1] + b2[99999]=99999 + b2[e1[0][0]]=e1[0][1] + _test_merge(base, b1, b2, bm, 'merge conflicting inserts', + should_fail=1) + +class SetConflictTestBase(ConflictTestBase): + "Set (as opposed to TreeSet) specific tests." + + def _setupConflict(self): + l=[ -5124, -7377, 2274, 8801, -9901, 7327, 1565, 17, -679, + 3686, -3607, 14, 6419, -5637, 6040, -4556, -8622, 3847, 7191, + -4067] + + e1=[-1704, 5420, -239, 4024, -6984] + e2=[7745, 4868, -2548, -2711, -3154] + + + base = self._makeOne() + base.update(l) + b1=base.__class__(base) + b2=base.__class__(base) + bm=base.__class__(base) + + items=base.keys() + + return base, b1, b2, bm, e1, e2, items + + def testMergeDelete(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + b1.remove(items[1]) + b2.remove(items[5]) + b1.remove(items[-1]) + b2.remove(items[-2]) + bm.remove(items[1]) + bm.remove(items[5]) + bm.remove(items[-1]) + bm.remove(items[-2]) + _test_merge(base, b1, b2, bm, 'merge delete') + + def testFailMergeDelete(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + b1.remove(items[0]) + b2.remove(items[0]) + _test_merge(base, b1, b2, bm, 'merge conflicting delete', + should_fail=1) + + def testMergeInserts(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + + b1.insert(-99999) + b1.insert(e1[0]) + b2.insert(99999) + b2.insert(e1[2]) + + bm.insert(-99999) + bm.insert(e1[0]) + bm.insert(99999) + bm.insert(e1[2]) + _test_merge(base, b1, b2, bm, 'merge insert') + + def testMergeInsertsFromEmpty(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + + base.clear() + b1.clear() + b2.clear() + bm.clear() + + b1.update(e1) + bm.update(e1) + b2.update(e2) + bm.update(e2) + + _test_merge(base, b1, b2, bm, 'merge insert from empty') + + def testFailMergeEmptyAndFill(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + + b1.clear() + bm.clear() + b2.update(e2) + bm.update(e2) + + _test_merge(base, b1, b2, bm, 'merge insert from empty', should_fail=1) + + def testMergeEmpty(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + + b1.clear() + bm.clear() + + _test_merge(base, b1, b2, bm, 'empty one and not other', should_fail=1) + + def testFailMergeInsert(self): + base, b1, b2, bm, e1, e2, items = self._setupConflict() + b1.insert(-99999) + b1.insert(e1[0]) + b2.insert(99999) + b2.insert(e1[0]) + _test_merge(base, b1, b2, bm, 'merge conflicting inserts', + should_fail=1) + + +## utility functions + +def lsubtract(l1, l2): + l1 = list(l1) + l2 = list(l2) + return (list(filter(lambda x, l1=l1: x not in l1, l2)) + + list(filter(lambda x, l2=l2: x not in l2, l1))) + +def realseq(itemsob): + return [x for x in itemsob] + +def permutations(x): + # Return a list of all permutations of list x. + n = len(x) + if n <= 1: + return [x] + result = [] + x0 = x[0] + for i in range(n): + # Build the (n-1)! permutations with x[i] in the first position. + xcopy = x[:] + first, xcopy[i] = xcopy[i], x0 + result.extend([[first] + p for p in permutations(xcopy[1:])]) + return result diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/testBTrees.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/testBTrees.py new file mode 100644 index 0000000..7c9ac2c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/testBTrees.py @@ -0,0 +1,534 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest + +from BTrees.tests.common import permutations + + +class DegenerateBTree(unittest.TestCase): + # Build a degenerate tree (set). Boxes are BTree nodes. There are + # 5 leaf buckets, each containing a single int. Keys in the BTree + # nodes don't appear in the buckets. Seven BTree nodes are purely + # indirection nodes (no keys). Buckets aren't all at the same depth: + # + # +------------------------+ + # | 4 | + # +------------------------+ + # | | + # | v + # | +-+ + # | | | + # | +-+ + # | | + # v v + # +-------+ +-------------+ + # | 2 | | 6 10 | + # +-------+ +-------------+ + # | | | | | + # v v v v v + # +-+ +-+ +-+ +-+ +-+ + # | | | | | | | | | | + # +-+ +-+ +-+ +-+ +-+ + # | | | | | + # v v v v v + # 1 3 +-+ 7 11 + # | | + # +-+ + # | + # v + # 5 + # + # This is nasty for many algorithms. Consider a high-end range search + # for 4. The BTree nodes direct it to the 5 bucket, but the correct + # answer is the 3 bucket, which requires going in a different direction + # at the very top node already. Consider a low-end range search for + # 9. The BTree nodes direct it to the 7 bucket, but the correct answer + # is the 11 bucket. This is also a nasty-case tree for deletions. + + def _build_degenerate_tree(self): + # Build the buckets and chain them together. + from BTrees.IIBTree import IISet + from BTrees.IIBTree import IITreeSet + from BTrees.check import check + bucket11 = IISet([11]) + + bucket7 = IISet() + bucket7.__setstate__(((7,), bucket11)) + + bucket5 = IISet() + bucket5.__setstate__(((5,), bucket7)) + + bucket3 = IISet() + bucket3.__setstate__(((3,), bucket5)) + + bucket1 = IISet() + bucket1.__setstate__(((1,), bucket3)) + + # Build the deepest layers of indirection nodes. + ts = IITreeSet + tree1 = ts() + tree1.__setstate__(((bucket1,), bucket1)) + + tree3 = ts() + tree3.__setstate__(((bucket3,), bucket3)) + + tree5lower = ts() + tree5lower.__setstate__(((bucket5,), bucket5)) + tree5 = ts() + tree5.__setstate__(((tree5lower,), bucket5)) + + tree7 = ts() + tree7.__setstate__(((bucket7,), bucket7)) + + tree11 = ts() + tree11.__setstate__(((bucket11,), bucket11)) + + # Paste together the middle layers. + tree13 = ts() + tree13.__setstate__(((tree1, 2, tree3), bucket1)) + + tree5711lower = ts() + tree5711lower.__setstate__(((tree5, 6, tree7, 10, tree11), bucket5)) + tree5711 = ts() + tree5711.__setstate__(((tree5711lower,), bucket5)) + + # One more. + t = ts() + t.__setstate__(((tree13, 4, tree5711), bucket1)) + t._check() + check(t) + return t, [1, 3, 5, 7, 11] + + def testBasicOps(self): + t, keys = self._build_degenerate_tree() + self.assertEqual(len(t), len(keys)) + self.assertEqual(list(t.keys()), keys) + # has_key actually returns the depth of a bucket. + self.assertEqual(t.has_key(1), 4) + self.assertEqual(t.has_key(3), 4) + self.assertEqual(t.has_key(5), 6) + self.assertEqual(t.has_key(7), 5) + self.assertEqual(t.has_key(11), 5) + for i in 0, 2, 4, 6, 8, 9, 10, 12: + self.assertTrue(i not in t) + + def _checkRanges(self, tree, keys): + self.assertEqual(len(tree), len(keys)) + sorted_keys = keys[:] + sorted_keys.sort() + self.assertEqual(list(tree.keys()), sorted_keys) + for k in keys: + self.assertTrue(k in tree) + if keys: + lokey = sorted_keys[0] + hikey = sorted_keys[-1] + self.assertEqual(lokey, tree.minKey()) + self.assertEqual(hikey, tree.maxKey()) + else: + lokey = hikey = 42 + + # Try all range searches. + for lo in range(lokey - 1, hikey + 2): + for hi in range(lo - 1, hikey + 2): + for skipmin in False, True: + for skipmax in False, True: + wantlo, wanthi = lo, hi + if skipmin: + wantlo += 1 + if skipmax: + wanthi -= 1 + want = [k for k in keys if wantlo <= k <= wanthi] + got = list(tree.keys(lo, hi, skipmin, skipmax)) + self.assertEqual(want, got) + + def testRanges(self): + t, keys = self._build_degenerate_tree() + self._checkRanges(t, keys) + + def testDeletes(self): + # Delete keys in all possible orders, checking each tree along + # the way. + + # This is a tough test. Previous failure modes included: + # 1. A variety of assertion failures in _checkRanges. + # 2. Assorted "Invalid firstbucket pointer" failures at + # seemingly random times, coming out of the BTree destructor. + # 3. Under Python 2.3 CVS, some baffling + # RuntimeWarning: tp_compare didn't return -1 or -2 for exception + # warnings, possibly due to memory corruption after a BTree + # goes insane. + # On CPython in PURE_PYTHON mode, this is a *slow* test, taking 15+s + # on a 2015 laptop. + from BTrees.check import check + t, keys = self._build_degenerate_tree() + for oneperm in permutations(keys): + t, keys = self._build_degenerate_tree() + for key in oneperm: + t.remove(key) + keys.remove(key) + t._check() + check(t) + self._checkRanges(t, keys) + # We removed all the keys, so the tree should be empty now. + self.assertEqual(t.__getstate__(), None) + + # A damaged tree may trigger an "invalid firstbucket pointer" + # failure at the time its destructor is invoked. Try to force + # that to happen now, so it doesn't look like a baffling failure + # at some unrelated line. + del t # trigger destructor + + +LP294788_ids = {} + + +class ToBeDeleted(object): + def __init__(self, id): + assert isinstance(id, int) #we don't want to store any object ref here + self.id = id + + global LP294788_ids + LP294788_ids[id] = 1 + + def __del__(self): + global LP294788_ids + LP294788_ids.pop(self.id, None) + + def __cmp__(self, other): + return cmp(self.id, other.id) + + def __le__(self, other): + return self.id <= other.id + + def __lt__(self, other): + return self.id < other.id + + def __eq__(self, other): + return self.id == other.id + + def __ne__(self, other): + return self.id != other.id + + def __gt__(self, other): + return self.id > other.id + + def __ge__(self, other): + return self.id >= other.id + + def __hash__(self): + return hash(self.id) + + +class BugFixes(unittest.TestCase): + + # Collector 1843. Error returns were effectively ignored in + # Bucket_rangeSearch(), leading to "delayed" errors, or worse. + def testFixed1843(self): + from BTrees.IIBTree import IISet + t = IISet() + t.insert(1) + # This one used to fail to raise the TypeError when it occurred. + self.assertRaises(TypeError, t.keys, "") + # This one used to segfault. + self.assertRaises(TypeError, t.keys, 0, "") + + def test_LP294788(self): + # https://bugs.launchpad.net/bugs/294788 + # BTree keeps some deleted objects referenced + + # The logic here together with the ToBeDeleted class is that + # a separate reference dict is populated on object creation + # and removed in __del__ + # That means what's left in the reference dict is never GC'ed + # therefore referenced somewhere + # To simulate real life, some random data is used to exercise the tree + import gc + import random + from BTrees.OOBTree import OOBTree + + t = OOBTree() + + trandom = random.Random('OOBTree') + + global LP294788_ids + + # /// BTree keys are integers, value is an object + LP294788_ids = {} + ids = {} + for i in range(1024): + if trandom.random() > 0.1 or not ids: + #add + id = None + while id is None or id in ids: + id = trandom.randint(0, 1000000) + + ids[id] = 1 + t[id] = ToBeDeleted(id) + else: + #del + keys = list(ids.keys()) + if keys: + id = trandom.choice(list(ids.keys())) + del t[id] + del ids[id] + + ids = ids.keys() + trandom.shuffle(list(ids)) + for id in ids: + del t[id] + ids = None + + #to be on the safe side run a full GC + gc.collect() + + #print LP294788_ids + + self.assertEqual(len(t), 0) + self.assertEqual(len(LP294788_ids), 0) + # \\\ + + # /// BTree keys are integers, value is a tuple having an object + LP294788_ids = {} + ids = {} + for i in range(1024): + if trandom.random() > 0.1 or not ids: + #add + id = None + while id is None or id in ids: + id = trandom.randint(0, 1000000) + + ids[id] = 1 + t[id] = (id, ToBeDeleted(id), u'somename') + else: + #del + keys = list(ids.keys()) + if keys: + id = trandom.choice(keys) + del t[id] + del ids[id] + + ids = ids.keys() + trandom.shuffle(list(ids)) + for id in ids: + del t[id] + ids = None + + #to be on the safe side run a full GC + gc.collect() + + #print LP294788_ids + + self.assertEqual(len(t), 0) + self.assertEqual(len(LP294788_ids), 0) + # \\\ + + + # /// BTree keys are objects, value is an int + t = OOBTree() + LP294788_ids = {} + ids = {} + for i in range(1024): + if trandom.random() > 0.1 or not ids: + #add + id = None + while id is None or id in ids: + id = ToBeDeleted(trandom.randint(0, 1000000)) + + ids[id] = 1 + t[id] = 1 + else: + #del + id = trandom.choice(list(ids.keys())) + del ids[id] + del t[id] + + ids = ids.keys() + trandom.shuffle(list(ids)) + for id in ids: + del t[id] + #release all refs + ids = id = None + + #to be on the safe side run a full GC + gc.collect() + + #print LP294788_ids + + self.assertEqual(len(t), 0) + self.assertEqual(len(LP294788_ids), 0) + + # /// BTree keys are tuples having objects, value is an int + t = OOBTree() + LP294788_ids = {} + ids = {} + for i in range(1024): + if trandom.random() > 0.1 or not ids: + #add + id = None + while id is None or id in ids: + id = trandom.randint(0, 1000000) + id = (id, ToBeDeleted(id), u'somename') + + ids[id] = 1 + t[id] = 1 + else: + #del + id = trandom.choice(list(ids.keys())) + del ids[id] + del t[id] + + ids = ids.keys() + trandom.shuffle(list(ids)) + for id in ids: + del t[id] + #release all refs + ids = id = key = None + + #to be on the safe side run a full GC + gc.collect() + + #print LP294788_ids + + self.assertEqual(len(t), 0) + self.assertEqual(len(LP294788_ids), 0) + + +# cmp error propagation tests + + +class DoesntLikeBeingCompared: + + def __cmp__(self, other): + raise ValueError('incomparable') + __lt__ = __le__ = __eq__ = __ne__ = __ge__ = __gt__ = __cmp__ + +class TestCmpError(unittest.TestCase): + + def testFoo(self): + from BTrees.OOBTree import OOBTree + t = OOBTree() + t['hello world'] = None + try: + t[DoesntLikeBeingCompared()] = None + except ValueError as e: + self.assertEqual(str(e), 'incomparable') + else: + self.fail('incomarable objects should not be allowed into ' + 'the tree') + + +class FamilyTest(unittest.TestCase): + def test32(self): + from zope.interface.verify import verifyObject + import BTrees + from BTrees.IOBTree import IOTreeSet + verifyObject(BTrees.Interfaces.IBTreeFamily, BTrees.family32) + self.assertEqual( + BTrees.family32.IO, BTrees.IOBTree) + self.assertEqual( + BTrees.family32.OI, BTrees.OIBTree) + self.assertEqual( + BTrees.family32.II, BTrees.IIBTree) + self.assertEqual( + BTrees.family32.IF, BTrees.IFBTree) + self.assertEqual( + BTrees.family32.OO, BTrees.OOBTree) + s = IOTreeSet() + s.insert(BTrees.family32.maxint) + self.assertTrue(BTrees.family32.maxint in s) + s = IOTreeSet() + s.insert(BTrees.family32.minint) + self.assertTrue(BTrees.family32.minint in s) + s = IOTreeSet() + # this next bit illustrates an, um, "interesting feature". If + # the characteristics change to match the 64 bit version, please + # feel free to change. + with self.assertRaises((TypeError, OverflowError)): + s.insert(BTrees.family32.maxint + 1) + + with self.assertRaises((TypeError, OverflowError)): + s.insert(BTrees.family32.minint - 1) + self.check_pickling(BTrees.family32) + + def test64(self): + from zope.interface.verify import verifyObject + import BTrees + from BTrees.LOBTree import LOTreeSet + verifyObject(BTrees.Interfaces.IBTreeFamily, BTrees.family64) + self.assertEqual( + BTrees.family64.IO, BTrees.LOBTree) + self.assertEqual( + BTrees.family64.OI, BTrees.OLBTree) + self.assertEqual( + BTrees.family64.II, BTrees.LLBTree) + self.assertEqual( + BTrees.family64.IF, BTrees.LFBTree) + self.assertEqual( + BTrees.family64.OO, BTrees.OOBTree) + s = LOTreeSet() + s.insert(BTrees.family64.maxint) + self.assertTrue(BTrees.family64.maxint in s) + s = LOTreeSet() + s.insert(BTrees.family64.minint) + self.assertTrue(BTrees.family64.minint in s) + s = LOTreeSet() + # XXX why oh why do we expect ValueError here, but TypeError in test32? + self.assertRaises(ValueError, s.insert, BTrees.family64.maxint + 1) + self.assertRaises(ValueError, s.insert, BTrees.family64.minint - 1) + self.check_pickling(BTrees.family64) + + def check_pickling(self, family): + # The "family" objects are singletons; they can be pickled and + # unpickled, and the same instances will always be returned on + # unpickling, whether from the same unpickler or different + # unpicklers. + import pickle + from io import BytesIO + + s = pickle.dumps((family, family)) + (f1, f2) = pickle.loads(s) + self.assertIs(f1, family) + self.assertIs(f2, family) + + # Using a single memo across multiple pickles: + sio = BytesIO() + p = pickle.Pickler(sio) + p.dump(family) + p.dump([family]) + u = pickle.Unpickler(BytesIO(sio.getvalue())) + f1 = u.load() + f2, = u.load() + self.assertTrue(f1 is family) + self.assertTrue(f2 is family) + + # Using separate memos for each pickle: + sio = BytesIO() + p = pickle.Pickler(sio) + p.dump(family) + p.clear_memo() + p.dump([family]) + u = pickle.Unpickler(BytesIO(sio.getvalue())) + f1 = u.load() + f2, = u.load() + self.assertTrue(f1 is family) + self.assertTrue(f2 is family) + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(DegenerateBTree), + unittest.makeSuite(BugFixes), + unittest.makeSuite(TestCmpError), + unittest.makeSuite(FamilyTest), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/testBTreesUnicode.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/testBTreesUnicode.py new file mode 100644 index 0000000..a090a1b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/testBTreesUnicode.py @@ -0,0 +1,76 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +import unittest +from .common import _skip_under_Py3k + +# When an OOBtree contains unicode strings as keys, +# it is neccessary accessing non-unicode strings are +# either ascii strings or encoded as unicoded using the +# corresponding encoding + +encoding = 'ISO-8859-1' + +class TestBTreesUnicode(unittest.TestCase): + """ test unicode""" + + def setUp(self): + #setup an OOBTree with some unicode strings + from BTrees.OOBTree import OOBTree + from BTrees._compat import _bytes + + self.s = b'dreit\xe4gigen'.decode('latin1') + + self.data = [(b'alien', 1), + (b'k\xf6nnten', 2), + (b'fox', 3), + (b'future', 4), + (b'quick', 5), + (b'zerst\xf6rt', 6), + (u'dreit\xe4gigen', 7), + ] + + self.tree = OOBTree() + for k, v in self.data: + if isinstance(k, _bytes): + k = k.decode('latin1') + self.tree[k] = v + + @_skip_under_Py3k + def testAllKeys(self): + # check every item of the tree + from BTrees._compat import _bytes + for k, v in self.data: + if isinstance(k, _bytes): + k = k.decode(encoding) + self.assertTrue(k in self.tree) + self.assertEqual(self.tree[k], v) + + @_skip_under_Py3k + def testUnicodeKeys(self): + # try to access unicode keys in tree + k, v = self.data[-1] + self.assertEqual(k, self.s) + self.assertEqual(self.tree[k], v) + self.assertEqual(self.tree[self.s], v) + + @_skip_under_Py3k + def testAsciiKeys(self): + # try to access some "plain ASCII" keys in the tree + for k, v in self.data[0], self.data[2]: + self.assertTrue(isinstance(k, str)) + self.assertEqual(self.tree[k], v) + +def test_suite(): + return unittest.makeSuite(TestBTreesUnicode) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/testConflict.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/testConflict.py new file mode 100644 index 0000000..7080393 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/testConflict.py @@ -0,0 +1,573 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest + +from .common import _skip_wo_ZODB +from .common import ConflictTestBase + + +class NastyConfictFunctionalTests(ConflictTestBase, unittest.TestCase): + # FUNCTESTS: Provoke various conflict scenarios using ZODB + transaction + + def _getTargetClass(self): + from BTrees.OOBTree import OOBTree + return OOBTree + + def openDB(self): + # The conflict tests tend to open two or more connections + # and then try to commit them. A standard FileStorage + # is not MVCC aware, and so each connection would have the same + # instance of the storage, leading to the error + # "Duplicate tpc_begin calls for same transaction" on commit; + # thus we use a MVCCMappingStorage for these tests, ensuring each + # connection has its own storage. + # Unfortunately, it wants to acquire the identically same + # non-recursive lock in each of its *its* tpc_* methods, which deadlocks. + # The solution is to give each instance its own lock, and trust in the + # serialization (ordering) of the datamanager, and the fact that these tests are + # single-threaded. + import threading + from ZODB.tests.MVCCMappingStorage import MVCCMappingStorage + class _MVCCMappingStorage(MVCCMappingStorage): + def new_instance(self): + inst = MVCCMappingStorage.new_instance(self) + inst._commit_lock = threading.Lock() + return inst + from ZODB.DB import DB + self.storage = _MVCCMappingStorage() + self.db = DB(self.storage) + return self.db + + + @_skip_wo_ZODB + def testSimpleConflict(self): + # Invoke conflict resolution by committing a transaction and + # catching a conflict in the storage. + import transaction + self.openDB() + + r1 = self.db.open().root() + r1["t"] = t = self._makeOne() + transaction.commit() + + r2 = self.db.open().root() + copy = r2["t"] + list(copy) # unghostify + + self.assertEqual(t._p_serial, copy._p_serial) + + t.update({1:2, 2:3}) + transaction.commit() + + copy.update({3:4}) + transaction.commit() + + # This tests a problem that cropped up while trying to write + # testBucketSplitConflict (below): conflict resolution wasn't + # working at all in non-trivial cases. Symptoms varied from + # strange complaints about pickling (despite that the test isn't + # doing any *directly*), thru SystemErrors from Python and + # AssertionErrors inside the BTree code. + @_skip_wo_ZODB + def testResolutionBlowsUp(self): + import transaction + b = self._makeOne() + for i in range(0, 200, 4): + b[i] = i + # bucket 0 has 15 values: 0, 4 .. 56 + # bucket 1 has 15 values: 60, 64 .. 116 + # bucket 2 has 20 values: 120, 124 .. 196 + state = b.__getstate__() + # Looks like: ((bucket0, 60, bucket1, 120, bucket2), firstbucket) + # If these fail, the *preconditions* for running the test aren't + # satisfied -- the test itself hasn't been run yet. + self.assertEqual(len(state), 2) + self.assertEqual(len(state[0]), 5) + self.assertEqual(state[0][1], 60) + self.assertEqual(state[0][3], 120) + + # Invoke conflict resolution by committing a transaction. + self.openDB() + + r1 = self.db.open().root() + r1["t"] = b + transaction.commit() + + r2 = self.db.open().root() + copy = r2["t"] + # Make sure all of copy is loaded. + list(copy.values()) + + self.assertEqual(b._p_serial, copy._p_serial) + + b.update({1:2, 2:3}) + transaction.commit() + + copy.update({3:4}) + transaction.commit() # if this doesn't blow up + list(copy.values()) # and this doesn't either, then fine + + @_skip_wo_ZODB + def testBucketSplitConflict(self): + # Tests that a bucket split is viewed as a conflict. + # It's (almost necessarily) a white-box test, and sensitive to + # implementation details. + import transaction + from ZODB.POSException import ConflictError + b = orig = self._makeOne() + for i in range(0, 200, 4): + b[i] = i + # bucket 0 has 15 values: 0, 4 .. 56 + # bucket 1 has 15 values: 60, 64 .. 116 + # bucket 2 has 20 values: 120, 124 .. 196 + state = b.__getstate__() + # Looks like: ((bucket0, 60, bucket1, 120, bucket2), firstbucket) + # If these fail, the *preconditions* for running the test aren't + # satisfied -- the test itself hasn't been run yet. + self.assertEqual(len(state), 2) + self.assertEqual(len(state[0]), 5) + self.assertEqual(state[0][1], 60) + self.assertEqual(state[0][3], 120) + + # Invoke conflict resolution by committing a transaction. + self.openDB() + + tm1 = transaction.TransactionManager() + r1 = self.db.open(transaction_manager=tm1).root() + r1["t"] = b + tm1.commit() + + tm2 = transaction.TransactionManager() + r2 = self.db.open(transaction_manager=tm2).root() + copy = r2["t"] + # Make sure all of copy is loaded. + list(copy.values()) + + self.assertEqual(orig._p_serial, copy._p_serial) + + # In one transaction, add 16 new keys to bucket1, to force a bucket + # split. + b = orig + numtoadd = 16 + candidate = 60 + while numtoadd: + if candidate not in b: + b[candidate] = candidate + numtoadd -= 1 + candidate += 1 + # bucket 0 has 15 values: 0, 4 .. 56 + # bucket 1 has 15 values: 60, 61 .. 74 + # bucket 2 has 16 values: [75, 76 .. 81] + [84, 88 ..116] + # bucket 3 has 20 values: 120, 124 .. 196 + state = b.__getstate__() + # Looks like: ((b0, 60, b1, 75, b2, 120, b3), firstbucket) + # The next block is still verifying preconditions. + self.assertEqual(len(state) , 2) + self.assertEqual(len(state[0]), 7) + self.assertEqual(state[0][1], 60) + self.assertEqual(state[0][3], 75) + self.assertEqual(state[0][5], 120) + + tm1.commit() + + # In the other transaction, add 3 values near the tail end of bucket1. + # This doesn't cause a split. + b = copy + for i in range(112, 116): + b[i] = i + # bucket 0 has 15 values: 0, 4 .. 56 + # bucket 1 has 18 values: 60, 64 .. 112, 113, 114, 115, 116 + # bucket 2 has 20 values: 120, 124 .. 196 + state = b.__getstate__() + # Looks like: ((bucket0, 60, bucket1, 120, bucket2), firstbucket) + # The next block is still verifying preconditions. + self.assertEqual(len(state), 2) + self.assertEqual(len(state[0]), 5) + self.assertEqual(state[0][1], 60) + self.assertEqual(state[0][3], 120) + + self.assertRaises(ConflictError, tm2.commit) + + @_skip_wo_ZODB + def testEmptyBucketConflict(self): + # Tests that an emptied bucket *created by* conflict resolution is + # viewed as a conflict: conflict resolution doesn't have enough + # info to unlink the empty bucket from the BTree correctly. + import transaction + from ZODB.POSException import ConflictError + b = orig = self._makeOne() + for i in range(0, 200, 4): + b[i] = i + # bucket 0 has 15 values: 0, 4 .. 56 + # bucket 1 has 15 values: 60, 64 .. 116 + # bucket 2 has 20 values: 120, 124 .. 196 + state = b.__getstate__() + # Looks like: ((bucket0, 60, bucket1, 120, bucket2), firstbucket) + # If these fail, the *preconditions* for running the test aren't + # satisfied -- the test itself hasn't been run yet. + self.assertEqual(len(state), 2) + self.assertEqual(len(state[0]), 5) + self.assertEqual(state[0][1], 60) + self.assertEqual(state[0][3], 120) + + # Invoke conflict resolution by committing a transaction. + self.openDB() + + tm1 = transaction.TransactionManager() + r1 = self.db.open(transaction_manager=tm1).root() + r1["t"] = b + tm1.commit() + + tm2 = transaction.TransactionManager() + r2 = self.db.open(transaction_manager=tm2).root() + copy = r2["t"] + # Make sure all of copy is loaded. + list(copy.values()) + + self.assertEqual(orig._p_serial, copy._p_serial) + + # In one transaction, delete half of bucket 1. + b = orig + for k in 60, 64, 68, 72, 76, 80, 84, 88: + del b[k] + # bucket 0 has 15 values: 0, 4 .. 56 + # bucket 1 has 7 values: 92, 96, 100, 104, 108, 112, 116 + # bucket 2 has 20 values: 120, 124 .. 196 + state = b.__getstate__() + # Looks like: ((bucket0, 60, bucket1, 120, bucket2), firstbucket) + # The next block is still verifying preconditions. + self.assertEqual(len(state) , 2) + self.assertEqual(len(state[0]), 5) + self.assertEqual(state[0][1], 92) + self.assertEqual(state[0][3], 120) + + tm1.commit() + + # In the other transaction, delete the other half of bucket 1. + b = copy + for k in 92, 96, 100, 104, 108, 112, 116: + del b[k] + # bucket 0 has 15 values: 0, 4 .. 56 + # bucket 1 has 8 values: 60, 64, 68, 72, 76, 80, 84, 88 + # bucket 2 has 20 values: 120, 124 .. 196 + state = b.__getstate__() + # Looks like: ((bucket0, 60, bucket1, 120, bucket2), firstbucket) + # The next block is still verifying preconditions. + self.assertEqual(len(state), 2) + self.assertEqual(len(state[0]), 5) + self.assertEqual(state[0][1], 60) + self.assertEqual(state[0][3], 120) + + # Conflict resolution empties bucket1 entirely. This used to + # create an "insane" BTree (a legit BTree cannot contain an empty + # bucket -- it contains NULL pointers the BTree code doesn't + # expect, and segfaults result). + self.assertRaises(ConflictError, tm2.commit) + + @_skip_wo_ZODB + def testEmptyBucketNoConflict(self): + # Tests that a plain empty bucket (on input) is not viewed as a + # conflict. + import transaction + b = orig = self._makeOne() + for i in range(0, 200, 4): + b[i] = i + # bucket 0 has 15 values: 0, 4 .. 56 + # bucket 1 has 15 values: 60, 64 .. 116 + # bucket 2 has 20 values: 120, 124 .. 196 + state = b.__getstate__() + # Looks like: ((bucket0, 60, bucket1, 120, bucket2), firstbucket) + # If these fail, the *preconditions* for running the test aren't + # satisfied -- the test itself hasn't been run yet. + self.assertEqual(len(state), 2) + self.assertEqual(len(state[0]), 5) + self.assertEqual(state[0][1], 60) + self.assertEqual(state[0][3], 120) + + # Invoke conflict resolution by committing a transaction. + self.openDB() + + r1 = self.db.open().root() + r1["t"] = orig + transaction.commit() + + r2 = self.db.open().root() + copy = r2["t"] + # Make sure all of copy is loaded. + list(copy.values()) + + self.assertEqual(orig._p_serial, copy._p_serial) + + # In one transaction, just add a key. + b = orig + b[1] = 1 + # bucket 0 has 16 values: [0, 1] + [4, 8 .. 56] + # bucket 1 has 15 values: 60, 64 .. 116 + # bucket 2 has 20 values: 120, 124 .. 196 + state = b.__getstate__() + # Looks like: ((bucket0, 60, bucket1, 120, bucket2), firstbucket) + # The next block is still verifying preconditions. + self.assertEqual(len(state), 2) + self.assertEqual(len(state[0]), 5) + self.assertEqual(state[0][1], 60) + self.assertEqual(state[0][3], 120) + + transaction.commit() + + # In the other transaction, delete bucket 2. + b = copy + for k in range(120, 200, 4): + del b[k] + # bucket 0 has 15 values: 0, 4 .. 56 + # bucket 1 has 15 values: 60, 64 .. 116 + state = b.__getstate__() + # Looks like: ((bucket0, 60, bucket1), firstbucket) + # The next block is still verifying preconditions. + self.assertEqual(len(state), 2) + self.assertEqual(len(state[0]), 3) + self.assertEqual(state[0][1], 60) + + # This shouldn't create a ConflictError. + transaction.commit() + # And the resulting BTree shouldn't have internal damage. + b._check() + + # The snaky control flow in _bucket__p_resolveConflict ended up trying + # to decref a NULL pointer if conflict resolution was fed 3 empty + # buckets. http://collector.zope.org/Zope/553 + def testThreeEmptyBucketsNoSegfault(self): + # Note that the conflict is raised by our C extension, rather than + # indirectly via the storage, and hence is a more specialized type. + # This test therefore does not require ZODB. + from BTrees.Interfaces import BTreesConflictError + t = self._makeOne() + t[1] = 1 + bucket = t._firstbucket + del t[1] + state1 = bucket.__getstate__() + state2 = bucket.__getstate__() + state3 = bucket.__getstate__() + self.assertTrue(state2 is not state1 and + state2 is not state3 and + state3 is not state1) + self.assertTrue(state2 == state1 and + state3 == state1) + self.assertRaises(BTreesConflictError, bucket._p_resolveConflict, + state1, state2, state3) + # When an empty BTree resolves conflicts, it computes the + # bucket state as None, so... + self.assertRaises(BTreesConflictError, bucket._p_resolveConflict, + None, None, None) + + @_skip_wo_ZODB + def testCantResolveBTreeConflict(self): + # Test that a conflict involving two different changes to + # an internal BTree node is unresolvable. An internal node + # only changes when there are enough additions or deletions + # to a child bucket that the bucket is split or removed. + # It's (almost necessarily) a white-box test, and sensitive to + # implementation details. + import transaction + from ZODB.POSException import ConflictError + b = orig = self._makeOne() + for i in range(0, 200, 4): + b[i] = i + # bucket 0 has 15 values: 0, 4 .. 56 + # bucket 1 has 15 values: 60, 64 .. 116 + # bucket 2 has 20 values: 120, 124 .. 196 + state = b.__getstate__() + # Looks like: ((bucket0, 60, bucket1, 120, bucket2), firstbucket) + # If these fail, the *preconditions* for running the test aren't + # satisfied -- the test itself hasn't been run yet. + self.assertEqual(len(state), 2) + self.assertEqual(len(state[0]), 5) + self.assertEqual(state[0][1], 60) + self.assertEqual(state[0][3], 120) + + # Set up database connections to provoke conflict. + self.openDB() + tm1 = transaction.TransactionManager() + r1 = self.db.open(transaction_manager=tm1).root() + r1["t"] = orig + tm1.commit() + + tm2 = transaction.TransactionManager() + r2 = self.db.open(transaction_manager=tm2).root() + copy = r2["t"] + # Make sure all of copy is loaded. + list(copy.values()) + + self.assertEqual(orig._p_serial, copy._p_serial) + + # Now one transaction should add enough keys to cause a split, + # and another should remove all the keys in one bucket. + + for k in range(200, 300, 4): + orig[k] = k + tm1.commit() + + for k in range(0, 60, 4): + del copy[k] + + self.assertRaises(ConflictError, tm2.commit) + + @_skip_wo_ZODB + def testConflictWithOneEmptyBucket(self): + # If one transaction empties a bucket, while another adds an item + # to the bucket, all the changes "look resolvable": bucket conflict + # resolution returns a bucket containing (only) the item added by + # the latter transaction, but changes from the former transaction + # removing the bucket are uncontested: the bucket is removed from + # the BTree despite that resolution thinks it's non-empty! This + # was first reported by Dieter Maurer, to zodb-dev on 22 Mar 2005. + import transaction + from ZODB.POSException import ConflictError + b = orig = self._makeOne() + for i in range(0, 200, 4): + b[i] = i + # bucket 0 has 15 values: 0, 4 .. 56 + # bucket 1 has 15 values: 60, 64 .. 116 + # bucket 2 has 20 values: 120, 124 .. 196 + state = b.__getstate__() + # Looks like: ((bucket0, 60, bucket1, 120, bucket2), firstbucket) + # If these fail, the *preconditions* for running the test aren't + # satisfied -- the test itself hasn't been run yet. + self.assertEqual(len(state), 2) + self.assertEqual(len(state[0]), 5) + self.assertEqual(state[0][1], 60) + self.assertEqual(state[0][3], 120) + + # Set up database connections to provoke conflict. + self.openDB() + tm1 = transaction.TransactionManager() + r1 = self.db.open(transaction_manager=tm1).root() + r1["t"] = orig + tm1.commit() + + tm2 = transaction.TransactionManager() + r2 = self.db.open(transaction_manager=tm2).root() + copy = r2["t"] + # Make sure all of copy is loaded. + list(copy.values()) + + self.assertEqual(orig._p_serial, copy._p_serial) + + # Now one transaction empties the first bucket, and another adds a + # key to the first bucket. + + for k in range(0, 60, 4): + del orig[k] + tm1.commit() + + copy[1] = 1 + + self.assertRaises(ConflictError, tm2.commit) + + # Same thing, except commit the transactions in the opposite order. + b = self._makeOne() + for i in range(0, 200, 4): + b[i] = i + + tm1 = transaction.TransactionManager() + r1 = self.db.open(transaction_manager=tm1).root() + r1["t"] = b + tm1.commit() + + tm2 = transaction.TransactionManager() + r2 = self.db.open(transaction_manager=tm2).root() + copy = r2["t"] + # Make sure all of copy is loaded. + list(copy.values()) + + self.assertEqual(b._p_serial, copy._p_serial) + + # Now one transaction empties the first bucket, and another adds a + # key to the first bucket. + b[1] = 1 + tm1.commit() + + for k in range(0, 60, 4): + del copy[k] + + self.assertRaises(ConflictError, tm2.commit) + + @_skip_wo_ZODB + def testConflictOfInsertAndDeleteOfFirstBucketItem(self): + # Recently, BTrees became careful about removing internal keys + # (keys in internal aka BTree nodes) when they were deleted from + # buckets. This poses a problem for conflict resolution. + + # We want to guard against a case in which the first key in a + # bucket is removed in one transaction while a key is added + # after that key but before the next key in another transaction + # with the result that the added key is unreachable. + + # original: + + # Bucket(...), k1, Bucket((k1, v1), (k3, v3), ...) + + # tran1 + + # Bucket(...), k3, Bucket(k3, v3), ...) + + # tran2 + + # Bucket(...), k1, Bucket((k1, v1), (k2, v2), (k3, v3), ...) + + # where k1 < k2 < k3 + + # We don't want: + + # Bucket(...), k3, Bucket((k2, v2), (k3, v3), ...) + + # as k2 would be unfindable, so we want a conflict. + + import transaction + from ZODB.POSException import ConflictError + mytype = self._getTargetClass() + db = self.openDB() + tm1 = transaction.TransactionManager() + conn1 = db.open(tm1) + conn1.root.t = t = mytype() + for i in range(0, 200, 2): + t[i] = i + tm1.commit() + k = t.__getstate__()[0][1] + assert t.__getstate__()[0][2].keys()[0] == k + + tm2 = transaction.TransactionManager() + conn2 = db.open(tm2) + + t[k+1] = k+1 + del conn2.root.t[k] + for i in range(200,300): + conn2.root.t[i] = i + + tm1.commit() + self.assertRaises(ConflictError, tm2.commit) + tm2.abort() + + k = t.__getstate__()[0][1] + t[k+1] = k+1 + del conn2.root.t[k] + + tm2.commit() + self.assertRaises(ConflictError, tm1.commit) + tm1.abort() + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(NastyConfictFunctionalTests), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_IFBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_IFBTree.py new file mode 100644 index 0000000..be6b52a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_IFBTree.py @@ -0,0 +1,378 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest + +from .common import BTreeTests +from .common import ExtendedSetTests +from .common import InternalKeysMappingTest +from .common import InternalKeysSetTest +from .common import MappingBase +from .common import MappingConflictTestBase +from .common import ModuleTest +from .common import MultiUnion +from .common import NormalSetTests +from .common import SetConflictTestBase +from .common import SetResult +from .common import TestLongIntKeys +from .common import makeBuilder +from BTrees.IIBTree import using64bits #XXX Ugly, but unavoidable + + +class IFBTreeInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFBTree + return IFBTree + + +class IFBTreePyInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFBTreePy + return IFBTreePy + + +class IFTreeSetInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFTreeSet + return IFTreeSet + + +class IFTreeSetPyInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFTreeSetPy + return IFTreeSetPy + + +class IFBucketTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFBucket + return IFBucket + + +class IFBucketPyTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFBucketPy + return IFBucketPy + + +class IFTreeSetTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFTreeSet + return IFTreeSet + + +class IFTreeSetPyTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFTreeSetPy + return IFTreeSetPy + + +class IFSetTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFSet + return IFSet + + +class IFSetPyTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFSetPy + return IFSetPy + + +class IFBTreeTest(BTreeTests, unittest.TestCase): + + def _makeOne(self): + from BTrees.IFBTree import IFBTree + return IFBTree() + + +class IFBTreePyTest(BTreeTests, unittest.TestCase): + + def _makeOne(self): + from BTrees.IFBTree import IFBTreePy + return IFBTreePy() + +if using64bits: + + class IFBTreeTest(BTreeTests, TestLongIntKeys, unittest.TestCase): + + def _makeOne(self): + from BTrees.IFBTree import IFBTree + return IFBTree() + + def getTwoValues(self): + return 0.5, 1.5 + + class IFBTreePyTest(BTreeTests, TestLongIntKeys, unittest.TestCase): + + def _makeOne(self): + from BTrees.IFBTree import IFBTreePy + return IFBTreePy() + + def getTwoValues(self): + return 0.5, 1.5 + + +class _TestIFBTreesBase(object): + + def testNonIntegerKeyRaises(self): + self.assertRaises(TypeError, self._stringraiseskey) + self.assertRaises(TypeError, self._floatraiseskey) + self.assertRaises(TypeError, self._noneraiseskey) + + def testNonNumericValueRaises(self): + self.assertRaises(TypeError, self._stringraisesvalue) + self.assertRaises(TypeError, self._noneraisesvalue) + self._makeOne()[1] = 1 + self._makeOne()[1] = 1.0 + + def _stringraiseskey(self): + self._makeOne()['c'] = 1 + + def _floatraiseskey(self): + self._makeOne()[2.5] = 1 + + def _noneraiseskey(self): + self._makeOne()[None] = 1 + + def _stringraisesvalue(self): + self._makeOne()[1] = 'c' + + def _floatraisesvalue(self): + self._makeOne()[1] = 1.4 + + def _noneraisesvalue(self): + self._makeOne()[1] = None + + +class TestIFBTrees(_TestIFBTreesBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IFBTree import IFBTree + return IFBTree() + + +class TestIFBTreesPy(_TestIFBTreesBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IFBTree import IFBTreePy + return IFBTreePy() + + +class TestIFMultiUnion(MultiUnion, unittest.TestCase): + + def multiunion(self, *args): + from BTrees.IFBTree import multiunion + return multiunion(*args) + + def union(self, *args): + from BTrees.IFBTree import union + return union(*args) + + def mkset(self, *args): + from BTrees.IFBTree import IFSet as mkset + return mkset(*args) + + def mktreeset(self, *args): + from BTrees.IFBTree import IFTreeSet as mktreeset + return mktreeset(*args) + + def mkbucket(self, *args): + from BTrees.IFBTree import IFBucket as mkbucket + return mkbucket(*args) + + def mkbtree(self, *args): + from BTrees.IFBTree import IFBTree as mkbtree + return mkbtree(*args) + + +class TestIFMultiUnionPy(MultiUnion, unittest.TestCase): + + def multiunion(self, *args): + from BTrees.IFBTree import multiunionPy + return multiunionPy(*args) + + def union(self, *args): + from BTrees.IFBTree import unionPy + return unionPy(*args) + + def mkset(self, *args): + from BTrees.IFBTree import IFSetPy as mkset + return mkset(*args) + + def mktreeset(self, *args): + from BTrees.IFBTree import IFTreeSetPy as mktreeset + return mktreeset(*args) + + def mkbucket(self, *args): + from BTrees.IFBTree import IFBucketPy as mkbucket + return mkbucket(*args) + + def mkbtree(self, *args): + from BTrees.IFBTree import IFBTreePy as mkbtree + return mkbtree(*args) + + +class PureIF(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.IFBTree import union + return union(*args) + + def intersection(self, *args): + from BTrees.IFBTree import intersection + return intersection(*args) + + def difference(self, *args): + from BTrees.IFBTree import difference + return difference(*args) + + def builders(self): + from BTrees.IFBTree import IFBTree + from BTrees.IFBTree import IFBucket + from BTrees.IFBTree import IFTreeSet + from BTrees.IFBTree import IFSet + return IFSet, IFTreeSet, makeBuilder(IFBTree), makeBuilder(IFBucket) + + +class PureIFPy(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.IFBTree import unionPy + return unionPy(*args) + + def intersection(self, *args): + from BTrees.IFBTree import intersectionPy + return intersectionPy(*args) + + def difference(self, *args): + from BTrees.IFBTree import differencePy + return differencePy(*args) + + def builders(self): + from BTrees.IFBTree import IFBTreePy + from BTrees.IFBTree import IFBucketPy + from BTrees.IFBTree import IFTreeSetPy + from BTrees.IFBTree import IFSetPy + return (IFSetPy, IFTreeSetPy, + makeBuilder(IFBTreePy), makeBuilder(IFBucketPy)) + + +class IFBTreeConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFBTree + return IFBTree + + +class IFBTreePyConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFBTreePy + return IFBTreePy + + +class IFBucketConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFBucket + return IFBucket + + +class IFBucketPyConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFBucketPy + return IFBucketPy + + +class IFTreeSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFTreeSet + return IFTreeSet + + +class IFTreeSetPyConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFTreeSetPy + return IFTreeSetPy + + +class IFSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFSet + return IFSet + + +class IFSetPyConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IFBTree import IFSetPy + return IFSetPy + + +class IFModuleTest(ModuleTest, unittest.TestCase): + + prefix = 'IF' + + def _getModule(self): + import BTrees + return BTrees.IFBTree + + def _getInterface(self): + import BTrees.Interfaces + return BTrees.Interfaces.IIntegerFloatBTreeModule + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(IFBTreeInternalKeyTest), + unittest.makeSuite(IFBTreePyInternalKeyTest), + unittest.makeSuite(IFTreeSetInternalKeyTest), + unittest.makeSuite(IFTreeSetPyInternalKeyTest), + unittest.makeSuite(IFBucketTest), + unittest.makeSuite(IFBucketPyTest), + unittest.makeSuite(IFTreeSetTest), + unittest.makeSuite(IFTreeSetPyTest), + unittest.makeSuite(IFSetTest), + unittest.makeSuite(IFSetPyTest), + unittest.makeSuite(IFBTreeTest), + unittest.makeSuite(IFBTreePyTest), + unittest.makeSuite(TestIFBTrees), + unittest.makeSuite(TestIFBTreesPy), + unittest.makeSuite(TestIFMultiUnion), + unittest.makeSuite(TestIFMultiUnionPy), + unittest.makeSuite(PureIF), + unittest.makeSuite(PureIFPy), + unittest.makeSuite(IFBTreeConflictTests), + unittest.makeSuite(IFBTreePyConflictTests), + unittest.makeSuite(IFBucketConflictTests), + unittest.makeSuite(IFBucketPyConflictTests), + unittest.makeSuite(IFTreeSetConflictTests), + unittest.makeSuite(IFTreeSetPyConflictTests), + unittest.makeSuite(IFSetConflictTests), + unittest.makeSuite(IFSetPyConflictTests), + unittest.makeSuite(IFModuleTest), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_IIBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_IIBTree.py new file mode 100644 index 0000000..fe776b8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_IIBTree.py @@ -0,0 +1,507 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest + +from .common import BTreeTests +from .common import ExtendedSetTests +from .common import I_SetsBase +from .common import InternalKeysMappingTest +from .common import InternalKeysSetTest +from .common import MappingBase +from .common import MappingConflictTestBase +from .common import ModuleTest +from .common import MultiUnion +from .common import NormalSetTests +from .common import SetConflictTestBase +from .common import SetResult +from .common import TestLongIntKeys +from .common import TestLongIntValues +from .common import Weighted +from .common import itemsToSet +from .common import makeBuilder +from BTrees.IIBTree import using64bits #XXX Ugly, but unavoidable + + +class IIBTreeInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IIBTree + return IIBTree + + +class IIBTreePyInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IIBTreePy + return IIBTreePy + + +class IITreeSetInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IITreeSet + return IITreeSet + + +class IITreeSetPyInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IITreeSetPy + return IITreeSetPy + + +class IIBucketTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IIBucket + return IIBucket + + +class IIBucketPyTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IIBucketPy + return IIBucketPy + + +class IITreeSetTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IITreeSet + return IITreeSet + + +class IITreeSetPyTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IITreeSetPy + return IITreeSetPy + + +class IISetTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IISet + return IISet + + +class IISetPyTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IISetPy + return IISetPy + + +class _IIBTreeTestBase(BTreeTests): + + def testIIBTreeOverflow(self): + good = set() + b = self._makeOne() + + def trial(i): + i = int(i) + try: + b[i] = 0 + except OverflowError: + self.assertRaises(OverflowError, b.__setitem__, 0, i) + except TypeError: + self.assertRaises(TypeError, b.__setitem__, 0, i) + else: + good.add(i) + b[0] = i + self.assertEqual(b[0], i) + + for i in range((1<<31) - 3, (1<<31) + 3): + trial(i) + trial(-i) + + del b[0] + self.assertEqual(sorted(good), sorted(b)) + + +class IIBTreeTest(_IIBTreeTestBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IIBTree import IIBTree + return IIBTree() + + +class IIBTreeTestPy(_IIBTreeTestBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IIBTree import IIBTreePy + return IIBTreePy() + + +if using64bits: + + class IIBTreeTest(BTreeTests, TestLongIntKeys, TestLongIntValues, + unittest.TestCase): + + def _makeOne(self): + from BTrees.IIBTree import IIBTree + return IIBTree() + + def getTwoValues(self): + return 1, 2 + + class IIBTreeTest(BTreeTests, TestLongIntKeys, TestLongIntValues, + unittest.TestCase): + + def _makeOne(self): + from BTrees.IIBTree import IIBTreePy + return IIBTreePy() + + def getTwoValues(self): + return 1, 2 + + +class _TestIIBTreesBase(object): + + def testNonIntegerKeyRaises(self): + self.assertRaises(TypeError, self._stringraiseskey) + self.assertRaises(TypeError, self._floatraiseskey) + self.assertRaises(TypeError, self._noneraiseskey) + + def testNonIntegerValueRaises(self): + self.assertRaises(TypeError, self._stringraisesvalue) + self.assertRaises(TypeError, self._floatraisesvalue) + self.assertRaises(TypeError, self._noneraisesvalue) + + def _stringraiseskey(self): + self._makeOne()['c'] = 1 + + def _floatraiseskey(self): + self._makeOne()[2.5] = 1 + + def _noneraiseskey(self): + self._makeOne()[None] = 1 + + def _stringraisesvalue(self): + self._makeOne()[1] = 'c' + + def _floatraisesvalue(self): + self._makeOne()[1] = 1.4 + + def _noneraisesvalue(self): + self._makeOne()[1] = None + + +class TestIIBTrees(_TestIIBTreesBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IIBTree import IIBTree + return IIBTree() + + +class TestIIBTreesPy(_TestIIBTreesBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IIBTree import IIBTreePy + return IIBTreePy() + + +class TestIISets(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IIBTree import IISet + return IISet() + + +class TestIISetsPy(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IIBTree import IISetPy + return IISetPy() + + +class TestIITreeSets(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IIBTree import IITreeSet + return IITreeSet() + + +class TestIITreeSetsPy(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IIBTree import IITreeSetPy + return IITreeSetPy() + + +class PureII(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.IIBTree import union + return union(*args) + + def intersection(self, *args): + from BTrees.IIBTree import intersection + return intersection(*args) + + def difference(self, *args): + from BTrees.IIBTree import difference + return difference(*args) + + def builders(self): + from BTrees.IIBTree import IIBTree + from BTrees.IIBTree import IIBucket + from BTrees.IIBTree import IITreeSet + from BTrees.IIBTree import IISet + return IISet, IITreeSet, makeBuilder(IIBTree), makeBuilder(IIBucket) + + +class PureIIPy(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.IIBTree import unionPy + return unionPy(*args) + + def intersection(self, *args): + from BTrees.IIBTree import intersectionPy + return intersectionPy(*args) + + def difference(self, *args): + from BTrees.IIBTree import differencePy + return differencePy(*args) + + def builders(self): + from BTrees.IIBTree import IIBTreePy + from BTrees.IIBTree import IIBucketPy + from BTrees.IIBTree import IITreeSetPy + from BTrees.IIBTree import IISetPy + return (IISetPy, IITreeSetPy, + makeBuilder(IIBTreePy), makeBuilder(IIBucketPy)) + + +class TestIIMultiUnion(MultiUnion, unittest.TestCase): + + def multiunion(self, *args): + from BTrees.IIBTree import multiunion + return multiunion(*args) + + def union(self, *args): + from BTrees.IIBTree import union + return union(*args) + + def mkset(self, *args): + from BTrees.IIBTree import IISet as mkset + return mkset(*args) + + def mktreeset(self, *args): + from BTrees.IIBTree import IITreeSet as mktreeset + return mktreeset(*args) + + def mkbucket(self, *args): + from BTrees.IIBTree import IIBucket as mkbucket + return mkbucket(*args) + + def mkbtree(self, *args): + from BTrees.IIBTree import IIBTree as mkbtree + return mkbtree(*args) + + +class TestIIMultiUnionPy(MultiUnion, unittest.TestCase): + + def multiunion(self, *args): + from BTrees.IIBTree import multiunionPy + return multiunionPy(*args) + + def union(self, *args): + from BTrees.IIBTree import unionPy + return unionPy(*args) + + def mkset(self, *args): + from BTrees.IIBTree import IISetPy as mkset + return mkset(*args) + + def mktreeset(self, *args): + from BTrees.IIBTree import IITreeSetPy as mktreeset + return mktreeset(*args) + + def mkbucket(self, *args): + from BTrees.IIBTree import IIBucketPy as mkbucket + return mkbucket(*args) + + def mkbtree(self, *args): + from BTrees.IIBTree import IIBTreePy as mkbtree + return mkbtree(*args) + + +class TestWeightedII(Weighted, unittest.TestCase): + + def weightedUnion(self): + from BTrees.IIBTree import weightedUnion + return weightedUnion + + def weightedIntersection(self): + from BTrees.IIBTree import weightedIntersection + return weightedIntersection + + def union(self): + from BTrees.IIBTree import union + return union + + def intersection(self): + from BTrees.IIBTree import intersection + return intersection + + def mkbucket(self, *args): + from BTrees.IIBTree import IIBucket as mkbucket + return mkbucket(*args) + + def builders(self): + from BTrees.IIBTree import IIBTree + from BTrees.IIBTree import IIBucket + from BTrees.IIBTree import IITreeSet + from BTrees.IIBTree import IISet + return IIBucket, IIBTree, itemsToSet(IISet), itemsToSet(IITreeSet) + + +class TestWeightedIIPy(Weighted, unittest.TestCase): + + def weightedUnion(self): + from BTrees.IIBTree import weightedUnionPy + return weightedUnionPy + + def weightedIntersection(self): + from BTrees.IIBTree import weightedIntersectionPy + return weightedIntersectionPy + + def union(self): + from BTrees.IIBTree import unionPy + return unionPy + + def intersection(self): + from BTrees.IIBTree import intersectionPy + return intersectionPy + + def mkbucket(self, *args): + from BTrees.IIBTree import IIBucketPy as mkbucket + return mkbucket(*args) + + def builders(self): + from BTrees.IIBTree import IIBTreePy + from BTrees.IIBTree import IIBucketPy + from BTrees.IIBTree import IITreeSetPy + from BTrees.IIBTree import IISetPy + return (IIBucketPy, IIBTreePy, + itemsToSet(IISetPy), itemsToSet(IITreeSetPy)) + + +class IIBTreeConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IIBTree + return IIBTree + + +class IIBTreeConflictTestsPy(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IIBTreePy + return IIBTreePy + + +class IIBucketConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IIBucket + return IIBucket + + +class IIBucketConflictTestsPy(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IIBucketPy + return IIBucketPy + + +class IITreeSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IITreeSet + return IITreeSet + + +class IITreeSetConflictTestsPy(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IITreeSetPy + return IITreeSetPy + + +class IISetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IISet + return IISet + + +class IISetConflictTestsPy(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IIBTree import IISetPy + return IISetPy + + +class IIModuleTest(ModuleTest, unittest.TestCase): + + prefix = 'II' + + def _getModule(self): + import BTrees + return BTrees.IIBTree + def _getInterface(self): + import BTrees.Interfaces + return BTrees.Interfaces.IIntegerIntegerBTreeModule + + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(IIBTreeInternalKeyTest), + unittest.makeSuite(IIBTreePyInternalKeyTest), + unittest.makeSuite(IITreeSetInternalKeyTest), + unittest.makeSuite(IITreeSetPyInternalKeyTest), + unittest.makeSuite(IIBucketTest), + unittest.makeSuite(IIBucketPyTest), + unittest.makeSuite(IITreeSetTest), + unittest.makeSuite(IITreeSetPyTest), + unittest.makeSuite(IISetTest), + unittest.makeSuite(IISetPyTest), + unittest.makeSuite(IIBTreeTest), + unittest.makeSuite(IIBTreeTestPy), + unittest.makeSuite(TestIIBTrees), + unittest.makeSuite(TestIIBTreesPy), + unittest.makeSuite(TestIISets), + unittest.makeSuite(TestIISetsPy), + unittest.makeSuite(TestIITreeSets), + unittest.makeSuite(TestIITreeSetsPy), + unittest.makeSuite(TestIIMultiUnion), + unittest.makeSuite(TestIIMultiUnionPy), + unittest.makeSuite(PureII), + unittest.makeSuite(PureIIPy), + unittest.makeSuite(TestWeightedII), + unittest.makeSuite(TestWeightedIIPy), + unittest.makeSuite(IIBTreeConflictTests), + unittest.makeSuite(IIBTreeConflictTestsPy), + unittest.makeSuite(IIBucketConflictTests), + unittest.makeSuite(IIBucketConflictTestsPy), + unittest.makeSuite(IITreeSetConflictTests), + unittest.makeSuite(IITreeSetConflictTestsPy), + unittest.makeSuite(IISetConflictTests), + unittest.makeSuite(IISetConflictTestsPy), + unittest.makeSuite(IIModuleTest), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_IOBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_IOBTree.py new file mode 100644 index 0000000..aa14c4a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_IOBTree.py @@ -0,0 +1,414 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest + +from .common import BTreeTests +from .common import ExtendedSetTests +from .common import I_SetsBase +from .common import InternalKeysMappingTest +from .common import InternalKeysSetTest +from .common import MappingBase +from .common import MappingConflictTestBase +from .common import ModuleTest +from .common import MultiUnion +from .common import NormalSetTests +from .common import SetConflictTestBase +from .common import SetResult +from .common import TypeTest +from .common import TestLongIntKeys +from .common import makeBuilder +from BTrees.IIBTree import using64bits #XXX Ugly, but unavoidable + + +class IOBTreeInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOBTree + return IOBTree + + +class IOBTreePyInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOBTreePy + return IOBTreePy + + +class IOTreeSetInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOTreeSet + return IOTreeSet + + +class IOTreeSetPyInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOTreeSetPy + return IOTreeSetPy + + +class IOBucketTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOBucket + return IOBucket + + +class IOBucketPyTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOBucketPy + return IOBucketPy + + +class IOTreeSetTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOTreeSet + return IOTreeSet + + +class IOTreeSetPyTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOTreeSetPy + return IOTreeSetPy + + +class IOSetTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOSet + return IOSet + + +class IOSetPyTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOSetPy + return IOSetPy + + +class IOBTreeTest(BTreeTests, unittest.TestCase): + + def _makeOne(self): + from BTrees.IOBTree import IOBTree + return IOBTree() + + +class IOBTreePyTest(BTreeTests, unittest.TestCase): + + def _makeOne(self): + from BTrees.IOBTree import IOBTreePy + return IOBTreePy() + + +if using64bits: + + + class IOBTreeTest(BTreeTests, TestLongIntKeys, unittest.TestCase): + + def _makeOne(self): + from BTrees.IOBTree import IOBTree + return IOBTree() + + + class IOBTreePyTest(BTreeTests, TestLongIntKeys, unittest.TestCase): + + def _makeOne(self): + from BTrees.IOBTree import IOBTreePy + return IOBTreePy() + + +class _TestIOBTreesBase(TypeTest): + + def _stringraises(self): + self._makeOne()['c'] = 1 + + def _floatraises(self): + self._makeOne()[2.5] = 1 + + def _noneraises(self): + self._makeOne()[None] = 1 + + def testStringAllowedInContains(self): + self.assertFalse('key' in self._makeOne()) + + def testStringKeyRaisesKeyErrorWhenMissing(self): + self.assertRaises(KeyError, self._makeOne().__getitem__, 'key') + + def testStringKeyReturnsDefaultFromGetWhenMissing(self): + self.assertEqual(self._makeOne().get('key', 42), 42) + +class TestIOBTrees(_TestIOBTreesBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IOBTree import IOBTree + return IOBTree() + + +class TestIOBTreesPy(_TestIOBTreesBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IOBTree import IOBTreePy + return IOBTreePy() + + +class TestIOSets(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IOBTree import IOSet + return IOSet() + + +class TestIOSetsPy(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IOBTree import IOSetPy + return IOSetPy() + + +class TestIOTreeSets(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IOBTree import IOTreeSet + return IOTreeSet() + + +class TestIOTreeSetsPy(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.IOBTree import IOTreeSetPy + return IOTreeSetPy() + + +class PureIO(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.IOBTree import union + return union(*args) + + def intersection(self, *args): + from BTrees.IOBTree import intersection + return intersection(*args) + + def difference(self, *args): + from BTrees.IOBTree import difference + return difference(*args) + + def builders(self): + from BTrees.IOBTree import IOBTree + from BTrees.IOBTree import IOBucket + from BTrees.IOBTree import IOTreeSet + from BTrees.IOBTree import IOSet + return IOSet, IOTreeSet, makeBuilder(IOBTree), makeBuilder(IOBucket) + + +class PureIOPy(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.IOBTree import unionPy + return unionPy(*args) + + def intersection(self, *args): + from BTrees.IOBTree import intersectionPy + return intersectionPy(*args) + + def difference(self, *args): + from BTrees.IOBTree import differencePy + return differencePy(*args) + + def builders(self): + from BTrees.IOBTree import IOBTreePy + from BTrees.IOBTree import IOBucketPy + from BTrees.IOBTree import IOTreeSetPy + from BTrees.IOBTree import IOSetPy + return (IOSetPy, IOTreeSetPy, + makeBuilder(IOBTreePy), makeBuilder(IOBucketPy)) + + +class TestIOMultiUnion(MultiUnion, unittest.TestCase): + + def multiunion(self, *args): + from BTrees.IOBTree import multiunion + return multiunion(*args) + + def union(self, *args): + from BTrees.IOBTree import union + return union(*args) + + def mkset(self, *args): + from BTrees.IOBTree import IOSet as mkset + return mkset(*args) + + def mktreeset(self, *args): + from BTrees.IOBTree import IOTreeSet as mktreeset + return mktreeset(*args) + + def mkbucket(self, *args): + from BTrees.IOBTree import IOBucket as mkbucket + return mkbucket(*args) + + def mkbtree(self, *args): + from BTrees.IOBTree import IOBTree as mkbtree + return mkbtree(*args) + + +class TestIOMultiUnionPy(MultiUnion, unittest.TestCase): + + def multiunion(self, *args): + from BTrees.IOBTree import multiunionPy + return multiunionPy(*args) + + def union(self, *args): + from BTrees.IOBTree import unionPy + return unionPy(*args) + + def mkset(self, *args): + from BTrees.IOBTree import IOSetPy as mkset + return mkset(*args) + + def mktreeset(self, *args): + from BTrees.IOBTree import IOTreeSetPy as mktreeset + return mktreeset(*args) + + def mkbucket(self, *args): + from BTrees.IOBTree import IOBucketPy as mkbucket + return mkbucket(*args) + + def mkbtree(self, *args): + from BTrees.IOBTree import IOBTreePy as mkbtree + return mkbtree(*args) + + +class IOBTreeConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOBTree + return IOBTree + + +class IOBTreeConflictTestsPy(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOBTreePy + return IOBTreePy + + +class IOBucketConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOBucket + return IOBucket + + +class IOBucketConflictTestsPy(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOBucketPy + return IOBucketPy + + +class IOTreeSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOTreeSet + return IOTreeSet + + +class IOTreeSetConflictTestsPy(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOTreeSetPy + return IOTreeSetPy + + +class IOSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOSet + return IOSet + + +class IOSetConflictTestsPy(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.IOBTree import IOSetPy + return IOSetPy + + +class IOModuleTest(ModuleTest, unittest.TestCase): + + prefix = 'IO' + + def _getModule(self): + import BTrees + return BTrees.IOBTree + + def _getInterface(self): + import BTrees.Interfaces + return BTrees.Interfaces.IIntegerObjectBTreeModule + + def test_weightedUnion_not_present(self): + try: + from BTrees.IOBTree import weightedUnion + except ImportError: + pass + else: + self.fail("IOBTree shouldn't have weightedUnion") + + def test_weightedIntersection_not_present(self): + try: + from BTrees.IOBTree import weightedIntersection + except ImportError: + pass + else: + self.fail("IOBTree shouldn't have weightedIntersection") + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(IOBTreeInternalKeyTest), + unittest.makeSuite(IOBTreePyInternalKeyTest), + unittest.makeSuite(IOTreeSetInternalKeyTest), + unittest.makeSuite(IOTreeSetPyInternalKeyTest), + unittest.makeSuite(IOBucketTest), + unittest.makeSuite(IOBucketPyTest), + unittest.makeSuite(IOTreeSetTest), + unittest.makeSuite(IOTreeSetPyTest), + unittest.makeSuite(IOSetTest), + unittest.makeSuite(IOSetPyTest), + unittest.makeSuite(IOBTreeTest), + unittest.makeSuite(IOBTreePyTest), + unittest.makeSuite(TestIOBTrees), + unittest.makeSuite(TestIOBTreesPy), + unittest.makeSuite(TestIOSets), + unittest.makeSuite(TestIOSetsPy), + unittest.makeSuite(TestIOTreeSets), + unittest.makeSuite(TestIOTreeSetsPy), + unittest.makeSuite(TestIOMultiUnion), + unittest.makeSuite(TestIOMultiUnionPy), + unittest.makeSuite(PureIO), + unittest.makeSuite(PureIOPy), + unittest.makeSuite(IOBTreeConflictTests), + unittest.makeSuite(IOBTreeConflictTestsPy), + unittest.makeSuite(IOBucketConflictTests), + unittest.makeSuite(IOBucketConflictTestsPy), + unittest.makeSuite(IOTreeSetConflictTests), + unittest.makeSuite(IOTreeSetConflictTestsPy), + unittest.makeSuite(IOSetConflictTests), + unittest.makeSuite(IOSetConflictTestsPy), + unittest.makeSuite(IOModuleTest), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_LFBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_LFBTree.py new file mode 100644 index 0000000..b72abee --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_LFBTree.py @@ -0,0 +1,316 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest + +from .common import BTreeTests +from .common import ExtendedSetTests +from .common import InternalKeysMappingTest +from .common import InternalKeysSetTest +from .common import MappingBase +from .common import MappingConflictTestBase +from .common import ModuleTest +from .common import MultiUnion +from .common import NormalSetTests +from .common import SetConflictTestBase +from .common import SetResult +from .common import TestLongIntKeys +from .common import makeBuilder + + +class LFBTreeInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFBTree + return LFBTree + + +class LFBTreePyInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFBTreePy + return LFBTreePy + + +class LFTreeSetInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFTreeSet + return LFTreeSet + + +class LFTreeSetPyInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFTreeSetPy + return LFTreeSetPy + + +class LFBucketTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFBucket + return LFBucket + + +class LFBucketPyTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFBucketPy + return LFBucketPy + + +class LFTreeSetTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFTreeSet + return LFTreeSet + + +class LFTreeSetPyTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFTreeSetPy + return LFTreeSetPy + + +class LFSetTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFSet + return LFSet + + +class LFSetPyTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFSetPy + return LFSetPy + + +class LFBTreeTest(BTreeTests, TestLongIntKeys, unittest.TestCase): + + def _makeOne(self): + from BTrees.LFBTree import LFBTree + return LFBTree() + + def getTwoValues(self): + return 0.5, 1.5 + + +class LFBTreePyTest(BTreeTests, TestLongIntKeys, unittest.TestCase): + + def _makeOne(self): + from BTrees.LFBTree import LFBTreePy + return LFBTreePy() + + def getTwoValues(self): + return 0.5, 1.5 + + +class TestLFMultiUnion(MultiUnion, unittest.TestCase): + + def multiunion(self, *args): + from BTrees.LFBTree import multiunionPy + return multiunionPy(*args) + + def union(self, *args): + from BTrees.LFBTree import unionPy + return unionPy(*args) + + def mkset(self, *args): + from BTrees.LFBTree import LFSetPy as mkset + return mkset(*args) + + def mktreeset(self, *args): + from BTrees.LFBTree import LFTreeSetPy as mktreeset + return mktreeset(*args) + + def mkbucket(self, *args): + from BTrees.LFBTree import LFBucketPy as mkbucket + return mkbucket(*args) + + def mkbtree(self, *args): + from BTrees.LFBTree import LFBTreePy as mkbtree + return mkbtree(*args) + + +class TestLFMultiUnionPy(MultiUnion, unittest.TestCase): + + def multiunion(self, *args): + from BTrees.LFBTree import multiunionPy + return multiunionPy(*args) + + def union(self, *args): + from BTrees.LFBTree import unionPy + return unionPy(*args) + + def mkset(self, *args): + from BTrees.LFBTree import LFSetPy as mkset + return mkset(*args) + + def mktreeset(self, *args): + from BTrees.LFBTree import LFTreeSetPy as mktreeset + return mktreeset(*args) + + def mkbucket(self, *args): + from BTrees.LFBTree import LFBucketPy as mkbucket + return mkbucket(*args) + + def mkbtree(self, *args): + from BTrees.LFBTree import LFBTreePy as mkbtree + return mkbtree(*args) + + +class PureLF(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.LFBTree import union + return union(*args) + + def intersection(self, *args): + from BTrees.LFBTree import intersection + return intersection(*args) + + def difference(self, *args): + from BTrees.LFBTree import difference + return difference(*args) + + def builders(self): + from BTrees.LFBTree import LFBTree + from BTrees.LFBTree import LFBucket + from BTrees.LFBTree import LFTreeSet + from BTrees.LFBTree import LFSet + return LFSet, LFTreeSet, makeBuilder(LFBTree), makeBuilder(LFBucket) + + +class PureLFPy(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.LFBTree import unionPy + return unionPy(*args) + + def intersection(self, *args): + from BTrees.LFBTree import intersectionPy + return intersectionPy(*args) + + def difference(self, *args): + from BTrees.LFBTree import differencePy + return differencePy(*args) + + def builders(self): + from BTrees.LFBTree import LFBTreePy + from BTrees.LFBTree import LFBucketPy + from BTrees.LFBTree import LFTreeSetPy + from BTrees.LFBTree import LFSetPy + return (LFSetPy, LFTreeSetPy, + makeBuilder(LFBTreePy), makeBuilder(LFBucketPy)) + + +class LFBTreeConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFBTree + return LFBTree + + +class LFBTreeConflictTestsPy(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFBTreePy + return LFBTreePy + + +class LFBucketConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFBucket + return LFBucket + + +class LFBucketConflictTestsPy(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFBucketPy + return LFBucketPy + + +class LFTreeSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFTreeSet + return LFTreeSet + + +class LFTreeSetConflictTestsPy(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFTreeSetPy + return LFTreeSetPy + + +class LFSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFSet + return LFSet + + +class LFSetConflictTestsPy(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LFBTree import LFSetPy + return LFSetPy + + +class LFModuleTest(ModuleTest, unittest.TestCase): + + prefix = 'LF' + + def _getModule(self): + import BTrees + return BTrees.LFBTree + + def _getInterface(self): + import BTrees.Interfaces + return BTrees.Interfaces.IIntegerFloatBTreeModule + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(LFBTreeInternalKeyTest), + unittest.makeSuite(LFBTreePyInternalKeyTest), + unittest.makeSuite(LFTreeSetInternalKeyTest), + unittest.makeSuite(LFTreeSetPyInternalKeyTest), + unittest.makeSuite(LFBucketTest), + unittest.makeSuite(LFBucketPyTest), + unittest.makeSuite(LFTreeSetTest), + unittest.makeSuite(LFTreeSetPyTest), + unittest.makeSuite(LFSetTest), + unittest.makeSuite(LFSetPyTest), + unittest.makeSuite(LFBTreeTest), + unittest.makeSuite(LFBTreePyTest), + unittest.makeSuite(TestLFMultiUnion), + unittest.makeSuite(TestLFMultiUnionPy), + unittest.makeSuite(PureLF), + unittest.makeSuite(PureLFPy), + unittest.makeSuite(LFBTreeConflictTests), + unittest.makeSuite(LFBTreeConflictTestsPy), + unittest.makeSuite(LFBucketConflictTests), + unittest.makeSuite(LFBucketConflictTestsPy), + unittest.makeSuite(LFTreeSetConflictTests), + unittest.makeSuite(LFTreeSetConflictTestsPy), + unittest.makeSuite(LFSetConflictTests), + unittest.makeSuite(LFSetConflictTestsPy), + unittest.makeSuite(LFModuleTest), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_LLBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_LLBTree.py new file mode 100644 index 0000000..c2a6bdc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_LLBTree.py @@ -0,0 +1,415 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest + +from .common import BTreeTests +from .common import ExtendedSetTests +from .common import I_SetsBase +from .common import InternalKeysMappingTest +from .common import InternalKeysSetTest +from .common import MappingBase +from .common import MappingConflictTestBase +from .common import ModuleTest +from .common import MultiUnion +from .common import NormalSetTests +from .common import SetConflictTestBase +from .common import SetResult +from .common import TestLongIntKeys +from .common import TestLongIntValues +from .common import Weighted +from .common import itemsToSet +from .common import makeBuilder + + +class LLBTreeInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLBTree + return LLBTree + + +class LLBTreePyInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLBTreePy + return LLBTreePy + + +class LLTreeSetInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLTreeSet + return LLTreeSet + + +class LLTreeSetPyInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLTreeSetPy + return LLTreeSetPy + + +class LLBucketTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLBucket + return LLBucket + + +class LLBucketTestPy(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLBucketPy + return LLBucketPy + + +class LLTreeSetTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLTreeSet + return LLTreeSet + + +class LLTreeSetTestPy(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLTreeSetPy + return LLTreeSetPy + + +class LLSetTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLSet + return LLSet + + +class LLSetTestPy(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLSetPy + return LLSetPy + + +class LLBTreeTest(BTreeTests, TestLongIntKeys, TestLongIntValues, + unittest.TestCase): + + def _makeOne(self): + from BTrees.LLBTree import LLBTree + return LLBTree() + def getTwoValues(self): + return 1, 2 + + +class LLBTreeTestPy(BTreeTests, TestLongIntKeys, TestLongIntValues, + unittest.TestCase): + + def _makeOne(self): + from BTrees.LLBTree import LLBTreePy + return LLBTreePy() + def getTwoValues(self): + return 1, 2 + + +class TestLLSets(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.LLBTree import LLSet + return LLSet() + + +class TestLLSetsPy(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.LLBTree import LLSetPy + return LLSetPy() + + +class TestLLTreeSets(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.LLBTree import LLTreeSet + return LLTreeSet() + + +class TestLLTreeSetsPy(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.LLBTree import LLTreeSetPy + return LLTreeSetPy() + + +class PureLL(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.LLBTree import union + return union(*args) + + def intersection(self, *args): + from BTrees.LLBTree import intersection + return intersection(*args) + + def difference(self, *args): + from BTrees.LLBTree import difference + return difference(*args) + + def builders(self): + from BTrees.LLBTree import LLBTree + from BTrees.LLBTree import LLBucket + from BTrees.LLBTree import LLTreeSet + from BTrees.LLBTree import LLSet + return LLSet, LLTreeSet, makeBuilder(LLBTree), makeBuilder(LLBucket) + + +class PureLLPy(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.LLBTree import unionPy + return unionPy(*args) + + def intersection(self, *args): + from BTrees.LLBTree import intersectionPy + return intersectionPy(*args) + + def difference(self, *args): + from BTrees.LLBTree import differencePy + return differencePy(*args) + + def builders(self): + from BTrees.LLBTree import LLBTreePy + from BTrees.LLBTree import LLBucketPy + from BTrees.LLBTree import LLTreeSetPy + from BTrees.LLBTree import LLSetPy + return (LLSetPy, LLTreeSetPy, + makeBuilder(LLBTreePy), makeBuilder(LLBucketPy)) + + +class TestLLMultiUnion(MultiUnion, unittest.TestCase): + + def multiunion(self, *args): + from BTrees.LLBTree import multiunion + return multiunion(*args) + + def union(self, *args): + from BTrees.LLBTree import union + return union(*args) + + def mkset(self, *args): + from BTrees.LLBTree import LLSet as mkset + return mkset(*args) + + def mktreeset(self, *args): + from BTrees.LLBTree import LLTreeSet as mktreeset + return mktreeset(*args) + + def mkbucket(self, *args): + from BTrees.LLBTree import LLBucket as mkbucket + return mkbucket(*args) + + def mkbtree(self, *args): + from BTrees.LLBTree import LLBTree as mkbtree + return mkbtree(*args) + + +class TestLLMultiUnionPy(MultiUnion, unittest.TestCase): + + def multiunion(self, *args): + from BTrees.LLBTree import multiunionPy + return multiunionPy(*args) + + def union(self, *args): + from BTrees.LLBTree import unionPy + return unionPy(*args) + + def mkset(self, *args): + from BTrees.LLBTree import LLSetPy as mkset + return mkset(*args) + + def mktreeset(self, *args): + from BTrees.LLBTree import LLTreeSetPy as mktreeset + return mktreeset(*args) + + def mkbucket(self, *args): + from BTrees.LLBTree import LLBucketPy as mkbucket + return mkbucket(*args) + + def mkbtree(self, *args): + from BTrees.LLBTree import LLBTreePy as mkbtree + return mkbtree(*args) + + +class TestWeightedLL(Weighted, unittest.TestCase): + + def weightedUnion(self): + from BTrees.LLBTree import weightedUnion + return weightedUnion + + def weightedIntersection(self): + from BTrees.LLBTree import weightedIntersection + return weightedIntersection + + def union(self): + from BTrees.LLBTree import union + return union + + def intersection(self): + from BTrees.LLBTree import intersection + return intersection + + def mkbucket(self, *args): + from BTrees.LLBTree import LLBucket as mkbucket + return mkbucket(*args) + + def builders(self): + from BTrees.LLBTree import LLBTree + from BTrees.LLBTree import LLBucket + from BTrees.LLBTree import LLTreeSet + from BTrees.LLBTree import LLSet + return LLBucket, LLBTree, itemsToSet(LLSet), itemsToSet(LLTreeSet) + + +class TestWeightedLLPy(Weighted, unittest.TestCase): + + def weightedUnion(self): + from BTrees.LLBTree import weightedUnionPy + return weightedUnionPy + + def weightedIntersection(self): + from BTrees.LLBTree import weightedIntersectionPy + return weightedIntersectionPy + + def union(self): + from BTrees.LLBTree import unionPy + return unionPy + + def intersection(self): + from BTrees.LLBTree import intersectionPy + return intersectionPy + + def mkbucket(self, *args): + from BTrees.LLBTree import LLBucketPy as mkbucket + return mkbucket(*args) + + def builders(self): + from BTrees.LLBTree import LLBTreePy + from BTrees.LLBTree import LLBucketPy + from BTrees.LLBTree import LLTreeSetPy + from BTrees.LLBTree import LLSetPy + return (LLBucketPy, LLBTreePy, + itemsToSet(LLSetPy), itemsToSet(LLTreeSetPy)) + + +class LLBTreeConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLBTree + return LLBTree + + +class LLBTreeConflictTestsPy(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLBTreePy + return LLBTreePy + + +class LLBucketConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLBucket + return LLBucket + + +class LLBucketConflictTestsPy(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLBucketPy + return LLBucketPy + + +class LLTreeSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLTreeSet + return LLTreeSet + + +class LLTreeSetConflictTestsPy(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLTreeSetPy + return LLTreeSetPy + + +class LLSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLSet + return LLSet + + +class LLSetConflictTestsPy(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LLBTree import LLSetPy + return LLSetPy + + +class LLModuleTest(ModuleTest, unittest.TestCase): + + prefix = 'LL' + + def _getModule(self): + import BTrees + return BTrees.LLBTree + + def _getInterface(self): + import BTrees.Interfaces + return BTrees.Interfaces.IIntegerIntegerBTreeModule + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(LLBTreeInternalKeyTest), + unittest.makeSuite(LLBTreeInternalKeyTest), + unittest.makeSuite(LLTreeSetInternalKeyTest), + unittest.makeSuite(LLTreeSetInternalKeyTest), + unittest.makeSuite(LLBucketTest), + unittest.makeSuite(LLBucketTest), + unittest.makeSuite(LLTreeSetTest), + unittest.makeSuite(LLTreeSetTest), + unittest.makeSuite(LLSetTest), + unittest.makeSuite(LLSetTest), + unittest.makeSuite(LLBTreeTest), + unittest.makeSuite(LLBTreeTest), + unittest.makeSuite(TestLLSets), + unittest.makeSuite(TestLLSets), + unittest.makeSuite(TestLLTreeSets), + unittest.makeSuite(TestLLTreeSets), + unittest.makeSuite(TestLLMultiUnion), + unittest.makeSuite(TestLLMultiUnion), + unittest.makeSuite(PureLL), + unittest.makeSuite(PureLL), + unittest.makeSuite(TestWeightedLL), + unittest.makeSuite(TestWeightedLL), + unittest.makeSuite(LLBTreeConflictTests), + unittest.makeSuite(LLBTreeConflictTests), + unittest.makeSuite(LLBucketConflictTests), + unittest.makeSuite(LLBucketConflictTests), + unittest.makeSuite(LLTreeSetConflictTests), + unittest.makeSuite(LLTreeSetConflictTests), + unittest.makeSuite(LLSetConflictTests), + unittest.makeSuite(LLSetConflictTests), + unittest.makeSuite(LLModuleTest), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_LOBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_LOBTree.py new file mode 100644 index 0000000..98cb5ec --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_LOBTree.py @@ -0,0 +1,359 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest + +from .common import BTreeTests +from .common import ExtendedSetTests +from .common import I_SetsBase +from .common import InternalKeysMappingTest +from .common import InternalKeysSetTest +from .common import MappingBase +from .common import MappingConflictTestBase +from .common import ModuleTest +from .common import MultiUnion +from .common import NormalSetTests +from .common import SetConflictTestBase +from .common import SetResult +from .common import TestLongIntKeys +from .common import makeBuilder + + +class LOBTreeInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOBTree + return LOBTree + + +class LOBTreePyInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOBTreePy + return LOBTreePy + + +class LOTreeSetInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOTreeSet + return LOTreeSet + + +class LOTreeSetPyInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOTreeSetPy + return LOTreeSetPy + + +class LOBucketTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOBucket + return LOBucket + + +class LOBucketPyTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOBucketPy + return LOBucketPy + + +class LOTreeSetTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOTreeSet + return LOTreeSet + + +class LOTreeSetPyTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOTreeSetPy + return LOTreeSetPy + + +class LOSetTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOSet + return LOSet + + +class LOSetPyTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOSetPy + return LOSetPy + + +class LOBTreeTest(BTreeTests, TestLongIntKeys, unittest.TestCase): + + def _makeOne(self): + from BTrees.LOBTree import LOBTree + return LOBTree() + + +class LOBTreePyTest(BTreeTests, TestLongIntKeys, unittest.TestCase): + + def _makeOne(self): + from BTrees.LOBTree import LOBTreePy + return LOBTreePy() + + +class TestLOSets(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.LOBTree import LOSet + return LOSet() + + +class TestLOSetsPy(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.LOBTree import LOSetPy + return LOSetPy() + + +class TestLOTreeSets(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.LOBTree import LOTreeSet + return LOTreeSet() + + +class TestLOTreeSetsPy(I_SetsBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.LOBTree import LOTreeSetPy + return LOTreeSetPy() + + +class TestLOMultiUnion(MultiUnion, unittest.TestCase): + + def multiunion(self, *args): + from BTrees.LOBTree import multiunion + return multiunion(*args) + + def union(self, *args): + from BTrees.LOBTree import union + return union(*args) + + def mkset(self, *args): + from BTrees.LOBTree import LOSet as mkset + return mkset(*args) + + def mktreeset(self, *args): + from BTrees.LOBTree import LOTreeSet as mktreeset + return mktreeset(*args) + + def mkbucket(self, *args): + from BTrees.LOBTree import LOBucket as mkbucket + return mkbucket(*args) + + def mkbtree(self, *args): + from BTrees.LOBTree import LOBTree as mkbtree + return mkbtree(*args) + + +class TestLOMultiUnionPy(MultiUnion, unittest.TestCase): + + def multiunion(self, *args): + from BTrees.LOBTree import multiunionPy + return multiunionPy(*args) + + def union(self, *args): + from BTrees.LOBTree import unionPy + return unionPy(*args) + + def mkset(self, *args): + from BTrees.LOBTree import LOSetPy as mkset + return mkset(*args) + + def mktreeset(self, *args): + from BTrees.LOBTree import LOTreeSetPy as mktreeset + return mktreeset(*args) + + def mkbucket(self, *args): + from BTrees.LOBTree import LOBucketPy as mkbucket + return mkbucket(*args) + + def mkbtree(self, *args): + from BTrees.LOBTree import LOBTreePy as mkbtree + return mkbtree(*args) + + +class PureLO(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.LOBTree import union + return union(*args) + + def intersection(self, *args): + from BTrees.LOBTree import intersection + return intersection(*args) + + def difference(self, *args): + from BTrees.LOBTree import difference + return difference(*args) + + def builders(self): + from BTrees.LOBTree import LOBTree + from BTrees.LOBTree import LOBucket + from BTrees.LOBTree import LOTreeSet + from BTrees.LOBTree import LOSet + return LOSet, LOTreeSet, makeBuilder(LOBTree), makeBuilder(LOBucket) + + +class PureLOPy(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.LOBTree import unionPy + return unionPy(*args) + + def intersection(self, *args): + from BTrees.LOBTree import intersectionPy + return intersectionPy(*args) + + def difference(self, *args): + from BTrees.LOBTree import differencePy + return differencePy(*args) + + def builders(self): + from BTrees.LOBTree import LOBTreePy + from BTrees.LOBTree import LOBucketPy + from BTrees.LOBTree import LOTreeSetPy + from BTrees.LOBTree import LOSetPy + return (LOSetPy, LOTreeSetPy, + makeBuilder(LOBTreePy), makeBuilder(LOBucketPy)) + + +class LOBTreeConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOBTree + return LOBTree + + +class LOBTreeConflictTestsPy(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOBTreePy + return LOBTreePy + + +class LOBucketConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOBucket + return LOBucket + + +class LOBucketConflictTestsPy(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOBucketPy + return LOBucketPy + + +class LOTreeSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOTreeSet + return LOTreeSet + + +class LOTreeSetConflictTestsPy(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOTreeSetPy + return LOTreeSetPy + + +class LOSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOSet + return LOSet + + +class LOSetConflictTestsPy(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.LOBTree import LOSetPy + return LOSetPy + + +class LOModuleTest(ModuleTest, unittest.TestCase): + + prefix = 'LO' + + def _getModule(self): + import BTrees + return BTrees.LOBTree + + def _getInterface(self): + import BTrees.Interfaces + return BTrees.Interfaces.IIntegerObjectBTreeModule + + def test_weightedUnion_not_present(self): + try: + from BTrees.LOBTree import weightedUnion + except ImportError: + pass + else: + self.fail("LOBTree shouldn't have weightedUnion") + + def test_weightedIntersection_not_present(self): + try: + from BTrees.LOBTree import weightedIntersection + except ImportError: + pass + else: + self.fail("LOBTree shouldn't have weightedIntersection") + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(LOBTreeInternalKeyTest), + unittest.makeSuite(LOBTreePyInternalKeyTest), + unittest.makeSuite(LOTreeSetInternalKeyTest), + unittest.makeSuite(LOTreeSetPyInternalKeyTest), + unittest.makeSuite(LOBucketTest), + unittest.makeSuite(LOBucketPyTest), + unittest.makeSuite(LOTreeSetTest), + unittest.makeSuite(LOTreeSetPyTest), + unittest.makeSuite(LOSetTest), + unittest.makeSuite(LOSetPyTest), + unittest.makeSuite(LOBTreeTest), + unittest.makeSuite(LOBTreePyTest), + unittest.makeSuite(TestLOSets), + unittest.makeSuite(TestLOSetsPy), + unittest.makeSuite(TestLOTreeSets), + unittest.makeSuite(TestLOTreeSetsPy), + unittest.makeSuite(TestLOMultiUnion), + unittest.makeSuite(TestLOMultiUnionPy), + unittest.makeSuite(PureLO), + unittest.makeSuite(PureLOPy), + unittest.makeSuite(LOBTreeConflictTests), + unittest.makeSuite(LOBTreeConflictTestsPy), + unittest.makeSuite(LOBucketConflictTests), + unittest.makeSuite(LOBucketConflictTestsPy), + unittest.makeSuite(LOTreeSetConflictTests), + unittest.makeSuite(LOTreeSetConflictTestsPy), + unittest.makeSuite(LOSetConflictTests), + unittest.makeSuite(LOSetConflictTestsPy), + unittest.makeSuite(LOModuleTest), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_Length.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_Length.py new file mode 100644 index 0000000..bdbdf9d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_Length.py @@ -0,0 +1,110 @@ +############################################################################## +# +# Copyright (c) 2008 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest + + +_marker = object() + +class LengthTestCase(unittest.TestCase): + + def _getTargetClass(self): + from BTrees.Length import Length + return Length + + def _makeOne(self, value=_marker): + if value is _marker: + return self._getTargetClass()() + return self._getTargetClass()(value) + + def test_ctor_default(self): + length = self._makeOne() + self.assertEqual(length.value, 0) + + def test_ctor_explict(self): + length = self._makeOne(42) + self.assertEqual(length.value, 42) + + def test___getstate___(self): + length = self._makeOne(42) + self.assertEqual(length.__getstate__(), 42) + + def test___setstate__(self): + length = self._makeOne() + length.__setstate__(42) + self.assertEqual(length.value, 42) + + def test_set(self): + length = self._makeOne() + length.set(42) + self.assertEqual(length.value, 42) + + def test__p_resolveConflict(self): + length = self._makeOne() + self.assertEqual(length._p_resolveConflict(5, 7, 9), 11) + + def test_change_w_positive_delta(self): + length = self._makeOne() + length.change(3) + self.assertEqual(length.value, 3) + + def test_change_w_negative_delta(self): + length = self._makeOne() + length.change(-3) + self.assertEqual(length.value, -3) + + def test_change_overflows_to_long(self): + import sys + try: + length = self._makeOne(sys.maxint) + except AttributeError: #pragma NO COVER Py3k + return + else: #pragma NO COVER Py2 + self.assertEqual(length(), sys.maxint) + self.assertTrue(type(length()) is int) + length.change(+1) + self.assertEqual(length(), sys.maxint + 1) + self.assertTrue(type(length()) is long) + + def test_change_underflows_to_long(self): + import sys + try: + minint = (-sys.maxint) - 1 + except AttributeError: #pragma NO COVER Py3k + return + else: #pragma NO COVER Py2 + length = self._makeOne(minint) + self.assertEqual(length(), minint) + self.assertTrue(type(length()) is int) + length.change(-1) + self.assertEqual(length(), minint - 1) + self.assertTrue(type(length()) is long) + + def test___call___no_args(self): + length = self._makeOne(42) + self.assertEqual(length(), 42) + + def test___call___w_args(self): + length = self._makeOne(42) + self.assertEqual(length(0, None, (), [], {}), 42) + + def test_lp_516653(self): + # Test for https://bugs.launchpad.net/zodb/+bug/516653 + import copy + length = self._makeOne() + other = copy.copy(length) + self.assertEqual(other(), 0) + + +def test_suite(): + return unittest.makeSuite(LengthTestCase) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_OIBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_OIBTree.py new file mode 100644 index 0000000..f52791d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_OIBTree.py @@ -0,0 +1,383 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest + +from .common import BTreeTests +from .common import ExtendedSetTests +from .common import InternalKeysMappingTest +from .common import InternalKeysSetTest +from .common import MappingBase +from .common import MappingConflictTestBase +from .common import ModuleTest +from .common import NormalSetTests +from .common import SetConflictTestBase +from .common import SetResult +from .common import TestLongIntValues +from .common import TypeTest +from .common import Weighted +from .common import itemsToSet +from .common import makeBuilder +from BTrees.IIBTree import using64bits #XXX Ugly, but necessary + + +class OIBTreeInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OIBTree + return OIBTree + + +class OIBTreePyInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OIBTreePy + return OIBTreePy + + +class OITreeSetInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OITreeSet + return OITreeSet + + +class OITreeSetPyInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OITreeSetPy + return OITreeSetPy + + +class OIBucketTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OIBucket + return OIBucket + + +class OIBucketPyTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OIBucketPy + return OIBucketPy + + +class OITreeSetTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OITreeSet + return OITreeSet + + +class OITreeSetPyTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OITreeSetPy + return OITreeSetPy + + +class OISetTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OISet + return OISet + + +class OISetPyTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OISetPy + return OISetPy + + +class OIBTreeTest(BTreeTests, unittest.TestCase): + + def _makeOne(self): + from BTrees.OIBTree import OIBTree + return OIBTree() + + +class OIBTreePyTest(BTreeTests, unittest.TestCase): + def _makeOne(self): + from BTrees.OIBTree import OIBTreePy + return OIBTreePy() + + +if using64bits: + + class OIBTreeTest(BTreeTests, TestLongIntValues, unittest.TestCase): + def _makeOne(self): + from BTrees.OIBTree import OIBTree + return OIBTree() + def getTwoKeys(self): + return object(), object() + + class OIBTreePyTest(BTreeTests, TestLongIntValues, unittest.TestCase): + def _makeOne(self): + from BTrees.OIBTree import OIBTreePy + return OIBTreePy() + def getTwoKeys(self): + return object(), object() + + +class _TestOIBTreesBase(TypeTest): + + def _stringraises(self): + self._makeOne()[1] = 'c' + + def _floatraises(self): + self._makeOne()[1] = 1.4 + + def _noneraises(self): + self._makeOne()[1] = None + + def testEmptyFirstBucketReportedByGuido(self): + from .._compat import xrange + b = self._makeOne() + for i in xrange(29972): # reduce to 29971 and it works + b[i] = i + for i in xrange(30): # reduce to 29 and it works + del b[i] + b[i+40000] = i + + self.assertEqual(b.keys()[0], 30) + + +class TestOIBTrees(_TestOIBTreesBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.OIBTree import OIBTree + return OIBTree() + + +class TestOIBTreesPy(_TestOIBTreesBase, unittest.TestCase): + + def _makeOne(self): + from BTrees.OIBTree import OIBTreePy + return OIBTreePy() + + +class PureOI(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.OIBTree import union + return union(*args) + + def intersection(self, *args): + from BTrees.OIBTree import intersection + return intersection(*args) + + def difference(self, *args): + from BTrees.OIBTree import difference + return difference(*args) + + def builders(self): + from BTrees.OIBTree import OIBTree + from BTrees.OIBTree import OIBucket + from BTrees.OIBTree import OITreeSet + from BTrees.OIBTree import OISet + return OISet, OITreeSet, makeBuilder(OIBTree), makeBuilder(OIBucket) + + +class PureOIPy(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.OIBTree import unionPy + return unionPy(*args) + + def intersection(self, *args): + from BTrees.OIBTree import intersectionPy + return intersectionPy(*args) + + def difference(self, *args): + from BTrees.OIBTree import differencePy + return differencePy(*args) + + def builders(self): + from BTrees.OIBTree import OIBTreePy + from BTrees.OIBTree import OIBucketPy + from BTrees.OIBTree import OITreeSetPy + from BTrees.OIBTree import OISetPy + return (OISetPy, OITreeSetPy, + makeBuilder(OIBTreePy), makeBuilder(OIBucketPy)) + + +class TestWeightedOI(Weighted, unittest.TestCase): + + def weightedUnion(self): + from BTrees.OIBTree import weightedUnion + return weightedUnion + + def weightedIntersection(self): + from BTrees.OIBTree import weightedIntersection + return weightedIntersection + + def union(self): + from BTrees.OIBTree import union + return union + + def intersection(self): + from BTrees.OIBTree import intersection + return intersection + + def mkbucket(self, *args): + from BTrees.OIBTree import OIBucket as mkbucket + return mkbucket(*args) + + def builders(self): + from BTrees.OIBTree import OIBTree + from BTrees.OIBTree import OIBucket + from BTrees.OIBTree import OITreeSet + from BTrees.OIBTree import OISet + return OIBucket, OIBTree, itemsToSet(OISet), itemsToSet(OITreeSet) + + +class TestWeightedOIPy(Weighted, unittest.TestCase): + + def weightedUnion(self): + from BTrees.OIBTree import weightedUnionPy + return weightedUnionPy + + def weightedIntersection(self): + from BTrees.OIBTree import weightedIntersectionPy + return weightedIntersectionPy + + def union(self): + from BTrees.OIBTree import unionPy + return unionPy + + def intersection(self): + from BTrees.OIBTree import intersectionPy + return intersectionPy + + def mkbucket(self, *args): + from BTrees.OIBTree import OIBucketPy as mkbucket + return mkbucket(*args) + + def builders(self): + from BTrees.OIBTree import OIBTreePy + from BTrees.OIBTree import OIBucketPy + from BTrees.OIBTree import OITreeSetPy + from BTrees.OIBTree import OISetPy + return (OIBucketPy, OIBTreePy, + itemsToSet(OISetPy), itemsToSet(OITreeSetPy)) + + +class OIBucketConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OIBucket + return OIBucket + + +class OIBucketConflictTestsPy(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OIBucketPy + return OIBucketPy + + +class OISetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OISet + return OISet + + +class OISetConflictTestsPy(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OISetPy + return OISetPy + + +class OIBTreeConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OIBTree + return OIBTree + + +class OIBTreeConflictTestsPy(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OIBTreePy + return OIBTreePy + + +class OITreeSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OITreeSet + return OITreeSet + + +class OITreeSetConflictTestsPy(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OIBTree import OITreeSetPy + return OITreeSetPy + + +class OIModuleTest(ModuleTest, unittest.TestCase): + + prefix = 'OI' + + def _getModule(self): + import BTrees + return BTrees.OIBTree + + def _getInterface(self): + import BTrees.Interfaces + return BTrees.Interfaces.IObjectIntegerBTreeModule + + def test_multiunion_not_present(self): + try: + from BTrees.OIBTree import multiunion + except ImportError: + pass + else: + self.fail("OIBTree shouldn't have multiunion") + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(OIBTreeInternalKeyTest), + unittest.makeSuite(OIBTreePyInternalKeyTest), + unittest.makeSuite(OITreeSetInternalKeyTest), + unittest.makeSuite(OITreeSetPyInternalKeyTest), + unittest.makeSuite(OIBucketTest), + unittest.makeSuite(OIBucketPyTest), + unittest.makeSuite(OITreeSetTest), + unittest.makeSuite(OITreeSetPyTest), + unittest.makeSuite(OISetTest), + unittest.makeSuite(OISetPyTest), + unittest.makeSuite(OIBTreeTest), + unittest.makeSuite(OIBTreePyTest), + unittest.makeSuite(TestOIBTrees), + unittest.makeSuite(TestOIBTreesPy), + unittest.makeSuite(PureOI), + unittest.makeSuite(PureOIPy), + unittest.makeSuite(TestWeightedOI), + unittest.makeSuite(TestWeightedOIPy), + unittest.makeSuite(OIBucketConflictTests), + unittest.makeSuite(OIBucketConflictTestsPy), + unittest.makeSuite(OISetConflictTests), + unittest.makeSuite(OISetConflictTestsPy), + unittest.makeSuite(OIBTreeConflictTests), + unittest.makeSuite(OIBTreeConflictTestsPy), + unittest.makeSuite(OITreeSetConflictTests), + unittest.makeSuite(OITreeSetConflictTestsPy), + unittest.makeSuite(OIModuleTest), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_OLBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_OLBTree.py new file mode 100644 index 0000000..1ed1aba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_OLBTree.py @@ -0,0 +1,348 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest + +from .common import BTreeTests +from .common import ExtendedSetTests +from .common import InternalKeysMappingTest +from .common import InternalKeysSetTest +from .common import MappingBase +from .common import MappingConflictTestBase +from .common import ModuleTest +from .common import NormalSetTests +from .common import SetConflictTestBase +from .common import SetResult +from .common import TestLongIntValues +from .common import Weighted +from .common import itemsToSet +from .common import makeBuilder +from .common import _skip_on_32_bits + + +class OLBTreeInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLBTree + return OLBTree + + +class OLBTreePyInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLBTreePy + return OLBTreePy + + +class OLTreeSetInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLTreeSet + return OLTreeSet + + +class OLTreeSetPyInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLTreeSetPy + return OLTreeSetPy + + +class OLBucketTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLBucket + return OLBucket + + +class OLBucketPyTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLBucketPy + return OLBucketPy + + +class OLTreeSetTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLTreeSet + return OLTreeSet + + +class OLTreeSetPyTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLTreeSetPy + return OLTreeSetPy + + +class OLSetTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLSet + return OLSet + + +class OLSetPyTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLSetPy + return OLSetPy + + +class OLBTreeTest(BTreeTests, TestLongIntValues, unittest.TestCase): + + def _makeOne(self): + from BTrees.OLBTree import OLBTree + return OLBTree() + + def getTwoKeys(self): + return "abc", "def" + + @_skip_on_32_bits + def test_extremes(self): + from BTrees.tests.common import SMALLEST_64_BITS + from BTrees.tests.common import SMALLEST_POSITIVE_65_BITS + from BTrees.tests.common import LARGEST_64_BITS + from BTrees.tests.common import LARGEST_NEGATIVE_65_BITS + btree = self._makeOne() + btree['ZERO'] = 0 + btree['SMALLEST_64_BITS'] = SMALLEST_64_BITS + btree['LARGEST_64_BITS'] = LARGEST_64_BITS + self.assertRaises((ValueError, OverflowError), btree.__setitem__, + 'SMALLEST_POSITIVE_65_BITS', SMALLEST_POSITIVE_65_BITS) + self.assertRaises((ValueError, OverflowError), btree.__setitem__, + 'LARGEST_NEGATIVE_65_BITS', LARGEST_NEGATIVE_65_BITS) + + +class OLBTreePyTest(BTreeTests, TestLongIntValues, unittest.TestCase): + + def _makeOne(self): + from BTrees.OLBTree import OLBTreePy + return OLBTreePy() + + def getTwoKeys(self): + return "abc", "def" + + +class PureOL(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.OLBTree import union + return union(*args) + + def intersection(self, *args): + from BTrees.OLBTree import intersection + return intersection(*args) + + def difference(self, *args): + from BTrees.OLBTree import difference + return difference(*args) + + def builders(self): + from BTrees.OLBTree import OLBTree + from BTrees.OLBTree import OLBucket + from BTrees.OLBTree import OLTreeSet + from BTrees.OLBTree import OLSet + return OLSet, OLTreeSet, makeBuilder(OLBTree), makeBuilder(OLBucket) + + +class PureOLPy(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.OLBTree import unionPy + return unionPy(*args) + + def intersection(self, *args): + from BTrees.OLBTree import intersectionPy + return intersectionPy(*args) + + def difference(self, *args): + from BTrees.OLBTree import differencePy + return differencePy(*args) + + def builders(self): + from BTrees.OLBTree import OLBTreePy + from BTrees.OLBTree import OLBucketPy + from BTrees.OLBTree import OLTreeSetPy + from BTrees.OLBTree import OLSetPy + return (OLSetPy, OLTreeSetPy, + makeBuilder(OLBTreePy), makeBuilder(OLBucketPy)) + + +class TestWeightedOL(Weighted, unittest.TestCase): + + def weightedUnion(self): + from BTrees.OLBTree import weightedUnion + return weightedUnion + + def weightedIntersection(self): + from BTrees.OLBTree import weightedIntersection + return weightedIntersection + + def union(self): + from BTrees.OLBTree import union + return union + + def intersection(self): + from BTrees.OLBTree import intersection + return intersection + + def mkbucket(self, *args): + from BTrees.OLBTree import OLBucket as mkbucket + return mkbucket(*args) + + def builders(self): + from BTrees.OLBTree import OLBTree + from BTrees.OLBTree import OLBucket + from BTrees.OLBTree import OLTreeSet + from BTrees.OLBTree import OLSet + return OLBucket, OLBTree, itemsToSet(OLSet), itemsToSet(OLTreeSet) + + +class TestWeightedOLPy(Weighted, unittest.TestCase): + + def weightedUnion(self): + from BTrees.OLBTree import weightedUnionPy + return weightedUnionPy + + def weightedIntersection(self): + from BTrees.OLBTree import weightedIntersectionPy + return weightedIntersectionPy + + def union(self): + from BTrees.OLBTree import unionPy + return unionPy + + def intersection(self): + from BTrees.OLBTree import intersectionPy + return intersectionPy + + def mkbucket(self, *args): + from BTrees.OLBTree import OLBucketPy as mkbucket + return mkbucket(*args) + + def builders(self): + from BTrees.OLBTree import OLBTreePy + from BTrees.OLBTree import OLBucketPy + from BTrees.OLBTree import OLTreeSetPy + from BTrees.OLBTree import OLSetPy + return (OLBucketPy, OLBTreePy, + itemsToSet(OLSetPy), itemsToSet(OLTreeSetPy)) + + +class OLBucketConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLBucket + return OLBucket + + +class OLBucketPyConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLBucketPy + return OLBucketPy + + +class OLSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLSet + return OLSet + + +class OLSetPyConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLSetPy + return OLSetPy + + +class OLBTreeConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLBTree + return OLBTree + + +class OLBTreePyConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLBTreePy + return OLBTreePy + + +class OLTreeSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLTreeSet + return OLTreeSet + + +class OLTreeSetPyConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OLBTree import OLTreeSetPy + return OLTreeSetPy + + +class OLModuleTest(ModuleTest, unittest.TestCase): + + prefix = 'OL' + + def _getModule(self): + import BTrees + return BTrees.OLBTree + + def _getInterface(self): + import BTrees.Interfaces + return BTrees.Interfaces.IObjectIntegerBTreeModule + + def test_multiunion_not_present(self): + try: + from BTrees.OLBTree import multiunion + except ImportError: + pass + else: + self.fail("OLBTree shouldn't have multiunion") + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(OLBTreeInternalKeyTest), + unittest.makeSuite(OLBTreePyInternalKeyTest), + unittest.makeSuite(OLTreeSetInternalKeyTest), + unittest.makeSuite(OLTreeSetPyInternalKeyTest), + unittest.makeSuite(OLBucketTest), + unittest.makeSuite(OLBucketPyTest), + unittest.makeSuite(OLTreeSetTest), + unittest.makeSuite(OLTreeSetPyTest), + unittest.makeSuite(OLSetTest), + unittest.makeSuite(OLSetPyTest), + unittest.makeSuite(OLBTreeTest), + unittest.makeSuite(OLBTreePyTest), + unittest.makeSuite(PureOL), + unittest.makeSuite(PureOLPy), + unittest.makeSuite(TestWeightedOL), + unittest.makeSuite(TestWeightedOLPy), + unittest.makeSuite(OLBucketConflictTests), + unittest.makeSuite(OLBucketPyConflictTests), + unittest.makeSuite(OLSetConflictTests), + unittest.makeSuite(OLSetPyConflictTests), + unittest.makeSuite(OLBTreeConflictTests), + unittest.makeSuite(OLBTreePyConflictTests), + unittest.makeSuite(OLTreeSetConflictTests), + unittest.makeSuite(OLTreeSetPyConflictTests), + unittest.makeSuite(OLModuleTest), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_OOBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_OOBTree.py new file mode 100644 index 0000000..6a0fe63 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_OOBTree.py @@ -0,0 +1,419 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest + +from .common import _skip_under_Py3k +from .common import BTreeTests +from .common import ExtendedSetTests +from .common import InternalKeysMappingTest +from .common import InternalKeysSetTest +from .common import MappingBase +from .common import MappingConflictTestBase +from .common import ModuleTest +from .common import NormalSetTests +from .common import SetResult +from .common import SetConflictTestBase +from .common import makeBuilder + + + +class OOBTreeInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOBTreePy + return OOBTreePy + +class OOBTreePyInternalKeyTest(InternalKeysMappingTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOBTree + return OOBTree + + +class OOTreeSetInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOTreeSet + return OOTreeSet + + +class OOTreeSetPyInternalKeyTest(InternalKeysSetTest, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOTreeSetPy + return OOTreeSetPy + + +class OOBucketTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOBucket + return OOBucket + + +class OOBucketPyTest(MappingBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOBucketPy + return OOBucketPy + + +class OOTreeSetTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOTreeSet + return OOTreeSet + + +class OOTreeSetPyTest(NormalSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOTreeSetPy + return OOTreeSetPy + + +class OOSetTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOSet + return OOSet + + +class OOSetPyTest(ExtendedSetTests, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOSetPy + return OOSetPy + + + +class OOBTreeTest(BTreeTests, unittest.TestCase): + + def _makeOne(self, *args): + from BTrees.OOBTree import OOBTree + return OOBTree(*args) + + def test_byValue(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertEqual(list(tree.byValue(22)), + [(y, x) for x, y in reversed(ITEMS[22:])]) + + def testRejectDefaultComparisonOnSet(self): + # Check that passing int keys w default comparison fails. + # Only applies to new-style class instances. Old-style + # instances are too hard to introspect. + + # This is white box because we know that the check is being + # used in a function that's used in lots of places. + # Otherwise, there are many permutations that would have to be + # checked. + from .._compat import PY2 + t = self._makeOne() + + class C(object): + pass + + self.assertRaises(TypeError, lambda : t.__setitem__(C(), 1)) + + with self.assertRaises(TypeError) as raising: + t[C()] = 1 + + self.assertEqual(raising.exception.args[0], "Object has default comparison") + + if PY2: # we only check for __cmp__ on Python2 + + class With___cmp__(object): + def __cmp__(*args): + return 1 + c = With___cmp__() + t[c] = 1 + + t.clear() + + class With___lt__(object): + def __lt__(*args): + return 1 + + c = With___lt__() + t[c] = 1 + + t.clear() + + def testAcceptDefaultComparisonOnGet(self): + # Issue #42 + t = self._makeOne() + class C(object): + pass + + self.assertEqual(t.get(C(), 42), 42) + self.assertRaises(KeyError, t.__getitem__, C()) + self.assertFalse(C() in t) + + def test_None_is_smallest(self): + t = self._makeOne() + for i in range(999): # Make sure we multiple buckets + t[i] = i*i + t[None] = -1 + for i in range(-99,0): # Make sure we multiple buckets + t[i] = i*i + self.assertEqual(list(t), [None] + list(range(-99, 999))) + self.assertEqual(list(t.values()), + [-1] + [i*i for i in range(-99, 999)]) + self.assertEqual(t[2], 4) + self.assertEqual(t[-2], 4) + self.assertEqual(t[None], -1) + t[None] = -2 + self.assertEqual(t[None], -2) + t2 = t.__class__(t) + del t[None] + self.assertEqual(list(t), list(range(-99, 999))) + + if 'Py' in self.__class__.__name__: + return + from BTrees.OOBTree import difference, union, intersection + self.assertEqual(list(difference(t2, t).items()), [(None, -2)]) + self.assertEqual(list(union(t, t2)), list(t2)) + self.assertEqual(list(intersection(t, t2)), list(t)) + + @_skip_under_Py3k + def testDeleteNoneKey(self): + # Check that a None key can be deleted in Python 2. + # This doesn't work on Python 3 because None is unorderable, + # so the tree can't be searched. But None also can't be inserted, + # and we don't support migrating Python 2 databases to Python 3. + t = self._makeOne() + bucket_state = ((None, 42),) + tree_state = ((bucket_state,),) + t.__setstate__(tree_state) + + self.assertEqual(t[None], 42) + del t[None] + + def testUnpickleNoneKey(self): + # All versions (py2 and py3, C and Python) can unpickle + # data that looks like this: {None: 42}, even though None + # is unorderable.. + # This pickle was captured in BTree/ZODB3 3.10.7 + data = b'ccopy_reg\n__newobj__\np0\n(cBTrees.OOBTree\nOOBTree\np1\ntp2\nRp3\n((((NI42\ntp4\ntp5\ntp6\ntp7\nb.' + + import pickle + t = pickle.loads(data) + keys = list(t) + self.assertEqual([None], keys) + + def testIdentityTrumpsBrokenComparison(self): + # Identical keys always match, even if their comparison is + # broken. See https://github.com/zopefoundation/BTrees/issues/50 + from functools import total_ordering + + @total_ordering + class Bad(object): + def __eq__(self, other): + return False + + def __cmp__(self, other): + return 1 + + def __lt__(self, other): + return False + + t = self._makeOne() + bad_key = Bad() + t[bad_key] = 42 + + self.assertIn(bad_key, t) + self.assertEqual(list(t), [bad_key]) + + del t[bad_key] + self.assertNotIn(bad_key, t) + self.assertEqual(list(t), []) + + +class OOBTreePyTest(OOBTreeTest): +# +# Right now, we can't match the C extension's test / prohibition of the +# default 'object' comparison semantics. +#class OOBTreePyTest(BTreeTests, unittest.TestCase): + + def _makeOne(self, *args): + from BTrees.OOBTree import OOBTreePy + return OOBTreePy(*args) + + + +class PureOO(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.OOBTree import union + return union(*args) + + def intersection(self, *args): + from BTrees.OOBTree import intersection + return intersection(*args) + + def difference(self, *args): + from BTrees.OOBTree import difference + return difference(*args) + + def builders(self): + from BTrees.OOBTree import OOBTree + from BTrees.OOBTree import OOBucket + from BTrees.OOBTree import OOTreeSet + from BTrees.OOBTree import OOSet + return OOSet, OOTreeSet, makeBuilder(OOBTree), makeBuilder(OOBucket) + + +class PureOOPy(SetResult, unittest.TestCase): + + def union(self, *args): + from BTrees.OOBTree import unionPy + return unionPy(*args) + + def intersection(self, *args): + from BTrees.OOBTree import intersectionPy + return intersectionPy(*args) + + def difference(self, *args): + from BTrees.OOBTree import differencePy + return differencePy(*args) + + def builders(self): + from BTrees.OOBTree import OOBTreePy + from BTrees.OOBTree import OOBucketPy + from BTrees.OOBTree import OOTreeSetPy + from BTrees.OOBTree import OOSetPy + return (OOSetPy, OOTreeSetPy, + makeBuilder(OOBTreePy), makeBuilder(OOBucketPy)) + + +class OOBucketConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOBucket + return OOBucket + + +class OOBucketPyConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOBucketPy + return OOBucketPy + + +class OOSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOSet + return OOSet + + +class OOSetPyConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOSetPy + return OOSetPy + + +class OOBTreeConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOBTree + return OOBTree + + +class OOBTreePyConflictTests(MappingConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOBTreePy + return OOBTreePy + + +class OOTreeSetConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOTreeSet + return OOTreeSet + + +class OOTreeSetPyConflictTests(SetConflictTestBase, unittest.TestCase): + + def _getTargetClass(self): + from BTrees.OOBTree import OOTreeSetPy + return OOTreeSetPy + + +class OOModuleTest(ModuleTest, unittest.TestCase): + + prefix = 'OO' + + def _getModule(self): + import BTrees + return BTrees.OOBTree + + def _getInterface(self): + import BTrees.Interfaces + return BTrees.Interfaces.IObjectObjectBTreeModule + + def test_weightedUnion_not_present(self): + try: + from BTrees.OOBTree import weightedUnion + except ImportError: + pass + else: + self.fail("OOBTree shouldn't have weightedUnion") + + def test_weightedIntersection_not_present(self): + try: + from BTrees.OOBTree import weightedIntersection + except ImportError: + pass + else: + self.fail("OOBTree shouldn't have weightedIntersection") + + def test_multiunion_not_present(self): + try: + from BTrees.OOBTree import multiunion + except ImportError: + pass + else: + self.fail("OOBTree shouldn't have multiunion") + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(OOBTreeInternalKeyTest), + unittest.makeSuite(OOBTreePyInternalKeyTest), + unittest.makeSuite(OOTreeSetInternalKeyTest), + unittest.makeSuite(OOTreeSetPyInternalKeyTest), + unittest.makeSuite(OOBucketTest), + unittest.makeSuite(OOBucketPyTest), + unittest.makeSuite(OOTreeSetTest), + unittest.makeSuite(OOTreeSetPyTest), + unittest.makeSuite(OOSetTest), + unittest.makeSuite(OOSetPyTest), + unittest.makeSuite(OOBTreeTest), + unittest.makeSuite(OOBTreePyTest), + unittest.makeSuite(PureOO), + unittest.makeSuite(PureOOPy), + unittest.makeSuite(OOBucketConflictTests), + unittest.makeSuite(OOBucketPyConflictTests), + unittest.makeSuite(OOSetConflictTests), + unittest.makeSuite(OOSetPyConflictTests), + unittest.makeSuite(OOBTreeConflictTests), + unittest.makeSuite(OOBTreePyConflictTests), + unittest.makeSuite(OOTreeSetConflictTests), + unittest.makeSuite(OOTreeSetPyConflictTests), + unittest.makeSuite(OOModuleTest), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test__base.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test__base.py new file mode 100644 index 0000000..5dc7014 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test__base.py @@ -0,0 +1,3086 @@ +############################################################################## +# +# Copyright 2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + + +def _assertRaises(self, e_type, checked, *args, **kw): + try: + checked(*args, **kw) + except e_type as e: + return e + self.fail("Didn't raise: %s" % e_type.__name__) + + +class Test_Base(unittest.TestCase): + + def _getTargetClass(self): + from .._base import _Base + return _Base + + def _makeOne(self, items=None): + class _Test(self._getTargetClass()): + max_leaf_size = 10 + max_internal_size = 15 + def clear(self): + self._data = {} + def update(self, d): + self._data.update(d) + return _Test(items) + + def test_ctor_wo_items(self): + base = self._makeOne() + self.assertEqual(base._data, {}) + + def test_ctor_w_items(self): + base = self._makeOne({'a': 'b'}) + self.assertEqual(base._data, {'a': 'b'}) + + +class Test_BucketBase(unittest.TestCase): + + def _getTargetClass(self): + from .._base import _BucketBase + return _BucketBase + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + bucket = self._makeOne() + self.assertEqual(bucket._keys, []) + self.assertEqual(bucket._next, None) + self.assertEqual(len(bucket), 0) + self.assertEqual(bucket.size, 0) + + def test__deleteNextBucket_none(self): + bucket = self._makeOne() + bucket._deleteNextBucket() # no raise + self.assertTrue(bucket._next is None) + + def test__deleteNextBucket_one(self): + bucket1 = self._makeOne() + bucket2 = bucket1._next = self._makeOne() + bucket1._deleteNextBucket() # no raise + self.assertTrue(bucket1._next is None) + + def test__deleteNextBucket_two(self): + bucket1 = self._makeOne() + bucket2 = bucket1._next = self._makeOne() + bucket3 = bucket2._next = self._makeOne() + bucket1._deleteNextBucket() # no raise + self.assertTrue(bucket1._next is bucket3) + + def test__search_empty(self): + bucket = self._makeOne() + self.assertEqual(bucket._search('nonesuch'), -1) + + def test__search_nonempty_miss(self): + bucket = self._makeOne() + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket._search('candy'), -3) + + def test__search_nonempty_hit(self): + bucket = self._makeOne() + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket._search('charlie'), 2) + + def test_minKey_empty(self): + bucket = self._makeOne() + self.assertRaises(IndexError, bucket.minKey) + + def test_minKey_no_bound(self): + bucket = self._makeOne() + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.minKey(), 'alpha') + + def test_minKey_w_bound_hit(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.minKey('bravo'), 'bravo') + + def test_minKey_w_bound_miss(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.minKey('candy'), 'charlie') + + def test_minKey_w_bound_fail(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertRaises(ValueError, bucket.minKey, 'foxtrot') + + def test_maxKey_empty(self): + bucket = self._makeOne() + self.assertRaises(IndexError, bucket.maxKey) + + def test_maxKey_no_bound(self): + bucket = self._makeOne() + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.maxKey(), 'echo') + + def test_maxKey_w_bound_hit(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.maxKey('bravo'), 'bravo') + + def test_maxKey_w_bound_miss(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.maxKey('candy'), 'bravo') + + def test_maxKey_w_bound_fail(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertRaises(ValueError, bucket.maxKey, 'abacus') + + def test__range_defaults_empty(self): + bucket = self._makeOne() + self.assertEqual(bucket._range(), (0, 0)) + + def test__range_defaults_filled(self): + bucket = self._makeOne() + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket._range(), (0, 5)) + + def test__range_defaults_exclude_min(self): + bucket = self._makeOne() + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket._range(excludemin=True), (1, 5)) + + def test__range_defaults_exclude_max(self): + bucket = self._makeOne() + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket._range(excludemax=True), (0, 4)) + + def test__range_w_min_hit(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket._range(min='bravo'), (1, 5)) + + def test__range_w_min_miss(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket._range(min='candy'), (2, 5)) + + def test__range_w_min_hit_w_exclude_min(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket._range(min='bravo', excludemin=True), (2, 5)) + + def test__range_w_min_miss_w_exclude_min(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + # 'excludemin' doesn't fire on miss + self.assertEqual(bucket._range(min='candy', excludemin=True), (2, 5)) + + def test__range_w_max_hit(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket._range(max='delta'), (0, 4)) + + def test__range_w_max_miss(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket._range(max='dandy'), (0, 3)) + + def test__range_w_max_hit_w_exclude_max(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket._range(max='delta', excludemax=True), (0, 3)) + + def test__range_w_max_miss_w_exclude_max(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + # 'excludemax' doesn't fire on miss + self.assertEqual(bucket._range(max='dandy', excludemax=True), (0, 3)) + + def test_keys_defaults_empty(self): + bucket = self._makeOne() + self.assertEqual(bucket.keys(), []) + + def test_keys_defaults_filled(self): + bucket = self._makeOne() + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.keys(), KEYS[0: 5]) + + def test_keys_defaults_exclude_min(self): + bucket = self._makeOne() + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.keys(excludemin=True), KEYS[1: 5]) + + def test_keys_defaults_exclude_max(self): + bucket = self._makeOne() + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.keys(excludemax=True), KEYS[0: 4]) + + def test_keys_w_min_hit(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.keys(min='bravo'), KEYS[1: 5]) + + def test_keys_w_min_miss(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.keys(min='candy'), KEYS[2: 5]) + + def test_keys_w_min_hit_w_exclude_min(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.keys(min='bravo', excludemin=True), KEYS[2: 5]) + + def test_keys_w_min_miss_w_exclude_min(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + # 'excludemin' doesn't fire on miss + self.assertEqual(bucket.keys(min='candy', excludemin=True), KEYS[2: 5]) + + def test_keys_w_max_hit(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.keys(max='delta'), KEYS[0: 4]) + + def test_keys_w_max_miss(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.keys(max='dandy'), KEYS[0: 3]) + + def test_keys_w_max_hit_w_exclude_max(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(bucket.keys(max='delta', excludemax=True), KEYS[0: 3]) + + def test_keys_w_max_miss_w_exclude_max(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + # 'excludemax' doesn't fire on miss + self.assertEqual(bucket.keys(max='dandy', excludemax=True), KEYS[0: 3]) + + def test_iterkeys_defaults_empty(self): + bucket = self._makeOne() + self.assertEqual(list(bucket.iterkeys()), []) + + def test_iterkeys_defaults_filled(self): + bucket = self._makeOne() + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(list(bucket.iterkeys()), KEYS[0: 5]) + + def test_iterkeys_defaults_exclude_min(self): + bucket = self._makeOne() + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(list(bucket.iterkeys(excludemin=True)), KEYS[1: 5]) + + def test_iterkeys_defaults_exclude_max(self): + bucket = self._makeOne() + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(list(bucket.iterkeys(excludemax=True)), KEYS[0: 4]) + + def test_iterkeys_w_min_hit(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(list(bucket.iterkeys(min='bravo')), KEYS[1: 5]) + + def test_iterkeys_w_min_miss(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(list(bucket.iterkeys(min='candy')), KEYS[2: 5]) + + def test_iterkeys_w_min_hit_w_exclude_min(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(list(bucket.iterkeys(min='bravo', excludemin=True)), + KEYS[2: 5]) + + def test_iterkeys_w_min_miss_w_exclude_min(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + # 'excludemin' doesn't fire on miss + self.assertEqual(list(bucket.iterkeys(min='candy', excludemin=True)), + KEYS[2: 5]) + + def test_iterkeys_w_max_hit(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(list(bucket.iterkeys(max='delta')), KEYS[0: 4]) + + def test_iterkeys_w_max_miss(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(list(bucket.iterkeys(max='dandy')), KEYS[0: 3]) + + def test_iterkeys_w_max_hit_w_exclude_max(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual(list(bucket.keys(max='delta', excludemax=True)), + KEYS[0: 3]) + + def test_iterkeys_w_max_miss_w_exclude_max(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + # 'excludemax' doesn't fire on miss + self.assertEqual(list(bucket.iterkeys(max='dandy', excludemax=True)), + KEYS[0: 3]) + + def test___iter___empty(self): + bucket = self._makeOne() + self.assertEqual([x for x in bucket], []) + + def test___iter___filled(self): + bucket = self._makeOne() + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertEqual([x for x in bucket], KEYS[0: 5]) + + def test___contains___empty(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + self.assertFalse('nonesuch' in bucket) + + def test___contains___filled_miss(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + self.assertFalse('nonesuch' in bucket) + + def test___contains___filled_hit(self): + bucket = self._makeOne() + bucket._to_key = lambda x: x + KEYS = bucket._keys = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + self.assertTrue(key in bucket) + + +class Test_SetIteration(unittest.TestCase): + + assertRaises = _assertRaises + + def _getTargetClass(self): + from .._base import _SetIteration + return _SetIteration + + def _makeOne(self, to_iterate, useValues=False, default=None): + return self._getTargetClass()(to_iterate, useValues, default) + + def test_ctor_w_None(self): + from .._base import _marker + si = self._makeOne(None) + self.assertEqual(si.useValues, False) + self.assertTrue(si.key is _marker) + self.assertEqual(si.value, None) + self.assertEqual(si.active, False) + self.assertEqual(si.position, -1) + + def test_ctor_w_non_empty_list(self): + si = self._makeOne(['a', 'b', 'c']) + self.assertEqual(si.useValues, False) + self.assertEqual(si.key, 'a') + self.assertEqual(si.value, None) + self.assertEqual(si.active, True) + self.assertEqual(si.position, 1) + + +class BucketTests(unittest.TestCase): + + assertRaises = _assertRaises + + def _getTargetClass(self): + from .._base import Bucket + return Bucket + + def _makeOne(self): + class _Bucket(self._getTargetClass()): + def _to_key(self, x): + return x + def _to_value(self, x): + return x + return _Bucket() + + def test_ctor_defaults(self): + bucket = self._makeOne() + self.assertEqual(bucket._keys, []) + self.assertEqual(bucket._values, []) + + def test_setdefault_miss(self): + bucket = self._makeOne() + self.assertEqual(bucket.setdefault('a', 'b'), 'b') + self.assertEqual(bucket._keys, ['a']) + self.assertEqual(bucket._values, ['b']) + + def test_setdefault_hit(self): + bucket = self._makeOne() + bucket._keys.append('a') + bucket._values.append('b') + self.assertEqual(bucket.setdefault('a', 'b'), 'b') + self.assertEqual(bucket._keys, ['a']) + self.assertEqual(bucket._values, ['b']) + + def test_pop_miss_no_default(self): + bucket = self._makeOne() + self.assertRaises(KeyError, bucket.pop, 'nonesuch') + + def test_pop_miss_w_default(self): + bucket = self._makeOne() + self.assertEqual(bucket.pop('nonesuch', 'b'), 'b') + + def test_pop_hit(self): + bucket = self._makeOne() + bucket._keys.append('a') + bucket._values.append('b') + self.assertEqual(bucket.pop('a'), 'b') + self.assertEqual(bucket._keys, []) + self.assertEqual(bucket._values, []) + + def test_update_value_w_iteritems(self): + bucket = self._makeOne() + bucket.update({'a': 'b'}) + self.assertEqual(bucket._keys, ['a']) + self.assertEqual(bucket._values, ['b']) + + def test_update_value_w_items(self): + bucket = self._makeOne() + class Foo(object): + def items(self): + return [('a', 'b')] + bucket.update(Foo()) + self.assertEqual(bucket._keys, ['a']) + self.assertEqual(bucket._values, ['b']) + + def test_update_value_w_invalid_items(self): + bucket = self._makeOne() + class Foo(object): + def items(self): + return ('a', 'b', 'c') + self.assertRaises(TypeError, bucket.update, Foo()) + + def test_update_sequence(self): + bucket = self._makeOne() + bucket.update([('a', 'b')]) + self.assertEqual(bucket._keys, ['a']) + self.assertEqual(bucket._values, ['b']) + + def test_update_replacing(self): + bucket = self._makeOne() + bucket['a'] = 'b' + bucket.update([('a', 'c')]) + self.assertEqual(bucket['a'], 'c') + + def test___setitem___incomparable(self): + bucket = self._makeOne() + def _should_error(): + bucket[object()] = 'b' + self.assertRaises(TypeError, _should_error) + + def test___setitem___comparable(self): + bucket = self._makeOne() + bucket['a'] = 'b' + self.assertEqual(bucket['a'], 'b') + + def test___setitem___replace(self): + bucket = self._makeOne() + bucket['a'] = 'b' + bucket['a'] = 'c' + self.assertEqual(bucket['a'], 'c') + + def test___delitem___miss(self): + bucket = self._makeOne() + def _should_error(): + del bucket['nonesuch'] + self.assertRaises(KeyError, _should_error) + + def test___delitem___hit(self): + bucket = self._makeOne() + bucket._keys.append('a') + bucket._values.append('b') + del bucket['a'] + self.assertEqual(bucket._keys, []) + self.assertEqual(bucket._values, []) + + def test_clear_filled(self): + bucket = self._makeOne() + bucket['a'] = 'b' + bucket['c'] = 'd' + bucket.clear() + self.assertEqual(len(bucket._keys), 0) + self.assertEqual(len(bucket._values), 0) + + def test_clear_empty(self): + bucket = self._makeOne() + bucket.clear() + self.assertEqual(len(bucket._keys), 0) + self.assertEqual(len(bucket._values), 0) + + def test_get_miss_no_default(self): + bucket = self._makeOne() + self.assertEqual(bucket.get('nonesuch'), None) + + def test_get_miss_w_default(self): + bucket = self._makeOne() + self.assertEqual(bucket.get('nonesuch', 'b'), 'b') + + def test_get_hit(self): + bucket = self._makeOne() + bucket._keys.append('a') + bucket._values.append('b') + self.assertEqual(bucket.get('a'), 'b') + + def test___getitem___miss(self): + bucket = self._makeOne() + def _should_error(): + return bucket['nonesuch'] + self.assertRaises(KeyError, _should_error) + + def test___getitem___hit(self): + bucket = self._makeOne() + bucket._keys.append('a') + bucket._values.append('b') + self.assertEqual(bucket['a'], 'b') + + def test__split_empty(self): + bucket = self._makeOne() + next_b = bucket._next = self._makeOne() + new_b = bucket._split() + self.assertEqual(len(bucket._keys), 0) + self.assertEqual(len(bucket._values), 0) + self.assertEqual(len(new_b._keys), 0) + self.assertEqual(len(new_b._values), 0) + self.assertTrue(bucket._next is new_b) + self.assertTrue(new_b._next is next_b) + + def test__split_filled_default_index(self): + bucket = self._makeOne() + next_b = bucket._next = self._makeOne() + for i, c in enumerate('abcdef'): + bucket[c] = i + new_b = bucket._split() + self.assertEqual(list(bucket._keys), ['a', 'b', 'c']) + self.assertEqual(list(bucket._values), [0, 1, 2]) + self.assertEqual(list(new_b._keys), ['d', 'e', 'f']) + self.assertEqual(list(new_b._values), [3, 4, 5]) + self.assertTrue(bucket._next is new_b) + self.assertTrue(new_b._next is next_b) + + def test__split_filled_explicit_index(self): + bucket = self._makeOne() + next_b = bucket._next = self._makeOne() + for i, c in enumerate('abcdef'): + bucket[c] = i + new_b = bucket._split(2) + self.assertEqual(list(bucket._keys), ['a', 'b']) + self.assertEqual(list(bucket._values), [0, 1]) + self.assertEqual(list(new_b._keys), ['c', 'd', 'e', 'f']) + self.assertEqual(list(new_b._values), [2, 3, 4, 5]) + self.assertTrue(bucket._next is new_b) + self.assertTrue(new_b._next is next_b) + + def test_keys_empty_no_args(self): + bucket = self._makeOne() + self.assertEqual(bucket.keys(), []) + + def test_keys_filled_no_args(self): + bucket = self._makeOne() + for i, c in enumerate('abcdef'): + bucket[c] = i + self.assertEqual(bucket.keys(), + ['a', 'b', 'c', 'd', 'e', 'f']) + + def test_keys_filled_w_args(self): + bucket = self._makeOne() + for i, c in enumerate('abcdef'): + bucket[c] = i + self.assertEqual(bucket.keys(min='b', excludemin=True, + max='f', excludemax=True), ['c', 'd', 'e']) + + def test_iterkeys_empty_no_args(self): + bucket = self._makeOne() + self.assertEqual(list(bucket.iterkeys()), []) + + def test_iterkeys_filled_no_args(self): + bucket = self._makeOne() + for i, c in enumerate('abcdef'): + bucket[c] = i + self.assertEqual(list(bucket.iterkeys()), + ['a', 'b', 'c', 'd', 'e', 'f']) + + def test_iterkeys_filled_w_args(self): + bucket = self._makeOne() + for i, c in enumerate('abcdef'): + bucket[c] = i + self.assertEqual(list(bucket.iterkeys( + min='b', excludemin=True, + max='f', excludemax=True)), ['c', 'd', 'e']) + + def test_values_empty_no_args(self): + bucket = self._makeOne() + self.assertEqual(bucket.values(), []) + + def test_values_filled_no_args(self): + bucket = self._makeOne() + for i, c in enumerate('abcdef'): + bucket[c] = i + self.assertEqual(bucket.values(), list(range(6))) + + def test_values_filled_w_args(self): + bucket = self._makeOne() + for i, c in enumerate('abcdef'): + bucket[c] = i + self.assertEqual(bucket.values(min='b', excludemin=True, + max='f', excludemax=True), [2, 3, 4]) + + def test_itervalues_empty_no_args(self): + bucket = self._makeOne() + self.assertEqual(list(bucket.itervalues()), []) + + def test_itervalues_filled_no_args(self): + bucket = self._makeOne() + for i, c in enumerate('abcdef'): + bucket[c] = i + self.assertEqual(list(bucket.itervalues()), list(range(6))) + + def test_itervalues_filled_w_args(self): + bucket = self._makeOne() + for i, c in enumerate('abcdef'): + bucket[c] = i + self.assertEqual(list(bucket.itervalues( + min='b', excludemin=True, + max='f', excludemax=True)), [2, 3, 4]) + + def test_items_empty_no_args(self): + bucket = self._makeOne() + self.assertEqual(bucket.items(), []) + + def test_items_filled_no_args(self): + bucket = self._makeOne() + EXPECTED = [] + for i, c in enumerate('abcdef'): + bucket[c] = i + EXPECTED.append((c, i)) + self.assertEqual(bucket.items(), EXPECTED) + + def test_items_filled_w_args(self): + bucket = self._makeOne() + EXPECTED = [] + for i, c in enumerate('abcdef'): + bucket[c] = i + EXPECTED.append((c, i)) + self.assertEqual(bucket.items(min='b', excludemin=True, + max='f', excludemax=True), + EXPECTED[2:5]) + + def test_iteritems_empty_no_args(self): + bucket = self._makeOne() + self.assertEqual(list(bucket.iteritems()), []) + + def test_iteritems_filled_no_args(self): + bucket = self._makeOne() + EXPECTED = [] + for i, c in enumerate('abcdef'): + bucket[c] = i + EXPECTED.append((c, i)) + self.assertEqual(list(bucket.iteritems()), EXPECTED) + + def test_iteritems_filled_w_args(self): + bucket = self._makeOne() + EXPECTED = [] + for i, c in enumerate('abcdef'): + bucket[c] = i + EXPECTED.append((c, i)) + self.assertEqual(list(bucket.iteritems(min='b', excludemin=True, + max='f', excludemax=True)), + EXPECTED[2:5]) + + def test___getstate___empty_no_next(self): + bucket = self._makeOne() + self.assertEqual(bucket.__getstate__(), ((),)) + + def test___getstate___empty_w_next(self): + bucket = self._makeOne() + bucket._next = next_b = self._makeOne() + self.assertEqual(bucket.__getstate__(), ((), next_b)) + + def test___getstate___non_empty_no_next(self): + bucket = self._makeOne() + EXPECTED = () + for i, c in enumerate('abcdef'): + bucket[c] = i + EXPECTED += (c, i) + self.assertEqual(bucket.__getstate__(), (EXPECTED,)) + + def test___getstate___non_empty_w_next(self): + bucket = self._makeOne() + bucket._next = next_b = self._makeOne() + EXPECTED = () + for i, c in enumerate('abcdef'): + bucket[c] = i + EXPECTED += (c, i) + self.assertEqual(bucket.__getstate__(), (EXPECTED, next_b)) + + def test___setstate___w_non_tuple(self): + bucket = self._makeOne() + self.assertRaises(TypeError, bucket.__setstate__, (None,)) + + def test___setstate___w_empty_no_next(self): + bucket = self._makeOne() + bucket._next = next_b = self._makeOne() + for i, c in enumerate('abcdef'): + bucket[c] = i + bucket.__setstate__(((),)) + self.assertEqual(len(bucket.keys()), 0) + self.assertTrue(bucket._next is None) + + def test___setstate___w_non_empty_w_next(self): + bucket = self._makeOne() + next_b = self._makeOne() + ITEMS = () + EXPECTED = [] + for i, c in enumerate('abcdef'): + ITEMS += (c, i) + EXPECTED.append((c, i)) + bucket.__setstate__((ITEMS, next_b)) + self.assertEqual(bucket.items(), EXPECTED) + self.assertTrue(bucket._next is next_b) + + def test__p_resolveConflict_x_on_com_next_old_new_None(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + N_NEW = object() + s_old = None + s_com = ((), N_NEW) + s_new = None + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 0) + + def test__p_resolveConflict_x_on_com_next(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + N_NEW = object() + s_old = ((), None) + s_com = ((), N_NEW) + s_new = ((), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 0) + + def test__p_resolveConflict_x_on_new_next_old_com_None(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + N_NEW = object() + s_old = None + s_com = None + s_new = ((), N_NEW) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 0) + + def test__p_resolveConflict_x_on_new_next(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + N_NEW = object() + s_old = ((), None) + s_com = ((), None) + s_new = ((), N_NEW) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 0) + + def test__p_resolveConflict_x_on_com_empty(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + s_old = (('a', 'b', 'c', 'd'), None) + s_com = ((), None) + s_new = (('a', 'b'), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 12) + + def test__p_resolveConflict_x_on_new_empty(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1), None) + s_com = (('a', 0), None) + s_new = ((), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 12) + + def test__p_resolveConflict_x_both_update_same_key(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + s_old = (('a', 0), None) + s_com = (('a', 5, 'b', 1, 'c', 2), None) + s_new = (('a', 6, 'd', 3), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 1) + + def test__p_resolveConflict_x_on_del_first_com_x(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1, 'c', 2), None) + s_com = (('b', 1), None) + s_new = (('a', 0, 'b', 1), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 13) + + def test__p_resolveConflict_x_on_del_first_new_x(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1, 'c', 2), None) + s_com = (('a', 0, 'b', 1), None) + s_new = (('b', 1), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 13) + + def test__p_resolveConflict_x_on_del_first_new(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1), None) + s_com = (('a', 1, 'b', 2, 'c', 3), None) + s_new = (('b', 4), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 2) + + def test__p_resolveConflict_x_on_del_first_com(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1), None) + s_com = (('b', 4), None) + s_new = (('a', 1, 'b', 2, 'c', 3), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 3) + + def test__p_resolveConflict_x_on_ins_same_after_del(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1), None) + s_com = (('a', 0, 'c', 2), None) + s_new = (('a', 0, 'c', 2, 'd', 3), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 4) + + def test__p_resolveConflict_x_on_del_same(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1, 'c', 2), None) + s_com = (('a', 0, 'c', 2), None) + s_new = (('a', 0, 'd', 3, 'e', 4), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 5) + + def test__p_resolveConflict_x_on_append_same(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + s_old = (('a', 0, ), None) + s_com = (('a', 0, 'b', 1), None) + s_new = (('a', 0, 'b', 1, 'c', 2), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 6) + + def test__p_resolveConflict_x_on_new_deletes_all_com_adds(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1, 'c', 2), None) + s_com = (('a', 0, 'd', 3, 'e', 4, 'f', 5), None) + s_new = (('a', 0, ), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 7) + + def test__p_resolveConflict_x_on_com_deletes_all_new_adds(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1, 'c', 2), None) + s_com = (('a', 0, ), None) + s_new = (('a', 0, 'd', 3, 'e', 4, 'f', 5), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 8) + + def test__p_resolveConflict_x_on_com_deletes_all_new_deletes(self): + from ..Interfaces import BTreesConflictError + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1, 'c', 2), None) + s_com = (('a', 0, ), None) + s_new = (('a', 0, 'b', 1), None) + e = self.assertRaises(BTreesConflictError, + bucket._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 9) + + def test__p_resolveConflict_ok_both_add_new_max(self): + bucket = self._makeOne() + s_old = (('a', 0), None) + s_com = (('a', 0, 'b', 1), None) + s_new = (('a', 0, 'c', 2), None) + result = bucket._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 0, 'b', 1, 'c', 2),)) + + def test__p_resolveConflict_ok_com_updates(self): + bucket = self._makeOne() + s_old = (('a', 0), None) + s_com = (('a', 5), None) + s_new = (('a', 0, 'd', 3), None) + result = bucket._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 5, 'd', 3),)) + + def test__p_resolveConflict_ok_new_updates(self): + bucket = self._makeOne() + s_old = (('a', 0), None) + s_com = (('a', 0, 'd', 3), None) + s_new = (('a', 5), None) + result = bucket._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 5, 'd', 3),)) + + def test__p_resolveConflict_ok_com_inserts_new_adds(self): + bucket = self._makeOne() + s_old = (('a', 0, 'c', 2), None) + s_com = (('a', 0, 'b', 1, 'c', 2), None) + s_new = (('a', 0, 'c', 2, 'd', 3), None) + result = bucket._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 0, 'b', 1, 'c', 2, 'd', 3),)) + + def test__p_resolveConflict_ok_com_adds_new_inserts(self): + bucket = self._makeOne() + s_old = (('a', 0, 'c', 2), None) + s_com = (('a', 0, 'c', 2, 'd', 3), None) + s_new = (('a', 0, 'b', 1, 'c', 2), None) + result = bucket._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 0, 'b', 1, 'c', 2, 'd', 3),)) + + def test__p_resolveConflict_ok_com_adds_new_deletes(self): + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1, 'c', 2), None) + s_com = (('a', 0, 'b', 1, 'c', 2, 'd', 3), None) + s_new = (('a', 0, 'e', 4), None) + result = bucket._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 0, 'd', 3, 'e', 4),)) + + def test__p_resolveConflict_ok_com_deletes_new_adds(self): + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1, 'c', 2), None) + s_com = (('a', 0, 'e', 4), None) + s_new = (('a', 0, 'b', 1, 'c', 2, 'd', 3), None) + result = bucket._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 0, 'd', 3, 'e', 4),)) + + def test__p_resolveConflict_ok_both_insert_new_lt_com(self): + bucket = self._makeOne() + s_old = (('a', 0, 'd', 3), None) + s_com = (('a', 0, 'c', 2, 'd', 3), None) + s_new = (('a', 0, 'b', 1, 'd', 3), None) + result = bucket._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 0, 'b', 1, 'c', 2, 'd', 3),)) + + def test__p_resolveConflict_ok_both_insert_new_gt_com(self): + bucket = self._makeOne() + s_old = (('a', 0, 'd', 3), None) + s_com = (('a', 0, 'b', 1, 'd', 3), None) + s_new = (('a', 0, 'c', 2, 'd', 3), None) + result = bucket._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 0, 'b', 1, 'c', 2, 'd', 3),)) + + def test__p_resolveConflict_ok_new_insert_then_com_append(self): + bucket = self._makeOne() + s_old = (('a', 0, 'd', 3), None) + s_com = (('a', 0, 'e', 4), None) + s_new = (('a', 0, 'b', 1, 'd', 3), None) + result = bucket._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 0, 'b', 1, 'e', 4),)) + + def test__p_resolveConflict_ok_com_insert_then_new_append(self): + bucket = self._makeOne() + s_old = (('a', 0, 'd', 3), None) + s_com = (('a', 0, 'b', 1, 'd', 3), None) + s_new = (('a', 0, 'e', 4), None) + result = bucket._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 0, 'b', 1, 'e', 4),)) + + def test__p_resolveConflict_ok_new_deletes_tail_com_inserts(self): + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1, 'd', 3), None) + s_com = (('a', 0, 'b', 1, 'c', 2, 'd', 3), None) + s_new = (('a', 0), None) + result = bucket._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 0, 'c', 2),)) + + def test__p_resolveConflict_ok_com_deletes_tail_new_inserts(self): + bucket = self._makeOne() + s_old = (('a', 0, 'b', 1, 'd', 3), None) + s_com = (('a', 0), None) + s_new = (('a', 0, 'b', 1, 'c', 2, 'd', 3), None) + result = bucket._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 0, 'c', 2),)) + + +class SetTests(unittest.TestCase): + + assertRaises = _assertRaises + + def _getTargetClass(self): + from .._base import Set + return Set + + def _makeOne(self): + class _Set(self._getTargetClass()): + def _to_key(self, x): + return x + return _Set() + + def test_add_not_extant(self): + _set = self._makeOne() + _set.add('not_extant') + self.assertEqual(list(_set), ['not_extant']) + + def test_add_extant(self): + _set = self._makeOne() + _set.add('extant') + _set.add('extant') + self.assertEqual(list(_set), ['extant']) + + def test_insert(self): + _set = self._makeOne() + _set.insert('inserted') + self.assertEqual(list(_set), ['inserted']) + + def test_remove_miss(self): + _set = self._makeOne() + self.assertRaises(KeyError, _set.remove, 'not_extant') + + def test_remove_extant(self): + _set = self._makeOne() + _set.add('one') + _set.add('another') + _set.remove('one') + self.assertEqual(list(_set), ['another']) + + def test_update(self): + _set = self._makeOne() + _set.update(['one', 'after', 'another']) + self.assertEqual(sorted(_set), ['after', 'another', 'one']) + + def test___getstate___empty_no_next(self): + _set = self._makeOne() + self.assertEqual(_set.__getstate__(), ((),)) + + def test___getstate___empty_w_next(self): + _set = self._makeOne() + _set._next = next_s = self._makeOne() + self.assertEqual(_set.__getstate__(), ((), next_s)) + + def test___getstate___non_empty_no_next(self): + _set = self._makeOne() + EXPECTED = () + for c in 'abcdef': + _set.add(c) + EXPECTED += (c,) + self.assertEqual(_set.__getstate__(), (EXPECTED,)) + + def test___getstate___non_empty_w_next(self): + _set = self._makeOne() + _set._next = next_s = self._makeOne() + EXPECTED = () + for c in 'abcdef': + _set.add(c) + EXPECTED += (c,) + self.assertEqual(_set.__getstate__(), (EXPECTED, next_s)) + + def test___setstate___w_non_tuple(self): + _set = self._makeOne() + self.assertRaises(TypeError, _set.__setstate__, (None,)) + + def test___setstate___w_empty_no_next(self): + _set = self._makeOne() + _set._next = next_s = self._makeOne() + for c in 'abcdef': + _set.add(c) + _set.__setstate__(((),)) + self.assertEqual(len(_set), 0) + self.assertTrue(_set._next is None) + + def test___setstate___w_non_empty_w_next(self): + _set = self._makeOne() + next_s = self._makeOne() + ITEMS = () + EXPECTED = [] + for c in 'abcdef': + ITEMS += (c,) + EXPECTED.append(c) + _set.__setstate__((ITEMS, next_s)) + self.assertEqual(sorted(_set), EXPECTED) + self.assertTrue(_set._next is next_s) + + def test___getitem___out_of_bounds(self): + _set = self._makeOne() + self.assertRaises(IndexError, _set.__getitem__, 1) + + def test___getitem___hit_bounds(self): + _set = self._makeOne() + _set.add('b') + _set.add('a') + _set.add('c') + self.assertEqual(_set[0], 'a') + self.assertEqual(_set[1], 'b') + self.assertEqual(_set[2], 'c') + + def test__split_empty(self): + _set = self._makeOne() + next_b = _set._next = self._makeOne() + new_b = _set._split() + self.assertEqual(len(_set._keys), 0) + self.assertEqual(len(new_b._keys), 0) + self.assertTrue(_set._next is new_b) + self.assertTrue(new_b._next is next_b) + + def test__split_filled_default_index(self): + _set = self._makeOne() + next_b = _set._next = self._makeOne() + for c in 'abcdef': + _set.add(c) + new_b = _set._split() + self.assertEqual(list(_set._keys), ['a', 'b', 'c']) + self.assertEqual(list(new_b._keys), ['d', 'e', 'f']) + self.assertTrue(_set._next is new_b) + self.assertTrue(new_b._next is next_b) + + def test__split_filled_explicit_index(self): + _set = self._makeOne() + next_b = _set._next = self._makeOne() + for c in 'abcdef': + _set.add(c) + new_b = _set._split(2) + self.assertEqual(list(_set._keys), ['a', 'b']) + self.assertEqual(list(new_b._keys), ['c', 'd', 'e', 'f']) + self.assertTrue(_set._next is new_b) + self.assertTrue(new_b._next is next_b) + + def test__p_resolveConflict_x_on_com_next_old_new_None(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + N_NEW = object() + s_old = None + s_com = ((), N_NEW) + s_new = None + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 0) + + def test__p_resolveConflict_x_on_com_next(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + N_NEW = object() + s_old = ((), None) + s_com = ((), N_NEW) + s_new = ((), None) + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 0) + + def test__p_resolveConflict_x_on_new_next_old_com_None(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + N_NEW = object() + s_old = None + s_com = None + s_new = ((), N_NEW) + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 0) + + def test__p_resolveConflict_x_on_new_next(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + N_NEW = object() + s_old = ((), None) + s_com = ((), None) + s_new = ((), N_NEW) + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 0) + + def test__p_resolveConflict_x_on_com_empty(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + s_old = (('a', 'b'), None) + s_com = ((), None) + s_new = (('a',), None) + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 12) + + def test__p_resolveConflict_x_on_new_empty(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + s_old = (('a', 'b'), None) + s_com = (('a',), None) + s_new = ((), None) + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 12) + + def test__p_resolveConflict_x_on_del_first_com(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + s_old = (('a','b'), None) + s_com = (('b',), None) + s_new = (('a', 'b', 'c'), None) + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 13) + + def test__p_resolveConflict_x_on_del_first_new(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + s_old = (('a', 'b'), None) + s_com = (('a', 'b', 'c'), None) + s_new = (('b',), None) + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 13) + + def test__p_resolveConflict_x_on_ins_same_after_del(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + s_old = (('a', 'b'), None) + s_com = (('a', 'c'), None) + s_new = (('a', 'c', 'd'), None) + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 4) + + def test__p_resolveConflict_x_on_del_same(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + s_old = (('a', 'b', 'c'), None) + s_com = (('a', 'c'), None) + s_new = (('a', 'd', 'e'), None) + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 5) + + def test__p_resolveConflict_x_on_append_same(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + s_old = (('a',), None) + s_com = (('a', 'b'), None) + s_new = (('a', 'b', 'c'), None) + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 6) + + def test__p_resolveConflict_x_on_new_deletes_all_com_adds(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + s_old = (('a', 'b', 'c'), None) + s_com = (('a', 'd', 'e', 'f'), None) + s_new = (('a',), None) + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 7) + + def test__p_resolveConflict_x_on_com_deletes_all_new_adds(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + s_old = (('a', 'b', 'c'), None) + s_com = (('a',), None) + s_new = (('a', 'd', 'e', 'f'), None) + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 8) + + def test__p_resolveConflict_x_on_com_deletes_all_new_deletes(self): + from ..Interfaces import BTreesConflictError + _set = self._makeOne() + s_old = (('a', 'b', 'c'), None) + s_com = (('a',), None) + s_new = (('a', 'b'), None) + e = self.assertRaises(BTreesConflictError, + _set._p_resolveConflict, s_old, s_com, s_new) + self.assertEqual(e.reason, 9) + + def test__p_resolveConflict_ok_insert_in_new_add_in_com(self): + _set = self._makeOne() + s_old = (('a', 'c'), None) + s_com = (('a', 'c', 'd'), None) + s_new = (('a', 'b', 'c'), None) + result = _set._p_resolveConflict(s_old, s_com, s_new) + # Note that _SetBase uses default __getstate__ + self.assertEqual(result, (('a', 'b', 'c', 'd'),)) + + def test__p_resolveConflict_ok_insert_in_com_add_in_new(self): + _set = self._makeOne() + s_old = (('a', 'c'), None) + s_com = (('a', 'b', 'c'), None) + s_new = (('a', 'c', 'd'), None) + result = _set._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 'b', 'c', 'd'),)) + + def test__p_resolveConflict_ok_delete_in_new_add_in_com(self): + _set = self._makeOne() + s_old = (('a', 'b', 'c'), None) + s_com = (('a', 'b', 'c', 'd'), None) + s_new = (('a', 'c'), None) + result = _set._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 'c', 'd'),)) + + def test__p_resolveConflict_ok_delete_in_com_add_in_new(self): + _set = self._makeOne() + s_old = (('a', 'b', 'c'), None) + s_com = (('a', 'c'), None) + s_new = (('a', 'b', 'c', 'd'), None) + result = _set._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 'c', 'd'),)) + + def test__p_resolveConflict_ok_add_new_lt_add_com(self): + _set = self._makeOne() + s_old = (('a',), None) + s_com = (('a', 'd'), None) + s_new = (('a', 'b', 'c'), None) + result = _set._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 'b', 'c', 'd'),)) + + def test__p_resolveConflict_ok_add_com_lt_add_new(self): + _set = self._makeOne() + s_old = (('a',), None) + s_com = (('a', 'b', 'c'), None) + s_new = (('a', 'd'), None) + result = _set._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 'b', 'c', 'd'),)) + + def test__p_resolveConflict_ok_ins_in_com_del_add_in_new(self): + _set = self._makeOne() + s_old = (('a', 'c'), None) + s_com = (('a', 'b', 'c'), None) + s_new = (('a', 'd', 'e'), None) + result = _set._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 'b', 'd', 'e'),)) + + def test__p_resolveConflict_ok_ins_in_new_del_add_in_com(self): + _set = self._makeOne() + s_old = (('a', 'c'), None) + s_com = (('a', 'd', 'e'), None) + s_new = (('a', 'b', 'c'), None) + result = _set._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 'b', 'd', 'e'),)) + + def test__p_resolveConflict_ok_ins_both_new_lt_com(self): + _set = self._makeOne() + s_old = (('a', 'e'), None) + s_com = (('a', 'c', 'd', 'e'), None) + s_new = (('a', 'b', 'e'), None) + result = _set._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 'b', 'c', 'd', 'e'),)) + + def test__p_resolveConflict_ok_del_new_add_com(self): + _set = self._makeOne() + s_old = (('a', 'e'), None) + s_com = (('a', 'c', 'd', 'e'), None) + s_new = (('a',), None) + result = _set._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 'c', 'd'),)) + + def test__p_resolveConflict_ok_del_com_add_new(self): + _set = self._makeOne() + s_old = (('a', 'e'), None) + s_com = (('a',), None) + s_new = (('a', 'c', 'd', 'e'), None) + result = _set._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 'c', 'd'),)) + + def test__p_resolveConflict_add_new_gt_old_com_lt_old(self): + _set = self._makeOne() + s_old = (('a', 'b', 'c'), None) + s_com = (('a', 'b', 'bb', 'c'), None) + s_new = (('a', 'b', 'c', 'd'), None) + result = _set._p_resolveConflict(s_old, s_com, s_new) + self.assertEqual(result, (('a', 'b', 'bb', 'c', 'd'),)) + + +class Test_TreeItem(unittest.TestCase): + + def _getTargetClass(self): + from .._base import _TreeItem + return _TreeItem + + def _makeOne(self, key, child): + return self._getTargetClass()(key, child) + + def test_ctor(self): + child = object() + item = self._makeOne('key', child) + self.assertEqual(item.key, 'key') + self.assertTrue(item.child is child) + + +class Test_Tree(unittest.TestCase): + + assertRaises = _assertRaises + + def _getTargetClass(self): + from .._base import _Tree + return _Tree + + def _makeOne(self, items=None): + from .._base import Bucket + class _Bucket(Bucket): + def _to_key(self, k): + return k + class _Test(self._getTargetClass()): + _to_key = _to_value = lambda self, x: x + _bucket_type = _Bucket + max_leaf_size = 10 + max_internal_size = 15 + return _Test(items) + + def test_setdefault_miss(self): + tree = self._makeOne() + value = object() + self.assertTrue(tree.setdefault('non_extant', value) is value) + self.assertTrue('non_extant' in tree) + self.assertTrue(tree._findbucket('non_extant')['non_extant'] is value) + + def test_setdefault_hit(self): + tree = self._makeOne() + value1 = object() + value2 = object() + tree['extant'] = value1 + self.assertTrue(tree.setdefault('extant', value2) is value1) + self.assertTrue('extant' in tree) + self.assertTrue(tree._findbucket('extant')['extant'] is value1) + + def test_pop_miss_no_default(self): + tree = self._makeOne() + self.assertRaises(KeyError, tree.pop, 'nonesuch') + + def test_pop_miss_w_default(self): + default = object() + tree = self._makeOne() + self.assertTrue(tree.pop('nonesuch', default) is default) + + def test_pop_hit(self): + tree = self._makeOne() + value = object() + tree['extant'] = value + self.assertTrue(tree.pop('extant', value) is value) + self.assertFalse('extant' in tree) + + def test_update_value_w_iteritems(self): + tree = self._makeOne() + tree.update({'a': 'b'}) + self.assertEqual(tree._findbucket('a')['a'], 'b') + + def test_update_value_w_items(self): + tree = self._makeOne() + class Foo(object): + def items(self): + return [('a', 'b')] + tree.update(Foo()) + self.assertEqual(tree._findbucket('a')['a'], 'b') + + def test_update_value_w_invalid_items(self): + tree = self._makeOne() + class Foo(object): + def items(self): + return ('a', 'b', 'c') + self.assertRaises(TypeError, tree.update, Foo()) + + def test_update_sequence(self): + tree = self._makeOne() + tree.update([('a', 'b')]) + self.assertEqual(tree._findbucket('a')['a'], 'b') + + def test_update_replacing(self): + tree = self._makeOne() + tree['a'] = 'b' + tree.update([('a', 'c')]) + self.assertEqual(tree._findbucket('a')['a'], 'c') + + def test___setitem___incomparable(self): + tree = self._makeOne() + def _should_error(): + tree[object()] = 'b' + self.assertRaises(TypeError, _should_error) + + def test___delitem___miss(self): + tree = self._makeOne() + def _should_error(): + del tree['a'] + self.assertRaises(KeyError, _should_error) + + def test___delitem___hit(self): + tree = self._makeOne() + tree['a'] = 'b' + del tree['a'] + self.assertFalse('a' in tree) + + def test_clear(self): + tree = self._makeOne() + tree['a'] = 'b' + tree.clear() + self.assertFalse('a' in tree) + self.assertEqual(tree._firstbucket, None) + + def test___nonzero___empty(self): + tree = self._makeOne() + self.assertFalse(tree) + + def test___nonzero___nonempty(self): + tree = self._makeOne() + tree['a'] = 'b' + self.assertTrue(tree) + + def test___len__empty(self): + tree = self._makeOne() + self.assertEqual(len(tree), 0) + + def test___len__nonempty(self): + tree = self._makeOne() + tree['a'] = 'b' + self.assertEqual(len(tree), 1) + + def test___len__nonempty_multiple_buckets(self): + tree = self._makeOne() + for i in range(100): + tree[str(i)] = i + self.assertEqual(len(tree), 100) + b_count = 0 + bucket = tree._firstbucket + + def test_size_empty(self): + tree = self._makeOne() + self.assertEqual(tree.size, 0) + + def test_size_nonempty(self): + tree = self._makeOne() + tree['a'] = 'b' + self.assertEqual(tree.size, 1) + + def test_size_nonempty_multiple_buckets(self): + tree = self._makeOne() + for i in range(100): + tree[str(i)] = i + b_count = 0 + bucket = tree._firstbucket + while bucket is not None: + b_count += 1 + bucket = bucket._next + self.assertEqual(tree.size, b_count) + + def test__search_empty(self): + tree = self._makeOne() + self.assertEqual(tree._search('nonesuch'), -1) + + def test__search_miss_high(self): + tree = self._makeOne() + for i in range(100): + tree[float(i)] = i + b_count = 0 + bucket = tree._firstbucket + while bucket is not None: + b_count += 1 + bucket = bucket._next + self.assertEqual(tree.size, b_count) + self.assertEqual(tree._search(99.5), b_count - 1) + + def test__search_miss_low(self): + tree = self._makeOne() + for i in range(100): + tree[float(i)] = i + self.assertEqual(tree._search(0.1), 0) + + def test__search_miss_between(self): + tree = self._makeOne() + for i in range(100): + tree[float(i)] = i + self.assertEqual(tree._search(1.5), 0) + + def test__search_hit(self): + tree = self._makeOne() + for i in range(100): + tree[float(i)] = i + key = tree._data[1].key + self.assertEqual(tree._search(key), 1) + + def test__find_bucket_low(self): + tree = self._makeOne() + for i in range(1000): + tree[float(i)] = i + self.assertTrue(tree._findbucket(0.1) is tree._firstbucket) + + def test__find_bucket_high(self): + tree = self._makeOne() + for i in range(1000): + tree[float(i)] = i + bucket = tree._firstbucket + while bucket._next is not None: + bucket = bucket._next + self.assertTrue(tree._findbucket(999.5) is bucket) + + def test___contains___empty(self): + tree = self._makeOne() + self.assertFalse('nonesuch' in tree) + + def test___contains___miss(self): + tree = self._makeOne() + for i in range(1000): + tree[float(i)] = i + self.assertFalse(1000.0 in tree) + + def test___contains___hit(self): + tree = self._makeOne() + keys = [] + for i in range(1000): + key = float(i) + tree[key] = i + keys.append(key) + for key in keys: + self.assertTrue(key in tree) + + def test_has_key_empty(self): + tree = self._makeOne() + self.assertFalse(tree.has_key('nonesuch')) + + def test_has_key_miss(self): + tree = self._makeOne() + for i in range(1000): + tree[float(i)] = i + self.assertFalse(tree.has_key(1000.0)) + + def test_has_key_hit(self): + tree = self._makeOne() + KEYS = [] + for i in range(1000): + key = float(i) + tree[key] = i + KEYS.append(key) + for key in KEYS: + # XXX should we be testing for the 'depth' value? + self.assertTrue(tree.has_key(key)) + + def test_keys_defaults_empty(self): + tree = self._makeOne() + self.assertEqual(list(tree.keys()), []) + + def test_keys_defaults_filled(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(list(tree.keys()), KEYS[:]) + + def test_keys_defaults_exclude_min(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(list(tree.keys(excludemin=True)), KEYS[1: 5]) + + def test_keys_defaults_exclude_max(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(list(tree.keys(excludemax=True)), KEYS[0: 4]) + + def test_keys_w_min_hit(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(list(tree.keys(min='bravo')), KEYS[1: 5]) + + def test_keys_w_min_miss(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(list(tree.keys(min='candy')), KEYS[2: 5]) + + def test_keys_w_min_hit_w_exclude_min(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(list(tree.keys(min='bravo', excludemin=True)), + KEYS[2: 5]) + + def test_keys_w_min_miss_w_exclude_min(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + # 'excludemin' doesn't fire on miss + self.assertEqual(list(tree.keys(min='candy', excludemin=True)), + KEYS[2: 5]) + + def test_keys_w_max_hit(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(list(tree.keys(max='delta')), KEYS[0: 4]) + + def test_keys_w_max_miss(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(list(tree.keys(max='dandy')), KEYS[0: 3]) + + def test_keys_w_max_hit_w_exclude_max(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(list(tree.keys(max='delta', excludemax=True)), + KEYS[0: 3]) + + def test_keys_w_max_miss_w_exclude_max(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + # 'excludemax' doesn't fire on miss + self.assertEqual(list(tree.keys(max='dandy', excludemax=True)), + KEYS[0: 3]) + + def test_iterkeys(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(list(tree.iterkeys()), KEYS) + + def test___iter__(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(list(tree), KEYS) + + def test_minKey_empty(self): + tree = self._makeOne() + self.assertRaises(ValueError, tree.minKey) + + def test_minKey_filled_default(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(tree.minKey(), KEYS[0]) + + def test_minKey_filled_explicit_hit(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(tree.minKey(min='bravo'), 'bravo') + + def test_minKey_filled_explicit_miss(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(tree.minKey(min='basso'), 'bravo') + + def test_maxKey_empty(self): + tree = self._makeOne() + self.assertRaises(ValueError, tree.maxKey) + + def test_maxKey_filled_default(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(tree.maxKey(), 'echo') + + def test_maxKey_filled_explicit_hit(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(tree.maxKey('bravo'), 'bravo') + + def test_maxKey_filled_explicit_miss(self): + tree = self._makeOne() + KEYS = ['alpha', 'bravo', 'charlie', 'delta', 'echo'] + for key in KEYS: + tree[key] = key.upper() + self.assertEqual(tree.maxKey('candy'), 'bravo') + + def test__set_calls_readCurrent_on_jar(self): + tree = self._makeOne() + tree._p_oid = b'OID' + tree._p_serial = b'01234567' + tree._p_jar = jar = _Jar() + tree._set('a', 'b') + self.assertTrue(tree in jar._current) + + def test__split_empty(self): + tree = self._makeOne() + self.assertRaises(IndexError, tree._split) + + def test__split_filled_empties_original(self): + tree = self._makeOne() + next_t = tree._next = self._makeOne() + for i, c in enumerate('abcdef'): + tree[c] = i + fb = tree._firstbucket + new_t = tree._split() + self.assertEqual(list(tree), []) + self.assertTrue(tree._firstbucket is None) + self.assertEqual(list(new_t), ['a', 'b', 'c', 'd', 'e', 'f']) + self.assertTrue(new_t._firstbucket is fb) + + def test__split_filled_divides_original(self): + tree = self._makeOne() + next_t = tree._next = self._makeOne() + LETTERS = 'abcdefghijklmnopqrstuvwxyz' + for i, c in enumerate(LETTERS): + tree[c] = i + fb = tree._firstbucket + new_t = tree._split() + # Note that original tree still links to split buckets + self.assertEqual(''.join(list(tree)), LETTERS) + self.assertTrue(tree._firstbucket is fb) + self.assertEqual(''.join(list(new_t)), LETTERS[10:]) + self.assertFalse(new_t._firstbucket is fb) + + def test__split_filled_divides_deeper(self): + tree = self._makeOne() + next_t = tree._next = self._makeOne() + KEYS = [] + FMT = '%05d' + for i in range(1000): + key = FMT % i + tree[key] = i + KEYS.append(key) + fb = tree._firstbucket + new_t = tree._split(tree.max_internal_size - 2) + # Note that original tree still links to split buckets + self.assertEqual(list(tree), KEYS) + self.assertTrue(tree._firstbucket is fb) + new_min = new_t.minKey() + self.assertEqual(list(new_t), KEYS[int(new_min):]) + self.assertFalse(new_t._firstbucket is fb) + + def test__del_calls_readCurrent_on_jar(self): + tree = self._makeOne({'a': 'b'}) + tree._p_oid = b'OID' + tree._p_serial = b'01234567' + tree._p_jar = jar = _Jar() + tree._del('a') + self.assertTrue(tree in jar._current) + + def test__del_miss(self): + tree = self._makeOne({'a': 'b'}) + self.assertRaises(KeyError, tree._del, 'nonesuch') + + def test__del_fixes_up_node_key(self): + SOURCE = dict([('%05d' % i, i) for i in range(1000)]) + tree = self._makeOne(SOURCE) + before = tree._data[1].key + del tree[before] + after = tree._data[1].key + self.assertTrue(after > before) + + def test__del_empties_first_bucket_not_zeroth_item(self): + SOURCE = dict([('%05d' % i, i) for i in range(1000)]) + tree = self._makeOne(SOURCE) + bucket = tree._data[1].child._firstbucket + next_b = bucket._next + for key in list(bucket): # don't del while iterting + del tree[key] + self.assertTrue(tree._data[1].child._firstbucket is next_b) + + def test__del_empties_first_bucket_zeroth_item(self): + SOURCE = dict([('%05d' % i, i) for i in range(1000)]) + tree = self._makeOne(SOURCE) + bucket = tree._data[0].child._firstbucket + next_b = bucket._next + for key in list(bucket): # don't del while iterting + del tree[key] + self.assertTrue(tree._data[0].child._firstbucket is next_b) + self.assertTrue(tree._firstbucket is next_b) + + def test__del_empties_other_bucket_not_zeroth_item(self): + SOURCE = dict([('%05d' % i, i) for i in range(1000)]) + tree = self._makeOne(SOURCE) + bucket = tree._data[1].child._firstbucket._next + next_b = bucket._next + for key in list(bucket): # don't del while iterting + del tree[key] + self.assertTrue(tree._data[1].child._firstbucket._next is next_b) + + def test___getstate___empty(self): + tree = self._makeOne() + self.assertEqual(tree.__getstate__(), None) + + def test___getstate___single_bucket_wo_oid(self): + tree = self._makeOne({'a': 'b'}) + self.assertEqual(tree.__getstate__(), (((('a', 'b'),),),)) + + def test___getstate___single_bucket_w_oid(self): + tree = self._makeOne({'a': 'b'}) + bucket = tree._firstbucket + jar = _Jar() + bucket._p_jar = jar + bucket._p_oid = b'OID' + self.assertEqual(tree.__getstate__(), ((bucket,), bucket)) + + def test___getstate___multiple_buckets(self): + tree = self._makeOne() + FMT = '%05d' + for i in range(1000): + key = FMT % i + tree[key] = i + bucket = tree._firstbucket + EXPECTED = (tree._data[0].child,) + for item in tree._data[1:]: + EXPECTED += (item.key, item.child) + self.assertEqual(tree.__getstate__(), (EXPECTED, bucket)) + + def test___setstate___invalid(self): + tree = self._makeOne() + self.assertRaises(TypeError, tree.__setstate__, ('a', 'b')) + + def test___setstate___to_empty(self): + tree = self._makeOne({'a': 'b'}) + tree.__setstate__(None) + self.assertEqual(len(tree), 0) + + def test___setstate___to_single_bucket_wo_oid(self): + tree = self._makeOne() + tree.__setstate__((((('a', 'b'),),),)) + self.assertEqual(list(tree.keys()), ['a']) + self.assertEqual(tree._findbucket('a')['a'], 'b') + self.assertTrue(len(tree._data), 1) + self.assertTrue(tree._data[0].child is tree._firstbucket) + self.assertTrue(tree._firstbucket._p_oid is None) + + def test___setstate___to_multiple_buckets(self): + from .._base import Bucket + class _Bucket(Bucket): + def _to_key(self, x): + return x + tree = self._makeOne() + b1 = _Bucket({'a': 0, 'b': 1}) + b2 = _Bucket({'c': 2, 'd': 3}) + b1._next = b2 + tree.__setstate__(((b1, 'c', b2), b1)) + self.assertEqual(list(tree.keys()), ['a', 'b', 'c', 'd']) + self.assertTrue(len(tree._data), 2) + self.assertEqual(tree._data[0].key, None) + self.assertEqual(tree._data[0].child, b1) + self.assertEqual(tree._data[1].key, 'c') + self.assertEqual(tree._data[1].child, b2) + self.assertTrue(tree._firstbucket is b1) + + def test__check_empty_wo_firstbucket(self): + tree = self._makeOne() + tree._check() # no raise + + def test__check_empty_w_firstbucket(self): + tree = self._makeOne() + tree._firstbucket = object() + e = self.assertRaises(AssertionError, tree._check) + self.assertEqual(str(e), "Empty BTree has non-NULL firstbucket") + + def test__check_nonempty_wo_firstbucket(self): + tree = self._makeOne({'a': 'b'}) + tree._firstbucket = None + e = self.assertRaises(AssertionError, tree._check) + self.assertEqual(str(e), "Non-empty BTree has NULL firstbucket") + + def test__check_nonempty_w_null_child(self): + tree = self._makeOne({'a': 'b'}) + tree._data.append(tree._data[0].__class__('c', None)) + e = self.assertRaises(AssertionError, tree._check) + self.assertEqual(str(e), "BTree has NULL child") + + def test__check_nonempty_w_heterogenous_child(self): + class Other(object): + pass + tree = self._makeOne({'a': 'b'}) + tree._data.append(tree._data[0].__class__('c', Other())) + e = self.assertRaises(AssertionError, tree._check) + self.assertEqual(str(e), "BTree children have different types") + + def test__check_nonempty_w_empty_child(self): + tree = self._makeOne({'a': 'b'}) + first = tree._data[0] + tree._data.append(first.__class__('c', first.child.__class__())) + e = self.assertRaises(AssertionError, tree._check) + self.assertEqual(str(e), "Bucket length < 1") + + def test__check_branch_w_mismatched_firstbucket(self): + tree = self._makeOne() + c_tree = tree.__class__({'a': 'b'}) + c_first = c_tree._data[0] + tree._data.append(c_first.__class__('a', c_tree)) + tree._firstbucket = object() + e = self.assertRaises(AssertionError, tree._check) + self.assertEqual(str(e), "BTree has firstbucket different than " + "its first child's firstbucket") + + def test__check_nonempty_w_invalid_child(self): + class Invalid(object): + size = 2 + tree = self._makeOne({'a': 'b'}) + tree._data[0].child = Invalid() + e = self.assertRaises(AssertionError, tree._check) + self.assertEqual(str(e), "Incorrect child type") + + def test__check_branch_traverse_bucket_pointers(self): + tree = self._makeOne() + t_first = tree.__class__({'a': 'b'}) + c_first = t_first._data[0] + b_first = c_first.child + t_second = tree.__class__({'c': 'd'}) + b_first._next = t_second._firstbucket + tree._data.append(c_first.__class__('a', t_first)) + tree._data.append(c_first.__class__('c', t_second)) + tree._firstbucket = t_first._firstbucket + tree._check() #no raise + + def test__check_nonempty_leaf_traverse_bucket_pointers(self): + tree = self._makeOne({'a': 'b'}) + first = tree._data[0] + first.child._next = b2 = first.child.__class__({'c': 'd'}) + tree._data.append(first.__class__('c', b2)) + tree._check() #no raise + + def test__p_resolveConflict_invalid_state_non_tuple(self): + tree = self._makeOne() + INVALID = [] + EMPTY = None + DEGEN = (((('a', 'b'),),),) + self.assertRaises(TypeError, tree._p_resolveConflict, + INVALID, EMPTY, DEGEN) + self.assertRaises(TypeError, tree._p_resolveConflict, + EMPTY, INVALID, DEGEN) + self.assertRaises(TypeError, tree._p_resolveConflict, + EMPTY, DEGEN, INVALID) + + def test__p_resolveConflict_non_degenerate_state(self): + from ..Interfaces import BTreesConflictError + tree = self._makeOne() + FIRST = object() + NON_DEGEN = ((FIRST, 'a', object(), 'b', object()), FIRST) + EMPTY = None + DEGEN = (((('a', 'b'),),),) + e = self.assertRaises(BTreesConflictError, tree._p_resolveConflict, + NON_DEGEN, EMPTY, DEGEN) + self.assertEqual(e.reason, 11) + e = self.assertRaises(BTreesConflictError, tree._p_resolveConflict, + EMPTY, NON_DEGEN, DEGEN) + self.assertEqual(e.reason, 11) + e = self.assertRaises(BTreesConflictError, tree._p_resolveConflict, + EMPTY, DEGEN, NON_DEGEN) + self.assertEqual(e.reason, 11) + + def test__p_resolveConflict_invalid_state_non_1_tuple(self): + tree = self._makeOne() + INVALID = ('a', 'b', 'c') + EMPTY = None + DEGEN = (((('a', 'b'),),),) + self.assertRaises(TypeError, tree._p_resolveConflict, + INVALID, EMPTY, DEGEN) + self.assertRaises(TypeError, tree._p_resolveConflict, + EMPTY, INVALID, DEGEN) + self.assertRaises(TypeError, tree._p_resolveConflict, + EMPTY, DEGEN, INVALID) + + def test__p_resolveConflict_invalid_state_nested_non_tuple(self): + tree = self._makeOne() + INVALID = ([],) + EMPTY = None + DEGEN = (((('a', 'b'),),),) + self.assertRaises(TypeError, tree._p_resolveConflict, + INVALID, EMPTY, DEGEN) + self.assertRaises(TypeError, tree._p_resolveConflict, + EMPTY, INVALID, DEGEN) + self.assertRaises(TypeError, tree._p_resolveConflict, + EMPTY, DEGEN, INVALID) + + def test__p_resolveConflict_invalid_state_nested_non_1_tuple(self): + tree = self._makeOne() + INVALID = (('a', 'b', 'c'),) + EMPTY = None + DEGEN = (((('a', 'b'),),),) + self.assertRaises(TypeError, tree._p_resolveConflict, + INVALID, EMPTY, DEGEN) + self.assertRaises(TypeError, tree._p_resolveConflict, + EMPTY, INVALID, DEGEN) + self.assertRaises(TypeError, tree._p_resolveConflict, + EMPTY, DEGEN, INVALID) + + def test__p_resolveConflict_invalid_state_nested2_non_tuple(self): + tree = self._makeOne() + INVALID = (([],),) + EMPTY = None + DEGEN = (((('a', 'b'),),),) + self.assertRaises(TypeError, tree._p_resolveConflict, + INVALID, EMPTY, DEGEN) + self.assertRaises(TypeError, tree._p_resolveConflict, + EMPTY, INVALID, DEGEN) + self.assertRaises(TypeError, tree._p_resolveConflict, + EMPTY, DEGEN, INVALID) + + def test__p_resolveConflict_invalid_state_nested2_non_1_tuple(self): + tree = self._makeOne() + INVALID = ((('a', 'b', 'c'),)) + EMPTY = None + DEGEN = (((('a', 'b'),),),) + self.assertRaises(TypeError, tree._p_resolveConflict, + INVALID, EMPTY, DEGEN) + self.assertRaises(TypeError, tree._p_resolveConflict, + EMPTY, INVALID, DEGEN) + self.assertRaises(TypeError, tree._p_resolveConflict, + EMPTY, DEGEN, INVALID) + + def test__p_resolveConflict_w_degenerate_state(self): + tree = self._makeOne() + OLD = (((('a', 'b', 'c', 'd'),),),) + COM = (((('a', 'b', 'c', 'd', 'e', 'f'),),),) + NEW = (((('a', 'b'),),),) + resolved = tree._p_resolveConflict(OLD, COM, NEW) + self.assertEqual(resolved, (((('a', 'b', 'e', 'f'),),),)) + + +class Test_TreeItems(unittest.TestCase): + + assertRaises = _assertRaises + + def _getTargetClass(self): + from .._base import _TreeItems + return _TreeItems + + def _makeOne(self, firstbucket, itertype, iterargs): + return self._getTargetClass()(firstbucket, itertype, iterargs) + + def _makeBucket(self, items=None): + from .._base import Bucket + class _Bucket(Bucket): + def _to_key(self, k): + return k + return _Bucket(items) + + def test___getitem___w_slice(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + bucket = self._makeBucket(ITEMS) + ti = self._makeOne(bucket, 'iterkeys', ()) + self.assertEqual(list(ti[0:3]), ['a', 'b', 'c']) + + def test___getitem___w_negative_index_le_minus_length(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + bucket = self._makeBucket(ITEMS) + ti = self._makeOne(bucket, 'iterkeys', ()) + def _should_error(): + return ti[-27] + self.assertRaises(IndexError, _should_error) + + def test___getitem___w_index_gt_length(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + bucket = self._makeBucket(ITEMS) + ti = self._makeOne(bucket, 'iterkeys', ()) + def _should_error(): + return ti[27] + self.assertRaises(IndexError, _should_error) + + def test___getitem___w_index_smaller_than_cursor(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + bucket = self._makeBucket(ITEMS) + ti = self._makeOne(bucket, 'iterkeys', ()) + ti[12] + self.assertEqual(ti[1], 'b') + + def test___len__(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + bucket = self._makeBucket(ITEMS) + ti = self._makeOne(bucket, 'iterkeys', ()) + self.assertEqual(len(ti), 26) + # short-circuit on second pass + self.assertEqual(len(ti), 26) + + def test___iter___w_iterkeys(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + bucket = self._makeBucket(ITEMS) + ti = self._makeOne(bucket, 'iterkeys', ()) + self.assertEqual(list(ti), [x[0] for x in ITEMS]) + + def test___iter___w_iteritems(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + bucket = self._makeBucket(ITEMS) + ti = self._makeOne(bucket, 'iteritems', ()) + self.assertEqual(list(ti), ITEMS) + + def test___iter___w_itervalues(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + bucket = self._makeBucket(ITEMS) + ti = self._makeOne(bucket, 'itervalues', ()) + self.assertEqual(list(ti), [x[1] for x in ITEMS]) + + def test___iter___w_empty_last_bucket(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + bucket1 = self._makeBucket(ITEMS) + bucket2 = bucket1._next = self._makeBucket() + ti = self._makeOne(bucket1, 'iterkeys', ()) + self.assertEqual(list(ti), [x[0] for x in ITEMS]) + + +class TreeTests(unittest.TestCase): + + assertRaises = _assertRaises + + def _getTargetClass(self): + from .._base import Tree + return Tree + + def _makeOne(self, items=None): + from .._base import Bucket + class _Bucket(Bucket): + def _to_key(self, k): + return k + class _Test(self._getTargetClass()): + _to_key = _to_value = lambda self, x: x + _bucket_type = _Bucket + max_leaf_size = 10 + max_internal_size = 15 + return _Test(items) + + def test_get_empty_miss(self): + tree = self._makeOne() + self.assertEqual(tree.get('nonesuch'), None) + + def test_get_empty_miss_w_default(self): + DEFAULT = object() + tree = self._makeOne() + self.assertTrue(tree.get('nonesuch', DEFAULT) is DEFAULT) + + def test_get_filled_miss(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertEqual(tree.get('nonesuch'), None) + + def test_get_filled_miss_w_default(self): + DEFAULT = object() + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertTrue(tree.get('nonesuch', DEFAULT) is DEFAULT) + + def test_get_filled_hit(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertEqual(tree.get('a'), 0) + + def test___getitem___empty_miss(self): + tree = self._makeOne() + def _should_error(): + return tree['nonesuch'] + self.assertRaises(KeyError, _should_error) + + def test___getitem___filled_miss(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + def _should_error(): + return tree['nonesuch'] + self.assertRaises(KeyError, _should_error) + + def test___getitem___filled_hit(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertEqual(tree['a'], 0) + + def test_values_empty_no_args(self): + tree = self._makeOne() + self.assertEqual(list(tree.values()), []) + + def test_values_filled_no_args(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertEqual(list(tree.values()), list(range(26))) + + def test_values_filled_w_args(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertEqual(list(tree.values(min='b', excludemin=True, + max='f', excludemax=True)), + [2, 3, 4]) + + def test_itervalues_empty_no_args(self): + tree = self._makeOne() + self.assertEqual(list(tree.itervalues()), []) + + def test_itervalues_filled_no_args(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertEqual(list(tree.itervalues()), list(range(26))) + + def test_itervalues_filled_w_args(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertEqual(list(tree.itervalues(min='b', excludemin=True, + max='f', excludemax=True)), + [2, 3, 4]) + + def test_items_empty_no_args(self): + tree = self._makeOne() + self.assertEqual(list(tree.items()), []) + + def test_items_filled_no_args(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertEqual(list(tree.items()), ITEMS) + + def test_items_filled_w_args(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertEqual(list(tree.items(min='b', excludemin=True, + max='f', excludemax=True)), + ITEMS[2:5]) + + def test_iteritems_empty_no_args(self): + tree = self._makeOne() + self.assertEqual(list(tree.iteritems()), []) + + def test_iteritems_filled_no_args(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertEqual(list(tree.iteritems()), ITEMS) + + def test_iteritems_filled_w_args(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertEqual(list(tree.iteritems(min='b', excludemin=True, + max='f', excludemax=True)), + ITEMS[2:5]) + + def test_byValue(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertEqual(list(tree.byValue(min=22)), + [(y, x) for x, y in reversed(ITEMS[22:])]) + + def test_insert_new_key(self): + tree = self._makeOne() + self.assertTrue(tree.insert('a', 0)) + self.assertEqual(tree['a'], 0) + + def test_insert_would_change_key(self): + ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')] + tree = self._makeOne(ITEMS) + self.assertFalse(tree.insert('a', 1)) + self.assertEqual(tree['a'], 0) + + +class TreeSetTests(unittest.TestCase): + + assertRaises = _assertRaises + + def _getTargetClass(self): + from .._base import TreeSet + return TreeSet + + def _makeOne(self, items=None): + from .._base import Bucket + class _Bucket(Bucket): + def _to_key(self, k): + return k + class _Test(self._getTargetClass()): + _to_key = _to_value = lambda self, x: x + _bucket_type = _Bucket + max_leaf_size = 10 + max_internal_size = 15 + return _Test(items) + + def test_add_new_key(self): + _set = self._makeOne() + self.assertTrue(_set.add('a')) + self.assertTrue('a' in _set) + + def test_add_existing_key(self): + _set = self._makeOne() + _set.add('a') + self.assertFalse(_set.add('a')) + + def test_remove_miss(self): + _set = self._makeOne() + self.assertRaises(KeyError, _set.remove, 'a') + + def test_remove_hit(self): + _set = self._makeOne() + _set.add('a') + self.assertEqual(_set.remove('a'), None) + self.assertFalse('a' in _set) + + def test_update_empty_sequence(self): + _set = self._makeOne() + _set.update(()) + self.assertEqual(len(_set), 0) + + def test_update_simple_sequence(self): + _set = self._makeOne() + LETTERS = 'abcdefghijklmnopqrstuvwxyz' + _set.update(LETTERS) + self.assertEqual(len(_set), len(LETTERS)) + for letter in LETTERS: + self.assertTrue(letter in _set) + + def test_update_mppaing(self): + _set = self._makeOne() + LETTERS = 'abcdefghijklmnopqrstuvwxyz' + a_dict = dict([(y, x) for x, y in enumerate(LETTERS)]) + _set.update(a_dict) + self.assertEqual(len(_set), len(LETTERS)) + for letter in LETTERS: + self.assertTrue(letter in _set) + + +class Test_set_operation(unittest.TestCase): + + assertRaises = _assertRaises + + def _getTargetClass(self): + from .._base import set_operation + return set_operation + + def _makeOne(self, func, set_type): + return self._getTargetClass()(func, set_type) + + def test_it(self): + class _SetType(object): + pass + _called_with = [] + def _func(*args, **kw): + _called_with.append((args, kw)) + set_op = self._makeOne(_func, _SetType) + set_op('a', b=1) + self.assertEqual(_called_with, [((_SetType, 'a',), {'b': 1})]) + + +class _SetObBase(object): + + def _makeSet(self, *args): + return _Set(*args) + + def _makeMapping(self, *args, **kw): + return _Mapping(*args, **kw) + + +class Test_difference(unittest.TestCase, _SetObBase): + + def _callFUT(self, *args, **kw): + from .._base import difference + return difference(*args, **kw) + + def test_lhs_none(self): + rhs = self._makeSet('a', 'b', 'c') + self.assertEqual(self._callFUT(rhs.__class__, None, rhs), None) + + def test_rhs_none(self): + lhs = self._makeSet('a', 'b', 'c') + self.assertEqual(self._callFUT(lhs.__class__, lhs, None), lhs) + + def test_both_sets_rhs_empty(self): + lhs = self._makeSet('a', 'b', 'c') + rhs = self._makeSet() + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), list(lhs)) + + def test_both_sets_lhs_empty(self): + lhs = self._makeSet() + rhs = self._makeSet('a', 'b', 'c') + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), list(lhs)) + + def test_lhs_set_rhs_mapping(self): + lhs = self._makeSet('a', 'b', 'c') + rhs = self._makeMapping({'a': 13, 'b': 12}) + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), ['c']) + + def test_lhs_mapping_rhs_set(self): + lhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11}) + rhs = self._makeSet('a', 'b') + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), ['c']) + self.assertEqual(result['c'], 11) + + def test_both_mappings_rhs_empty(self): + lhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11}) + rhs = self._makeMapping({}) + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), ['a', 'b', 'c']) + self.assertEqual(result['a'], 13) + self.assertEqual(result['b'], 12) + self.assertEqual(result['c'], 11) + + def test_both_mappings_rhs_non_empty(self): + lhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11, 'f': 10}) + rhs = self._makeMapping({'b': 22, 'e': 37}) + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), ['a', 'c', 'f']) + self.assertEqual(result['a'], 13) + self.assertEqual(result['c'], 11) + self.assertEqual(result['f'], 10) + + +class Test_union(unittest.TestCase, _SetObBase): + + def _callFUT(self, *args, **kw): + from .._base import union + return union(*args, **kw) + + def test_lhs_none(self): + rhs = self._makeSet('a', 'b', 'c') + self.assertEqual(self._callFUT(rhs.__class__, None, rhs), rhs) + + def test_rhs_none(self): + lhs = self._makeSet('a', 'b', 'c') + self.assertEqual(self._callFUT(lhs.__class__, lhs, None), lhs) + + def test_both_sets_rhs_empty(self): + lhs = self._makeSet('a', 'b', 'c') + rhs = self._makeSet() + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), list(lhs)) + + def test_both_sets_lhs_empty(self): + lhs = self._makeSet() + rhs = self._makeSet('a', 'b', 'c') + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), list(rhs)) + + def test_lhs_set_rhs_mapping(self): + lhs = self._makeSet('a', 'b', 'c') + rhs = self._makeMapping({'a': 13, 'd': 12}) + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), ['a', 'b', 'c', 'd']) + + def test_lhs_mapping_rhs_set(self): + lhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11}) + rhs = self._makeSet('a', 'd') + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertTrue(isinstance(result, _Set)) + self.assertEqual(list(result), ['a', 'b', 'c', 'd']) + + def test_both_mappings_rhs_empty(self): + lhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11}) + rhs = self._makeMapping({}) + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), ['a', 'b', 'c']) + + def test_both_mappings_rhs_non_empty(self): + lhs = self._makeMapping({'a': 13, 'c': 12, 'e': 11}) + rhs = self._makeMapping({'b': 22, 'd': 33}) + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), ['a', 'b', 'c', 'd', 'e']) + + +class Test_intersection(unittest.TestCase, _SetObBase): + + def _callFUT(self, *args, **kw): + from .._base import intersection + return intersection(*args, **kw) + + def test_lhs_none(self): + rhs = self._makeSet(('a', 'b', 'c')) + self.assertEqual(self._callFUT(rhs.__class__, None, rhs), rhs) + + def test_rhs_none(self): + lhs = self._makeSet(('a', 'b', 'c')) + self.assertEqual(self._callFUT(lhs.__class__, lhs, None), lhs) + + def test_both_sets_rhs_empty(self): + lhs = self._makeSet('a', 'b', 'c') + rhs = self._makeSet() + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), []) + + def test_both_sets_lhs_empty(self): + lhs = self._makeSet() + rhs = self._makeSet('a', 'b', 'c') + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), []) + + def test_lhs_set_rhs_mapping(self): + lhs = self._makeSet('a', 'b', 'c') + rhs = self._makeMapping({'a': 13, 'd': 12}) + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), ['a']) + + def test_lhs_mapping_rhs_set(self): + lhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11}) + rhs = self._makeSet('a', 'd') + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertTrue(isinstance(result, _Set)) + self.assertEqual(list(result), ['a']) + + def test_both_mappings_rhs_empty(self): + lhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11}) + rhs = self._makeMapping({}) + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), []) + + def test_both_mappings_rhs_non_empty(self): + lhs = self._makeMapping({'a': 13, 'c': 12, 'e': 11}) + rhs = self._makeMapping({'b': 22, 'c': 44, 'd': 33}) + result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(list(result), ['c']) + + +class Test_weightedUnion(unittest.TestCase, _SetObBase): + + def _callFUT(self, *args, **kw): + from .._base import weightedUnion + return weightedUnion(*args, **kw) + + def test_both_none(self): + self.assertEqual(self._callFUT(_Mapping, None, None), (0, None)) + + def test_lhs_none(self): + rhs = self._makeMapping({'a': 13, 'c': 12, 'e': 11}) + self.assertEqual(self._callFUT(rhs.__class__, None, rhs), (1, rhs)) + + def test_rhs_none(self): + lhs = self._makeMapping({'a': 13, 'c': 12, 'e': 11}) + self.assertEqual(self._callFUT(lhs.__class__, lhs, None), (1, lhs)) + + def test_both_mappings_but_no_merge(self): + lhs = {'a': 13, 'b': 12, 'c': 11} + rhs = {'b': 22, 'd': 14} + self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs) + + def test_lhs_set_wo_MERGE_DEFAULT_rhs_set(self): + lhs = self._makeSet('a', 'd') + lhs.MERGE = lambda v1, w1, v2, w2: (v1 * w1) + (v2 * w2) + lhs.MERGE_WEIGHT = lambda v, w: v + lhs._mapping_type = _Mapping + rhs = self._makeSet('a', 'b', 'c') + self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs) + + def test_lhs_mapping_wo_MERGE_DEFAULT_rhs_set(self): + class _MappingWoDefault(dict): + def MERGE(self, v1, w1, v2, w2): + return (v1 * w1) + (v2 * w2) + def MERGE_WEIGHT(self, v, w): + return v + lhs = _MappingWoDefault({'a': 13, 'b': 12, 'c': 11}) + lhs._mapping_type = _MappingWoDefault + rhs = self._makeSet('a', 'b', 'c') + self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs) + + def test_lhs_mapping_wo_MERGE_rhs_mapping(self): + class _MappingWoMerge(dict): + def MERGE_DEFAULT(self): + return 1 + def MERGE_WEIGHT(self, v, w): + return v + lhs = _MappingWoMerge({'a': 13, 'b': 12, 'c': 11}) + lhs._mapping_type = _MappingWoMerge + rhs = self._makeMapping({'a': 1, 'b': 2, 'c': 3}) + self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs) + + def test_lhs_set_wo_MERGE_DEFAULT_rhs_mapping(self): + lhs = self._makeSet('a', 'd') + lhs.MERGE = lambda v1, w1, v2, w2: (v1 * w1) + (v2 * w2) + lhs.MERGE_WEIGHT = lambda v, w: v + lhs._mapping_type = _Mapping + rhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11}) + self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs) + + def test_lhs_mergeable_set_rhs_mapping(self): + lhs = self._makeSet('a', 'd') + lhs.MERGE = lambda v1, w1, v2, w2: (v1 * w1) + (v2 * w2) + lhs.MERGE_WEIGHT = lambda v, w: v + lhs.MERGE_DEFAULT = 1 + lhs._mapping_type = _Mapping + rhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11}) + weight, result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(weight, 1) + self.assertTrue(isinstance(result, _Mapping)) + self.assertEqual(list(result), ['a', 'b', 'c', 'd']) + self.assertEqual(result['a'], 14) + self.assertEqual(result['b'], 12) + self.assertEqual(result['c'], 11) + self.assertEqual(result['d'], 1) + + def test_lhs_mapping_rhs_set(self): + lhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11}) + rhs = self._makeSet('a', 'd') + weight, result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(weight, 1) + self.assertTrue(isinstance(result, _Mapping)) + self.assertEqual(list(result), ['a', 'b', 'c', 'd']) + self.assertEqual(result['a'], 55) + self.assertEqual(result['b'], 12) + self.assertEqual(result['c'], 11) + self.assertEqual(result['d'], 42) + + def test_both_mappings_rhs_empty(self): + lhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11}) + rhs = self._makeMapping({}) + weight, result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(weight, 1) + self.assertEqual(list(result), ['a', 'b', 'c']) + self.assertEqual(result['a'], 13) + self.assertEqual(result['b'], 12) + self.assertEqual(result['c'], 11) + + def test_both_mappings_rhs_non_empty(self): + lhs = self._makeMapping({'a': 13, 'c': 12, 'e': 11}) + rhs = self._makeMapping({'a': 10, 'b': 22, 'd': 33}) + weight, result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(weight, 1) + self.assertEqual(list(result), ['a', 'b', 'c', 'd', 'e']) + self.assertEqual(result['a'], 23) + self.assertEqual(result['b'], 22) + self.assertEqual(result['c'], 12) + self.assertEqual(result['d'], 33) + self.assertEqual(result['e'], 11) + + def test_w_lhs_Set_rhs_Set(self): + from BTrees.IIBTree import IISetPy + lhs = IISetPy([1, 2, 3]) + rhs = IISetPy([1, 4]) + weight, result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(weight, 1) + self.assertEqual(list(result), [1, 2, 3, 4]) + + #TODO: test non-default weights + + +class Test_weightedIntersection(unittest.TestCase, _SetObBase): + + def _callFUT(self, *args, **kw): + from .._base import weightedIntersection + return weightedIntersection(*args, **kw) + + def test_both_none(self): + self.assertEqual(self._callFUT(_Mapping, None, None), (0, None)) + + def test_lhs_none(self): + rhs = self._makeMapping({'a': 13, 'c': 12, 'e': 11}) + self.assertEqual(self._callFUT(rhs.__class__, None, rhs), (1, rhs)) + + def test_rhs_none(self): + lhs = self._makeMapping({'a': 13, 'c': 12, 'e': 11}) + self.assertEqual(self._callFUT(lhs.__class__, lhs, None), (1, lhs)) + + def test_both_mappings_but_no_merge(self): + lhs = {'a': 13, 'b': 12, 'c': 11} + rhs = {'b': 22, 'd': 14} + self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs) + + def test_lhs_mapping_wo_MERGE_rhs_mapping(self): + class _MappingWoMerge(dict): + def MERGE_DEFAULT(self): + return 1 + def MERGE_WEIGHT(self, v, w): + return v + lhs = _MappingWoMerge({'a': 13, 'b': 12, 'c': 11}) + lhs._mapping_type = _MappingWoMerge + rhs = self._makeMapping({'a': 1, 'b': 2, 'c': 3}) + self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs) + + def test_lhs_set_wo_MERGE_DEFAULT_rhs_set(self): + lhs = self._makeSet('a', 'd') + lhs.MERGE = lambda v1, w1, v2, w2: (v1 * w1) + (v2 * w2) + lhs.MERGE_WEIGHT = lambda v, w: v + lhs._mapping_type = _Mapping + rhs = self._makeSet('a', 'b', 'c') + self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs) + + def test_lhs_set_wo_MERGE_DEFAULT_rhs_mapping(self): + lhs = self._makeSet('a', 'd') + lhs.MERGE = lambda v1, w1, v2, w2: (v1 * w1) + (v2 * w2) + lhs.MERGE_WEIGHT = lambda v, w: v + lhs._mapping_type = _Mapping + rhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11}) + self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs) + + def test_lhs_mapping_rhs_set(self): + lhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11}) + rhs = self._makeSet('a', 'd') + weight, result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(weight, 1) + self.assertTrue(isinstance(result, _Mapping)) + self.assertEqual(list(result), ['a']) + self.assertEqual(result['a'], 55) + + def test_both_mappings_rhs_empty(self): + lhs = self._makeMapping({'a': 13, 'b': 12, 'c': 11}) + rhs = self._makeMapping({}) + weight, result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(weight, 1) + self.assertEqual(list(result), []) + + def test_both_mappings_rhs_non_empty(self): + lhs = self._makeMapping({'a': 13, 'c': 12, 'e': 11}) + rhs = self._makeMapping({'a': 10, 'b': 22, 'd': 33}) + weight, result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(weight, 1) + self.assertEqual(list(result), ['a']) + self.assertEqual(result['a'], 23) + + def test_w_lhs_Set_rhs_Set(self): + from BTrees.IIBTree import IISetPy + lhs = IISetPy([1, 2, 3]) + rhs = IISetPy([1, 4]) + weight, result = self._callFUT(lhs.__class__, lhs, rhs) + self.assertEqual(weight, 2) + self.assertEqual(list(result), [1]) + + #TODO: test non-default weights + + +class Test_multiunion(unittest.TestCase, _SetObBase): + + def _callFUT(self, *args, **kw): + from .._base import multiunion + return multiunion(*args, **kw) + + def test_no_seqs(self): + result = self._callFUT(_Set, ()) + self.assertEqual(list(result), []) + + def test_w_non_iterable_seq(self): + result = self._callFUT(_Set, (1, 2)) + self.assertEqual(list(result), [1, 2]) + + def test_w_iterable_seqs(self): + result = self._callFUT(_Set, [(1,), (2,)]) + self.assertEqual(list(result), [1, 2]) + + def test_w_mix(self): + result = self._callFUT(_Set, [1, (2,)]) + self.assertEqual(list(result), [1, 2]) + + +class Test_helpers(unittest.TestCase): + + def test_to_ob(self): + from BTrees._base import to_ob + faux_self = object() + for thing in "abc", 0, 1.3, (), frozenset((1, 2)), object(): + self.assertTrue(to_ob(faux_self, thing) is thing) + + def test_to_int_w_int(self): + from BTrees._base import to_int + faux_self = object() + self.assertEqual(to_int(faux_self, 3), 3) + + def test_to_int_w_long_in_range(self): + from BTrees._base import to_int + faux_self = object() + try: + self.assertEqual(to_int(faux_self, long(3)), 3) + except NameError: #Python3 + pass + + def test_to_int_w_overflow(self): + from BTrees._base import to_int + faux_self = object() + self.assertRaises(TypeError, to_int, faux_self, 2**64) + + def test_to_int_w_invalid(self): + from BTrees._base import to_int + faux_self = object() + self.assertRaises(TypeError, to_int, faux_self, ()) + + def test_to_float_w_float(self): + from BTrees._base import to_float + faux_self = object() + self.assertEqual(to_float(faux_self, 3.14159), 3.14159) + + def test_to_float_w_int(self): + from BTrees._base import to_float + faux_self = object() + self.assertEqual(to_float(faux_self, 3), 3.0) + + def test_to_float_w_invalid(self): + from BTrees._base import to_float + faux_self = object() + self.assertRaises(TypeError, to_float, faux_self, ()) + + def test_to_long_w_int(self): + from BTrees._base import to_long + faux_self = object() + self.assertEqual(to_long(faux_self, 3), 3) + + def test_to_long_w_long_in_range(self): + from BTrees._base import to_long + faux_self = object() + try: + self.assertEqual(to_long(faux_self, long(3)), 3) + except NameError: #Python3 + pass + + def test_to_long_w_overflow(self): + from BTrees._base import to_long + faux_self = object() + self.assertRaises(ValueError, to_long, faux_self, 2**64) + + def test_to_long_w_invalid(self): + from BTrees._base import to_long + faux_self = object() + self.assertRaises(TypeError, to_long, faux_self, ()) + + def test_to_bytes_w_ok(self): + from BTrees._base import to_bytes + faux_self = object() + conv = to_bytes(3) + self.assertEqual(conv(faux_self, b'abc'), b'abc') + + def test_to_bytes_w_invalid_length(self): + from BTrees._base import to_bytes + faux_self = object() + conv = to_bytes(3) + self.assertRaises(TypeError, conv, faux_self, b'ab') + self.assertRaises(TypeError, conv, faux_self, b'abcd') + + def test_MERGE(self): + from BTrees._base import MERGE + faux_self = object() + self.assertEqual(MERGE(faux_self, 1, 1, 1, 1), 2) + self.assertEqual(MERGE(faux_self, 1, 2, 1, 3), 5) + + def test_MERGE_WEIGHT_default(self): + from BTrees._base import MERGE_WEIGHT_default + faux_self = object() + self.assertEqual(MERGE_WEIGHT_default(faux_self, 1, 17), 1) + self.assertEqual(MERGE_WEIGHT_default(faux_self, 7, 1), 7) + + def test_MERGE_WEIGHT_numeric(self): + from BTrees._base import MERGE_WEIGHT_numeric + faux_self = object() + self.assertEqual(MERGE_WEIGHT_numeric(faux_self, 1, 17), 17) + self.assertEqual(MERGE_WEIGHT_numeric(faux_self, 7, 1), 7) + + +class _Cache(object): + def __init__(self): + self._mru = [] + def mru(self, oid): + self._mru.append(oid) + + +class _Jar(object): + def __init__(self): + self._current = set() + self._cache = _Cache() + def readCurrent(self, obj): + self._current.add(obj) + def register(self, obj): + pass + + +class _Set(object): + def __init__(self, *args, **kw): + if len(args) == 1 and isinstance(args[0], tuple): + keys = args[0] + else: + keys = set(args) + self._keys = sorted(keys) + def keys(self): + return self._keys + def __iter__(self): + return iter(self._keys) + def update(self, items): + self._keys = sorted(self._keys + list(items)) +_Set._set_type = _Set + + +class _Mapping(dict): + def __init__(self, source=None): + if source is None: + source = {} + self._keys = [] + self._values = [] + for k, v in sorted(source.items()): + self._keys.append(k) + self._values.append(v) + MERGE_DEFAULT = 42 + def MERGE_WEIGHT(self, v, w): + return v + def MERGE(self, v1, w1, v2, w2): + return v1 * w1 + v2 * w2 + def iteritems(self): + for k, v in zip(self._keys, self._values): + yield k,v + def __iter__(self): + return iter(self._keys) + def __getitem__(self, key): + search = dict(zip(self._keys, self._values)) + return search[key] + def __repr__(self): + return repr(dict(zip(self._keys, self._values))) +_Mapping._set_type = _Set +_Mapping._mapping_type = _Mapping + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(Test_Base), + unittest.makeSuite(Test_BucketBase), + unittest.makeSuite(Test_SetIteration), + unittest.makeSuite(BucketTests), + unittest.makeSuite(SetTests), + unittest.makeSuite(Test_TreeItem), + unittest.makeSuite(Test_Tree), + unittest.makeSuite(Test_TreeItems), + unittest.makeSuite(TreeTests), + unittest.makeSuite(TreeSetTests), + unittest.makeSuite(Test_set_operation), + unittest.makeSuite(Test_difference), + unittest.makeSuite(Test_union), + unittest.makeSuite(Test_intersection), + unittest.makeSuite(Test_weightedUnion), + unittest.makeSuite(Test_weightedIntersection), + unittest.makeSuite(Test_multiunion), + unittest.makeSuite(Test_helpers), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_btreesubclass.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_btreesubclass.py new file mode 100644 index 0000000..b7bff74 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_btreesubclass.py @@ -0,0 +1,54 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from BTrees.OOBTree import OOBTree, OOBucket + +class B(OOBucket): + pass + +class T(OOBTree): + _bucket_type = B + max_leaf_size = 2 + max_internal_size = 3 + +class S(T): + pass + +import unittest + +class SubclassTest(unittest.TestCase): + + def testSubclass(self): + # test that a subclass that defines _bucket_type gets buckets + # of that type + t = T() + t[0] = 0 + self.assertTrue(t._firstbucket.__class__ is B) + + def testCustomNodeSizes(self): + # We override btree and bucket split sizes in BTree subclasses. + t = S() + for i in range(8): + t[i] = i + state = t.__getstate__()[0] + self.assertEqual(len(state), 5) + sub = state[0] + self.assertEqual(sub.__class__, S) + sub = sub.__getstate__()[0] + self.assertEqual(len(sub), 5) + sub = sub[0] + self.assertEqual(sub.__class__, B) + self.assertEqual(len(sub), 1) + +def test_suite(): + return unittest.makeSuite(SubclassTest) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_check.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_check.py new file mode 100644 index 0000000..b99bf7c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_check.py @@ -0,0 +1,425 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + + +def _assertRaises(self, e_type, checked, *args, **kw): + try: + checked(*args, **kw) + except e_type as e: + return e + self.fail("Didn't raise: %s" % e_type.__name__) + + +class Test_classify(unittest.TestCase): + + def _callFUT(self, obj): + from BTrees.check import classify + return classify(obj) + + def test_classify_w_unknown(self): + class NotClassified(object): + pass + self.assertRaises(KeyError, self._callFUT, NotClassified()) + + def test_classify_w_bucket(self): + from BTrees.OOBTree import OOBucketPy + from BTrees.check import TYPE_BUCKET + kind, is_mapping = self._callFUT(OOBucketPy()) + self.assertEqual(kind, TYPE_BUCKET) + self.assertTrue(is_mapping) + + def test_classify_w_set(self): + from BTrees.OOBTree import OOSetPy + from BTrees.check import TYPE_BUCKET + kind, is_mapping = self._callFUT(OOSetPy()) + self.assertEqual(kind, TYPE_BUCKET) + self.assertFalse(is_mapping) + + def test_classify_w_tree(self): + from BTrees.OOBTree import OOBTreePy + from BTrees.check import TYPE_BTREE + kind, is_mapping = self._callFUT(OOBTreePy()) + self.assertEqual(kind, TYPE_BTREE) + self.assertTrue(is_mapping) + + def test_classify_w_treeset(self): + from BTrees.OOBTree import OOTreeSetPy + from BTrees.check import TYPE_BTREE + kind, is_mapping = self._callFUT(OOTreeSetPy()) + self.assertEqual(kind, TYPE_BTREE) + self.assertFalse(is_mapping) + + +class Test_crack_btree(unittest.TestCase): + + def _callFUT(self, obj, is_mapping): + from BTrees.check import crack_btree + return crack_btree(obj, is_mapping) + + def test_w_empty_tree(self): + from BTrees.check import BTREE_EMPTY + class Empty(object): + def __getstate__(self): + return None + kind, keys, kids = self._callFUT(Empty(), True) + self.assertEqual(kind, BTREE_EMPTY) + self.assertEqual(keys, []) + self.assertEqual(kids, []) + + def test_w_degenerate_tree(self): + from BTrees.check import BTREE_ONE + class Degenerate(object): + def __getstate__(self): + return ((('a', 1, 'b', 2),),) + kind, keys, kids = self._callFUT(Degenerate(), True) + self.assertEqual(kind, BTREE_ONE) + self.assertEqual(keys, ('a', 1, 'b', 2)) + self.assertEqual(kids, None) + + def test_w_normal_tree(self): + from BTrees.check import BTREE_NORMAL + first_bucket = [object()] * 8 + second_bucket = [object()] * 8 + class Normal(object): + def __getstate__(self): + return ((first_bucket, 'b', second_bucket), first_bucket) + kind, keys, kids = self._callFUT(Normal(), True) + self.assertEqual(kind, BTREE_NORMAL) + self.assertEqual(keys, ['b']) + self.assertEqual(kids, [first_bucket, second_bucket]) + + +class Test_crack_bucket(unittest.TestCase): + + def _callFUT(self, obj, is_mapping): + from BTrees.check import crack_bucket + return crack_bucket(obj, is_mapping) + + def test_w_empty_set(self): + class EmptySet(object): + def __getstate__(self): + return ([],) + keys, values = self._callFUT(EmptySet(), False) + self.assertEqual(keys, []) + self.assertEqual(values, []) + + def test_w_non_empty_set(self): + class NonEmptySet(object): + def __getstate__(self): + return (['a', 'b', 'c'],) + keys, values = self._callFUT(NonEmptySet(), False) + self.assertEqual(keys, ['a', 'b', 'c']) + self.assertEqual(values, []) + + def test_w_empty_mapping(self): + class EmptyMapping(object): + def __getstate__(self): + return ([], object()) + keys, values = self._callFUT(EmptyMapping(), True) + self.assertEqual(keys, []) + self.assertEqual(values, []) + + def test_w_non_empty_mapping(self): + class NonEmptyMapping(object): + def __getstate__(self): + return (['a', 1, 'b', 2, 'c', 3], object()) + keys, values = self._callFUT(NonEmptyMapping(), True) + self.assertEqual(keys, ['a', 'b', 'c']) + self.assertEqual(values, [1, 2, 3]) + + +class Test_type_and_adr(unittest.TestCase): + + def _callFUT(self, obj): + from BTrees.check import type_and_adr + return type_and_adr(obj) + + def test_type_and_adr_w_oid(self): + from BTrees.utils import oid_repr + class WithOid(object): + _p_oid = b'DEADBEEF' + t_and_a = self._callFUT(WithOid()) + self.assertTrue(t_and_a.startswith('WithOid (0x')) + self.assertTrue(t_and_a.endswith('oid=%s)' % oid_repr(b'DEADBEEF'))) + + def test_type_and_adr_wo_oid(self): + class WithoutOid(object): + pass + t_and_a = self._callFUT(WithoutOid()) + self.assertTrue(t_and_a.startswith('WithoutOid (0x')) + self.assertTrue(t_and_a.endswith('oid=None)')) + + +class WalkerTests(unittest.TestCase): + + def _getTargetClass(self): + from BTrees.check import Walker + return Walker + + def _makeOne(self, obj): + return self._getTargetClass()(obj) + + def test_visit_btree_abstract(self): + walker = self._makeOne(object()) + obj = object() + path = '/' + parent = object() + is_mapping = True + keys = [] + kids = [] + lo = 0 + hi = None + self.assertRaises(NotImplementedError, walker.visit_btree, + obj, path, parent, is_mapping, keys, kids, lo, hi) + + def test_visit_bucket_abstract(self): + walker = self._makeOne(object()) + obj = object() + path = '/' + parent = object() + is_mapping = True + keys = [] + kids = [] + lo = 0 + hi = None + self.assertRaises(NotImplementedError, walker.visit_bucket, + obj, path, parent, is_mapping, keys, kids, lo, hi) + + def test_walk_w_empty_bucket(self): + from BTrees.OOBTree import OOBucket + obj = OOBucket() + walker = self._makeOne(obj) + path = '/' + parent = object() + is_mapping = True + keys = [] + kids = [] + lo = 0 + hi = None + self.assertRaises(NotImplementedError, walker.walk) + + def test_walk_w_empty_btree(self): + from BTrees.OOBTree import OOBTree + obj = OOBTree() + walker = self._makeOne(obj) + path = '/' + parent = object() + is_mapping = True + keys = [] + kids = [] + lo = 0 + hi = None + self.assertRaises(NotImplementedError, walker.walk) + + def test_walk_w_degenerate_btree(self): + from BTrees.OOBTree import OOBTree + obj = OOBTree() + obj['a'] = 1 + walker = self._makeOne(obj) + path = '/' + parent = object() + is_mapping = True + keys = [] + kids = [] + lo = 0 + hi = None + self.assertRaises(NotImplementedError, walker.walk) + + def test_walk_w_normal_btree(self): + from BTrees.IIBTree import IIBTree + obj = IIBTree() + for i in range(1000): + obj[i] = i + walker = self._makeOne(obj) + path = '/' + parent = object() + is_mapping = True + keys = [] + kids = [] + lo = 0 + hi = None + self.assertRaises(NotImplementedError, walker.walk) + + +class CheckerTests(unittest.TestCase): + + assertRaises = _assertRaises + + def _getTargetClass(self): + from BTrees.check import Checker + return Checker + + def _makeOne(self, obj): + return self._getTargetClass()(obj) + + def test_walk_w_empty_bucket(self): + from BTrees.OOBTree import OOBucket + obj = OOBucket() + checker = self._makeOne(obj) + path = '/' + parent = object() + is_mapping = True + keys = [] + kids = [] + lo = 0 + hi = None + checker.check() #noraise + + def test_walk_w_empty_btree(self): + obj = _makeTree(False) + checker = self._makeOne(obj) + path = '/' + parent = object() + is_mapping = True + keys = [] + kids = [] + lo = 0 + hi = None + checker.check() #noraise + + def test_walk_w_degenerate_btree(self): + obj = _makeTree(False) + obj['a'] = 1 + checker = self._makeOne(obj) + path = '/' + parent = object() + is_mapping = True + keys = [] + kids = [] + lo = 0 + hi = None + checker.check() #noraise + + def test_walk_w_normal_btree(self): + obj = _makeTree(False) + checker = self._makeOne(obj) + path = '/' + parent = object() + is_mapping = True + keys = [] + kids = [] + lo = 0 + hi = None + checker.check() #noraise + + def test_walk_w_key_too_large(self): + obj = _makeTree(True) + state = obj.__getstate__() + # Damage an invariant by dropping the BTree key to 14. + new_state = (state[0][0], 14, state[0][2]), state[1] + obj.__setstate__(new_state) + checker = self._makeOne(obj) + path = '/' + parent = object() + is_mapping = True + keys = [] + kids = [] + lo = 0 + hi = None + e = self.assertRaises(AssertionError, checker.check) + self.assertTrue(">= upper bound" in str(e)) + + def test_walk_w_key_too_small(self): + obj = _makeTree(True) + state = obj.__getstate__() + # Damage an invariant by bumping the BTree key to 16. + new_state = (state[0][0], 16, state[0][2]), state[1] + obj.__setstate__(new_state) + checker = self._makeOne(obj) + path = '/' + parent = object() + is_mapping = True + keys = [] + kids = [] + lo = 0 + hi = None + e = self.assertRaises(AssertionError, checker.check) + self.assertTrue("< lower bound" in str(e)) + + def test_walk_w_keys_swapped(self): + obj = _makeTree(True) + state = obj.__getstate__() + # Damage an invariant by bumping the BTree key to 16. + (b0, num, b1), firstbucket = state + self.assertEqual(b0[4], 8) + self.assertEqual(b0[5], 10) + b0state = b0.__getstate__() + self.assertEqual(len(b0state), 2) + # b0state looks like + # ((k0, v0, k1, v1, ...), nextbucket) + pairs, nextbucket = b0state + self.assertEqual(pairs[8], 4) + self.assertEqual(pairs[9], 8) + self.assertEqual(pairs[10], 5) + self.assertEqual(pairs[11], 10) + newpairs = pairs[:8] + (5, 10, 4, 8) + pairs[12:] + b0.__setstate__((newpairs, nextbucket)) + checker = self._makeOne(obj) + path = '/' + parent = object() + is_mapping = True + keys = [] + kids = [] + lo = 0 + hi = None + e = self.assertRaises(AssertionError, checker.check) + self.assertTrue("key 5 at index 4 >= key 4 at index 5" in str(e)) + + +class Test_check(unittest.TestCase): + + def _callFUT(self, tree): + from BTrees.check import check + return check(tree) + + def _makeOne(self): + from BTrees.OOBTree import OOBTree + tree = OOBTree() + for i in range(31): + tree[i] = 2*i + return tree + + def test_normal(self): + from BTrees.OOBTree import OOBTree + tree = OOBTree() + for i in range(31): + tree[i] = 2*i + state = tree.__getstate__() + self.assertEqual(len(state), 2) + self.assertEqual(len(state[0]), 3) + self.assertEqual(state[0][1], 15) + self._callFUT(tree) #noraise + + +def _makeTree(fill): + from BTrees.OOBTree import OOBTree + from BTrees.OOBTree import _BUCKET_SIZE + tree = OOBTree() + if fill: + for i in range(_BUCKET_SIZE + 1): + tree[i] = 2*i + return tree + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(Test_classify), + unittest.makeSuite(Test_crack_btree), + unittest.makeSuite(Test_crack_bucket), + unittest.makeSuite(Test_type_and_adr), + unittest.makeSuite(WalkerTests), + unittest.makeSuite(CheckerTests), + unittest.makeSuite(Test_check), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_fsBTree.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_fsBTree.py new file mode 100644 index 0000000..0ed79f5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_fsBTree.py @@ -0,0 +1,64 @@ +############################################################################## +# +# Copyright (c) 2010 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + + +class fsBucketBase(object): + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _makeBytesItems(self): + from .._compat import _ascii + return[(_ascii(c*2), _ascii(c*6)) for c in 'abcdef'] + + def test_toString(self): + bucket = self._makeOne(self._makeBytesItems()) + self.assertEqual(bucket.toString(), + b'aabbccddeeffaaaaaabbbbbbccccccddddddeeeeeeffffff') + + def test_fromString(self): + before = self._makeOne(self._makeBytesItems()) + after = before.fromString(before.toString()) + self.assertEqual(before.__getstate__(), after.__getstate__()) + + def test_fromString_empty(self): + before = self._makeOne(self._makeBytesItems()) + after = before.fromString(b'') + self.assertEqual(after.__getstate__(), ((),)) + + def test_fromString_invalid_length(self): + bucket = self._makeOne(self._makeBytesItems()) + self.assertRaises(ValueError, bucket.fromString, b'xxx') + + +class fsBucketTests(unittest.TestCase, fsBucketBase): + + def _getTargetClass(self): + from BTrees.fsBTree import fsBucket + return fsBucket + + +class fsBucketPyTests(unittest.TestCase, fsBucketBase): + + def _getTargetClass(self): + from BTrees.fsBTree import fsBucketPy + return fsBucketPy + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(fsBucketTests), + unittest.makeSuite(fsBucketPyTests), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_utils.py b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_utils.py new file mode 100644 index 0000000..8c1657c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/tests/test_utils.py @@ -0,0 +1,83 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest + + +class Test_non_negative(unittest.TestCase): + + def _callFUT(self, int_val): + from BTrees.utils import non_negative + return non_negative(int_val) + + def test_w_big_negative(self): + self.assertEqual(self._callFUT(-(2**63 - 1)), 1) + + def test_w_negative(self): + self.assertEqual(self._callFUT(-1), 2**63 - 1) + + def test_w_zero(self): + self.assertEqual(self._callFUT(0), 0) + + def test_w_positive(self): + self.assertEqual(self._callFUT(1), 1) + + def test_w_big_positive(self): + import sys + try: + self.assertEqual(self._callFUT(sys.maxint), sys.maxint) + except AttributeError: #pragma NO COVER Py3k + pass + + +class Test_oid_repr(unittest.TestCase): + + def _callFUT(self, oid): + from BTrees.utils import oid_repr + return oid_repr(oid) + + def test_w_non_strings(self): + self.assertEqual(self._callFUT(None), repr(None)) + self.assertEqual(self._callFUT(()), repr(())) + self.assertEqual(self._callFUT([]), repr([])) + self.assertEqual(self._callFUT({}), repr({})) + self.assertEqual(self._callFUT(0), repr(0)) + + def test_w_short_strings(self): + for length in range(8): + faux = 'x' * length + self.assertEqual(self._callFUT(faux), repr(faux)) + + def test_w_long_strings(self): + for length in range(9, 1024): + faux = 'x' * length + self.assertEqual(self._callFUT(faux), repr(faux)) + + def test_w_zero(self): + self.assertEqual(self._callFUT(b'\0\0\0\0\0\0\0\0'), b'0x00') + + def test_w_one(self): + self.assertEqual(self._callFUT(b'\0\0\0\0\0\0\0\1'), b'0x01') + + def test_w_even_length(self): + self.assertEqual(self._callFUT(b'\0\0\0\0\0\0\xAB\xC4'), b'0xabc4') + + def test_w_odd_length(self): + self.assertEqual(self._callFUT(b'\0\0\0\0\0\0\x0D\xEF'), b'0x0def') + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(Test_non_negative), + unittest.makeSuite(Test_oid_repr), + )) diff --git a/thesisenv/lib/python3.6/site-packages/BTrees/utils.py b/thesisenv/lib/python3.6/site-packages/BTrees/utils.py new file mode 100644 index 0000000..5f0491c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/BTrees/utils.py @@ -0,0 +1,45 @@ +############################################################################## +# +# Copyright (c) 2001-2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +# Copied from ZODB/utils.py + +from binascii import hexlify + +from ._compat import _bytes + +def non_negative(int_val): + if int_val < 0: + # Coerce to non-negative. + int_val &= 0x7FFFFFFFFFFFFFFF + return int_val + + +def positive_id(obj): #pragma NO COVER + """Return id(obj) as a non-negative integer.""" + return non_negative(id(obj)) + + +def oid_repr(oid): + if isinstance(oid, _bytes) and len(oid) == 8: + # Convert to hex and strip leading zeroes. + as_hex = hexlify(oid).lstrip(b'0') + # Ensure two characters per input byte. + chunks = [b'0x'] + if len(as_hex) & 1: + chunks.append(b'0') + elif as_hex == b'': + as_hex = b'00' + chunks.append(as_hex) + return b''.join(chunks) + else: + return repr(oid) diff --git a/thesisenv/lib/python3.6/site-packages/ComputedAttribute/_ComputedAttribute.c b/thesisenv/lib/python3.6/site-packages/ComputedAttribute/_ComputedAttribute.c new file mode 100644 index 0000000..ff9a8bc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ComputedAttribute/_ComputedAttribute.c @@ -0,0 +1,159 @@ +/***************************************************************************** + + Copyright (c) 1996-2003 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ +#include "ExtensionClass/ExtensionClass.h" +#include "ExtensionClass/_compat.h" + +#define UNLESS(E) if(!(E)) +#define OBJECT(O) ((PyObject*)(O)) + +typedef struct { + PyObject_HEAD + PyObject *callable; + int level; +} CA; + +static PyObject * +CA__init__(CA *self, PyObject *args) +{ + PyObject *callable; + int level=0; + + UNLESS(PyArg_ParseTuple(args,"O|i",&callable, &level)) return NULL; + + if (level > 0) + { + callable=PyObject_CallFunction(OBJECT(Py_TYPE(self)), "Oi", + callable, level-1); + UNLESS (callable) return NULL; + self->level=level; + } + else + { + Py_INCREF(callable); + self->level=0; + } + + self->callable=callable; + + Py_INCREF(Py_None); + return Py_None; +} + +static void +CA_dealloc(CA *self) +{ + Py_DECREF(self->callable); + Py_DECREF(Py_TYPE(self)); + Py_TYPE(self)->tp_free(OBJECT(self)); +} + +static PyObject * +CA_of(CA *self, PyObject *args) +{ + if (self->level > 0) + { + Py_INCREF(self->callable); + return self->callable; + } + + if (NATIVE_CHECK(self->callable)) + { + /* Special case string as simple alias. */ + PyObject *o; + + UNLESS (PyArg_ParseTuple(args,"O", &o)) return NULL; + return PyObject_GetAttr(o, self->callable); + } + + return PyObject_CallObject(self->callable, args); +} + +static struct PyMethodDef CA_methods[] = { + {"__init__",(PyCFunction)CA__init__, METH_VARARGS, ""}, + {"__of__", (PyCFunction)CA_of, METH_VARARGS, ""}, + {NULL, NULL} /* sentinel */ +}; + +static PyExtensionClass ComputedAttributeType = { + PyVarObject_HEAD_INIT(NULL, 0) + "ComputedAttribute", sizeof(CA), + 0, + (destructor)CA_dealloc, + 0,0,0,0,0, 0,0,0, 0,0,0,0,0, 0,0, + "ComputedAttribute(callable) -- Create a computed attribute", + METHOD_CHAIN(CA_methods), + (void*)(EXTENSIONCLASS_BINDABLE_FLAG) +}; + +static struct PyMethodDef methods[] = { + {NULL, NULL} +}; + +#ifdef PY3K +static struct PyModuleDef moduledef = +{ + PyModuleDef_HEAD_INIT, + "_ComputedAttribute", /* m_name */ + "Provide ComputedAttribute\n\n", /* m_doc */ + -1, /* m_size */ + methods, /* m_methods */ + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL, /* m_free */ +}; +#endif + + +static PyObject* +module_init(void) +{ + PyObject *m, *d; + + UNLESS(ExtensionClassImported) return NULL; + +#ifdef PY3K + m = PyModule_Create(&moduledef); +#else + m = Py_InitModule3( + "_ComputedAttribute", + methods, + "Provide Computed Attributes\n\n"); +#endif + + if (m == NULL) { + return NULL; + } + + d = PyModule_GetDict(m); + if (d == NULL) { + return NULL; + } + + PyExtensionClass_Export(d, "ComputedAttribute", ComputedAttributeType); + + return m; +} + +#ifdef PY3K +PyMODINIT_FUNC PyInit__ComputedAttribute(void) +{ + return module_init(); +} +#else +PyMODINIT_FUNC init_ComputedAttribute(void) +{ + module_init(); +} +#endif diff --git a/thesisenv/lib/python3.6/site-packages/ComputedAttribute/_ComputedAttribute.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/ComputedAttribute/_ComputedAttribute.cpython-36m-darwin.so new file mode 100755 index 0000000..2eebe62 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/ComputedAttribute/_ComputedAttribute.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/ComputedAttribute/__init__.py b/thesisenv/lib/python3.6/site-packages/ComputedAttribute/__init__.py new file mode 100644 index 0000000..f47bbb7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ComputedAttribute/__init__.py @@ -0,0 +1,22 @@ +from ExtensionClass import Base +from ExtensionClass import C_EXTENSION + + +class ComputedAttribute(Base): + """ComputedAttribute(callable) -- Create a computed attribute""" + + def __init__(self, func, level=0): + if level > 0: + func = ComputedAttribute(func, level - 1) + self.callable = func + self.level = level + + def __of__(self, inst): + func = self.__dict__['callable'] + if self.level: + return func + return func(inst) + + +if C_EXTENSION: # pragma no cover + from ._ComputedAttribute import * # NOQA diff --git a/thesisenv/lib/python3.6/site-packages/ComputedAttribute/tests.py b/thesisenv/lib/python3.6/site-packages/ComputedAttribute/tests.py new file mode 100644 index 0000000..25e705a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ComputedAttribute/tests.py @@ -0,0 +1,108 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Computed Attributes + +Computed attributes work much like properties: + +>>> import math +>>> from ComputedAttribute import ComputedAttribute +>>> from ExtensionClass import Base +>>> class Point(Base): +... def __init__(self, x, y): +... self.x, self.y = x, y +... length = ComputedAttribute(lambda self: math.sqrt(self.x**2+self.y**2)) + +>>> p = Point(3, 4) +>>> "%.1f" % p.length +'5.0' + +Except that you can also use computed attributes with instances: + +>>> p.angle = ComputedAttribute(lambda self: math.atan(self.y*1.0/self.x)) +>>> "%.2f" % p.angle +'0.93' +""" + +from doctest import DocTestSuite +import unittest + +from ComputedAttribute import ComputedAttribute +from ExtensionClass import Base + + +def test_wrapper_support(): + """Wrapper support + + To support acquisition wrappers, computed attributes have a level. + The computation is only done when the level is zero. Retrieving a + computed attribute with a level > 0 returns a computed attribute + with a decremented level. + + >>> from ExtensionClass import Base + >>> class X(Base): + ... pass + + >>> x = X() + >>> x.n = 1 + + >>> from ComputedAttribute import ComputedAttribute + >>> x.n2 = ComputedAttribute(lambda self: self.n*2) + >>> x.n2 + 2 + >>> x.n2.__class__.__name__ + 'int' + + >>> x.n2 = ComputedAttribute(lambda self: self.n*2, 2) + >>> x.n2.__class__.__name__ + 'ComputedAttribute' + >>> x.n2 = x.n2 + >>> x.n2.__class__.__name__ + 'ComputedAttribute' + >>> x.n2 = x.n2 + >>> x.n2.__class__.__name__ + 'int' + """ + + +class TestComputedAttribute(unittest.TestCase): + def _construct_class(self, level): + class X(Base): + def _get_a(self): + return 1 + + a = ComputedAttribute(_get_a, level) + + return X + + def test_computed_attribute_on_class_level0(self): + x = self._construct_class(0)() + self.assertEqual(x.a, 1) + + def test_computed_attribute_on_class_level1(self): + x = self._construct_class(1)() + self.assertIsInstance(x.a, ComputedAttribute) + + def test_compilation(self): + from ExtensionClass import _IS_PYPY + try: + from ComputedAttribute import _ComputedAttribute + except ImportError: # pragma: no cover + self.assertTrue(_IS_PYPY) + else: + self.assertTrue(hasattr(_ComputedAttribute, 'ComputedAttribute')) + +def test_suite(): + suite = unittest.defaultTestLoader.loadTestsFromName(__name__) + suite.addTest(DocTestSuite()) + return suite diff --git a/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/DESCRIPTION.rst b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..75cbed7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/DESCRIPTION.rst @@ -0,0 +1,1029 @@ +The DateTime package +==================== + +Encapsulation of date/time values. + + +Function Timezones() +-------------------- + +Returns the list of recognized timezone names: + + >>> from DateTime import Timezones + >>> zones = set(Timezones()) + +Almost all of the standard pytz timezones are included, with the exception +of some commonly-used but ambiguous abbreviations, where historical Zope +usage conflicts with the name used by pytz: + + >>> import pytz + >>> [x for x in pytz.all_timezones if x not in zones] + ['CET', 'EET', 'EST', 'MET', 'MST', 'WET'] + +Class DateTime +-------------- + +DateTime objects represent instants in time and provide interfaces for +controlling its representation without affecting the absolute value of +the object. + +DateTime objects may be created from a wide variety of string or +numeric data, or may be computed from other DateTime objects. +DateTimes support the ability to convert their representations to many +major timezones, as well as the ablility to create a DateTime object +in the context of a given timezone. + +DateTime objects provide partial numerical behavior: + +* Two date-time objects can be subtracted to obtain a time, in days + between the two. + +* A date-time object and a positive or negative number may be added to + obtain a new date-time object that is the given number of days later + than the input date-time object. + +* A positive or negative number and a date-time object may be added to + obtain a new date-time object that is the given number of days later + than the input date-time object. + +* A positive or negative number may be subtracted from a date-time + object to obtain a new date-time object that is the given number of + days earlier than the input date-time object. + +DateTime objects may be converted to integer, long, or float numbers +of days since January 1, 1901, using the standard int, long, and float +functions (Compatibility Note: int, long and float return the number +of days since 1901 in GMT rather than local machine timezone). +DateTime objects also provide access to their value in a float format +usable with the python time module, provided that the value of the +object falls in the range of the epoch-based time module. + +A DateTime object should be considered immutable; all conversion and numeric +operations return a new DateTime object rather than modify the current object. + +A DateTime object always maintains its value as an absolute UTC time, +and is represented in the context of some timezone based on the +arguments used to create the object. A DateTime object's methods +return values based on the timezone context. + +Note that in all cases the local machine timezone is used for +representation if no timezone is specified. + +Constructor for DateTime +------------------------ + +DateTime() returns a new date-time object. DateTimes may be created +with from zero to seven arguments: + +* If the function is called with no arguments, then the current date/ + time is returned, represented in the timezone of the local machine. + +* If the function is invoked with a single string argument which is a + recognized timezone name, an object representing the current time is + returned, represented in the specified timezone. + +* If the function is invoked with a single string argument + representing a valid date/time, an object representing that date/ + time will be returned. + + As a general rule, any date-time representation that is recognized + and unambigous to a resident of North America is acceptable. (The + reason for this qualification is that in North America, a date like: + 2/1/1994 is interpreted as February 1, 1994, while in some parts of + the world, it is interpreted as January 2, 1994.) A date/ time + string consists of two components, a date component and an optional + time component, separated by one or more spaces. If the time + component is omited, 12:00am is assumed. + + Any recognized timezone name specified as the final element of the + date/time string will be used for computing the date/time value. + (If you create a DateTime with the string, + "Mar 9, 1997 1:45pm US/Pacific", the value will essentially be the + same as if you had captured time.time() at the specified date and + time on a machine in that timezone). If no timezone is passed, then + the timezone configured on the local machine will be used, **except** + that if the date format matches ISO 8601 ('YYYY-MM-DD'), the instance + will use UTC / CMT+0 as the timezone. + + o Returns current date/time, represented in US/Eastern: + + >>> from DateTime import DateTime + >>> e = DateTime('US/Eastern') + >>> e.timezone() + 'US/Eastern' + + o Returns specified time, represented in local machine zone: + + >>> x = DateTime('1997/3/9 1:45pm') + >>> x.parts() # doctest: +ELLIPSIS + (1997, 3, 9, 13, 45, ...) + + o Specified time in local machine zone, verbose format: + + >>> y = DateTime('Mar 9, 1997 13:45:00') + >>> y.parts() # doctest: +ELLIPSIS + (1997, 3, 9, 13, 45, ...) + >>> y == x + True + + o Specified time in UTC via ISO 8601 rule: + + >>> z = DateTime('2014-03-24') + >>> z.parts() # doctest: +ELLIPSIS + (2014, 3, 24, 0, 0, ...) + >>> z.timezone() + 'GMT+0' + + The date component consists of year, month, and day values. The + year value must be a one-, two-, or four-digit integer. If a one- + or two-digit year is used, the year is assumed to be in the + twentieth century. The month may an integer, from 1 to 12, a month + name, or a month abreviation, where a period may optionally follow + the abreviation. The day must be an integer from 1 to the number of + days in the month. The year, month, and day values may be separated + by periods, hyphens, forward, shashes, or spaces. Extra spaces are + permitted around the delimiters. Year, month, and day values may be + given in any order as long as it is possible to distinguish the + components. If all three components are numbers that are less than + 13, then a a month-day-year ordering is assumed. + + The time component consists of hour, minute, and second values + separated by colons. The hour value must be an integer between 0 + and 23 inclusively. The minute value must be an integer between 0 + and 59 inclusively. The second value may be an integer value + between 0 and 59.999 inclusively. The second value or both the + minute and second values may be ommitted. The time may be followed + by am or pm in upper or lower case, in which case a 12-hour clock is + assumed. + +* If the DateTime function is invoked with a single Numeric argument, + the number is assumed to be either a floating point value such as + that returned by time.time() , or a number of days after January 1, + 1901 00:00:00 UTC. + + A DateTime object is returned that represents either the gmt value + of the time.time() float represented in the local machine's + timezone, or that number of days after January 1, 1901. Note that + the number of days after 1901 need to be expressed from the + viewpoint of the local machine's timezone. A negative argument will + yield a date-time value before 1901. + +* If the function is invoked with two numeric arguments, then the + first is taken to be an integer year and the second argument is + taken to be an offset in days from the beginning of the year, in the + context of the local machine timezone. The date-time value returned + is the given offset number of days from the beginning of the given + year, represented in the timezone of the local machine. The offset + may be positive or negative. Two-digit years are assumed to be in + the twentieth century. + +* If the function is invoked with two arguments, the first a float + representing a number of seconds past the epoch in gmt (such as + those returned by time.time()) and the second a string naming a + recognized timezone, a DateTime with a value of that gmt time will + be returned, represented in the given timezone. + + >>> import time + >>> t = time.time() + + Time t represented as US/Eastern: + + >>> now_east = DateTime(t, 'US/Eastern') + + Time t represented as US/Pacific: + + >>> now_west = DateTime(t, 'US/Pacific') + + Only their representations are different: + + >>> now_east.equalTo(now_west) + True + +* If the function is invoked with three or more numeric arguments, + then the first is taken to be an integer year, the second is taken + to be an integer month, and the third is taken to be an integer day. + If the combination of values is not valid, then a DateTimeError is + raised. One- or two-digit years up to 69 are assumed to be in the + 21st century, whereas values 70-99 are assumed to be 20th century. + The fourth, fifth, and sixth arguments are floating point, positive + or negative offsets in units of hours, minutes, and days, and + default to zero if not given. An optional string may be given as + the final argument to indicate timezone (the effect of this is as if + you had taken the value of time.time() at that time on a machine in + the specified timezone). + +If a string argument passed to the DateTime constructor cannot be +parsed, it will raise SyntaxError. Invalid date, time, or +timezone components will raise a DateTimeError. + +The module function Timezones() will return a list of the timezones +recognized by the DateTime module. Recognition of timezone names is +case-insensitive. + +Instance Methods for DateTime (IDateTime interface) +--------------------------------------------------- + +Conversion and comparison methods +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* ``timeTime()`` returns the date/time as a floating-point number in + UTC, in the format used by the python time module. Note that it is + possible to create date /time values with DateTime that have no + meaningful value to the time module, and in such cases a + DateTimeError is raised. A DateTime object's value must generally + be between Jan 1, 1970 (or your local machine epoch) and Jan 2038 to + produce a valid time.time() style value. + + >>> dt = DateTime('Mar 9, 1997 13:45:00 US/Eastern') + >>> dt.timeTime() + 857933100.0 + + >>> DateTime('2040/01/01 UTC').timeTime() + 2208988800.0 + + >>> DateTime('1900/01/01 UTC').timeTime() + -2208988800.0 + +* ``toZone(z)`` returns a DateTime with the value as the current + object, represented in the indicated timezone: + + >>> dt.toZone('UTC') + DateTime('1997/03/09 18:45:00 UTC') + + >>> dt.toZone('UTC').equalTo(dt) + True + +* ``isFuture()`` returns true if this object represents a date/time + later than the time of the call: + + >>> dt.isFuture() + False + >>> DateTime('Jan 1 3000').isFuture() # not time-machine safe! + True + +* ``isPast()`` returns true if this object represents a date/time + earlier than the time of the call: + + >>> dt.isPast() + True + >>> DateTime('Jan 1 3000').isPast() # not time-machine safe! + False + +* ``isCurrentYear()`` returns true if this object represents a + date/time that falls within the current year, in the context of this + object's timezone representation: + + >>> dt.isCurrentYear() + False + >>> DateTime().isCurrentYear() + True + +* ``isCurrentMonth()`` returns true if this object represents a + date/time that falls within the current month, in the context of + this object's timezone representation: + + >>> dt.isCurrentMonth() + False + >>> DateTime().isCurrentMonth() + True + +* ``isCurrentDay()`` returns true if this object represents a + date/time that falls within the current day, in the context of this + object's timezone representation: + + >>> dt.isCurrentDay() + False + >>> DateTime().isCurrentDay() + True + +* ``isCurrentHour()`` returns true if this object represents a + date/time that falls within the current hour, in the context of this + object's timezone representation: + + >>> dt.isCurrentHour() + False + + >>> DateTime().isCurrentHour() + True + +* ``isCurrentMinute()`` returns true if this object represents a + date/time that falls within the current minute, in the context of + this object's timezone representation: + + >>> dt.isCurrentMinute() + False + >>> DateTime().isCurrentMinute() + True + +* ``isLeapYear()`` returns true if the current year (in the context of + the object's timezone) is a leap year: + + >>> dt.isLeapYear() + False + >>> DateTime('Mar 8 2004').isLeapYear() + True + +* ``earliestTime()`` returns a new DateTime object that represents the + earliest possible time (in whole seconds) that still falls within + the current object's day, in the object's timezone context: + + >>> dt.earliestTime() + DateTime('1997/03/09 00:00:00 US/Eastern') + +* ``latestTime()`` return a new DateTime object that represents the + latest possible time (in whole seconds) that still falls within the + current object's day, in the object's timezone context + + >>> dt.latestTime() + DateTime('1997/03/09 23:59:59 US/Eastern') + +Component access +~~~~~~~~~~~~~~~~ + +* ``parts()`` returns a tuple containing the calendar year, month, + day, hour, minute second and timezone of the object + + >>> dt.parts() # doctest: +ELLIPSIS + (1997, 3, 9, 13, 45, ... 'US/Eastern') + +* ``timezone()`` returns the timezone in which the object is represented: + + >>> dt.timezone() in Timezones() + True + +* ``tzoffset()`` returns the timezone offset for the objects timezone: + + >>> dt.tzoffset() + -18000 + +* ``year()`` returns the calendar year of the object: + + >>> dt.year() + 1997 + +* ``month()`` retursn the month of the object as an integer: + + >>> dt.month() + 3 + +* ``Month()`` returns the full month name: + + >>> dt.Month() + 'March' + +* ``aMonth()`` returns the abreviated month name: + + >>> dt.aMonth() + 'Mar' + +* ``pMonth()`` returns the abreviated (with period) month name: + + >>> dt.pMonth() + 'Mar.' + +* ``day()`` returns the integer day: + + >>> dt.day() + 9 + +* ``Day()`` returns the full name of the day of the week: + + >>> dt.Day() + 'Sunday' + +* ``dayOfYear()`` returns the day of the year, in context of the + timezone representation of the object: + + >>> dt.dayOfYear() + 68 + +* ``aDay()`` returns the abreviated name of the day of the week: + + >>> dt.aDay() + 'Sun' + +* ``pDay()`` returns the abreviated (with period) name of the day of + the week: + + >>> dt.pDay() + 'Sun.' + +* ``dow()`` returns the integer day of the week, where Sunday is 0: + + >>> dt.dow() + 0 + +* ``dow_1()`` returns the integer day of the week, where sunday is 1: + + >>> dt.dow_1() + 1 + +* ``h_12()`` returns the 12-hour clock representation of the hour: + + >>> dt.h_12() + 1 + +* ``h_24()`` returns the 24-hour clock representation of the hour: + + >>> dt.h_24() + 13 + +* ``ampm()`` returns the appropriate time modifier (am or pm): + + >>> dt.ampm() + 'pm' + +* ``hour()`` returns the 24-hour clock representation of the hour: + + >>> dt.hour() + 13 + +* ``minute()`` returns the minute: + + >>> dt.minute() + 45 + +* ``second()`` returns the second: + + >>> dt.second() == 0 + True + +* ``millis()`` returns the milliseconds since the epoch in GMT. + + >>> dt.millis() == 857933100000 + True + +strftime() +~~~~~~~~~~ + +See ``tests/test_datetime.py``. + +General formats from previous DateTime +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* ``Date()`` return the date string for the object: + + >>> dt.Date() + '1997/03/09' + +* ``Time()`` returns the time string for an object to the nearest + second: + + >>> dt.Time() + '13:45:00' + +* ``TimeMinutes()`` returns the time string for an object not showing + seconds: + + >>> dt.TimeMinutes() + '13:45' + +* ``AMPM()`` returns the time string for an object to the nearest second: + + >>> dt.AMPM() + '01:45:00 pm' + +* ``AMPMMinutes()`` returns the time string for an object not showing + seconds: + + >>> dt.AMPMMinutes() + '01:45 pm' + +* ``PreciseTime()`` returns the time string for the object: + + >>> dt.PreciseTime() + '13:45:00.000' + +* ``PreciseAMPM()`` returns the time string for the object: + + >>> dt.PreciseAMPM() + '01:45:00.000 pm' + +* ``yy()`` returns the calendar year as a 2 digit string + + >>> dt.yy() + '97' + +* ``mm()`` returns the month as a 2 digit string + + >>> dt.mm() + '03' + +* ``dd()`` returns the day as a 2 digit string: + + >>> dt.dd() + '09' + +* ``rfc822()`` returns the date in RFC 822 format: + + >>> dt.rfc822() + 'Sun, 09 Mar 1997 13:45:00 -0500' + +New formats +~~~~~~~~~~~ + +* ``fCommon()`` returns a string representing the object's value in + the format: March 9, 1997 1:45 pm: + + >>> dt.fCommon() + 'March 9, 1997 1:45 pm' + +* ``fCommonZ()`` returns a string representing the object's value in + the format: March 9, 1997 1:45 pm US/Eastern: + + >>> dt.fCommonZ() + 'March 9, 1997 1:45 pm US/Eastern' + +* ``aCommon()`` returns a string representing the object's value in + the format: Mar 9, 1997 1:45 pm: + + >>> dt.aCommon() + 'Mar 9, 1997 1:45 pm' + +* ``aCommonZ()`` return a string representing the object's value in + the format: Mar 9, 1997 1:45 pm US/Eastern: + + >>> dt.aCommonZ() + 'Mar 9, 1997 1:45 pm US/Eastern' + +* ``pCommon()`` returns a string representing the object's value in + the format Mar. 9, 1997 1:45 pm: + + >>> dt.pCommon() + 'Mar. 9, 1997 1:45 pm' + +* ``pCommonZ()`` returns a string representing the object's value in + the format: Mar. 9, 1997 1:45 pm US/Eastern: + + >>> dt.pCommonZ() + 'Mar. 9, 1997 1:45 pm US/Eastern' + +* ``ISO()`` returns a string with the date/time in ISO format. Note: + this is not ISO 8601-format! See the ISO8601 and HTML4 methods below + for ISO 8601-compliant output. Dates are output as: YYYY-MM-DD HH:MM:SS + + >>> dt.ISO() + '1997-03-09 13:45:00' + +* ``ISO8601()`` returns the object in ISO 8601-compatible format + containing the date, time with seconds-precision and the time zone + identifier - see http://www.w3.org/TR/NOTE-datetime. Dates are + output as: YYYY-MM-DDTHH:MM:SSTZD (T is a literal character, TZD is + Time Zone Designator, format +HH:MM or -HH:MM). + + The ``HTML4()`` method below offers the same formatting, but + converts to UTC before returning the value and sets the TZD"Z" + + >>> dt.ISO8601() + '1997-03-09T13:45:00-05:00' + + +* ``HTML4()`` returns the object in the format used in the HTML4.0 + specification, one of the standard forms in ISO8601. See + http://www.w3.org/TR/NOTE-datetime. Dates are output as: + YYYY-MM-DDTHH:MM:SSZ (T, Z are literal characters, the time is in + UTC.): + + >>> dt.HTML4() + '1997-03-09T18:45:00Z' + +* ``JulianDay()`` returns the Julian day according to + http://www.tondering.dk/claus/cal/node3.html#sec-calcjd + + >>> dt.JulianDay() + 2450517 + +* ``week()`` returns the week number according to ISO + see http://www.tondering.dk/claus/cal/node6.html#SECTION00670000000000000000 + + >>> dt.week() + 10 + +Deprecated API +~~~~~~~~~~~~~~ + +* DayOfWeek(): see Day() + +* Day_(): see pDay() + +* Mon(): see aMonth() + +* Mon_(): see pMonth + +General Services Provided by DateTime +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +DateTimes can be repr()'ed; the result will be a string indicating how +to make a DateTime object like this: + + >>> repr(dt) + "DateTime('1997/03/09 13:45:00 US/Eastern')" + +When we convert them into a string, we get a nicer string that could +actually be shown to a user: + + >>> str(dt) + '1997/03/09 13:45:00 US/Eastern' + +The hash value of a DateTime is based on the date and time and is +equal for different representations of the DateTime: + + >>> hash(dt) + 3618678 + >>> hash(dt.toZone('UTC')) + 3618678 + +DateTime objects can be compared to other DateTime objects OR floating +point numbers such as the ones which are returned by the python time +module by using the equalTo method. Using this API, True is returned if the +object represents a date/time equal to the specified DateTime or time module +style time: + + >>> dt.equalTo(dt) + True + >>> dt.equalTo(dt.toZone('UTC')) + True + >>> dt.equalTo(dt.timeTime()) + True + >>> dt.equalTo(DateTime()) + False + +Same goes for inequalities: + + >>> dt.notEqualTo(dt) + False + >>> dt.notEqualTo(dt.toZone('UTC')) + False + >>> dt.notEqualTo(dt.timeTime()) + False + >>> dt.notEqualTo(DateTime()) + True + +Normal equality operations only work with datetime objects and take the +timezone setting into account: + + >>> dt == dt + True + >>> dt == dt.toZone('UTC') + False + >>> dt == DateTime() + False + + >>> dt != dt + False + >>> dt != dt.toZone('UTC') + True + >>> dt != DateTime() + True + +But the other comparison operations compare the referenced moment in time and +not the representation itself: + + >>> dt > dt + False + >>> DateTime() > dt + True + >>> dt > DateTime().timeTime() + False + >>> DateTime().timeTime() > dt + True + + >>> dt.greaterThan(dt) + False + >>> DateTime().greaterThan(dt) + True + >>> dt.greaterThan(DateTime().timeTime()) + False + + >>> dt >= dt + True + >>> DateTime() >= dt + True + >>> dt >= DateTime().timeTime() + False + >>> DateTime().timeTime() >= dt + True + + >>> dt.greaterThanEqualTo(dt) + True + >>> DateTime().greaterThanEqualTo(dt) + True + >>> dt.greaterThanEqualTo(DateTime().timeTime()) + False + + >>> dt < dt + False + >>> DateTime() < dt + False + >>> dt < DateTime().timeTime() + True + >>> DateTime().timeTime() < dt + False + + >>> dt.lessThan(dt) + False + >>> DateTime().lessThan(dt) + False + >>> dt.lessThan(DateTime().timeTime()) + True + + >>> dt <= dt + True + >>> DateTime() <= dt + False + >>> dt <= DateTime().timeTime() + True + >>> DateTime().timeTime() <= dt + False + + >>> dt.lessThanEqualTo(dt) + True + >>> DateTime().lessThanEqualTo(dt) + False + >>> dt.lessThanEqualTo(DateTime().timeTime()) + True + +Numeric Services Provided by DateTime +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A DateTime may be added to a number and a number may be added to a +DateTime: + + >>> dt + 5 + DateTime('1997/03/14 13:45:00 US/Eastern') + >>> 5 + dt + DateTime('1997/03/14 13:45:00 US/Eastern') + +Two DateTimes cannot be added: + + >>> from DateTime.interfaces import DateTimeError + >>> try: + ... dt + dt + ... print('fail') + ... except DateTimeError: + ... print('ok') + ok + +Either a DateTime or a number may be subtracted from a DateTime, +however, a DateTime may not be subtracted from a number: + + >>> DateTime('1997/03/10 13:45 US/Eastern') - dt + 1.0 + >>> dt - 1 + DateTime('1997/03/08 13:45:00 US/Eastern') + >>> 1 - dt + Traceback (most recent call last): + ... + TypeError: unsupported operand type(s) for -: 'int' and 'DateTime' + +DateTimes can also be converted to integers (number of seconds since +the epoch) and floats: + + >>> int(dt) + 857933100 + >>> float(dt) + 857933100.0 + + +Changelog +========= + +4.3 (2018-10-05) +---------------- + +- Add support for Python 3.7. + +4.2 (2017-04-26) +---------------- + +- Add support for Python 3.6, drop support for Python 3.3. + +4.1.1 (2016-04-30) +------------------ + +- Support unpickling instances having a numeric timezone like `+0430`. + +4.1 (2016-04-03) +---------------- + +- Add support for Python 3.4 and 3.5. + +- Drop support for Python 2.6 and 3.2. + +4.0.1 (2013-10-15) +------------------ + +- Provide more backward compatible timezones. + [vangheem] + +4.0 (2013-02-23) +---------------- + +- Added support for Python 3.2 and 3.3 in addition to 2.6 and 2.7. + +- Removed unused legacy pytz tests and the DateTimeZone module and renamed + some test internals. + +3.0.3 (2013-01-22) +------------------ + +- Allow timezone argument to be a Unicode string while creating a DateTime + object using two arguments. + +3.0.2 (2012-10-21) +------------------ + +- LP #1045233: Respect date format setting for parsing dates like `11-01-2001`. + +3.0.1 (2012-09-23) +------------------ + +- Add `_dt_reconstructor` function introduced in DateTime 2.12.7 to provide + forward compatibility with pickles that might reference this function. + +3.0 (2011-12-09) +---------------- + +- No changes. + +Backwards compatibility of DateTime 3 +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +DateTime 3 changes its pickle representation. DateTime instances pickled with +former versions of DateTime can be read, but older DateTime versions cannot read +DateTime instances pickled with version 3. + +DateTime 3 changes DateTime to be a new-style class with slots instead of being +an old-style class. + +DateTime 3 tries to preserve microsecond resolution throughout most of its API's +while former versions were often only accurate to millisecond resolution. Due to +the representation of float values in Python versions before Python 2.7 you +shouldn't compare string or float representations of DateTime instances if you +want high accuracy. The same is true for calculated values returned by methods +like `timeTime()`. You get the highest accuracy of comparing DateTime values by +calling its `micros()` methods. DateTime is not particular well suited to be +used in comparing timestamps of file systems - use the time and datetime objects +from the Python standard library instead. + +3.0b3 (2011-10-19) +------------------ + +- Allow comparison of DateTime objects against None. + +3.0b2 (2011-10-19) +------------------ + +- Reverted the single argument `None` special case handling for unpickling and + continue to treat it as meaning `now`. + +3.0b1 (2011-05-07) +------------------ + +- Restored `strftimeFormatter` as a class. + +- Added tests for read-only class attributes and interface. + +3.0a2 (2011-05-07) +------------------ + +- Added back support for reading old DateTime pickles without a `_micros` value. + +- Avoid storing `_t` representing the time as a float in seconds since the + epoch, as we already have `_micros` doing the same as a long. Memory use is + down to about 300 bytes per DateTime instance. + +- Updated exception raising syntax to current style. + +- Avoid storing `_aday`, `_fday`, `_pday`, `_amon`, `_fmon`, `_pmon`, `_pmhour` + and `_pm` in memory for every instance but look them up dynamically based on + `_dayoffset`, `_month` and `_hour`. This saves another 150 bytes of memory + per DateTime instance. + +- Moved various internal parsing related class variables to module constants. + +- No longer provide the `DateError`, `DateTimeError`, `SyntaxError` and + `TimeError` exceptions as class attributes, import them from their canonical + `DateTime.interfaces` location instead. + +- Removed deprecated `_isDST` and `_localzone` class variables. + +- Moved pytz cache from `DateTime._tzinfo` to a module global `_TZINFO`. + +- Make DateTime a new-style class and limit its available attributes via a + slots definition. The pickle size increases to 110 bytes thanks to the + `ccopy_reg\n_reconstructor` stanza. But the memory size drops from 3kb to + 500 bytes for each instance. + +3.0a1 (2011-05-06) +------------------ + +- Reordered some calculations in `_calcIndependentSecondEtc` to preserve more + floating point precision. + +- Optimized the pickled data, by only storing a tuple of `_micros` and time + zone information - this reduces the pickle size from an average of 300 bytes + to just 60 bytes. + +- Optimized un-pickling, by avoiding the creation of an intermediate DateTime + value representing the current time. + +- Removed in-place migration of old DateTime pickles without a `_micros` value. + +- Removed deprecated support for using `DateTime.__cmp__`. + +- Take time zone settings into account when comparing two date times for + (non-) equality. + +- Fixed (possibly unused) _parse_iso8601 function. + +- Removed unused import of legacy DateTimeZone, strftime and re. + Remove trailing whitespace. + +- Removed reference to missing version section from buildout. + +2.12.7 (2012-08-11) +------------------- + +- Added forward compatibility with DateTime 3 pickle format. DateTime + instances constructed under version 3 can be read and unpickled by this + version. The pickled data is converted to the current versions format + (old-style class / no slots). Once converted it will be stored again in the + old format. This should allow for a transparent upgrade/downgrade path + between DateTime 2 and 3. + +2.12.6 (2010-10-17) +------------------- + +- Changed ``testDayOfWeek`` test to be independent of OS locale. + +2.12.5 (2010-07-29) +------------------- + +- Launchpad #143269: Corrected the documentation for year value + behavior when constructing a DateTime object with three numeric + arguments. + +- Launchpad #142521: Removed confusing special case in + DateTime.__str__ where DateTime instances for midnight + (e.g. '2010-07-27 00:00:00 US/Eastern') values would + render only their date and nothing else. + +2.12.4 (2010-07-12) +------------------- + +- Fixed mapping of EDT (was -> 'GMT-0400', now 'GMT-4'). + +2.12.3 (2010-07-09) +------------------- + +- Added EDT timezone support. Addresses bug #599856. + [vangheem] + +2.12.2 (2010-05-05) +------------------- + +- Launchpad #572715: Relaxed pin on pytz, after applying a patch from + Marius Gedminus which fixes the apparent API breakage. + +2.12.1 (2010-04-30) +------------------- + +- Removed an undeclared testing dependency on zope.testing.doctest in favor of + the standard libraries doctest module. + +- Added a maximum version requirement on pytz <= 2010b. Later versions produce + test failures related to timezone changes. + +2.12.0 (2009-03-04) +------------------- + +- Launchpad #290254: Forward-ported fix for '_micros'-less pickles from + the Zope 2.11 branch version. + +2.11.2 (2009-02-02) +------------------- + +- Include *all* pytz zone names, not just "common" ones. + +- Fix one fragile doctest, band-aid another. + +- Fix for launchpad #267545: DateTime(DateTime()) should preserve the + correct hour. + +2.11.1 (2008-08-05) +------------------- + +- DateTime conversion of datetime objects with non-pytz tzinfo. Timezones() + returns a copy of the timezone list (allows tests to run). + +- Merged the slinkp-datetime-200007 branch: fix the DateTime(anotherDateTime) + constructor to preserve timezones. + +2.11.0b1 (2008-01-06) +--------------------- + +- Split off from the Zope2 main source code tree. + + + diff --git a/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/INSTALLER b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/METADATA b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/METADATA new file mode 100644 index 0000000..7ab36b4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/METADATA @@ -0,0 +1,1056 @@ +Metadata-Version: 2.0 +Name: DateTime +Version: 4.3 +Summary: This package provides a DateTime data type, as known from Zope. Unless you need to communicate with Zope APIs, you're probably better off using Python's built-in datetime module. +Home-page: https://github.com/zopefoundation/DateTime +Author: Zope Foundation and Contributors +Author-email: zope-dev@zope.org +License: ZPL 2.1 +Platform: UNKNOWN +Classifier: Development Status :: 6 - Mature +Classifier: Environment :: Web Environment +Classifier: Framework :: Zope :: 4 +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Dist: pytz +Requires-Dist: zope.interface + +The DateTime package +==================== + +Encapsulation of date/time values. + + +Function Timezones() +-------------------- + +Returns the list of recognized timezone names: + + >>> from DateTime import Timezones + >>> zones = set(Timezones()) + +Almost all of the standard pytz timezones are included, with the exception +of some commonly-used but ambiguous abbreviations, where historical Zope +usage conflicts with the name used by pytz: + + >>> import pytz + >>> [x for x in pytz.all_timezones if x not in zones] + ['CET', 'EET', 'EST', 'MET', 'MST', 'WET'] + +Class DateTime +-------------- + +DateTime objects represent instants in time and provide interfaces for +controlling its representation without affecting the absolute value of +the object. + +DateTime objects may be created from a wide variety of string or +numeric data, or may be computed from other DateTime objects. +DateTimes support the ability to convert their representations to many +major timezones, as well as the ablility to create a DateTime object +in the context of a given timezone. + +DateTime objects provide partial numerical behavior: + +* Two date-time objects can be subtracted to obtain a time, in days + between the two. + +* A date-time object and a positive or negative number may be added to + obtain a new date-time object that is the given number of days later + than the input date-time object. + +* A positive or negative number and a date-time object may be added to + obtain a new date-time object that is the given number of days later + than the input date-time object. + +* A positive or negative number may be subtracted from a date-time + object to obtain a new date-time object that is the given number of + days earlier than the input date-time object. + +DateTime objects may be converted to integer, long, or float numbers +of days since January 1, 1901, using the standard int, long, and float +functions (Compatibility Note: int, long and float return the number +of days since 1901 in GMT rather than local machine timezone). +DateTime objects also provide access to their value in a float format +usable with the python time module, provided that the value of the +object falls in the range of the epoch-based time module. + +A DateTime object should be considered immutable; all conversion and numeric +operations return a new DateTime object rather than modify the current object. + +A DateTime object always maintains its value as an absolute UTC time, +and is represented in the context of some timezone based on the +arguments used to create the object. A DateTime object's methods +return values based on the timezone context. + +Note that in all cases the local machine timezone is used for +representation if no timezone is specified. + +Constructor for DateTime +------------------------ + +DateTime() returns a new date-time object. DateTimes may be created +with from zero to seven arguments: + +* If the function is called with no arguments, then the current date/ + time is returned, represented in the timezone of the local machine. + +* If the function is invoked with a single string argument which is a + recognized timezone name, an object representing the current time is + returned, represented in the specified timezone. + +* If the function is invoked with a single string argument + representing a valid date/time, an object representing that date/ + time will be returned. + + As a general rule, any date-time representation that is recognized + and unambigous to a resident of North America is acceptable. (The + reason for this qualification is that in North America, a date like: + 2/1/1994 is interpreted as February 1, 1994, while in some parts of + the world, it is interpreted as January 2, 1994.) A date/ time + string consists of two components, a date component and an optional + time component, separated by one or more spaces. If the time + component is omited, 12:00am is assumed. + + Any recognized timezone name specified as the final element of the + date/time string will be used for computing the date/time value. + (If you create a DateTime with the string, + "Mar 9, 1997 1:45pm US/Pacific", the value will essentially be the + same as if you had captured time.time() at the specified date and + time on a machine in that timezone). If no timezone is passed, then + the timezone configured on the local machine will be used, **except** + that if the date format matches ISO 8601 ('YYYY-MM-DD'), the instance + will use UTC / CMT+0 as the timezone. + + o Returns current date/time, represented in US/Eastern: + + >>> from DateTime import DateTime + >>> e = DateTime('US/Eastern') + >>> e.timezone() + 'US/Eastern' + + o Returns specified time, represented in local machine zone: + + >>> x = DateTime('1997/3/9 1:45pm') + >>> x.parts() # doctest: +ELLIPSIS + (1997, 3, 9, 13, 45, ...) + + o Specified time in local machine zone, verbose format: + + >>> y = DateTime('Mar 9, 1997 13:45:00') + >>> y.parts() # doctest: +ELLIPSIS + (1997, 3, 9, 13, 45, ...) + >>> y == x + True + + o Specified time in UTC via ISO 8601 rule: + + >>> z = DateTime('2014-03-24') + >>> z.parts() # doctest: +ELLIPSIS + (2014, 3, 24, 0, 0, ...) + >>> z.timezone() + 'GMT+0' + + The date component consists of year, month, and day values. The + year value must be a one-, two-, or four-digit integer. If a one- + or two-digit year is used, the year is assumed to be in the + twentieth century. The month may an integer, from 1 to 12, a month + name, or a month abreviation, where a period may optionally follow + the abreviation. The day must be an integer from 1 to the number of + days in the month. The year, month, and day values may be separated + by periods, hyphens, forward, shashes, or spaces. Extra spaces are + permitted around the delimiters. Year, month, and day values may be + given in any order as long as it is possible to distinguish the + components. If all three components are numbers that are less than + 13, then a a month-day-year ordering is assumed. + + The time component consists of hour, minute, and second values + separated by colons. The hour value must be an integer between 0 + and 23 inclusively. The minute value must be an integer between 0 + and 59 inclusively. The second value may be an integer value + between 0 and 59.999 inclusively. The second value or both the + minute and second values may be ommitted. The time may be followed + by am or pm in upper or lower case, in which case a 12-hour clock is + assumed. + +* If the DateTime function is invoked with a single Numeric argument, + the number is assumed to be either a floating point value such as + that returned by time.time() , or a number of days after January 1, + 1901 00:00:00 UTC. + + A DateTime object is returned that represents either the gmt value + of the time.time() float represented in the local machine's + timezone, or that number of days after January 1, 1901. Note that + the number of days after 1901 need to be expressed from the + viewpoint of the local machine's timezone. A negative argument will + yield a date-time value before 1901. + +* If the function is invoked with two numeric arguments, then the + first is taken to be an integer year and the second argument is + taken to be an offset in days from the beginning of the year, in the + context of the local machine timezone. The date-time value returned + is the given offset number of days from the beginning of the given + year, represented in the timezone of the local machine. The offset + may be positive or negative. Two-digit years are assumed to be in + the twentieth century. + +* If the function is invoked with two arguments, the first a float + representing a number of seconds past the epoch in gmt (such as + those returned by time.time()) and the second a string naming a + recognized timezone, a DateTime with a value of that gmt time will + be returned, represented in the given timezone. + + >>> import time + >>> t = time.time() + + Time t represented as US/Eastern: + + >>> now_east = DateTime(t, 'US/Eastern') + + Time t represented as US/Pacific: + + >>> now_west = DateTime(t, 'US/Pacific') + + Only their representations are different: + + >>> now_east.equalTo(now_west) + True + +* If the function is invoked with three or more numeric arguments, + then the first is taken to be an integer year, the second is taken + to be an integer month, and the third is taken to be an integer day. + If the combination of values is not valid, then a DateTimeError is + raised. One- or two-digit years up to 69 are assumed to be in the + 21st century, whereas values 70-99 are assumed to be 20th century. + The fourth, fifth, and sixth arguments are floating point, positive + or negative offsets in units of hours, minutes, and days, and + default to zero if not given. An optional string may be given as + the final argument to indicate timezone (the effect of this is as if + you had taken the value of time.time() at that time on a machine in + the specified timezone). + +If a string argument passed to the DateTime constructor cannot be +parsed, it will raise SyntaxError. Invalid date, time, or +timezone components will raise a DateTimeError. + +The module function Timezones() will return a list of the timezones +recognized by the DateTime module. Recognition of timezone names is +case-insensitive. + +Instance Methods for DateTime (IDateTime interface) +--------------------------------------------------- + +Conversion and comparison methods +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* ``timeTime()`` returns the date/time as a floating-point number in + UTC, in the format used by the python time module. Note that it is + possible to create date /time values with DateTime that have no + meaningful value to the time module, and in such cases a + DateTimeError is raised. A DateTime object's value must generally + be between Jan 1, 1970 (or your local machine epoch) and Jan 2038 to + produce a valid time.time() style value. + + >>> dt = DateTime('Mar 9, 1997 13:45:00 US/Eastern') + >>> dt.timeTime() + 857933100.0 + + >>> DateTime('2040/01/01 UTC').timeTime() + 2208988800.0 + + >>> DateTime('1900/01/01 UTC').timeTime() + -2208988800.0 + +* ``toZone(z)`` returns a DateTime with the value as the current + object, represented in the indicated timezone: + + >>> dt.toZone('UTC') + DateTime('1997/03/09 18:45:00 UTC') + + >>> dt.toZone('UTC').equalTo(dt) + True + +* ``isFuture()`` returns true if this object represents a date/time + later than the time of the call: + + >>> dt.isFuture() + False + >>> DateTime('Jan 1 3000').isFuture() # not time-machine safe! + True + +* ``isPast()`` returns true if this object represents a date/time + earlier than the time of the call: + + >>> dt.isPast() + True + >>> DateTime('Jan 1 3000').isPast() # not time-machine safe! + False + +* ``isCurrentYear()`` returns true if this object represents a + date/time that falls within the current year, in the context of this + object's timezone representation: + + >>> dt.isCurrentYear() + False + >>> DateTime().isCurrentYear() + True + +* ``isCurrentMonth()`` returns true if this object represents a + date/time that falls within the current month, in the context of + this object's timezone representation: + + >>> dt.isCurrentMonth() + False + >>> DateTime().isCurrentMonth() + True + +* ``isCurrentDay()`` returns true if this object represents a + date/time that falls within the current day, in the context of this + object's timezone representation: + + >>> dt.isCurrentDay() + False + >>> DateTime().isCurrentDay() + True + +* ``isCurrentHour()`` returns true if this object represents a + date/time that falls within the current hour, in the context of this + object's timezone representation: + + >>> dt.isCurrentHour() + False + + >>> DateTime().isCurrentHour() + True + +* ``isCurrentMinute()`` returns true if this object represents a + date/time that falls within the current minute, in the context of + this object's timezone representation: + + >>> dt.isCurrentMinute() + False + >>> DateTime().isCurrentMinute() + True + +* ``isLeapYear()`` returns true if the current year (in the context of + the object's timezone) is a leap year: + + >>> dt.isLeapYear() + False + >>> DateTime('Mar 8 2004').isLeapYear() + True + +* ``earliestTime()`` returns a new DateTime object that represents the + earliest possible time (in whole seconds) that still falls within + the current object's day, in the object's timezone context: + + >>> dt.earliestTime() + DateTime('1997/03/09 00:00:00 US/Eastern') + +* ``latestTime()`` return a new DateTime object that represents the + latest possible time (in whole seconds) that still falls within the + current object's day, in the object's timezone context + + >>> dt.latestTime() + DateTime('1997/03/09 23:59:59 US/Eastern') + +Component access +~~~~~~~~~~~~~~~~ + +* ``parts()`` returns a tuple containing the calendar year, month, + day, hour, minute second and timezone of the object + + >>> dt.parts() # doctest: +ELLIPSIS + (1997, 3, 9, 13, 45, ... 'US/Eastern') + +* ``timezone()`` returns the timezone in which the object is represented: + + >>> dt.timezone() in Timezones() + True + +* ``tzoffset()`` returns the timezone offset for the objects timezone: + + >>> dt.tzoffset() + -18000 + +* ``year()`` returns the calendar year of the object: + + >>> dt.year() + 1997 + +* ``month()`` retursn the month of the object as an integer: + + >>> dt.month() + 3 + +* ``Month()`` returns the full month name: + + >>> dt.Month() + 'March' + +* ``aMonth()`` returns the abreviated month name: + + >>> dt.aMonth() + 'Mar' + +* ``pMonth()`` returns the abreviated (with period) month name: + + >>> dt.pMonth() + 'Mar.' + +* ``day()`` returns the integer day: + + >>> dt.day() + 9 + +* ``Day()`` returns the full name of the day of the week: + + >>> dt.Day() + 'Sunday' + +* ``dayOfYear()`` returns the day of the year, in context of the + timezone representation of the object: + + >>> dt.dayOfYear() + 68 + +* ``aDay()`` returns the abreviated name of the day of the week: + + >>> dt.aDay() + 'Sun' + +* ``pDay()`` returns the abreviated (with period) name of the day of + the week: + + >>> dt.pDay() + 'Sun.' + +* ``dow()`` returns the integer day of the week, where Sunday is 0: + + >>> dt.dow() + 0 + +* ``dow_1()`` returns the integer day of the week, where sunday is 1: + + >>> dt.dow_1() + 1 + +* ``h_12()`` returns the 12-hour clock representation of the hour: + + >>> dt.h_12() + 1 + +* ``h_24()`` returns the 24-hour clock representation of the hour: + + >>> dt.h_24() + 13 + +* ``ampm()`` returns the appropriate time modifier (am or pm): + + >>> dt.ampm() + 'pm' + +* ``hour()`` returns the 24-hour clock representation of the hour: + + >>> dt.hour() + 13 + +* ``minute()`` returns the minute: + + >>> dt.minute() + 45 + +* ``second()`` returns the second: + + >>> dt.second() == 0 + True + +* ``millis()`` returns the milliseconds since the epoch in GMT. + + >>> dt.millis() == 857933100000 + True + +strftime() +~~~~~~~~~~ + +See ``tests/test_datetime.py``. + +General formats from previous DateTime +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* ``Date()`` return the date string for the object: + + >>> dt.Date() + '1997/03/09' + +* ``Time()`` returns the time string for an object to the nearest + second: + + >>> dt.Time() + '13:45:00' + +* ``TimeMinutes()`` returns the time string for an object not showing + seconds: + + >>> dt.TimeMinutes() + '13:45' + +* ``AMPM()`` returns the time string for an object to the nearest second: + + >>> dt.AMPM() + '01:45:00 pm' + +* ``AMPMMinutes()`` returns the time string for an object not showing + seconds: + + >>> dt.AMPMMinutes() + '01:45 pm' + +* ``PreciseTime()`` returns the time string for the object: + + >>> dt.PreciseTime() + '13:45:00.000' + +* ``PreciseAMPM()`` returns the time string for the object: + + >>> dt.PreciseAMPM() + '01:45:00.000 pm' + +* ``yy()`` returns the calendar year as a 2 digit string + + >>> dt.yy() + '97' + +* ``mm()`` returns the month as a 2 digit string + + >>> dt.mm() + '03' + +* ``dd()`` returns the day as a 2 digit string: + + >>> dt.dd() + '09' + +* ``rfc822()`` returns the date in RFC 822 format: + + >>> dt.rfc822() + 'Sun, 09 Mar 1997 13:45:00 -0500' + +New formats +~~~~~~~~~~~ + +* ``fCommon()`` returns a string representing the object's value in + the format: March 9, 1997 1:45 pm: + + >>> dt.fCommon() + 'March 9, 1997 1:45 pm' + +* ``fCommonZ()`` returns a string representing the object's value in + the format: March 9, 1997 1:45 pm US/Eastern: + + >>> dt.fCommonZ() + 'March 9, 1997 1:45 pm US/Eastern' + +* ``aCommon()`` returns a string representing the object's value in + the format: Mar 9, 1997 1:45 pm: + + >>> dt.aCommon() + 'Mar 9, 1997 1:45 pm' + +* ``aCommonZ()`` return a string representing the object's value in + the format: Mar 9, 1997 1:45 pm US/Eastern: + + >>> dt.aCommonZ() + 'Mar 9, 1997 1:45 pm US/Eastern' + +* ``pCommon()`` returns a string representing the object's value in + the format Mar. 9, 1997 1:45 pm: + + >>> dt.pCommon() + 'Mar. 9, 1997 1:45 pm' + +* ``pCommonZ()`` returns a string representing the object's value in + the format: Mar. 9, 1997 1:45 pm US/Eastern: + + >>> dt.pCommonZ() + 'Mar. 9, 1997 1:45 pm US/Eastern' + +* ``ISO()`` returns a string with the date/time in ISO format. Note: + this is not ISO 8601-format! See the ISO8601 and HTML4 methods below + for ISO 8601-compliant output. Dates are output as: YYYY-MM-DD HH:MM:SS + + >>> dt.ISO() + '1997-03-09 13:45:00' + +* ``ISO8601()`` returns the object in ISO 8601-compatible format + containing the date, time with seconds-precision and the time zone + identifier - see http://www.w3.org/TR/NOTE-datetime. Dates are + output as: YYYY-MM-DDTHH:MM:SSTZD (T is a literal character, TZD is + Time Zone Designator, format +HH:MM or -HH:MM). + + The ``HTML4()`` method below offers the same formatting, but + converts to UTC before returning the value and sets the TZD"Z" + + >>> dt.ISO8601() + '1997-03-09T13:45:00-05:00' + + +* ``HTML4()`` returns the object in the format used in the HTML4.0 + specification, one of the standard forms in ISO8601. See + http://www.w3.org/TR/NOTE-datetime. Dates are output as: + YYYY-MM-DDTHH:MM:SSZ (T, Z are literal characters, the time is in + UTC.): + + >>> dt.HTML4() + '1997-03-09T18:45:00Z' + +* ``JulianDay()`` returns the Julian day according to + http://www.tondering.dk/claus/cal/node3.html#sec-calcjd + + >>> dt.JulianDay() + 2450517 + +* ``week()`` returns the week number according to ISO + see http://www.tondering.dk/claus/cal/node6.html#SECTION00670000000000000000 + + >>> dt.week() + 10 + +Deprecated API +~~~~~~~~~~~~~~ + +* DayOfWeek(): see Day() + +* Day_(): see pDay() + +* Mon(): see aMonth() + +* Mon_(): see pMonth + +General Services Provided by DateTime +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +DateTimes can be repr()'ed; the result will be a string indicating how +to make a DateTime object like this: + + >>> repr(dt) + "DateTime('1997/03/09 13:45:00 US/Eastern')" + +When we convert them into a string, we get a nicer string that could +actually be shown to a user: + + >>> str(dt) + '1997/03/09 13:45:00 US/Eastern' + +The hash value of a DateTime is based on the date and time and is +equal for different representations of the DateTime: + + >>> hash(dt) + 3618678 + >>> hash(dt.toZone('UTC')) + 3618678 + +DateTime objects can be compared to other DateTime objects OR floating +point numbers such as the ones which are returned by the python time +module by using the equalTo method. Using this API, True is returned if the +object represents a date/time equal to the specified DateTime or time module +style time: + + >>> dt.equalTo(dt) + True + >>> dt.equalTo(dt.toZone('UTC')) + True + >>> dt.equalTo(dt.timeTime()) + True + >>> dt.equalTo(DateTime()) + False + +Same goes for inequalities: + + >>> dt.notEqualTo(dt) + False + >>> dt.notEqualTo(dt.toZone('UTC')) + False + >>> dt.notEqualTo(dt.timeTime()) + False + >>> dt.notEqualTo(DateTime()) + True + +Normal equality operations only work with datetime objects and take the +timezone setting into account: + + >>> dt == dt + True + >>> dt == dt.toZone('UTC') + False + >>> dt == DateTime() + False + + >>> dt != dt + False + >>> dt != dt.toZone('UTC') + True + >>> dt != DateTime() + True + +But the other comparison operations compare the referenced moment in time and +not the representation itself: + + >>> dt > dt + False + >>> DateTime() > dt + True + >>> dt > DateTime().timeTime() + False + >>> DateTime().timeTime() > dt + True + + >>> dt.greaterThan(dt) + False + >>> DateTime().greaterThan(dt) + True + >>> dt.greaterThan(DateTime().timeTime()) + False + + >>> dt >= dt + True + >>> DateTime() >= dt + True + >>> dt >= DateTime().timeTime() + False + >>> DateTime().timeTime() >= dt + True + + >>> dt.greaterThanEqualTo(dt) + True + >>> DateTime().greaterThanEqualTo(dt) + True + >>> dt.greaterThanEqualTo(DateTime().timeTime()) + False + + >>> dt < dt + False + >>> DateTime() < dt + False + >>> dt < DateTime().timeTime() + True + >>> DateTime().timeTime() < dt + False + + >>> dt.lessThan(dt) + False + >>> DateTime().lessThan(dt) + False + >>> dt.lessThan(DateTime().timeTime()) + True + + >>> dt <= dt + True + >>> DateTime() <= dt + False + >>> dt <= DateTime().timeTime() + True + >>> DateTime().timeTime() <= dt + False + + >>> dt.lessThanEqualTo(dt) + True + >>> DateTime().lessThanEqualTo(dt) + False + >>> dt.lessThanEqualTo(DateTime().timeTime()) + True + +Numeric Services Provided by DateTime +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A DateTime may be added to a number and a number may be added to a +DateTime: + + >>> dt + 5 + DateTime('1997/03/14 13:45:00 US/Eastern') + >>> 5 + dt + DateTime('1997/03/14 13:45:00 US/Eastern') + +Two DateTimes cannot be added: + + >>> from DateTime.interfaces import DateTimeError + >>> try: + ... dt + dt + ... print('fail') + ... except DateTimeError: + ... print('ok') + ok + +Either a DateTime or a number may be subtracted from a DateTime, +however, a DateTime may not be subtracted from a number: + + >>> DateTime('1997/03/10 13:45 US/Eastern') - dt + 1.0 + >>> dt - 1 + DateTime('1997/03/08 13:45:00 US/Eastern') + >>> 1 - dt + Traceback (most recent call last): + ... + TypeError: unsupported operand type(s) for -: 'int' and 'DateTime' + +DateTimes can also be converted to integers (number of seconds since +the epoch) and floats: + + >>> int(dt) + 857933100 + >>> float(dt) + 857933100.0 + + +Changelog +========= + +4.3 (2018-10-05) +---------------- + +- Add support for Python 3.7. + +4.2 (2017-04-26) +---------------- + +- Add support for Python 3.6, drop support for Python 3.3. + +4.1.1 (2016-04-30) +------------------ + +- Support unpickling instances having a numeric timezone like `+0430`. + +4.1 (2016-04-03) +---------------- + +- Add support for Python 3.4 and 3.5. + +- Drop support for Python 2.6 and 3.2. + +4.0.1 (2013-10-15) +------------------ + +- Provide more backward compatible timezones. + [vangheem] + +4.0 (2013-02-23) +---------------- + +- Added support for Python 3.2 and 3.3 in addition to 2.6 and 2.7. + +- Removed unused legacy pytz tests and the DateTimeZone module and renamed + some test internals. + +3.0.3 (2013-01-22) +------------------ + +- Allow timezone argument to be a Unicode string while creating a DateTime + object using two arguments. + +3.0.2 (2012-10-21) +------------------ + +- LP #1045233: Respect date format setting for parsing dates like `11-01-2001`. + +3.0.1 (2012-09-23) +------------------ + +- Add `_dt_reconstructor` function introduced in DateTime 2.12.7 to provide + forward compatibility with pickles that might reference this function. + +3.0 (2011-12-09) +---------------- + +- No changes. + +Backwards compatibility of DateTime 3 +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +DateTime 3 changes its pickle representation. DateTime instances pickled with +former versions of DateTime can be read, but older DateTime versions cannot read +DateTime instances pickled with version 3. + +DateTime 3 changes DateTime to be a new-style class with slots instead of being +an old-style class. + +DateTime 3 tries to preserve microsecond resolution throughout most of its API's +while former versions were often only accurate to millisecond resolution. Due to +the representation of float values in Python versions before Python 2.7 you +shouldn't compare string or float representations of DateTime instances if you +want high accuracy. The same is true for calculated values returned by methods +like `timeTime()`. You get the highest accuracy of comparing DateTime values by +calling its `micros()` methods. DateTime is not particular well suited to be +used in comparing timestamps of file systems - use the time and datetime objects +from the Python standard library instead. + +3.0b3 (2011-10-19) +------------------ + +- Allow comparison of DateTime objects against None. + +3.0b2 (2011-10-19) +------------------ + +- Reverted the single argument `None` special case handling for unpickling and + continue to treat it as meaning `now`. + +3.0b1 (2011-05-07) +------------------ + +- Restored `strftimeFormatter` as a class. + +- Added tests for read-only class attributes and interface. + +3.0a2 (2011-05-07) +------------------ + +- Added back support for reading old DateTime pickles without a `_micros` value. + +- Avoid storing `_t` representing the time as a float in seconds since the + epoch, as we already have `_micros` doing the same as a long. Memory use is + down to about 300 bytes per DateTime instance. + +- Updated exception raising syntax to current style. + +- Avoid storing `_aday`, `_fday`, `_pday`, `_amon`, `_fmon`, `_pmon`, `_pmhour` + and `_pm` in memory for every instance but look them up dynamically based on + `_dayoffset`, `_month` and `_hour`. This saves another 150 bytes of memory + per DateTime instance. + +- Moved various internal parsing related class variables to module constants. + +- No longer provide the `DateError`, `DateTimeError`, `SyntaxError` and + `TimeError` exceptions as class attributes, import them from their canonical + `DateTime.interfaces` location instead. + +- Removed deprecated `_isDST` and `_localzone` class variables. + +- Moved pytz cache from `DateTime._tzinfo` to a module global `_TZINFO`. + +- Make DateTime a new-style class and limit its available attributes via a + slots definition. The pickle size increases to 110 bytes thanks to the + `ccopy_reg\n_reconstructor` stanza. But the memory size drops from 3kb to + 500 bytes for each instance. + +3.0a1 (2011-05-06) +------------------ + +- Reordered some calculations in `_calcIndependentSecondEtc` to preserve more + floating point precision. + +- Optimized the pickled data, by only storing a tuple of `_micros` and time + zone information - this reduces the pickle size from an average of 300 bytes + to just 60 bytes. + +- Optimized un-pickling, by avoiding the creation of an intermediate DateTime + value representing the current time. + +- Removed in-place migration of old DateTime pickles without a `_micros` value. + +- Removed deprecated support for using `DateTime.__cmp__`. + +- Take time zone settings into account when comparing two date times for + (non-) equality. + +- Fixed (possibly unused) _parse_iso8601 function. + +- Removed unused import of legacy DateTimeZone, strftime and re. + Remove trailing whitespace. + +- Removed reference to missing version section from buildout. + +2.12.7 (2012-08-11) +------------------- + +- Added forward compatibility with DateTime 3 pickle format. DateTime + instances constructed under version 3 can be read and unpickled by this + version. The pickled data is converted to the current versions format + (old-style class / no slots). Once converted it will be stored again in the + old format. This should allow for a transparent upgrade/downgrade path + between DateTime 2 and 3. + +2.12.6 (2010-10-17) +------------------- + +- Changed ``testDayOfWeek`` test to be independent of OS locale. + +2.12.5 (2010-07-29) +------------------- + +- Launchpad #143269: Corrected the documentation for year value + behavior when constructing a DateTime object with three numeric + arguments. + +- Launchpad #142521: Removed confusing special case in + DateTime.__str__ where DateTime instances for midnight + (e.g. '2010-07-27 00:00:00 US/Eastern') values would + render only their date and nothing else. + +2.12.4 (2010-07-12) +------------------- + +- Fixed mapping of EDT (was -> 'GMT-0400', now 'GMT-4'). + +2.12.3 (2010-07-09) +------------------- + +- Added EDT timezone support. Addresses bug #599856. + [vangheem] + +2.12.2 (2010-05-05) +------------------- + +- Launchpad #572715: Relaxed pin on pytz, after applying a patch from + Marius Gedminus which fixes the apparent API breakage. + +2.12.1 (2010-04-30) +------------------- + +- Removed an undeclared testing dependency on zope.testing.doctest in favor of + the standard libraries doctest module. + +- Added a maximum version requirement on pytz <= 2010b. Later versions produce + test failures related to timezone changes. + +2.12.0 (2009-03-04) +------------------- + +- Launchpad #290254: Forward-ported fix for '_micros'-less pickles from + the Zope 2.11 branch version. + +2.11.2 (2009-02-02) +------------------- + +- Include *all* pytz zone names, not just "common" ones. + +- Fix one fragile doctest, band-aid another. + +- Fix for launchpad #267545: DateTime(DateTime()) should preserve the + correct hour. + +2.11.1 (2008-08-05) +------------------- + +- DateTime conversion of datetime objects with non-pytz tzinfo. Timezones() + returns a copy of the timezone list (allows tests to run). + +- Merged the slinkp-datetime-200007 branch: fix the DateTime(anotherDateTime) + constructor to preserve timezones. + +2.11.0b1 (2008-01-06) +--------------------- + +- Split off from the Zope2 main source code tree. + + + diff --git a/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/RECORD b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/RECORD new file mode 100644 index 0000000..c43e68a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/RECORD @@ -0,0 +1,22 @@ +DateTime-4.3.dist-info/DESCRIPTION.rst,sha256=6bh46hbq62st_fsJ2q23PSoFCV5CJN_q4ysJyUaz-pE,29714 +DateTime-4.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +DateTime-4.3.dist-info/METADATA,sha256=K-cYd2_t0F3DEwV_bne8npbYOOxsc1BAwzR_fsWNZJA,30924 +DateTime-4.3.dist-info/RECORD,, +DateTime-4.3.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +DateTime-4.3.dist-info/metadata.json,sha256=NFIEd1jJz3oUtxS1vHItQWcb0GS8mUpYCHzK64DVy9k,1302 +DateTime-4.3.dist-info/top_level.txt,sha256=iVdUvuV_RIkkMzsnPGNfwojRWvuonInryaK3hA5Hh0o,9 +DateTime/DateTime.py,sha256=7OxshJpe23XInm7Jl6qw7iNQVBZRH8SUfrB0CvjLuc4,70958 +DateTime/DateTime.txt,sha256=QMckeQ2ZbnR7doTJ4lMA1JHgCdgYzAT1kmrGTeEdW0Y,22485 +DateTime/__init__.py,sha256=Pc16ScgL4_I1gHkmpU5ln2PdEuiDdJ815VWlMSqiAJo,713 +DateTime/__pycache__/DateTime.cpython-36.pyc,, +DateTime/__pycache__/__init__.cpython-36.pyc,, +DateTime/__pycache__/interfaces.cpython-36.pyc,, +DateTime/__pycache__/pytz_support.cpython-36.pyc,, +DateTime/interfaces.py,sha256=DJai5XEqsYFJfn8vdqQifma046gKXduzQ24H4BV9rq4,12275 +DateTime/pytz.txt,sha256=6KZ_Fj0fGZ6qCVrTGU3hyOhAcR_yVnlCw8cJHWcOr5I,5618 +DateTime/pytz_support.py,sha256=zXLKySu9lygwDUWI2ARaJf7J0KVrYHONLsCsdx2jP-8,11953 +DateTime/tests/__init__.py,sha256=H7Ixo1xp-8BlJ65u14hk5i_TKEmETyi2FmLMD6H-mpo,683 +DateTime/tests/__pycache__/__init__.cpython-36.pyc,, +DateTime/tests/__pycache__/test_datetime.cpython-36.pyc,, +DateTime/tests/julian_testdata.txt,sha256=qxvLvabVB9ayhh5UHBvPhuqW5mRL_lizzbUh6lc3d4I,1397 +DateTime/tests/test_datetime.py,sha256=fjzc1dQ7qIqwTEzYgXw4ZkU57An300swVW_7jV5qcs4,26572 diff --git a/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/WHEEL b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/metadata.json b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/metadata.json new file mode 100644 index 0000000..f07fb8d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 6 - Mature", "Environment :: Web Environment", "Framework :: Zope :: 4", "License :: OSI Approved :: Zope Public License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.details": {"contacts": [{"email": "zope-dev@zope.org", "name": "Zope Foundation and Contributors", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/zopefoundation/DateTime"}}}, "extras": [], "generator": "bdist_wheel (0.29.0)", "license": "ZPL 2.1", "metadata_version": "2.0", "name": "DateTime", "run_requires": [{"requires": ["pytz", "zope.interface"]}], "summary": "This package provides a DateTime data type, as known from Zope. Unless you need to communicate with Zope APIs, you're probably better off using Python's built-in datetime module.", "version": "4.3"} \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/top_level.txt new file mode 100644 index 0000000..1b8c206 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime-4.3.dist-info/top_level.txt @@ -0,0 +1 @@ +DateTime diff --git a/thesisenv/lib/python3.6/site-packages/DateTime/DateTime.py b/thesisenv/lib/python3.6/site-packages/DateTime/DateTime.py new file mode 100644 index 0000000..cc6ca78 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime/DateTime.py @@ -0,0 +1,1940 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +import math +import re +import sys +from time import altzone +from time import daylight +from time import gmtime +from time import localtime +from time import time +from time import timezone +from time import tzname +from datetime import datetime + +from zope.interface import implementer + +from .interfaces import IDateTime +from .interfaces import DateTimeError +from .interfaces import SyntaxError +from .interfaces import DateError +from .interfaces import TimeError +from .pytz_support import PytzCache + +if sys.version_info > (3, ): + import copyreg as copy_reg + basestring = str + long = int + explicit_unicode_type = type(None) +else: + import copy_reg + explicit_unicode_type = unicode + +default_datefmt = None + + +def getDefaultDateFormat(): + global default_datefmt + if default_datefmt is None: + try: + from App.config import getConfiguration + default_datefmt = getConfiguration().datetime_format + return default_datefmt + except Exception: + return 'us' + else: + return default_datefmt + +# To control rounding errors, we round system time to the nearest +# microsecond. Then delicate calculations can rely on that the +# maximum precision that needs to be preserved is known. +_system_time = time + + +def time(): + return round(_system_time(), 6) + +# Determine machine epoch +tm = ((0, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334), + (0, 0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335)) +yr, mo, dy, hr, mn, sc = gmtime(0)[:6] +i = int(yr - 1) +to_year = int(i * 365 + i // 4 - i // 100 + i // 400 - 693960.0) +to_month = tm[yr % 4 == 0 and (yr % 100 != 0 or yr % 400 == 0)][mo] +EPOCH = ((to_year + to_month + dy + + (hr / 24.0 + mn / 1440.0 + sc / 86400.0)) * 86400) +jd1901 = 2415385 + +_TZINFO = PytzCache() + +INT_PATTERN = re.compile(r'([0-9]+)') +FLT_PATTERN = re.compile(r':([0-9]+\.[0-9]+)') +NAME_PATTERN = re.compile(r'([a-zA-Z]+)', re.I) +SPACE_CHARS = ' \t\n' +DELIMITERS = '-/.:,+' + +_MONTH_LEN = ((0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31), + (0, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)) +_MONTHS = ('', 'January', 'February', 'March', 'April', 'May', 'June', + 'July', 'August', 'September', 'October', 'November', 'December') +_MONTHS_A = ('', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec') +_MONTHS_P = ('', 'Jan.', 'Feb.', 'Mar.', 'Apr.', 'May', 'June', + 'July', 'Aug.', 'Sep.', 'Oct.', 'Nov.', 'Dec.') +_MONTHMAP = {'january': 1, 'jan': 1, + 'february': 2, 'feb': 2, + 'march': 3, 'mar': 3, + 'april': 4, 'apr': 4, + 'may': 5, + 'june': 6, 'jun': 6, + 'july': 7, 'jul': 7, + 'august': 8, 'aug': 8, + 'september': 9, 'sep': 9, 'sept': 9, + 'october': 10, 'oct': 10, + 'november': 11, 'nov': 11, + 'december': 12, 'dec': 12} +_DAYS = ('Sunday', 'Monday', 'Tuesday', 'Wednesday', + 'Thursday', 'Friday', 'Saturday') +_DAYS_A = ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat') +_DAYS_P = ('Sun.', 'Mon.', 'Tue.', 'Wed.', 'Thu.', 'Fri.', 'Sat.') +_DAYMAP = {'sunday': 1, 'sun': 1, + 'monday': 2, 'mon': 2, + 'tuesday': 3, 'tues': 3, 'tue': 3, + 'wednesday': 4, 'wed': 4, + 'thursday': 5, 'thurs': 5, 'thur': 5, 'thu': 5, + 'friday': 6, 'fri': 6, + 'saturday': 7, 'sat': 7} + +numericTimeZoneMatch = re.compile(r'[+-][0-9][0-9][0-9][0-9]').match +iso8601Match = re.compile(r''' + (?P\d\d\d\d) # four digits year + (?:-? # one optional dash + (?: # followed by: + (?P\d\d\d # three digits year day + (?!\d)) # when there is no fourth digit + | # or: + W # one W + (?P\d\d) # two digits week + (?:-? # one optional dash + (?P\d) # one digit week day + )? # week day is optional + | # or: + (?P\d\d)? # two digits month + (?:-? # one optional dash + (?P\d\d)? # two digits day + )? # after day is optional + ) # + )? # after year is optional + (?:[T ] # one T or one whitespace + (?P\d\d) # two digits hour + (?::? # one optional colon + (?P\d\d)? # two digits minute + (?::? # one optional colon + (?P\d\d)? # two digits second + (?:[.,] # one dot or one comma + (?P\d+) # n digits fraction + )? # after second is optional + )? # after minute is optional + )? # after hour is optional + (?: # timezone: + (?PZ) # one Z + | # or: + (?P[-+]) # one plus or one minus as signal + (?P\d # one digit for hour offset... + (?:\d(?!\d$) # ...or two, if not the last two digits + )?) # second hour offset digit is optional + (?::? # one optional colon + (?P\d\d) # two digits minute offset + )? # after hour offset is optional + )? # timezone is optional + )? # time is optional + (?P.*) # store the extra garbage +''', re.VERBOSE).match + + +def _findLocalTimeZoneName(isDST): + if not daylight: + # Daylight savings does not occur in this time zone. + isDST = 0 + try: + # Get the name of the current time zone depending + # on DST. + _localzone = PytzCache._zmap[tzname[isDST].lower()] + except: + try: + # Generate a GMT-offset zone name. + if isDST: + localzone = altzone + else: + localzone = timezone + offset = (-localzone / 3600.0) + majorOffset = int(offset) + if majorOffset != 0: + minorOffset = abs(int((offset % majorOffset) * 60.0)) + else: + minorOffset = 0 + m = majorOffset >= 0 and '+' or '' + lz = '%s%0.02d%0.02d' % (m, majorOffset, minorOffset) + _localzone = PytzCache._zmap[('GMT%s' % lz).lower()] + except: + _localzone = '' + return _localzone + +_localzone0 = _findLocalTimeZoneName(0) +_localzone1 = _findLocalTimeZoneName(1) +_multipleZones = (_localzone0 != _localzone1) + +# Some utility functions for calculating dates: + + +def _calcSD(t): + # Returns timezone-independent days since epoch and the fractional + # part of the days. + dd = t + EPOCH - 86400.0 + d = dd / 86400.0 + s = d - math.floor(d) + return s, d + + +def _calcDependentSecond(tz, t): + # Calculates the timezone-dependent second (integer part only) + # from the timezone-independent second. + fset = _tzoffset(tz, t) + return fset + long(math.floor(t)) + long(EPOCH) - 86400 + + +def _calcDependentSecond2(yr, mo, dy, hr, mn, sc): + # Calculates the timezone-dependent second (integer part only) + # from the date given. + ss = int(hr) * 3600 + int(mn) * 60 + int(sc) + x = long(_julianday(yr, mo, dy) - jd1901) * 86400 + ss + return x + + +def _calcIndependentSecondEtc(tz, x, ms): + # Derive the timezone-independent second from the timezone + # dependent second. + fsetAtEpoch = _tzoffset(tz, 0.0) + nearTime = x - fsetAtEpoch - long(EPOCH) + 86400 + ms + # nearTime is now within an hour of being correct. + # Recalculate t according to DST. + fset = long(_tzoffset(tz, nearTime)) + d = (x - fset) / 86400.0 + (ms / 86400.0) + t = x - fset - long(EPOCH) + 86400 + ms + micros = (x + 86400 - fset) * 1000000 + \ + long(round(ms * 1000000.0)) - long(EPOCH * 1000000.0) + s = d - math.floor(d) + return (s, d, t, micros) + + +def _calcHMS(x, ms): + # hours, minutes, seconds from integer and float. + hr = x // 3600 + x = x - hr * 3600 + mn = x // 60 + sc = x - mn * 60 + ms + return (hr, mn, sc) + + +def _calcYMDHMS(x, ms): + # x is a timezone-dependent integer of seconds. + # Produces yr,mo,dy,hr,mn,sc. + yr, mo, dy = _calendarday(x // 86400 + jd1901) + x = int(x - (x // 86400) * 86400) + hr = x // 3600 + x = x - hr * 3600 + mn = x // 60 + sc = x - mn * 60 + ms + return (yr, mo, dy, hr, mn, sc) + + +def _julianday(yr, mo, dy): + y, m, d = long(yr), long(mo), long(dy) + if m > 12: + y = y + m // 12 + m = m % 12 + elif m < 1: + m = -m + y = y - m // 12 - 1 + m = 12 - m % 12 + if y > 0: + yr_correct = 0 + else: + yr_correct = 3 + if m < 3: + y, m = y - 1, m + 12 + if y * 10000 + m * 100 + d > 15821014: + b = 2 - y // 100 + y // 400 + else: + b = 0 + return ((1461 * y - yr_correct) // 4 + + 306001 * (m + 1) // 10000 + d + 1720994 + b) + + +def _calendarday(j): + j = long(j) + if (j < 2299160): + b = j + 1525 + else: + a = (4 * j - 7468861) // 146097 + b = j + 1526 + a - a // 4 + c = (20 * b - 2442) // 7305 + d = 1461 * c // 4 + e = 10000 * (b - d) // 306001 + dy = int(b - d - 306001 * e // 10000) + mo = (e < 14) and int(e - 1) or int(e - 13) + yr = (mo > 2) and (c - 4716) or (c - 4715) + return (int(yr), int(mo), int(dy)) + + +def _tzoffset(tz, t): + """Returns the offset in seconds to GMT from a specific timezone (tz) at + a specific time (t). NB! The _tzoffset result is the same same sign as + the time zone, i.e. GMT+2 has a 7200 second offset. This is the opposite + sign of time.timezone which (confusingly) is -7200 for GMT+2.""" + try: + return _TZINFO[tz].info(t)[0] + except Exception: + if numericTimeZoneMatch(tz) is not None: + return int(tz[0:3]) * 3600 + int(tz[0] + tz[3:5]) * 60 + else: + return 0 # ?? + + +def _correctYear(year): + # Y2K patch. + if year >= 0 and year < 100: + # 00-69 means 2000-2069, 70-99 means 1970-1999. + if year < 70: + year = 2000 + year + else: + year = 1900 + year + return year + + +def safegmtime(t): + '''gmtime with a safety zone.''' + try: + return gmtime(t) + except (ValueError, OverflowError): + raise TimeError('The time %f is beyond the range of this Python ' + 'implementation.' % float(t)) + + +def safelocaltime(t): + '''localtime with a safety zone.''' + try: + return localtime(t) + except (ValueError, OverflowError): + raise TimeError('The time %f is beyond the range of this Python ' + 'implementation.' % float(t)) + + +def _tzoffset2rfc822zone(seconds): + """Takes an offset, such as from _tzoffset(), and returns an rfc822 + compliant zone specification. Please note that the result of + _tzoffset() is the negative of what time.localzone and time.altzone is. + """ + return "%+03d%02d" % divmod((seconds // 60), 60) + + +def _tzoffset2iso8601zone(seconds): + """Takes an offset, such as from _tzoffset(), and returns an ISO 8601 + compliant zone specification. Please note that the result of + _tzoffset() is the negative of what time.localzone and time.altzone is. + """ + return "%+03d:%02d" % divmod((seconds // 60), 60) + + +def Timezones(): + """Return the list of recognized timezone names""" + return sorted(list(PytzCache._zmap.values())) + + +class strftimeFormatter(object): + + def __init__(self, dt, format): + self.dt = dt + self.format = format + + def __call__(self): + return self.dt.strftime(self.format) + + +@implementer(IDateTime) +class DateTime(object): + """DateTime objects represent instants in time and provide + interfaces for controlling its representation without + affecting the absolute value of the object. + + DateTime objects may be created from a wide variety of string + or numeric data, or may be computed from other DateTime objects. + DateTimes support the ability to convert their representations + to many major timezones, as well as the ablility to create a + DateTime object in the context of a given timezone. + + DateTime objects provide partial numerical behavior: + + - Two date-time objects can be subtracted to obtain a time, + in days between the two. + + - A date-time object and a positive or negative number may + be added to obtain a new date-time object that is the given + number of days later than the input date-time object. + + - A positive or negative number and a date-time object may + be added to obtain a new date-time object that is the given + number of days later than the input date-time object. + + - A positive or negative number may be subtracted from a + date-time object to obtain a new date-time object that is + the given number of days earlier than the input date-time + object. + + DateTime objects may be converted to integer, long, or float + numbers of days since January 1, 1901, using the standard int, + long, and float functions (Compatibility Note: int, long and + float return the number of days since 1901 in GMT rather than + local machine timezone). DateTime objects also provide access + to their value in a float format usable with the python time + module, provided that the value of the object falls in the + range of the epoch-based time module, and as a datetime.datetime + object. + + A DateTime object should be considered immutable; all conversion + and numeric operations return a new DateTime object rather than + modify the current object.""" + + # For security machinery: + __roles__ = None + __allow_access_to_unprotected_subobjects__ = 1 + + # Limit the amount of instance attributes + __slots__ = ( + '_timezone_naive', + '_tz', + '_dayoffset', + '_year', + '_month', + '_day', + '_hour', + '_minute', + '_second', + '_nearsec', + '_d', + '_micros', + 'time', + ) + + def __init__(self, *args, **kw): + """Return a new date-time object""" + try: + return self._parse_args(*args, **kw) + except (DateError, TimeError, DateTimeError): + raise + except Exception: + raise SyntaxError('Unable to parse %s, %s' % (args, kw)) + + def __getstate__(self): + # We store a float of _micros, instead of the _micros long, as we most + # often don't have any sub-second resolution and can save those bytes + return (self._micros / 1000000.0, + getattr(self, '_timezone_naive', False), + self._tz) + + def __setstate__(self, value): + if isinstance(value, tuple): + self._parse_args(value[0], value[2]) + self._micros = long(value[0] * 1000000) + self._timezone_naive = value[1] + else: + for k, v in value.items(): + if k in self.__slots__: + setattr(self, k, v) + # BBB: support for very old DateTime pickles + if '_micros' not in value: + self._micros = long(value['_t'] * 1000000) + if '_timezone_naive' not in value: + self._timezone_naive = False + + def _parse_args(self, *args, **kw): + """Return a new date-time object. + + A DateTime object always maintains its value as an absolute + UTC time, and is represented in the context of some timezone + based on the arguments used to create the object. A DateTime + object's methods return values based on the timezone context. + + Note that in all cases the local machine timezone is used for + representation if no timezone is specified. + + DateTimes may be created with from zero to seven arguments. + + - If the function is called with no arguments or with None, + then the current date/time is returned, represented in the + timezone of the local machine. + + - If the function is invoked with a single string argument + which is a recognized timezone name, an object representing + the current time is returned, represented in the specified + timezone. + + - If the function is invoked with a single string argument + representing a valid date/time, an object representing + that date/time will be returned. + + As a general rule, any date-time representation that is + recognized and unambigous to a resident of North America + is acceptable. The reason for this qualification is that + in North America, a date like: 2/1/1994 is interpreted + as February 1, 1994, while in some parts of the world, + it is interpreted as January 2, 1994. + + A date/time string consists of two components, a date + component and an optional time component, separated by one + or more spaces. If the time component is omited, 12:00am is + assumed. Any recognized timezone name specified as the final + element of the date/time string will be used for computing + the date/time value. If you create a DateTime with the + string 'Mar 9, 1997 1:45pm US/Pacific', the value will + essentially be the same as if you had captured time.time() + at the specified date and time on a machine in that timezone: + +
+            e=DateTime('US/Eastern')
+            # returns current date/time, represented in US/Eastern.
+
+            x=DateTime('1997/3/9 1:45pm')
+            # returns specified time, represented in local machine zone.
+
+            y=DateTime('Mar 9, 1997 13:45:00')
+            # y is equal to x
+            
+ + The date component consists of year, month, and day + values. The year value must be a one-, two-, or + four-digit integer. If a one- or two-digit year is + used, the year is assumed to be in the twentieth + century. The month may be an integer, from 1 to 12, a + month name, or a month abreviation, where a period may + optionally follow the abreviation. The day must be an + integer from 1 to the number of days in the month. The + year, month, and day values may be separated by + periods, hyphens, forward, shashes, or spaces. Extra + spaces are permitted around the delimiters. Year, + month, and day values may be given in any order as long + as it is possible to distinguish the components. If all + three components are numbers that are less than 13, + then a a month-day-year ordering is assumed. + + The time component consists of hour, minute, and second + values separated by colons. The hour value must be an + integer between 0 and 23 inclusively. The minute value + must be an integer between 0 and 59 inclusively. The + second value may be an integer value between 0 and + 59.999 inclusively. The second value or both the minute + and second values may be ommitted. The time may be + followed by am or pm in upper or lower case, in which + case a 12-hour clock is assumed. + + New in Zope 2.4: + The DateTime constructor automatically detects and handles + ISO8601 compliant dates (YYYY-MM-DDThh:ss:mmTZD). + + New in Zope 2.9.6: + The existing ISO8601 parser was extended to support almost + the whole ISO8601 specification. New formats includes: + +
+            y=DateTime('1993-045')
+            # returns the 45th day from 1993, which is 14th February
+
+            w=DateTime('1993-W06-7')
+            # returns the 7th day from the 6th week from 1993, which
+            # is also 14th February
+            
+ + See http://en.wikipedia.org/wiki/ISO_8601 for full specs. + + Note that the Zope DateTime parser assumes timezone naive ISO + strings to be in UTC rather than local time as specified. + + - If the DateTime function is invoked with a single Numeric + argument, the number is assumed to be a floating point value + such as that returned by time.time(). + + A DateTime object is returned that represents the GMT value + of the time.time() float represented in the local machine's + timezone. + + - If the DateTime function is invoked with a single argument + that is a DateTime instane, a copy of the passed object will + be created. + + - New in 2.11: + The DateTime function may now be invoked with a single argument + that is a datetime.datetime instance. DateTimes may be converted + back to datetime.datetime objects with asdatetime(). + DateTime instances may be converted to a timezone naive + datetime.datetime in UTC with utcdatetime(). + + - If the function is invoked with two numeric arguments, then + the first is taken to be an integer year and the second + argument is taken to be an offset in days from the beginning + of the year, in the context of the local machine timezone. + + The date-time value returned is the given offset number of + days from the beginning of the given year, represented in + the timezone of the local machine. The offset may be positive + or negative. + + Two-digit years are assumed to be in the twentieth + century. + + - If the function is invoked with two arguments, the first + a float representing a number of seconds past the epoch + in gmt (such as those returned by time.time()) and the + second a string naming a recognized timezone, a DateTime + with a value of that gmt time will be returned, represented + in the given timezone. + +
+            import time
+            t=time.time()
+
+            now_east=DateTime(t,'US/Eastern')
+            # Time t represented as US/Eastern
+
+            now_west=DateTime(t,'US/Pacific')
+            # Time t represented as US/Pacific
+
+            # now_east == now_west
+            # only their representations are different
+            
+ + - If the function is invoked with three or more numeric + arguments, then the first is taken to be an integer + year, the second is taken to be an integer month, and + the third is taken to be an integer day. If the + combination of values is not valid, then a + DateError is raised. Two-digit years are assumed + to be in the twentieth century. The fourth, fifth, and + sixth arguments specify a time in hours, minutes, and + seconds; hours and minutes should be positive integers + and seconds is a positive floating point value, all of + these default to zero if not given. An optional string may + be given as the final argument to indicate timezone (the + effect of this is as if you had taken the value of time.time() + at that time on a machine in the specified timezone). + + New in Zope 2.7: + A new keyword parameter "datefmt" can be passed to the + constructor. If set to "international", the constructor + is forced to treat ambigious dates as "days before month + before year". This useful if you need to parse non-US + dates in a reliable way + + In any case that a floating point number of seconds is given + or derived, it's rounded to the nearest millisecond. + + If a string argument passed to the DateTime constructor cannot be + parsed, it will raise DateTime.SyntaxError. Invalid date components + will raise a DateError, while invalid time or timezone components + will raise a DateTimeError. + + The module function Timezones() will return a list of the (common) + timezones recognized by the DateTime module. Recognition of + timezone names is case-insensitive. + """ + + datefmt = kw.get('datefmt', getDefaultDateFormat()) + d = t = s = None + ac = len(args) + microsecs = None + + if ac == 10: + # Internal format called only by DateTime + yr, mo, dy, hr, mn, sc, tz, t, d, s = args + elif ac == 11: + # Internal format that includes milliseconds (from the epoch) + yr, mo, dy, hr, mn, sc, tz, t, d, s, millisecs = args + microsecs = millisecs * 1000 + + elif ac == 12: + # Internal format that includes microseconds (from the epoch) and a + # flag indicating whether this was constructed in a timezone naive + # manner + yr, mo, dy, hr, mn, sc, tz, t, d, s, microsecs, tznaive = args + if tznaive is not None: # preserve this information + self._timezone_naive = tznaive + + elif not args or (ac and args[0] is None): + # Current time, to be displayed in local timezone + t = time() + lt = safelocaltime(t) + tz = self.localZone(lt) + ms = (t - math.floor(t)) + s, d = _calcSD(t) + yr, mo, dy, hr, mn, sc = lt[:6] + sc = sc + ms + self._timezone_naive = False + + elif ac == 1: + arg = args[0] + + if arg == '': + raise SyntaxError(arg) + + if isinstance(arg, DateTime): + """Construct a new DateTime instance from a given + DateTime instance. + """ + t = arg.timeTime() + s, d = _calcSD(t) + yr, mo, dy, hr, mn, sc, tz = arg.parts() + + elif isinstance(arg, datetime): + yr, mo, dy, hr, mn, sc, numerictz, tznaive = \ + self._parse_iso8601_preserving_tznaive(arg.isoformat()) + if arg.tzinfo is None: + self._timezone_naive = True + tz = None + else: + self._timezone_naive = False + # if we have a pytz tzinfo, use the `zone` attribute + # as a key + tz = getattr(arg.tzinfo, 'zone', numerictz) + ms = sc - math.floor(sc) + x = _calcDependentSecond2(yr, mo, dy, hr, mn, sc) + + if tz: + try: + zone = _TZINFO[tz] + except DateTimeError: + try: + zone = _TZINFO[numerictz] + except DateTimeError: + raise DateTimeError( + 'Unknown time zone in date: %s' % arg) + tz = zone.tzinfo.zone + else: + tz = self._calcTimezoneName(x, ms) + s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms) + + elif (isinstance(arg, basestring) and + arg.lower() in _TZINFO._zidx): + # Current time, to be displayed in specified timezone + t, tz = time(), _TZINFO._zmap[arg.lower()] + ms = (t - math.floor(t)) + # Use integer arithmetic as much as possible. + s, d = _calcSD(t) + x = _calcDependentSecond(tz, t) + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, ms) + + elif isinstance(arg, basestring): + # Date/time string + iso8601 = iso8601Match(arg.strip()) + fields_iso8601 = iso8601 and iso8601.groupdict() or {} + if fields_iso8601 and not fields_iso8601.get('garbage'): + yr, mo, dy, hr, mn, sc, tz, tznaive = \ + self._parse_iso8601_preserving_tznaive(arg) + self._timezone_naive = tznaive + else: + yr, mo, dy, hr, mn, sc, tz = self._parse(arg, datefmt) + + if not self._validDate(yr, mo, dy): + raise DateError('Invalid date: %s' % arg) + if not self._validTime(hr, mn, int(sc)): + raise TimeError('Invalid time: %s' % arg) + ms = sc - math.floor(sc) + x = _calcDependentSecond2(yr, mo, dy, hr, mn, sc) + + if tz: + try: + tz = _TZINFO._zmap[tz.lower()] + except KeyError: + if numericTimeZoneMatch(tz) is None: + raise DateTimeError( + 'Unknown time zone in date: %s' % arg) + else: + tz = self._calcTimezoneName(x, ms) + s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms) + + else: + # Seconds from epoch, gmt + t = arg + lt = safelocaltime(t) + tz = self.localZone(lt) + ms = (t - math.floor(t)) + s, d = _calcSD(t) + yr, mo, dy, hr, mn, sc = lt[:6] + sc = sc + ms + + elif ac == 2: + if isinstance(args[1], basestring): + # Seconds from epoch (gmt) and timezone + t, tz = args + ms = (t - math.floor(t)) + try: + tz = _TZINFO._zmap[tz.lower()] + except KeyError: + if numericTimeZoneMatch(tz) is None: + raise DateTimeError('Unknown time zone: %s' % tz) + # Use integer arithmetic as much as possible. + s, d = _calcSD(t) + x = _calcDependentSecond(tz, t) + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, ms) + else: + # Year, julian expressed in local zone + t = time() + lt = safelocaltime(t) + tz = self.localZone(lt) + yr, jul = args + yr = _correctYear(yr) + d = (_julianday(yr, 1, 0) - jd1901) + jul + x_float = d * 86400.0 + x_floor = math.floor(x_float) + ms = x_float - x_floor + x = long(x_floor) + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, ms) + s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms) + else: + # Explicit format + yr, mo, dy = args[:3] + hr, mn, sc, tz = 0, 0, 0, 0 + yr = _correctYear(yr) + if not self._validDate(yr, mo, dy): + raise DateError('Invalid date: %s' % (args, )) + args = args[3:] + if args: + hr, args = args[0], args[1:] + if args: + mn, args = args[0], args[1:] + if args: + sc, args = args[0], args[1:] + if args: + tz, args = args[0], args[1:] + if args: + raise DateTimeError('Too many arguments') + if not self._validTime(hr, mn, sc): + raise TimeError('Invalid time: %s' % repr(args)) + + x = _calcDependentSecond2(yr, mo, dy, hr, mn, sc) + ms = sc - math.floor(sc) + if tz: + try: + tz = _TZINFO._zmap[tz.lower()] + except KeyError: + if numericTimeZoneMatch(tz) is None: + raise DateTimeError('Unknown time zone: %s' % tz) + else: + # Get local time zone name + tz = self._calcTimezoneName(x, ms) + s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms) + + self._dayoffset = int((_julianday(yr, mo, dy) + 2) % 7) + # Round to nearest microsecond in platform-independent way. You + # cannot rely on C sprintf (Python '%') formatting to round + # consistently; doing it ourselves ensures that all but truly + # horrid C sprintf implementations will yield the same result + # x-platform, provided the format asks for exactly 6 digits after + # the decimal point. + sc = round(sc, 6) + if sc >= 60.0: # can happen if, e.g., orig sc was 59.9999999 + sc = 59.999999 + self._nearsec = math.floor(sc) + self._year, self._month, self._day = yr, mo, dy + self._hour, self._minute, self._second = hr, mn, sc + self.time, self._d, self._tz = s, d, tz + # self._micros is the time since the epoch + # in long integer microseconds. + if microsecs is None: + microsecs = long(math.floor(t * 1000000.0)) + self._micros = microsecs + + def localZone(self, ltm=None): + '''Returns the time zone on the given date. The time zone + can change according to daylight savings.''' + if not _multipleZones: + return _localzone0 + if ltm is None: + ltm = localtime(time()) + isDST = ltm[8] + lz = isDST and _localzone1 or _localzone0 + return lz + + def _calcTimezoneName(self, x, ms): + # Derive the name of the local time zone at the given + # timezone-dependent second. + if not _multipleZones: + return _localzone0 + fsetAtEpoch = _tzoffset(_localzone0, 0.0) + nearTime = x - fsetAtEpoch - long(EPOCH) + 86400 + ms + # nearTime is within an hour of being correct. + try: + ltm = safelocaltime(nearTime) + except: + # We are beyond the range of Python's date support. + # Hopefully we can assume that daylight savings schedules + # repeat every 28 years. Calculate the name of the + # time zone using a supported range of years. + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, 0) + yr = ((yr - 1970) % 28) + 1970 + x = _calcDependentSecond2(yr, mo, dy, hr, mn, sc) + nearTime = x - fsetAtEpoch - long(EPOCH) + 86400 + ms + + # nearTime might still be negative if we are east of Greenwich. + # But we can asume on 1969/12/31 were no timezone changes. + nearTime = max(0, nearTime) + + ltm = safelocaltime(nearTime) + tz = self.localZone(ltm) + return tz + + def _parse(self, st, datefmt=getDefaultDateFormat()): + # Parse date-time components from a string + month = year = tz = tm = None + ValidZones = _TZINFO._zidx + TimeModifiers = ['am', 'pm'] + + # Find timezone first, since it should always be the last + # element, and may contain a slash, confusing the parser. + st = st.strip() + sp = st.split() + tz = sp[-1] + if tz and (tz.lower() in ValidZones): + self._timezone_naive = False + st = ' '.join(sp[:-1]) + else: + self._timezone_naive = True + tz = None # Decide later, since the default time zone + # could depend on the date. + + ints = [] + i = 0 + l = len(st) + while i < l: + while i < l and st[i] in SPACE_CHARS: + i += 1 + if i < l and st[i] in DELIMITERS: + d = st[i] + i += 1 + else: + d = '' + while i < l and st[i] in SPACE_CHARS: + i += 1 + + # The float pattern needs to look back 1 character, because it + # actually looks for a preceding colon like ':33.33'. This is + # needed to avoid accidentally matching the date part of a + # dot-separated date string such as '1999.12.31'. + if i > 0: + b = i - 1 + else: + b = i + + ts_results = FLT_PATTERN.match(st, b) + if ts_results: + s = ts_results.group(1) + i = i + len(s) + ints.append(float(s)) + continue + + #AJ + ts_results = INT_PATTERN.match(st, i) + if ts_results: + s = ts_results.group(0) + + ls = len(s) + i = i + ls + if (ls == 4 and d and d in '+-' and + (len(ints) + (not not month) >= 3)): + tz = '%s%s' % (d, s) + else: + v = int(s) + ints.append(v) + continue + + ts_results = NAME_PATTERN.match(st, i) + if ts_results: + s = ts_results.group(0).lower() + i = i + len(s) + if i < l and st[i] == '.': + i += 1 + # Check for month name: + _v = _MONTHMAP.get(s) + if _v is not None: + if month is None: + month = _v + else: + raise SyntaxError(st) + continue + # Check for time modifier: + if s in TimeModifiers: + if tm is None: + tm = s + else: + raise SyntaxError(st) + continue + # Check for and skip day of week: + if s in _DAYMAP: + continue + + raise SyntaxError(st) + + day = None + if ints[-1] > 60 and d not in ('.', ':', '/') and len(ints) > 2: + year = ints[-1] + del ints[-1] + if month: + day = ints[0] + del ints[:1] + else: + if datefmt == "us": + month = ints[0] + day = ints[1] + else: + month = ints[1] + day = ints[0] + del ints[:2] + elif month: + if len(ints) > 1: + if ints[0] > 31: + year = ints[0] + day = ints[1] + else: + year = ints[1] + day = ints[0] + del ints[:2] + elif len(ints) > 2: + if ints[0] > 31: + year = ints[0] + if ints[1] > 12: + day = ints[1] + month = ints[2] + else: + day = ints[2] + month = ints[1] + if ints[1] > 31: + year = ints[1] + if ints[0] > 12 and ints[2] <= 12: + day = ints[0] + month = ints[2] + elif ints[2] > 12 and ints[0] <= 12: + day = ints[2] + month = ints[0] + elif ints[2] > 31: + year = ints[2] + if ints[0] > 12: + day = ints[0] + month = ints[1] + else: + if datefmt == "us": + day = ints[1] + month = ints[0] + else: + day = ints[0] + month = ints[1] + + elif ints[0] <= 12: + month = ints[0] + day = ints[1] + year = ints[2] + del ints[:3] + + if day is None: + # Use today's date. + year, month, day = localtime(time())[:3] + + year = _correctYear(year) + if year < 1000: + raise SyntaxError(st) + + leap = year % 4 == 0 and (year % 100 != 0 or year % 400 == 0) + try: + if not day or day > _MONTH_LEN[leap][month]: + raise DateError(st) + except IndexError: + raise DateError(st) + + tod = 0 + if ints: + i = ints[0] + # Modify hour to reflect am/pm + if tm and (tm == 'pm') and i < 12: + i += 12 + if tm and (tm == 'am') and i == 12: + i = 0 + if i > 24: + raise TimeError(st) + tod = tod + int(i) * 3600 + del ints[0] + if ints: + i = ints[0] + if i > 60: + raise TimeError(st) + tod = tod + int(i) * 60 + del ints[0] + if ints: + i = ints[0] + if i > 60: + raise TimeError(st) + tod = tod + i + del ints[0] + if ints: + raise SyntaxError(st) + + tod_int = int(math.floor(tod)) + ms = tod - tod_int + hr, mn, sc = _calcHMS(tod_int, ms) + if not tz: + # Figure out what time zone it is in the local area + # on the given date. + x = _calcDependentSecond2(year, month, day, hr, mn, sc) + tz = self._calcTimezoneName(x, ms) + + return year, month, day, hr, mn, sc, tz + + # Internal methods + def _validDate(self, y, m, d): + if m < 1 or m > 12 or y < 0 or d < 1 or d > 31: + return 0 + return d <= _MONTH_LEN[ + (y % 4 == 0 and (y % 100 != 0 or y % 400 == 0))][m] + + def _validTime(self, h, m, s): + return h >= 0 and h <= 23 and m >= 0 and m <= 59 and s >= 0 and s < 60 + + def __getattr__(self, name): + if '%' in name: + return strftimeFormatter(self, name) + raise AttributeError(name) + + # Conversion and comparison methods + + def timeTime(self): + """Return the date/time as a floating-point number in UTC, + in the format used by the python time module. + + Note that it is possible to create date/time values with + DateTime that have no meaningful value to the time module. + """ + return self._micros / 1000000.0 + + def toZone(self, z): + """Return a DateTime with the value as the current + object, represented in the indicated timezone. + """ + t, tz = self._t, _TZINFO._zmap[z.lower()] + micros = self.micros() + tznaive = False # you're performing a timzone change, can't be naive + + try: + # Try to use time module for speed. + yr, mo, dy, hr, mn, sc = safegmtime(t + _tzoffset(tz, t))[:6] + sc = self._second + return self.__class__(yr, mo, dy, hr, mn, sc, tz, t, + self._d, self.time, micros, tznaive) + except Exception: + # gmtime can't perform the calculation in the given range. + # Calculate the difference between the two time zones. + tzdiff = _tzoffset(tz, t) - _tzoffset(self._tz, t) + if tzdiff == 0: + return self + sc = self._second + ms = sc - math.floor(sc) + x = _calcDependentSecond2(self._year, self._month, self._day, + self._hour, self._minute, sc) + x_new = x + tzdiff + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x_new, ms) + return self.__class__(yr, mo, dy, hr, mn, sc, tz, t, + self._d, self.time, micros, tznaive) + + def isFuture(self): + """Return true if this object represents a date/time + later than the time of the call. + """ + return (self._t > time()) + + def isPast(self): + """Return true if this object represents a date/time + earlier than the time of the call. + """ + return (self._t < time()) + + def isCurrentYear(self): + """Return true if this object represents a date/time + that falls within the current year, in the context + of this object\'s timezone representation. + """ + t = time() + return safegmtime(t + _tzoffset(self._tz, t))[0] == self._year + + def isCurrentMonth(self): + """Return true if this object represents a date/time + that falls within the current month, in the context + of this object\'s timezone representation. + """ + t = time() + gmt = safegmtime(t + _tzoffset(self._tz, t)) + return gmt[0] == self._year and gmt[1] == self._month + + def isCurrentDay(self): + """Return true if this object represents a date/time + that falls within the current day, in the context + of this object\'s timezone representation. + """ + t = time() + gmt = safegmtime(t + _tzoffset(self._tz, t)) + return (gmt[0] == self._year and gmt[1] == self._month and + gmt[2] == self._day) + + def isCurrentHour(self): + """Return true if this object represents a date/time + that falls within the current hour, in the context + of this object\'s timezone representation. + """ + t = time() + gmt = safegmtime(t + _tzoffset(self._tz, t)) + return (gmt[0] == self._year and gmt[1] == self._month and + gmt[2] == self._day and gmt[3] == self._hour) + + def isCurrentMinute(self): + """Return true if this object represents a date/time + that falls within the current minute, in the context + of this object\'s timezone representation. + """ + t = time() + gmt = safegmtime(t + _tzoffset(self._tz, t)) + return (gmt[0] == self._year and gmt[1] == self._month and + gmt[2] == self._day and gmt[3] == self._hour and + gmt[4] == self._minute) + + def earliestTime(self): + """Return a new DateTime object that represents the earliest + possible time (in whole seconds) that still falls within + the current object\'s day, in the object\'s timezone context. + """ + return self.__class__( + self._year, self._month, self._day, 0, 0, 0, self._tz) + + def latestTime(self): + """Return a new DateTime object that represents the latest + possible time (in whole seconds) that still falls within + the current object\'s day, in the object\'s timezone context. + """ + return self.__class__( + self._year, self._month, self._day, 23, 59, 59, self._tz) + + def greaterThan(self, t): + """Compare this DateTime object to another DateTime object + OR a floating point number such as that which is returned + by the python time module. + + Returns true if the object represents a date/time greater + than the specified DateTime or time module style time. + + Revised to give more correct results through comparison of + long integer microseconds. + """ + if t is None: + t = 0 + if isinstance(t, float): + return self._micros > long(t * 1000000) + try: + return self._micros > t._micros + except AttributeError: + return self._micros > t + + __gt__ = greaterThan + + def greaterThanEqualTo(self, t): + """Compare this DateTime object to another DateTime object + OR a floating point number such as that which is returned + by the python time module. + + Returns true if the object represents a date/time greater + than or equal to the specified DateTime or time module style + time. + + Revised to give more correct results through comparison of + long integer microseconds. + """ + if t is None: + t = 0 + if isinstance(t, float): + return self._micros >= long(t * 1000000) + try: + return self._micros >= t._micros + except AttributeError: + return self._micros >= t + + __ge__ = greaterThanEqualTo + + def equalTo(self, t): + """Compare this DateTime object to another DateTime object + OR a floating point number such as that which is returned + by the python time module. + + Returns true if the object represents a date/time equal to + the specified DateTime or time module style time. + + Revised to give more correct results through comparison of + long integer microseconds. + """ + if t is None: + t = 0 + if isinstance(t, float): + return self._micros == long(t * 1000000) + try: + return self._micros == t._micros + except AttributeError: + return self._micros == t + + def notEqualTo(self, t): + """Compare this DateTime object to another DateTime object + OR a floating point number such as that which is returned + by the python time module. + + Returns true if the object represents a date/time not equal + to the specified DateTime or time module style time. + + Revised to give more correct results through comparison of + long integer microseconds. + """ + return not self.equalTo(t) + + def __eq__(self, t): + """Compare this DateTime object to another DateTime object. + Return True if their internal state is the same. Two objects + representing the same time in different timezones are regared as + unequal. Use the equalTo method if you are only interested in them + refering to the same moment in time. + """ + if not isinstance(t, DateTime): + return False + return (self._micros, self._tz) == (t._micros, t._tz) + + def __ne__(self, t): + return not self.__eq__(t) + + def lessThan(self, t): + """Compare this DateTime object to another DateTime object + OR a floating point number such as that which is returned + by the python time module. + + Returns true if the object represents a date/time less than + the specified DateTime or time module style time. + + Revised to give more correct results through comparison of + long integer microseconds. + """ + if t is None: + t = 0 + if isinstance(t, float): + return self._micros < long(t * 1000000) + try: + return self._micros < t._micros + except AttributeError: + return self._micros < t + + __lt__ = lessThan + + def lessThanEqualTo(self, t): + """Compare this DateTime object to another DateTime object + OR a floating point number such as that which is returned + by the python time module. + + Returns true if the object represents a date/time less than + or equal to the specified DateTime or time module style time. + + Revised to give more correct results through comparison of + long integer microseconds. + """ + if t is None: + t = 0 + if isinstance(t, float): + return self._micros <= long(t * 1000000) + try: + return self._micros <= t._micros + except AttributeError: + return self._micros <= t + + __le__ = lessThanEqualTo + + def isLeapYear(self): + """Return true if the current year (in the context of the + object\'s timezone) is a leap year. + """ + return (self._year % 4 == 0 and + (self._year % 100 != 0 or self._year % 400 == 0)) + + def dayOfYear(self): + """Return the day of the year, in context of the timezone + representation of the object. + """ + d = int(self._d + (_tzoffset(self._tz, self._t) / 86400.0)) + return int((d + jd1901) - _julianday(self._year, 1, 0)) + + # Component access + def parts(self): + """Return a tuple containing the calendar year, month, + day, hour, minute second and timezone of the object. + """ + return (self._year, self._month, self._day, self._hour, + self._minute, self._second, self._tz) + + def timezone(self): + """Return the timezone in which the object is represented.""" + return self._tz + + def tzoffset(self): + """Return the timezone offset for the objects timezone.""" + return _tzoffset(self._tz, self._t) + + def year(self): + """Return the calendar year of the object.""" + return self._year + + def month(self): + """Return the month of the object as an integer.""" + return self._month + + @property + def _fmon(self): + return _MONTHS[self._month] + + def Month(self): + """Return the full month name.""" + return self._fmon + + @property + def _amon(self): + return _MONTHS_A[self._month] + + def aMonth(self): + """Return the abreviated month name.""" + return self._amon + + def Mon(self): + """Compatibility: see aMonth.""" + return self._amon + + @property + def _pmon(self): + return _MONTHS_P[self._month] + + def pMonth(self): + """Return the abreviated (with period) month name.""" + return self._pmon + + def Mon_(self): + """Compatibility: see pMonth.""" + return self._pmon + + def day(self): + """Return the integer day.""" + return self._day + + @property + def _fday(self): + return _DAYS[self._dayoffset] + + def Day(self): + """Return the full name of the day of the week.""" + return self._fday + + def DayOfWeek(self): + """Compatibility: see Day.""" + return self._fday + + @property + def _aday(self): + return _DAYS_A[self._dayoffset] + + def aDay(self): + """Return the abreviated name of the day of the week.""" + return self._aday + + @property + def _pday(self): + return _DAYS_P[self._dayoffset] + + def pDay(self): + """Return the abreviated (with period) name of the day of the week.""" + return self._pday + + def Day_(self): + """Compatibility: see pDay.""" + return self._pday + + def dow(self): + """Return the integer day of the week, where sunday is 0.""" + return self._dayoffset + + def dow_1(self): + """Return the integer day of the week, where sunday is 1.""" + return self._dayoffset + 1 + + @property + def _pmhour(self): + hr = self._hour + if hr > 12: + return hr - 12 + return hr or 12 + + def h_12(self): + """Return the 12-hour clock representation of the hour.""" + return self._pmhour + + def h_24(self): + """Return the 24-hour clock representation of the hour.""" + return self._hour + + @property + def _pm(self): + hr = self._hour + if hr >= 12: + return 'pm' + return 'am' + + def ampm(self): + """Return the appropriate time modifier (am or pm).""" + return self._pm + + def hour(self): + """Return the 24-hour clock representation of the hour.""" + return self._hour + + def minute(self): + """Return the minute.""" + return self._minute + + def second(self): + """Return the second.""" + return self._second + + def millis(self): + """Return the millisecond since the epoch in GMT.""" + return self._micros // 1000 + + def micros(self): + """Return the microsecond since the epoch in GMT.""" + return self._micros + + def timezoneNaive(self): + """The python datetime module introduces the idea of distinguishing + between timezone aware and timezone naive datetime values. For lossless + conversion to and from datetime.datetime record if we record this + information using True / False. DateTime makes no distinction, when we + don't have any information we return None here. + """ + try: + return self._timezone_naive + except AttributeError: + return None + + def strftime(self, format): + """Format the date/time using the *current timezone representation*.""" + x = _calcDependentSecond2(self._year, self._month, self._day, + self._hour, self._minute, self._second) + ltz = self._calcTimezoneName(x, 0) + tzdiff = _tzoffset(ltz, self._t) - _tzoffset(self._tz, self._t) + zself = self + tzdiff / 86400.0 + microseconds = int((zself._second - zself._nearsec) * 1000000) + unicode_format = False + if isinstance(format, explicit_unicode_type): + format = format.encode('utf-8') + unicode_format = True + ds = datetime(zself._year, zself._month, zself._day, zself._hour, + zself._minute, int(zself._nearsec), + microseconds).strftime(format) + if unicode_format: + return ds.decode('utf-8') + return ds + + # General formats from previous DateTime + def Date(self): + """Return the date string for the object.""" + return "%s/%2.2d/%2.2d" % (self._year, self._month, self._day) + + def Time(self): + """Return the time string for an object to the nearest second.""" + return '%2.2d:%2.2d:%2.2d' % (self._hour, self._minute, self._nearsec) + + def TimeMinutes(self): + """Return the time string for an object not showing seconds.""" + return '%2.2d:%2.2d' % (self._hour, self._minute) + + def AMPM(self): + """Return the time string for an object to the nearest second.""" + return '%2.2d:%2.2d:%2.2d %s' % ( + self._pmhour, self._minute, self._nearsec, self._pm) + + def AMPMMinutes(self): + """Return the time string for an object not showing seconds.""" + return '%2.2d:%2.2d %s' % (self._pmhour, self._minute, self._pm) + + def PreciseTime(self): + """Return the time string for the object.""" + return '%2.2d:%2.2d:%06.3f' % (self._hour, self._minute, self._second) + + def PreciseAMPM(self): + """Return the time string for the object.""" + return '%2.2d:%2.2d:%06.3f %s' % ( + self._pmhour, self._minute, self._second, self._pm) + + def yy(self): + """Return calendar year as a 2 digit string.""" + return str(self._year)[-2:] + + def mm(self): + """Return month as a 2 digit string.""" + return '%02d' % self._month + + def dd(self): + """Return day as a 2 digit string.""" + return '%02d' % self._day + + def rfc822(self): + """Return the date in RFC 822 format.""" + tzoffset = _tzoffset2rfc822zone(_tzoffset(self._tz, self._t)) + return '%s, %2.2d %s %d %2.2d:%2.2d:%2.2d %s' % ( + self._aday, self._day, self._amon, self._year, + self._hour, self._minute, self._nearsec, tzoffset) + + # New formats + def fCommon(self): + """Return a string representing the object\'s value + in the format: March 1, 1997 1:45 pm. + """ + return '%s %s, %4.4d %s:%2.2d %s' % ( + self._fmon, self._day, self._year, self._pmhour, + self._minute, self._pm) + + def fCommonZ(self): + """Return a string representing the object\'s value + in the format: March 1, 1997 1:45 pm US/Eastern. + """ + return '%s %s, %4.4d %d:%2.2d %s %s' % ( + self._fmon, self._day, self._year, self._pmhour, + self._minute, self._pm, self._tz) + + def aCommon(self): + """Return a string representing the object\'s value + in the format: Mar 1, 1997 1:45 pm. + """ + return '%s %s, %4.4d %s:%2.2d %s' % ( + self._amon, self._day, self._year, self._pmhour, + self._minute, self._pm) + + def aCommonZ(self): + """Return a string representing the object\'s value + in the format: Mar 1, 1997 1:45 pm US/Eastern. + """ + return '%s %s, %4.4d %d:%2.2d %s %s' % ( + self._amon, self._day, self._year, self._pmhour, + self._minute, self._pm, self._tz) + + def pCommon(self): + """Return a string representing the object\'s value + in the format: Mar. 1, 1997 1:45 pm. + """ + return '%s %s, %4.4d %s:%2.2d %s' % ( + self._pmon, self._day, self._year, self._pmhour, + self._minute, self._pm) + + def pCommonZ(self): + """Return a string representing the object\'s value + in the format: Mar. 1, 1997 1:45 pm US/Eastern. + """ + return '%s %s, %4.4d %d:%2.2d %s %s' % ( + self._pmon, self._day, self._year, self._pmhour, + self._minute, self._pm, self._tz) + + def ISO(self): + """Return the object in ISO standard format. + + Note: this is *not* ISO 8601-format! See the ISO8601 and + HTML4 methods below for ISO 8601-compliant output. + + Dates are output as: YYYY-MM-DD HH:MM:SS + """ + return "%.4d-%.2d-%.2d %.2d:%.2d:%.2d" % ( + self._year, self._month, self._day, + self._hour, self._minute, self._second) + + def ISO8601(self): + """Return the object in ISO 8601-compatible format containing the + date, time with seconds-precision and the time zone identifier. + + See: http://www.w3.org/TR/NOTE-datetime + + Dates are output as: YYYY-MM-DDTHH:MM:SSTZD + T is a literal character. + TZD is Time Zone Designator, format +HH:MM or -HH:MM + + If the instance is timezone naive (it was not specified with a timezone + when it was constructed) then the timezone is ommitted. + + The HTML4 method below offers the same formatting, but converts + to UTC before returning the value and sets the TZD "Z". + """ + if self.timezoneNaive(): + return "%0.4d-%0.2d-%0.2dT%0.2d:%0.2d:%0.2d" % ( + self._year, self._month, self._day, + self._hour, self._minute, self._second) + tzoffset = _tzoffset2iso8601zone(_tzoffset(self._tz, self._t)) + return "%0.4d-%0.2d-%0.2dT%0.2d:%0.2d:%0.2d%s" % ( + self._year, self._month, self._day, + self._hour, self._minute, self._second, tzoffset) + + def HTML4(self): + """Return the object in the format used in the HTML4.0 specification, + one of the standard forms in ISO8601. + + See: http://www.w3.org/TR/NOTE-datetime + + Dates are output as: YYYY-MM-DDTHH:MM:SSZ + T, Z are literal characters. + The time is in UTC. + """ + newdate = self.toZone('UTC') + return "%0.4d-%0.2d-%0.2dT%0.2d:%0.2d:%0.2dZ" % ( + newdate._year, newdate._month, newdate._day, + newdate._hour, newdate._minute, newdate._second) + + def asdatetime(self): + """Return a standard libary datetime.datetime + """ + tznaive = self.timezoneNaive() + if tznaive: + tzinfo = None + else: + tzinfo = _TZINFO[self._tz].tzinfo + second = int(self._second) + microsec = self.micros() % 1000000 + dt = datetime(self._year, self._month, self._day, self._hour, + self._minute, second, microsec, tzinfo) + return dt + + def utcdatetime(self): + """Convert the time to UTC then return a timezone naive datetime object + """ + utc = self.toZone('UTC') + second = int(utc._second) + microsec = utc.micros() % 1000000 + dt = datetime(utc._year, utc._month, utc._day, utc._hour, + utc._minute, second, microsec) + return dt + + def __add__(self, other): + """A DateTime may be added to a number and a number may be + added to a DateTime; two DateTimes cannot be added. + """ + if hasattr(other, '_t'): + raise DateTimeError('Cannot add two DateTimes') + o = float(other) + tz = self._tz + omicros = round(o * 86400000000) + tmicros = self.micros() + omicros + t = tmicros / 1000000.0 + d = (tmicros + long(EPOCH * 1000000)) / 86400000000.0 + s = d - math.floor(d) + ms = t - math.floor(t) + x = _calcDependentSecond(tz, t) + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, ms) + return self.__class__(yr, mo, dy, hr, mn, sc, self._tz, + t, d, s, None, self.timezoneNaive()) + + __radd__ = __add__ + + def __sub__(self, other): + """Either a DateTime or a number may be subtracted from a + DateTime, however, a DateTime may not be subtracted from + a number. + """ + if hasattr(other, '_d'): + return (self.micros() - other.micros()) / 86400000000.0 + else: + return self.__add__(-(other)) + + def __repr__(self): + """Convert a DateTime to a string that looks like a Python + expression. + """ + return '%s(\'%s\')' % (self.__class__.__name__, str(self)) + + def __str__(self): + """Convert a DateTime to a string.""" + y, m, d = self._year, self._month, self._day + h, mn, s, t = self._hour, self._minute, self._second, self._tz + if s == int(s): + # A whole number of seconds -- suppress milliseconds. + return '%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d %s' % ( + y, m, d, h, mn, s, t) + else: + # s is already rounded to the nearest microsecond, and + # it's not a whole number of seconds. Be sure to print + # 2 digits before the decimal point. + return '%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%06.6f %s' % ( + y, m, d, h, mn, s, t) + + def __hash__(self): + """Compute a hash value for a DateTime.""" + return int(((self._year % 100 * 12 + self._month) * 31 + + self._day + self.time) * 100) + + def __int__(self): + """Convert to an integer number of seconds since the epoch (gmt).""" + return int(self.micros() // 1000000) + + def __long__(self): + """Convert to a long-int number of seconds since the epoch (gmt).""" + return long(self.micros() // 1000000) + + def __float__(self): + """Convert to floating-point number of seconds since the epoch (gmt). + """ + return self.micros() / 1000000.0 + + @property + def _t(self): + return self._micros / 1000000.0 + + def _parse_iso8601(self, s): + # preserve the previously implied contract + # who know where this could be used... + return self._parse_iso8601_preserving_tznaive(s)[:7] + + def _parse_iso8601_preserving_tznaive(self, s): + try: + return self.__parse_iso8601(s) + except IndexError: + raise SyntaxError( + 'Not an ISO 8601 compliant date string: "%s"' % s) + + def __parse_iso8601(self, s): + """Parse an ISO 8601 compliant date. + + See: http://en.wikipedia.org/wiki/ISO_8601 + """ + month = day = week_day = 1 + year = hour = minute = seconds = hour_off = min_off = 0 + tznaive = True + + iso8601 = iso8601Match(s.strip()) + fields = iso8601 and iso8601.groupdict() or {} + if not iso8601 or fields.get('garbage'): + raise IndexError + + if fields['year']: + year = int(fields['year']) + if fields['month']: + month = int(fields['month']) + if fields['day']: + day = int(fields['day']) + + if fields['year_day']: + d = DateTime('%s-01-01' % year) + int(fields['year_day']) - 1 + month = d.month() + day = d.day() + + if fields['week']: + week = int(fields['week']) + if fields['week_day']: + week_day = int(fields['week_day']) + d = DateTime('%s-01-04' % year) + d = d - (d.dow() + 6) % 7 + week * 7 + week_day - 8 + month = d.month() + day = d.day() + + if fields['hour']: + hour = int(fields['hour']) + + if fields['minute']: + minute = int(fields['minute']) + elif fields['fraction']: + minute = 60.0 * float('0.%s' % fields['fraction']) + seconds, minute = math.modf(minute) + minute = int(minute) + seconds = 60.0 * seconds + # Avoid reprocess when handling seconds, bellow + fields['fraction'] = None + + if fields['second']: + seconds = int(fields['second']) + if fields['fraction']: + seconds = seconds + float('0.%s' % fields['fraction']) + elif fields['fraction']: + seconds = 60.0 * float('0.%s' % fields['fraction']) + + if fields['hour_off']: + hour_off = int(fields['hour_off']) + if fields['signal'] == '-': + hour_off *= -1 + + if fields['min_off']: + min_off = int(fields['min_off']) + + if fields['signal'] or fields['Z']: + tznaive = False + else: + tznaive = True + + # Differ from the specification here. To preserve backwards + # compatibility assume a default timezone == UTC. + tz = 'GMT%+03d%02d' % (hour_off, min_off) + + return year, month, day, hour, minute, seconds, tz, tznaive + + def JulianDay(self): + """Return the Julian day. + + See: http://www.tondering.dk/claus/cal/node3.html#sec-calcjd + """ + a = (14 - self._month) // 12 + y = self._year + 4800 - a + m = self._month + (12 * a) - 3 + return (self._day + (153 * m + 2) // 5 + 365 * y + + y // 4 - y // 100 + y // 400 - 32045) + + def week(self): + """Return the week number according to ISO. + + See: http://www.tondering.dk/claus/cal/node6.html + """ + J = self.JulianDay() + d4 = (J + 31741 - (J % 7)) % 146097 % 36524 % 1461 + L = d4 // 1460 + d1 = ((d4 - L) % 365) + L + return d1 // 7 + 1 + + def encode(self, out): + """Encode value for XML-RPC.""" + out.write('') + out.write(self.ISO8601()) + out.write('\n') + + +# Provide the _dt_reconstructor function here, in case something +# accidentally creates a reference to this function + +orig_reconstructor = copy_reg._reconstructor + + +def _dt_reconstructor(cls, base, state): + if cls is DateTime: + return cls(state) + return orig_reconstructor(cls, base, state) diff --git a/thesisenv/lib/python3.6/site-packages/DateTime/DateTime.txt b/thesisenv/lib/python3.6/site-packages/DateTime/DateTime.txt new file mode 100644 index 0000000..5467047 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime/DateTime.txt @@ -0,0 +1,785 @@ +The DateTime package +==================== + +Encapsulation of date/time values. + + +Function Timezones() +-------------------- + +Returns the list of recognized timezone names: + + >>> from DateTime import Timezones + >>> zones = set(Timezones()) + +Almost all of the standard pytz timezones are included, with the exception +of some commonly-used but ambiguous abbreviations, where historical Zope +usage conflicts with the name used by pytz: + + >>> import pytz + >>> [x for x in pytz.all_timezones if x not in zones] + ['CET', 'EET', 'EST', 'MET', 'MST', 'WET'] + +Class DateTime +-------------- + +DateTime objects represent instants in time and provide interfaces for +controlling its representation without affecting the absolute value of +the object. + +DateTime objects may be created from a wide variety of string or +numeric data, or may be computed from other DateTime objects. +DateTimes support the ability to convert their representations to many +major timezones, as well as the ablility to create a DateTime object +in the context of a given timezone. + +DateTime objects provide partial numerical behavior: + +* Two date-time objects can be subtracted to obtain a time, in days + between the two. + +* A date-time object and a positive or negative number may be added to + obtain a new date-time object that is the given number of days later + than the input date-time object. + +* A positive or negative number and a date-time object may be added to + obtain a new date-time object that is the given number of days later + than the input date-time object. + +* A positive or negative number may be subtracted from a date-time + object to obtain a new date-time object that is the given number of + days earlier than the input date-time object. + +DateTime objects may be converted to integer, long, or float numbers +of days since January 1, 1901, using the standard int, long, and float +functions (Compatibility Note: int, long and float return the number +of days since 1901 in GMT rather than local machine timezone). +DateTime objects also provide access to their value in a float format +usable with the python time module, provided that the value of the +object falls in the range of the epoch-based time module. + +A DateTime object should be considered immutable; all conversion and numeric +operations return a new DateTime object rather than modify the current object. + +A DateTime object always maintains its value as an absolute UTC time, +and is represented in the context of some timezone based on the +arguments used to create the object. A DateTime object's methods +return values based on the timezone context. + +Note that in all cases the local machine timezone is used for +representation if no timezone is specified. + +Constructor for DateTime +------------------------ + +DateTime() returns a new date-time object. DateTimes may be created +with from zero to seven arguments: + +* If the function is called with no arguments, then the current date/ + time is returned, represented in the timezone of the local machine. + +* If the function is invoked with a single string argument which is a + recognized timezone name, an object representing the current time is + returned, represented in the specified timezone. + +* If the function is invoked with a single string argument + representing a valid date/time, an object representing that date/ + time will be returned. + + As a general rule, any date-time representation that is recognized + and unambigous to a resident of North America is acceptable. (The + reason for this qualification is that in North America, a date like: + 2/1/1994 is interpreted as February 1, 1994, while in some parts of + the world, it is interpreted as January 2, 1994.) A date/ time + string consists of two components, a date component and an optional + time component, separated by one or more spaces. If the time + component is omited, 12:00am is assumed. + + Any recognized timezone name specified as the final element of the + date/time string will be used for computing the date/time value. + (If you create a DateTime with the string, + "Mar 9, 1997 1:45pm US/Pacific", the value will essentially be the + same as if you had captured time.time() at the specified date and + time on a machine in that timezone). If no timezone is passed, then + the timezone configured on the local machine will be used, **except** + that if the date format matches ISO 8601 ('YYYY-MM-DD'), the instance + will use UTC / CMT+0 as the timezone. + + o Returns current date/time, represented in US/Eastern: + + >>> from DateTime import DateTime + >>> e = DateTime('US/Eastern') + >>> e.timezone() + 'US/Eastern' + + o Returns specified time, represented in local machine zone: + + >>> x = DateTime('1997/3/9 1:45pm') + >>> x.parts() # doctest: +ELLIPSIS + (1997, 3, 9, 13, 45, ...) + + o Specified time in local machine zone, verbose format: + + >>> y = DateTime('Mar 9, 1997 13:45:00') + >>> y.parts() # doctest: +ELLIPSIS + (1997, 3, 9, 13, 45, ...) + >>> y == x + True + + o Specified time in UTC via ISO 8601 rule: + + >>> z = DateTime('2014-03-24') + >>> z.parts() # doctest: +ELLIPSIS + (2014, 3, 24, 0, 0, ...) + >>> z.timezone() + 'GMT+0' + + The date component consists of year, month, and day values. The + year value must be a one-, two-, or four-digit integer. If a one- + or two-digit year is used, the year is assumed to be in the + twentieth century. The month may an integer, from 1 to 12, a month + name, or a month abreviation, where a period may optionally follow + the abreviation. The day must be an integer from 1 to the number of + days in the month. The year, month, and day values may be separated + by periods, hyphens, forward, shashes, or spaces. Extra spaces are + permitted around the delimiters. Year, month, and day values may be + given in any order as long as it is possible to distinguish the + components. If all three components are numbers that are less than + 13, then a a month-day-year ordering is assumed. + + The time component consists of hour, minute, and second values + separated by colons. The hour value must be an integer between 0 + and 23 inclusively. The minute value must be an integer between 0 + and 59 inclusively. The second value may be an integer value + between 0 and 59.999 inclusively. The second value or both the + minute and second values may be ommitted. The time may be followed + by am or pm in upper or lower case, in which case a 12-hour clock is + assumed. + +* If the DateTime function is invoked with a single Numeric argument, + the number is assumed to be either a floating point value such as + that returned by time.time() , or a number of days after January 1, + 1901 00:00:00 UTC. + + A DateTime object is returned that represents either the gmt value + of the time.time() float represented in the local machine's + timezone, or that number of days after January 1, 1901. Note that + the number of days after 1901 need to be expressed from the + viewpoint of the local machine's timezone. A negative argument will + yield a date-time value before 1901. + +* If the function is invoked with two numeric arguments, then the + first is taken to be an integer year and the second argument is + taken to be an offset in days from the beginning of the year, in the + context of the local machine timezone. The date-time value returned + is the given offset number of days from the beginning of the given + year, represented in the timezone of the local machine. The offset + may be positive or negative. Two-digit years are assumed to be in + the twentieth century. + +* If the function is invoked with two arguments, the first a float + representing a number of seconds past the epoch in gmt (such as + those returned by time.time()) and the second a string naming a + recognized timezone, a DateTime with a value of that gmt time will + be returned, represented in the given timezone. + + >>> import time + >>> t = time.time() + + Time t represented as US/Eastern: + + >>> now_east = DateTime(t, 'US/Eastern') + + Time t represented as US/Pacific: + + >>> now_west = DateTime(t, 'US/Pacific') + + Only their representations are different: + + >>> now_east.equalTo(now_west) + True + +* If the function is invoked with three or more numeric arguments, + then the first is taken to be an integer year, the second is taken + to be an integer month, and the third is taken to be an integer day. + If the combination of values is not valid, then a DateTimeError is + raised. One- or two-digit years up to 69 are assumed to be in the + 21st century, whereas values 70-99 are assumed to be 20th century. + The fourth, fifth, and sixth arguments are floating point, positive + or negative offsets in units of hours, minutes, and days, and + default to zero if not given. An optional string may be given as + the final argument to indicate timezone (the effect of this is as if + you had taken the value of time.time() at that time on a machine in + the specified timezone). + +If a string argument passed to the DateTime constructor cannot be +parsed, it will raise SyntaxError. Invalid date, time, or +timezone components will raise a DateTimeError. + +The module function Timezones() will return a list of the timezones +recognized by the DateTime module. Recognition of timezone names is +case-insensitive. + +Instance Methods for DateTime (IDateTime interface) +--------------------------------------------------- + +Conversion and comparison methods +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* ``timeTime()`` returns the date/time as a floating-point number in + UTC, in the format used by the python time module. Note that it is + possible to create date /time values with DateTime that have no + meaningful value to the time module, and in such cases a + DateTimeError is raised. A DateTime object's value must generally + be between Jan 1, 1970 (or your local machine epoch) and Jan 2038 to + produce a valid time.time() style value. + + >>> dt = DateTime('Mar 9, 1997 13:45:00 US/Eastern') + >>> dt.timeTime() + 857933100.0 + + >>> DateTime('2040/01/01 UTC').timeTime() + 2208988800.0 + + >>> DateTime('1900/01/01 UTC').timeTime() + -2208988800.0 + +* ``toZone(z)`` returns a DateTime with the value as the current + object, represented in the indicated timezone: + + >>> dt.toZone('UTC') + DateTime('1997/03/09 18:45:00 UTC') + + >>> dt.toZone('UTC').equalTo(dt) + True + +* ``isFuture()`` returns true if this object represents a date/time + later than the time of the call: + + >>> dt.isFuture() + False + >>> DateTime('Jan 1 3000').isFuture() # not time-machine safe! + True + +* ``isPast()`` returns true if this object represents a date/time + earlier than the time of the call: + + >>> dt.isPast() + True + >>> DateTime('Jan 1 3000').isPast() # not time-machine safe! + False + +* ``isCurrentYear()`` returns true if this object represents a + date/time that falls within the current year, in the context of this + object's timezone representation: + + >>> dt.isCurrentYear() + False + >>> DateTime().isCurrentYear() + True + +* ``isCurrentMonth()`` returns true if this object represents a + date/time that falls within the current month, in the context of + this object's timezone representation: + + >>> dt.isCurrentMonth() + False + >>> DateTime().isCurrentMonth() + True + +* ``isCurrentDay()`` returns true if this object represents a + date/time that falls within the current day, in the context of this + object's timezone representation: + + >>> dt.isCurrentDay() + False + >>> DateTime().isCurrentDay() + True + +* ``isCurrentHour()`` returns true if this object represents a + date/time that falls within the current hour, in the context of this + object's timezone representation: + + >>> dt.isCurrentHour() + False + + >>> DateTime().isCurrentHour() + True + +* ``isCurrentMinute()`` returns true if this object represents a + date/time that falls within the current minute, in the context of + this object's timezone representation: + + >>> dt.isCurrentMinute() + False + >>> DateTime().isCurrentMinute() + True + +* ``isLeapYear()`` returns true if the current year (in the context of + the object's timezone) is a leap year: + + >>> dt.isLeapYear() + False + >>> DateTime('Mar 8 2004').isLeapYear() + True + +* ``earliestTime()`` returns a new DateTime object that represents the + earliest possible time (in whole seconds) that still falls within + the current object's day, in the object's timezone context: + + >>> dt.earliestTime() + DateTime('1997/03/09 00:00:00 US/Eastern') + +* ``latestTime()`` return a new DateTime object that represents the + latest possible time (in whole seconds) that still falls within the + current object's day, in the object's timezone context + + >>> dt.latestTime() + DateTime('1997/03/09 23:59:59 US/Eastern') + +Component access +~~~~~~~~~~~~~~~~ + +* ``parts()`` returns a tuple containing the calendar year, month, + day, hour, minute second and timezone of the object + + >>> dt.parts() # doctest: +ELLIPSIS + (1997, 3, 9, 13, 45, ... 'US/Eastern') + +* ``timezone()`` returns the timezone in which the object is represented: + + >>> dt.timezone() in Timezones() + True + +* ``tzoffset()`` returns the timezone offset for the objects timezone: + + >>> dt.tzoffset() + -18000 + +* ``year()`` returns the calendar year of the object: + + >>> dt.year() + 1997 + +* ``month()`` retursn the month of the object as an integer: + + >>> dt.month() + 3 + +* ``Month()`` returns the full month name: + + >>> dt.Month() + 'March' + +* ``aMonth()`` returns the abreviated month name: + + >>> dt.aMonth() + 'Mar' + +* ``pMonth()`` returns the abreviated (with period) month name: + + >>> dt.pMonth() + 'Mar.' + +* ``day()`` returns the integer day: + + >>> dt.day() + 9 + +* ``Day()`` returns the full name of the day of the week: + + >>> dt.Day() + 'Sunday' + +* ``dayOfYear()`` returns the day of the year, in context of the + timezone representation of the object: + + >>> dt.dayOfYear() + 68 + +* ``aDay()`` returns the abreviated name of the day of the week: + + >>> dt.aDay() + 'Sun' + +* ``pDay()`` returns the abreviated (with period) name of the day of + the week: + + >>> dt.pDay() + 'Sun.' + +* ``dow()`` returns the integer day of the week, where Sunday is 0: + + >>> dt.dow() + 0 + +* ``dow_1()`` returns the integer day of the week, where sunday is 1: + + >>> dt.dow_1() + 1 + +* ``h_12()`` returns the 12-hour clock representation of the hour: + + >>> dt.h_12() + 1 + +* ``h_24()`` returns the 24-hour clock representation of the hour: + + >>> dt.h_24() + 13 + +* ``ampm()`` returns the appropriate time modifier (am or pm): + + >>> dt.ampm() + 'pm' + +* ``hour()`` returns the 24-hour clock representation of the hour: + + >>> dt.hour() + 13 + +* ``minute()`` returns the minute: + + >>> dt.minute() + 45 + +* ``second()`` returns the second: + + >>> dt.second() == 0 + True + +* ``millis()`` returns the milliseconds since the epoch in GMT. + + >>> dt.millis() == 857933100000 + True + +strftime() +~~~~~~~~~~ + +See ``tests/test_datetime.py``. + +General formats from previous DateTime +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* ``Date()`` return the date string for the object: + + >>> dt.Date() + '1997/03/09' + +* ``Time()`` returns the time string for an object to the nearest + second: + + >>> dt.Time() + '13:45:00' + +* ``TimeMinutes()`` returns the time string for an object not showing + seconds: + + >>> dt.TimeMinutes() + '13:45' + +* ``AMPM()`` returns the time string for an object to the nearest second: + + >>> dt.AMPM() + '01:45:00 pm' + +* ``AMPMMinutes()`` returns the time string for an object not showing + seconds: + + >>> dt.AMPMMinutes() + '01:45 pm' + +* ``PreciseTime()`` returns the time string for the object: + + >>> dt.PreciseTime() + '13:45:00.000' + +* ``PreciseAMPM()`` returns the time string for the object: + + >>> dt.PreciseAMPM() + '01:45:00.000 pm' + +* ``yy()`` returns the calendar year as a 2 digit string + + >>> dt.yy() + '97' + +* ``mm()`` returns the month as a 2 digit string + + >>> dt.mm() + '03' + +* ``dd()`` returns the day as a 2 digit string: + + >>> dt.dd() + '09' + +* ``rfc822()`` returns the date in RFC 822 format: + + >>> dt.rfc822() + 'Sun, 09 Mar 1997 13:45:00 -0500' + +New formats +~~~~~~~~~~~ + +* ``fCommon()`` returns a string representing the object's value in + the format: March 9, 1997 1:45 pm: + + >>> dt.fCommon() + 'March 9, 1997 1:45 pm' + +* ``fCommonZ()`` returns a string representing the object's value in + the format: March 9, 1997 1:45 pm US/Eastern: + + >>> dt.fCommonZ() + 'March 9, 1997 1:45 pm US/Eastern' + +* ``aCommon()`` returns a string representing the object's value in + the format: Mar 9, 1997 1:45 pm: + + >>> dt.aCommon() + 'Mar 9, 1997 1:45 pm' + +* ``aCommonZ()`` return a string representing the object's value in + the format: Mar 9, 1997 1:45 pm US/Eastern: + + >>> dt.aCommonZ() + 'Mar 9, 1997 1:45 pm US/Eastern' + +* ``pCommon()`` returns a string representing the object's value in + the format Mar. 9, 1997 1:45 pm: + + >>> dt.pCommon() + 'Mar. 9, 1997 1:45 pm' + +* ``pCommonZ()`` returns a string representing the object's value in + the format: Mar. 9, 1997 1:45 pm US/Eastern: + + >>> dt.pCommonZ() + 'Mar. 9, 1997 1:45 pm US/Eastern' + +* ``ISO()`` returns a string with the date/time in ISO format. Note: + this is not ISO 8601-format! See the ISO8601 and HTML4 methods below + for ISO 8601-compliant output. Dates are output as: YYYY-MM-DD HH:MM:SS + + >>> dt.ISO() + '1997-03-09 13:45:00' + +* ``ISO8601()`` returns the object in ISO 8601-compatible format + containing the date, time with seconds-precision and the time zone + identifier - see http://www.w3.org/TR/NOTE-datetime. Dates are + output as: YYYY-MM-DDTHH:MM:SSTZD (T is a literal character, TZD is + Time Zone Designator, format +HH:MM or -HH:MM). + + The ``HTML4()`` method below offers the same formatting, but + converts to UTC before returning the value and sets the TZD"Z" + + >>> dt.ISO8601() + '1997-03-09T13:45:00-05:00' + + +* ``HTML4()`` returns the object in the format used in the HTML4.0 + specification, one of the standard forms in ISO8601. See + http://www.w3.org/TR/NOTE-datetime. Dates are output as: + YYYY-MM-DDTHH:MM:SSZ (T, Z are literal characters, the time is in + UTC.): + + >>> dt.HTML4() + '1997-03-09T18:45:00Z' + +* ``JulianDay()`` returns the Julian day according to + http://www.tondering.dk/claus/cal/node3.html#sec-calcjd + + >>> dt.JulianDay() + 2450517 + +* ``week()`` returns the week number according to ISO + see http://www.tondering.dk/claus/cal/node6.html#SECTION00670000000000000000 + + >>> dt.week() + 10 + +Deprecated API +~~~~~~~~~~~~~~ + +* DayOfWeek(): see Day() + +* Day_(): see pDay() + +* Mon(): see aMonth() + +* Mon_(): see pMonth + +General Services Provided by DateTime +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +DateTimes can be repr()'ed; the result will be a string indicating how +to make a DateTime object like this: + + >>> repr(dt) + "DateTime('1997/03/09 13:45:00 US/Eastern')" + +When we convert them into a string, we get a nicer string that could +actually be shown to a user: + + >>> str(dt) + '1997/03/09 13:45:00 US/Eastern' + +The hash value of a DateTime is based on the date and time and is +equal for different representations of the DateTime: + + >>> hash(dt) + 3618678 + >>> hash(dt.toZone('UTC')) + 3618678 + +DateTime objects can be compared to other DateTime objects OR floating +point numbers such as the ones which are returned by the python time +module by using the equalTo method. Using this API, True is returned if the +object represents a date/time equal to the specified DateTime or time module +style time: + + >>> dt.equalTo(dt) + True + >>> dt.equalTo(dt.toZone('UTC')) + True + >>> dt.equalTo(dt.timeTime()) + True + >>> dt.equalTo(DateTime()) + False + +Same goes for inequalities: + + >>> dt.notEqualTo(dt) + False + >>> dt.notEqualTo(dt.toZone('UTC')) + False + >>> dt.notEqualTo(dt.timeTime()) + False + >>> dt.notEqualTo(DateTime()) + True + +Normal equality operations only work with datetime objects and take the +timezone setting into account: + + >>> dt == dt + True + >>> dt == dt.toZone('UTC') + False + >>> dt == DateTime() + False + + >>> dt != dt + False + >>> dt != dt.toZone('UTC') + True + >>> dt != DateTime() + True + +But the other comparison operations compare the referenced moment in time and +not the representation itself: + + >>> dt > dt + False + >>> DateTime() > dt + True + >>> dt > DateTime().timeTime() + False + >>> DateTime().timeTime() > dt + True + + >>> dt.greaterThan(dt) + False + >>> DateTime().greaterThan(dt) + True + >>> dt.greaterThan(DateTime().timeTime()) + False + + >>> dt >= dt + True + >>> DateTime() >= dt + True + >>> dt >= DateTime().timeTime() + False + >>> DateTime().timeTime() >= dt + True + + >>> dt.greaterThanEqualTo(dt) + True + >>> DateTime().greaterThanEqualTo(dt) + True + >>> dt.greaterThanEqualTo(DateTime().timeTime()) + False + + >>> dt < dt + False + >>> DateTime() < dt + False + >>> dt < DateTime().timeTime() + True + >>> DateTime().timeTime() < dt + False + + >>> dt.lessThan(dt) + False + >>> DateTime().lessThan(dt) + False + >>> dt.lessThan(DateTime().timeTime()) + True + + >>> dt <= dt + True + >>> DateTime() <= dt + False + >>> dt <= DateTime().timeTime() + True + >>> DateTime().timeTime() <= dt + False + + >>> dt.lessThanEqualTo(dt) + True + >>> DateTime().lessThanEqualTo(dt) + False + >>> dt.lessThanEqualTo(DateTime().timeTime()) + True + +Numeric Services Provided by DateTime +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A DateTime may be added to a number and a number may be added to a +DateTime: + + >>> dt + 5 + DateTime('1997/03/14 13:45:00 US/Eastern') + >>> 5 + dt + DateTime('1997/03/14 13:45:00 US/Eastern') + +Two DateTimes cannot be added: + + >>> from DateTime.interfaces import DateTimeError + >>> try: + ... dt + dt + ... print('fail') + ... except DateTimeError: + ... print('ok') + ok + +Either a DateTime or a number may be subtracted from a DateTime, +however, a DateTime may not be subtracted from a number: + + >>> DateTime('1997/03/10 13:45 US/Eastern') - dt + 1.0 + >>> dt - 1 + DateTime('1997/03/08 13:45:00 US/Eastern') + >>> 1 - dt + Traceback (most recent call last): + ... + TypeError: unsupported operand type(s) for -: 'int' and 'DateTime' + +DateTimes can also be converted to integers (number of seconds since +the epoch) and floats: + + >>> int(dt) + 857933100 + >>> float(dt) + 857933100.0 diff --git a/thesisenv/lib/python3.6/site-packages/DateTime/__init__.py b/thesisenv/lib/python3.6/site-packages/DateTime/__init__.py new file mode 100644 index 0000000..b4181ad --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime/__init__.py @@ -0,0 +1,17 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +from .DateTime import DateTime +from .DateTime import Timezones + +__all__ = ('DateTime', 'Timezones') diff --git a/thesisenv/lib/python3.6/site-packages/DateTime/interfaces.py b/thesisenv/lib/python3.6/site-packages/DateTime/interfaces.py new file mode 100644 index 0000000..5f29cff --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime/interfaces.py @@ -0,0 +1,375 @@ +############################################################################## +# +# Copyright (c) 2005 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +from zope.interface import Interface + + +class DateTimeError(Exception): + pass + + +class SyntaxError(DateTimeError): + pass + + +class DateError(DateTimeError): + pass + + +class TimeError(DateTimeError): + pass + + +class IDateTime(Interface): + # Conversion and comparison methods + + #TODO determine whether this method really is part of the public API + def localZone(ltm=None): + '''Returns the time zone on the given date. The time zone + can change according to daylight savings.''' + + def timeTime(): + """Return the date/time as a floating-point number in UTC, in + the format used by the python time module. Note that it is + possible to create date/time values with DateTime that have no + meaningful value to the time module.""" + + def toZone(z): + """Return a DateTime with the value as the current object, + represented in the indicated timezone.""" + + def isFuture(): + """Return true if this object represents a date/time later + than the time of the call""" + + def isPast(): + """Return true if this object represents a date/time earlier + than the time of the call""" + + def isCurrentYear(): + """Return true if this object represents a date/time that + falls within the current year, in the context of this + object's timezone representation""" + + def isCurrentMonth(): + """Return true if this object represents a date/time that + falls within the current month, in the context of this + object's timezone representation""" + + def isCurrentDay(): + """Return true if this object represents a date/time that + falls within the current day, in the context of this object's + timezone representation""" + + def isCurrentHour(): + """Return true if this object represents a date/time that + falls within the current hour, in the context of this object's + timezone representation""" + + def isCurrentMinute(): + """Return true if this object represents a date/time that + falls within the current minute, in the context of this + object's timezone representation""" + + def isLeapYear(): + """Return true if the current year (in the context of the + object's timezone) is a leap year""" + + def earliestTime(): + """Return a new DateTime object that represents the earliest + possible time (in whole seconds) that still falls within the + current object's day, in the object's timezone context""" + + def latestTime(): + """Return a new DateTime object that represents the latest + possible time (in whole seconds) that still falls within the + current object's day, in the object's timezone context""" + + def greaterThan(t): + """Compare this DateTime object to another DateTime object OR + a floating point number such as that which is returned by the + python time module. Returns true if the object represents a + date/time greater than the specified DateTime or time module + style time. Revised to give more correct results through + comparison of long integer milliseconds.""" + + __gt__ = greaterThan + + def greaterThanEqualTo(t): + """Compare this DateTime object to another DateTime object OR + a floating point number such as that which is returned by the + python time module. Returns true if the object represents a + date/time greater than or equal to the specified DateTime or + time module style time. Revised to give more correct results + through comparison of long integer milliseconds.""" + + __ge__ = greaterThanEqualTo + + def equalTo(t): + """Compare this DateTime object to another DateTime object OR + a floating point number such as that which is returned by the + python time module. Returns true if the object represents a + date/time equal to the specified DateTime or time module style + time. Revised to give more correct results through comparison + of long integer milliseconds.""" + + __eq__ = equalTo + + def notEqualTo(t): + """Compare this DateTime object to another DateTime object OR + a floating point number such as that which is returned by the + python time module. Returns true if the object represents a + date/time not equal to the specified DateTime or time module + style time. Revised to give more correct results through + comparison of long integer milliseconds.""" + + __ne__ = notEqualTo + + def lessThan(t): + """Compare this DateTime object to another DateTime object OR + a floating point number such as that which is returned by the + python time module. Returns true if the object represents a + date/time less than the specified DateTime or time module + style time. Revised to give more correct results through + comparison of long integer milliseconds.""" + + __lt__ = lessThan + + def lessThanEqualTo(t): + """Compare this DateTime object to another DateTime object OR + a floating point number such as that which is returned by the + python time module. Returns true if the object represents a + date/time less than or equal to the specified DateTime or time + module style time. Revised to give more correct results + through comparison of long integer milliseconds.""" + + __le__ = lessThanEqualTo + + # Component access + + def parts(): + """Return a tuple containing the calendar year, month, day, + hour, minute second and timezone of the object""" + + def timezone(): + """Return the timezone in which the object is represented.""" + + def tzoffset(): + """Return the timezone offset for the objects timezone.""" + + def year(): + """Return the calendar year of the object""" + + def month(): + """Return the month of the object as an integer""" + + def Month(): + """Return the full month name""" + + def aMonth(): + """Return the abreviated month name.""" + + def Mon(): + """Compatibility: see aMonth""" + + def pMonth(): + """Return the abreviated (with period) month name.""" + + def Mon_(): + """Compatibility: see pMonth""" + + def day(): + """Return the integer day""" + + def Day(): + """Return the full name of the day of the week""" + + def DayOfWeek(): + """Compatibility: see Day""" + + def dayOfYear(): + """Return the day of the year, in context of the timezone + representation of the object""" + + def aDay(): + """Return the abreviated name of the day of the week""" + + def pDay(): + """Return the abreviated (with period) name of the day of the + week""" + + def Day_(): + """Compatibility: see pDay""" + + def dow(): + """Return the integer day of the week, where sunday is 0""" + + def dow_1(): + """Return the integer day of the week, where sunday is 1""" + + def h_12(): + """Return the 12-hour clock representation of the hour""" + + def h_24(): + """Return the 24-hour clock representation of the hour""" + + def ampm(): + """Return the appropriate time modifier (am or pm)""" + + def hour(): + """Return the 24-hour clock representation of the hour""" + + def minute(): + """Return the minute""" + + def second(): + """Return the second""" + + def millis(): + """Return the millisecond since the epoch in GMT.""" + + def strftime(format): + """Format the date/time using the *current timezone representation*.""" + + # General formats from previous DateTime + + def Date(): + """Return the date string for the object.""" + + def Time(): + """Return the time string for an object to the nearest second.""" + + def TimeMinutes(): + """Return the time string for an object not showing seconds.""" + + def AMPM(): + """Return the time string for an object to the nearest second.""" + + def AMPMMinutes(): + """Return the time string for an object not showing seconds.""" + + def PreciseTime(): + """Return the time string for the object.""" + + def PreciseAMPM(): + """Return the time string for the object.""" + + def yy(): + """Return calendar year as a 2 digit string""" + + def mm(): + """Return month as a 2 digit string""" + + def dd(): + """Return day as a 2 digit string""" + + def rfc822(): + """Return the date in RFC 822 format""" + + # New formats + + def fCommon(): + """Return a string representing the object's value in the + format: March 1, 1997 1:45 pm""" + + def fCommonZ(): + """Return a string representing the object's value in the + format: March 1, 1997 1:45 pm US/Eastern""" + + def aCommon(): + """Return a string representing the object's value in the + format: Mar 1, 1997 1:45 pm""" + + def aCommonZ(): + """Return a string representing the object's value in the + format: Mar 1, 1997 1:45 pm US/Eastern""" + + def pCommon(): + """Return a string representing the object's value in the + format: Mar. 1, 1997 1:45 pm""" + + def pCommonZ(): + """Return a string representing the object's value + in the format: Mar. 1, 1997 1:45 pm US/Eastern""" + + def ISO(): + """Return the object in ISO standard format. Note: this is + *not* ISO 8601-format! See the ISO8601 and HTML4 methods below + for ISO 8601-compliant output + + Dates are output as: YYYY-MM-DD HH:MM:SS + """ + + def ISO8601(): + """Return the object in ISO 8601-compatible format containing + the date, time with seconds-precision and the time zone + identifier - see http://www.w3.org/TR/NOTE-datetime + + Dates are output as: YYYY-MM-DDTHH:MM:SSTZD + T is a literal character. + TZD is Time Zone Designator, format +HH:MM or -HH:MM + + The HTML4 method below offers the same formatting, but + converts to UTC before returning the value and sets the TZD"Z" + """ + + def HTML4(): + """Return the object in the format used in the HTML4.0 + specification, one of the standard forms in ISO8601. See + http://www.w3.org/TR/NOTE-datetime + + Dates are output as: YYYY-MM-DDTHH:MM:SSZ + T, Z are literal characters. + The time is in UTC. + """ + + def JulianDay(): + """Return the Julian day according to + http://www.tondering.dk/claus/cal/node3.html#sec-calcjd + """ + + def week(): + """Return the week number according to ISO + see http://www.tondering.dk/claus/cal/node6.html#SECTION00670000000000000000 + """ + + # Python operator and conversion API + + def __add__(other): + """A DateTime may be added to a number and a number may be + added to a DateTime; two DateTimes cannot be added.""" + + __radd__ = __add__ + + def __sub__(other): + """Either a DateTime or a number may be subtracted from a + DateTime, however, a DateTime may not be subtracted from a + number.""" + + def __repr__(): + """Convert a DateTime to a string that looks like a Python + expression.""" + + def __str__(): + """Convert a DateTime to a string.""" + + def __hash__(): + """Compute a hash value for a DateTime""" + + def __int__(): + """Convert to an integer number of seconds since the epoch (gmt)""" + + def __long__(): + """Convert to a long-int number of seconds since the epoch (gmt)""" + + def __float__(): + """Convert to floating-point number of seconds since the epoch (gmt)""" diff --git a/thesisenv/lib/python3.6/site-packages/DateTime/pytz.txt b/thesisenv/lib/python3.6/site-packages/DateTime/pytz.txt new file mode 100644 index 0000000..33de811 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime/pytz.txt @@ -0,0 +1,192 @@ +Pytz Support +============ + +Allows the pytz package to be used for time zone information. The +advantage of using pytz is that it has a more complete and up to date +time zone and daylight savings time database. + +Usage +----- +You don't have to do anything special to make it work. + + >>> from DateTime import DateTime, Timezones + >>> d = DateTime('March 11, 2007 US/Eastern') + +Daylight Savings +---------------- +In 2007 daylight savings time in the US was changed. The Energy Policy +Act of 2005 mandates that DST will start on the second Sunday in March +and end on the first Sunday in November. + +In 2007, the start and stop dates are March 11 and November 4, +respectively. These dates are different from previous DST start and +stop dates. In 2006, the dates were the first Sunday in April (April +2, 2006) and the last Sunday in October (October 29, 2006). + +Let's make sure that DateTime can deal with this, since the primary +motivation to use pytz for time zone information is the fact that it +is kept up to date with daylight savings changes. + + >>> DateTime('March 11, 2007 US/Eastern').tzoffset() + -18000 + >>> DateTime('March 12, 2007 US/Eastern').tzoffset() + -14400 + >>> DateTime('November 4, 2007 US/Eastern').tzoffset() + -14400 + >>> DateTime('November 5, 2007 US/Eastern').tzoffset() + -18000 + +Let's compare this to 2006. + + >>> DateTime('April 2, 2006 US/Eastern').tzoffset() + -18000 + >>> DateTime('April 3, 2006 US/Eastern').tzoffset() + -14400 + >>> DateTime('October 29, 2006 US/Eastern').tzoffset() + -14400 + >>> DateTime('October 30, 2006 US/Eastern').tzoffset() + -18000 + +Time Zones +--------- +DateTime can use pytz's large database of time zones. Here are some +examples: + + >>> d = DateTime('Pacific/Kwajalein') + >>> d = DateTime('America/Shiprock') + >>> d = DateTime('Africa/Ouagadougou') + +Of course pytz doesn't know about everything. + + >>> from DateTime.interfaces import SyntaxError + >>> try: + ... d = DateTime('July 21, 1969 Moon/Eastern') + ... print('fail') + ... except SyntaxError: + ... print('ok') + ok + +You can still use zone names that DateTime defines that aren't part of +the pytz database. + + >>> d = DateTime('eet') + >>> d = DateTime('iceland') + +These time zones use DateTimes database. So it's preferable to use the +official time zone name. + +One trickiness is that DateTime supports some zone name +abbreviations. Some of these map to pytz names, so these abbreviations +will give you time zone date from pytz. Notable among abbreviations +that work this way are 'est', 'cst', 'mst', and 'pst'. + +Let's verify that 'est' picks up the 2007 daylight savings time changes. + + >>> DateTime('March 11, 2007 est').tzoffset() + -18000 + >>> DateTime('March 12, 2007 est').tzoffset() + -14400 + >>> DateTime('November 4, 2007 est').tzoffset() + -14400 + >>> DateTime('November 5, 2007 est').tzoffset() + -18000 + +You can get a list of time zones supported by calling the Timezones() function. + + >>> Timezones() #doctest: +ELLIPSIS + ['Africa/Abidjan', 'Africa/Accra', 'Africa/Addis_Ababa', ...] + +Note that you can mess with this list without hurting things. + + >>> t = Timezones() + >>> t.remove('US/Eastern') + >>> d = DateTime('US/Eastern') + + +Internal Components +------------------- + +The following are tests of internal components. + +Cache +~~~~~ + +The DateTime class uses a new time zone cache. + + >>> from DateTime.DateTime import _TZINFO + >>> _TZINFO #doctest: +ELLIPSIS + + +The cache maps time zone names to time zone instances. + + >>> cache = _TZINFO + >>> tz = cache['GMT+730'] + >>> tz = cache['US/Mountain'] + +The cache also must provide a few attributes for use by the DateTime +class. + +The _zlst attribute is a list of supported time zone names. + + >>> cache._zlst #doctest: +ELLIPSIS + ['Africa/Abidjan'... 'Africa/Accra'... 'IDLE'... 'NZST'... 'NZT'...] + +The _zidx attribute is a list of lower-case and possibly abbreviated +time zone names that can be mapped to offical zone names. + + >>> 'australia/yancowinna' in cache._zidx + True + >>> 'europe/isle_of_man' in cache._zidx + True + >>> 'gmt+0500' in cache._zidx + True + +Note that there are more items in _zidx than in _zlst since there are +multiple names for some time zones. + + >>> len(cache._zidx) > len(cache._zlst) + True + +Each entry in _zlst should also be present in _zidx in lower case form. + + >>> for name in cache._zlst: + ... if not name.lower() in cache._zidx: + ... print("Error %s not in _zidx" % name.lower()) + +The _zmap attribute maps the names in _zidx to official names in _zlst. + + >>> cache._zmap['africa/abidjan'] + 'Africa/Abidjan' + >>> cache._zmap['gmt+1'] + 'GMT+1' + >>> cache._zmap['gmt+0100'] + 'GMT+1' + >>> cache._zmap['utc'] + 'UTC' + +Let's make sure that _zmap and _zidx agree. + + >>> idx = set(cache._zidx) + >>> keys = set(cache._zmap.keys()) + >>> idx == keys + True + +Timezone objects +~~~~~~~~~~~~~~~~ +The timezone instances have only one public method info(). It returns +a tuple of (offset, is_dst, name). The method takes a timestamp, which +is used to determine dst information. + + >>> t1 = DateTime('November 4, 00:00 2007 US/Mountain').timeTime() + >>> t2 = DateTime('November 4, 02:00 2007 US/Mountain').timeTime() + >>> tz.info(t1) + (-21600, 1, 'MDT') + >>> tz.info(t2) + (-25200, 0, 'MST') + +If you don't pass any arguments to info it provides daylight savings +time information as of today. + + >>> tz.info() in ((-21600, 1, 'MDT'), (-25200, 0, 'MST')) + True + diff --git a/thesisenv/lib/python3.6/site-packages/DateTime/pytz_support.py b/thesisenv/lib/python3.6/site-packages/DateTime/pytz_support.py new file mode 100644 index 0000000..8cfbfc5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime/pytz_support.py @@ -0,0 +1,259 @@ +############################################################################## +# +# Copyright (c) 2007 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +from datetime import datetime, timedelta + +import pytz +import pytz.reference +from pytz.tzinfo import StaticTzInfo, memorized_timedelta + +from .interfaces import DateTimeError + +EPOCH = datetime.utcfromtimestamp(0).replace(tzinfo=pytz.utc) + +_numeric_timezone_data = { + 'GMT': ('GMT', 0, 1, [], '', [(0, 0, 0)], 'GMT\000'), + 'GMT+0': ('GMT+0', 0, 1, [], '', [(0, 0, 0)], 'GMT+0000\000'), + 'GMT+1': ('GMT+1', 0, 1, [], '', [(3600, 0, 0)], 'GMT+0100\000'), + 'GMT+2': ('GMT+2', 0, 1, [], '', [(7200, 0, 0)], 'GMT+0200\000'), + 'GMT+3': ('GMT+3', 0, 1, [], '', [(10800, 0, 0)], 'GMT+0300\000'), + 'GMT+4': ('GMT+4', 0, 1, [], '', [(14400, 0, 0)], 'GMT+0400\000'), + 'GMT+5': ('GMT+5', 0, 1, [], '', [(18000, 0, 0)], 'GMT+0500\000'), + 'GMT+6': ('GMT+6', 0, 1, [], '', [(21600, 0, 0)], 'GMT+0600\000'), + 'GMT+7': ('GMT+7', 0, 1, [], '', [(25200, 0, 0)], 'GMT+0700\000'), + 'GMT+8': ('GMT+8', 0, 1, [], '', [(28800, 0, 0)], 'GMT+0800\000'), + 'GMT+9': ('GMT+9', 0, 1, [], '', [(32400, 0, 0)], 'GMT+0900\000'), + 'GMT+10': ('GMT+10', 0, 1, [], '', [(36000, 0, 0)], 'GMT+1000\000'), + 'GMT+11': ('GMT+11', 0, 1, [], '', [(39600, 0, 0)], 'GMT+1100\000'), + 'GMT+12': ('GMT+12', 0, 1, [], '', [(43200, 0, 0)], 'GMT+1200\000'), + 'GMT+13': ('GMT+13', 0, 1, [], '', [(46800, 0, 0)], 'GMT+1300\000'), + + 'GMT-1': ('GMT-1', 0, 1, [], '', [(-3600, 0, 0)], 'GMT-0100\000'), + 'GMT-2': ('GMT-2', 0, 1, [], '', [(-7200, 0, 0)], 'GMT-0200\000'), + 'GMT-3': ('GMT-3', 0, 1, [], '', [(-10800, 0, 0)], 'GMT-0300\000'), + 'GMT-4': ('GMT-4', 0, 1, [], '', [(-14400, 0, 0)], 'GMT-0400\000'), + 'GMT-5': ('GMT-5', 0, 1, [], '', [(-18000, 0, 0)], 'GMT-0500\000'), + 'GMT-6': ('GMT-6', 0, 1, [], '', [(-21600, 0, 0)], 'GMT-0600\000'), + 'GMT-7': ('GMT-7', 0, 1, [], '', [(-25200, 0, 0)], 'GMT-0700\000'), + 'GMT-8': ('GMT-8', 0, 1, [], '', [(-28800, 0, 0)], 'GMT-0800\000'), + 'GMT-9': ('GMT-9', 0, 1, [], '', [(-32400, 0, 0)], 'GMT-0900\000'), + 'GMT-10': ('GMT-10', 0, 1, [], '', [(-36000, 0, 0)], 'GMT-1000\000'), + 'GMT-11': ('GMT-11', 0, 1, [], '', [(-39600, 0, 0)], 'GMT-1100\000'), + 'GMT-12': ('GMT-12', 0, 1, [], '', [(-43200, 0, 0)], 'GMT-1200\000'), + + 'GMT+0130': ('GMT+0130', 0, 1, [], '', [(5400, 0, 0)], 'GMT+0130\000'), + 'GMT+0230': ('GMT+0230', 0, 1, [], '', [(9000, 0, 0)], 'GMT+0230\000'), + 'GMT+0330': ('GMT+0330', 0, 1, [], '', [(12600, 0, 0)], 'GMT+0330\000'), + 'GMT+0430': ('GMT+0430', 0, 1, [], '', [(16200, 0, 0)], 'GMT+0430\000'), + 'GMT+0530': ('GMT+0530', 0, 1, [], '', [(19800, 0, 0)], 'GMT+0530\000'), + 'GMT+0630': ('GMT+0630', 0, 1, [], '', [(23400, 0, 0)], 'GMT+0630\000'), + 'GMT+0730': ('GMT+0730', 0, 1, [], '', [(27000, 0, 0)], 'GMT+0730\000'), + 'GMT+0830': ('GMT+0830', 0, 1, [], '', [(30600, 0, 0)], 'GMT+0830\000'), + 'GMT+0930': ('GMT+0930', 0, 1, [], '', [(34200, 0, 0)], 'GMT+0930\000'), + 'GMT+1030': ('GMT+1030', 0, 1, [], '', [(37800, 0, 0)], 'GMT+1030\000'), + 'GMT+1130': ('GMT+1130', 0, 1, [], '', [(41400, 0, 0)], 'GMT+1130\000'), + 'GMT+1230': ('GMT+1230', 0, 1, [], '', [(45000, 0, 0)], 'GMT+1230\000'), + + 'GMT-0130': ('GMT-0130', 0, 1, [], '', [(-5400, 0, 0)], 'GMT-0130\000'), + 'GMT-0230': ('GMT-0230', 0, 1, [], '', [(-9000, 0, 0)], 'GMT-0230\000'), + 'GMT-0330': ('GMT-0330', 0, 1, [], '', [(-12600, 0, 0)], 'GMT-0330\000'), + 'GMT-0430': ('GMT-0430', 0, 1, [], '', [(-16200, 0, 0)], 'GMT-0430\000'), + 'GMT-0530': ('GMT-0530', 0, 1, [], '', [(-19800, 0, 0)], 'GMT-0530\000'), + 'GMT-0630': ('GMT-0630', 0, 1, [], '', [(-23400, 0, 0)], 'GMT-0630\000'), + 'GMT-0730': ('GMT-0730', 0, 1, [], '', [(-27000, 0, 0)], 'GMT-0730\000'), + 'GMT-0830': ('GMT-0830', 0, 1, [], '', [(-30600, 0, 0)], 'GMT-0830\000'), + 'GMT-0930': ('GMT-0930', 0, 1, [], '', [(-34200, 0, 0)], 'GMT-0930\000'), + 'GMT-1030': ('GMT-1030', 0, 1, [], '', [(-37800, 0, 0)], 'GMT-1030\000'), + 'GMT-1130': ('GMT-1130', 0, 1, [], '', [(-41400, 0, 0)], 'GMT-1130\000'), + 'GMT-1230': ('GMT-1230', 0, 1, [], '', [(-45000, 0, 0)], 'GMT-1230\000'), +} + +# These are the timezones not in pytz.common_timezones +_old_zlst = [ + 'AST', 'AT', 'BST', 'BT', 'CCT', + 'CET', 'CST', 'Cuba', 'EADT', 'EAST', + 'EEST', 'EET', 'EST', 'Egypt', 'FST', + 'FWT', 'GB-Eire', 'GMT+0100', 'GMT+0130', 'GMT+0200', + 'GMT+0230', 'GMT+0300', 'GMT+0330', 'GMT+0400', 'GMT+0430', + 'GMT+0500', 'GMT+0530', 'GMT+0600', 'GMT+0630', 'GMT+0700', + 'GMT+0730', 'GMT+0800', 'GMT+0830', 'GMT+0900', 'GMT+0930', + 'GMT+1', 'GMT+1000', 'GMT+1030', 'GMT+1100', 'GMT+1130', + 'GMT+1200', 'GMT+1230', 'GMT+1300', 'GMT-0100', 'GMT-0130', + 'GMT-0200', 'GMT-0300', 'GMT-0400', 'GMT-0500', 'GMT-0600', + 'GMT-0630', 'GMT-0700', 'GMT-0730', 'GMT-0800', 'GMT-0830', + 'GMT-0900', 'GMT-0930', 'GMT-1000', 'GMT-1030', 'GMT-1100', + 'GMT-1130', 'GMT-1200', 'GMT-1230', 'GST', 'Greenwich', + 'Hongkong', 'IDLE', 'IDLW', 'Iceland', 'Iran', + 'Israel', 'JST', 'Jamaica', 'Japan', 'MEST', + 'MET', 'MEWT', 'MST', 'NT', 'NZDT', + 'NZST', 'NZT', 'PST', 'Poland', 'SST', + 'SWT', 'Singapore', 'Turkey', 'UCT', 'UT', + 'Universal', 'WADT', 'WAST', 'WAT', 'WET', + 'ZP4', 'ZP5', 'ZP6', +] + +_old_zmap = { + 'aest': 'GMT+10', 'aedt': 'GMT+11', + 'aus eastern standard time': 'GMT+10', + 'sydney standard time': 'GMT+10', + 'tasmania standard time': 'GMT+10', + 'e. australia standard time': 'GMT+10', + 'aus central standard time': 'GMT+0930', + 'cen. australia standard time': 'GMT+0930', + 'w. australia standard time': 'GMT+8', + + 'central europe standard time': 'GMT+1', + 'eastern standard time': 'US/Eastern', + 'us eastern standard time': 'US/Eastern', + 'central standard time': 'US/Central', + 'mountain standard time': 'US/Mountain', + 'pacific standard time': 'US/Pacific', + 'mst': 'US/Mountain', 'pst': 'US/Pacific', + 'cst': 'US/Central', 'est': 'US/Eastern', + + 'gmt+0000': 'GMT+0', 'gmt+0': 'GMT+0', + + 'gmt+0100': 'GMT+1', 'gmt+0200': 'GMT+2', 'gmt+0300': 'GMT+3', + 'gmt+0400': 'GMT+4', 'gmt+0500': 'GMT+5', 'gmt+0600': 'GMT+6', + 'gmt+0700': 'GMT+7', 'gmt+0800': 'GMT+8', 'gmt+0900': 'GMT+9', + 'gmt+1000': 'GMT+10', 'gmt+1100': 'GMT+11', 'gmt+1200': 'GMT+12', + 'gmt+1300': 'GMT+13', + 'gmt-0100': 'GMT-1', 'gmt-0200': 'GMT-2', 'gmt-0300': 'GMT-3', + 'gmt-0400': 'GMT-4', 'gmt-0500': 'GMT-5', 'gmt-0600': 'GMT-6', + 'gmt-0700': 'GMT-7', 'gmt-0800': 'GMT-8', 'gmt-0900': 'GMT-9', + 'gmt-1000': 'GMT-10', 'gmt-1100': 'GMT-11', 'gmt-1200': 'GMT-12', + + 'gmt+1': 'GMT+1', 'gmt+2': 'GMT+2', 'gmt+3': 'GMT+3', + 'gmt+4': 'GMT+4', 'gmt+5': 'GMT+5', 'gmt+6': 'GMT+6', + 'gmt+7': 'GMT+7', 'gmt+8': 'GMT+8', 'gmt+9': 'GMT+9', + 'gmt+10': 'GMT+10', 'gmt+11': 'GMT+11', 'gmt+12': 'GMT+12', + 'gmt+13': 'GMT+13', + 'gmt-1': 'GMT-1', 'gmt-2': 'GMT-2', 'gmt-3': 'GMT-3', + 'gmt-4': 'GMT-4', 'gmt-5': 'GMT-5', 'gmt-6': 'GMT-6', + 'gmt-7': 'GMT-7', 'gmt-8': 'GMT-8', 'gmt-9': 'GMT-9', + 'gmt-10': 'GMT-10', 'gmt-11': 'GMT-11', 'gmt-12': 'GMT-12', + + 'gmt+130': 'GMT+0130', 'gmt+0130': 'GMT+0130', + 'gmt+230': 'GMT+0230', 'gmt+0230': 'GMT+0230', + 'gmt+330': 'GMT+0330', 'gmt+0330': 'GMT+0330', + 'gmt+430': 'GMT+0430', 'gmt+0430': 'GMT+0430', + 'gmt+530': 'GMT+0530', 'gmt+0530': 'GMT+0530', + 'gmt+630': 'GMT+0630', 'gmt+0630': 'GMT+0630', + 'gmt+730': 'GMT+0730', 'gmt+0730': 'GMT+0730', + 'gmt+830': 'GMT+0830', 'gmt+0830': 'GMT+0830', + 'gmt+930': 'GMT+0930', 'gmt+0930': 'GMT+0930', + 'gmt+1030': 'GMT+1030', + 'gmt+1130': 'GMT+1130', + 'gmt+1230': 'GMT+1230', + + 'gmt-130': 'GMT-0130', 'gmt-0130': 'GMT-0130', + 'gmt-230': 'GMT-0230', 'gmt-0230': 'GMT-0230', + 'gmt-330': 'GMT-0330', 'gmt-0330': 'GMT-0330', + 'gmt-430': 'GMT-0430', 'gmt-0430': 'GMT-0430', + 'gmt-530': 'GMT-0530', 'gmt-0530': 'GMT-0530', + 'gmt-630': 'GMT-0630', 'gmt-0630': 'GMT-0630', + 'gmt-730': 'GMT-0730', 'gmt-0730': 'GMT-0730', + 'gmt-830': 'GMT-0830', 'gmt-0830': 'GMT-0830', + 'gmt-930': 'GMT-0930', 'gmt-0930': 'GMT-0930', + 'gmt-1030': 'GMT-1030', + 'gmt-1130': 'GMT-1130', + 'gmt-1230': 'GMT-1230', + + 'ut': 'Universal', + 'bst': 'GMT+1', 'mest': 'GMT+2', 'sst': 'GMT+2', + 'fst': 'GMT+2', 'wadt': 'GMT+8', 'eadt': 'GMT+11', 'nzdt': 'GMT+13', + 'wet': 'GMT', 'wat': 'GMT-1', 'at': 'GMT-2', 'ast': 'GMT-4', + 'nt': 'GMT-11', 'idlw': 'GMT-12', 'cet': 'GMT+1', 'cest': 'GMT+2', + 'met': 'GMT+1', + 'mewt': 'GMT+1', 'swt': 'GMT+1', 'fwt': 'GMT+1', 'eet': 'GMT+2', + 'eest': 'GMT+3', + 'bt': 'GMT+3', 'zp4': 'GMT+4', 'zp5': 'GMT+5', 'zp6': 'GMT+6', + 'wast': 'GMT+7', 'cct': 'GMT+8', 'jst': 'GMT+9', 'east': 'GMT+10', + 'gst': 'GMT+10', 'nzt': 'GMT+12', 'nzst': 'GMT+12', 'idle': 'GMT+12', + 'ret': 'GMT+4', 'ist': 'GMT+0530', 'edt': 'GMT-4', + +} + + +# some timezone definitions of the "-0400" are not working +# when upgrading +for hour in range(0, 13): + hour = hour + fhour = str(hour) + if len(fhour) == 1: + fhour = '0' + fhour + _old_zmap['-%s00' % fhour] = 'GMT-%i' % hour + _old_zmap['+%s00' % fhour] = 'GMT+%i' % hour + + +def _static_timezone_factory(data): + zone = data[0] + cls = type(zone, (StaticTzInfo,), dict( + zone=zone, + _utcoffset=memorized_timedelta(data[5][0][0]), + _tzname=data[6][:-1])) # strip the trailing null + return cls() + +_numeric_timezones = dict((key, _static_timezone_factory(data)) + for key, data in _numeric_timezone_data.items()) + + +class Timezone: + """ + Timezone information returned by PytzCache.__getitem__ + Adapts datetime.tzinfo object to DateTime._timezone interface + """ + def __init__(self, tzinfo): + self.tzinfo = tzinfo + + def info(self, t=None): + if t is None: + dt = datetime.utcnow().replace(tzinfo=pytz.utc) + else: + # can't use utcfromtimestamp past 2038 + dt = EPOCH + timedelta(0, t) + + # need to normalize tzinfo for the datetime to deal with + # daylight savings time. + normalized_dt = self.tzinfo.normalize(dt.astimezone(self.tzinfo)) + normalized_tzinfo = normalized_dt.tzinfo + + offset = normalized_tzinfo.utcoffset(normalized_dt) + secs = offset.days * 24 * 60 * 60 + offset.seconds + dst = normalized_tzinfo.dst(normalized_dt) + if dst == timedelta(0): + is_dst = 0 + else: + is_dst = 1 + return secs, is_dst, normalized_tzinfo.tzname(normalized_dt) + + +class PytzCache: + """ + Reimplementation of the DateTime._cache class that uses for timezone info + """ + + _zlst = pytz.common_timezones + _old_zlst # used by DateTime.TimeZones + _zmap = dict((name.lower(), name) for name in pytz.all_timezones) + _zmap.update(_old_zmap) # These must take priority + _zidx = _zmap.keys() + + def __getitem__(self, key): + name = self._zmap.get(key.lower(), key) # fallback to key + try: + return Timezone(pytz.timezone(name)) + except pytz.UnknownTimeZoneError: + try: + return Timezone(_numeric_timezones[name]) + except KeyError: + raise DateTimeError('Unrecognized timezone: %s' % key) diff --git a/thesisenv/lib/python3.6/site-packages/DateTime/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/DateTime/tests/__init__.py new file mode 100644 index 0000000..e67bcb6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime/tests/__init__.py @@ -0,0 +1,15 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +# This file is needed to make this a package. diff --git a/thesisenv/lib/python3.6/site-packages/DateTime/tests/julian_testdata.txt b/thesisenv/lib/python3.6/site-packages/DateTime/tests/julian_testdata.txt new file mode 100644 index 0000000..386c3da --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime/tests/julian_testdata.txt @@ -0,0 +1,57 @@ +1970-01-01 (1970, 1, 4) +1970-01-02 (1970, 1, 5) +1970-01-30 (1970, 5, 5) +1970-01-31 (1970, 5, 6) +1970-02-01 (1970, 5, 7) +1970-02-02 (1970, 6, 1) +1970-02-28 (1970, 9, 6) +1970-03-01 (1970, 9, 7) +1970-03-30 (1970, 14, 1) +1970-03-31 (1970, 14, 2) +1970-04-01 (1970, 14, 3) +1970-09-30 (1970, 40, 3) +1970-10-01 (1970, 40, 4) +1970-10-02 (1970, 40, 5) +1970-10-03 (1970, 40, 6) +1970-10-04 (1970, 40, 7) +1970-10-05 (1970, 41, 1) +1971-01-02 (1970, 53, 6) +1971-01-03 (1970, 53, 7) +1971-01-04 (1971, 1, 1) +1971-01-05 (1971, 1, 2) +1971-12-31 (1971, 52, 5) +1972-01-01 (1971, 52, 6) +1972-01-02 (1971, 52, 7) +1972-01-03 (1972, 1, 1) +1972-01-04 (1972, 1, 2) +1972-12-30 (1972, 52, 6) +1972-12-31 (1972, 52, 7) +1973-01-01 (1973, 1, 1) +1973-01-02 (1973, 1, 2) +1973-12-29 (1973, 52, 6) +1973-12-30 (1973, 52, 7) +1973-12-31 (1974, 1, 1) +1974-01-01 (1974, 1, 2) +1998-12-30 (1998, 53, 3) +1998-12-31 (1998, 53, 4) +1999-01-01 (1998, 53, 5) +1999-01-02 (1998, 53, 6) +1999-01-03 (1998, 53, 7) +1999-01-04 (1999, 1, 1) +1999-01-05 (1999, 1, 2) +1999-12-30 (1999, 52, 4) +1999-12-31 (1999, 52, 5) +2000-01-01 (1999, 52, 6) +2000-01-02 (1999, 52, 7) +2000-01-03 (2000, 1, 1) +2000-01-04 (2000, 1, 2) +2000-01-05 (2000, 1, 3) +2000-01-06 (2000, 1, 4) +2000-01-07 (2000, 1, 5) +2000-01-08 (2000, 1, 6) +2000-01-09 (2000, 1, 7) +2000-01-10 (2000, 2, 1) +2019-12-28 (2019, 52, 6) +2019-12-29 (2019, 52, 7) +2019-12-30 (2020, 1, 1) +2019-12-31 (2020, 1, 2) diff --git a/thesisenv/lib/python3.6/site-packages/DateTime/tests/test_datetime.py b/thesisenv/lib/python3.6/site-packages/DateTime/tests/test_datetime.py new file mode 100644 index 0000000..7172adb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/DateTime/tests/test_datetime.py @@ -0,0 +1,686 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +from datetime import date, datetime, tzinfo, timedelta +import math +import platform +import os +import sys +import time +import unittest + +import pytz + +from DateTime.DateTime import _findLocalTimeZoneName +from DateTime import DateTime + +if sys.version_info > (3, ): + import pickle + unicode = str + PY3K = True +else: + import cPickle as pickle + PY3K = False + +try: + __file__ +except NameError: + f = sys.argv[0] +else: + f = __file__ + +IS_PYPY = getattr(platform, 'python_implementation', lambda: None)() == 'PyPy' + +DATADIR = os.path.dirname(os.path.abspath(f)) +del f + +ZERO = timedelta(0) + + +class FixedOffset(tzinfo): + """Fixed offset in minutes east from UTC.""" + + def __init__(self, offset, name): + self.__offset = timedelta(minutes=offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return ZERO + + +class DateTimeTests(unittest.TestCase): + + def _compare(self, dt1, dt2): + '''Compares the internal representation of dt1 with + the representation in dt2. Allows sub-millisecond variations. + Primarily for testing.''' + self.assertEqual(round(dt1._t, 3), round(dt2._t, 3)) + self.assertEqual(round(dt1._d, 9), round(dt2._d, 9)) + self.assertEqual(round(dt1.time, 9), round(dt2.time, 9)) + self.assertEqual(dt1.millis(), dt2.millis()) + self.assertEqual(dt1._micros, dt2._micros) + + def testBug1203(self): + # 01:59:60 occurred in old DateTime + dt = DateTime(7200, 'GMT') + self.assertTrue(str(dt).find('60') < 0, dt) + + def testDSTInEffect(self): + # Checks GMT offset for a DST date in the US/Eastern time zone + dt = DateTime(2000, 5, 9, 15, 0, 0, 'US/Eastern') + self.assertEqual(dt.toZone('GMT').hour(), 19, + (dt, dt.toZone('GMT'))) + + def testDSTNotInEffect(self): + # Checks GMT offset for a non-DST date in the US/Eastern time zone + dt = DateTime(2000, 11, 9, 15, 0, 0, 'US/Eastern') + self.assertEqual(dt.toZone('GMT').hour(), 20, + (dt, dt.toZone('GMT'))) + + def testAddPrecision(self): + # Precision of serial additions + dt = DateTime() + self.assertEqual(str(dt + 0.10 + 3.14 + 6.76 - 10), str(dt), + dt) + + def testConstructor3(self): + # Constructor from date/time string + dt = DateTime() + dt1s = '%d/%d/%d %d:%d:%f %s' % ( + dt.year(), + dt.month(), + dt.day(), + dt.hour(), + dt.minute(), + dt.second(), + dt.timezone()) + dt1 = DateTime(dt1s) + # Compare representations as it's the + # only way to compare the dates to the same accuracy + self.assertEqual(repr(dt), repr(dt1)) + + def testConstructor4(self): + # Constructor from time float + dt = DateTime() + dt1 = DateTime(float(dt)) + self._compare(dt, dt1) + + def testConstructor5(self): + # Constructor from time float and timezone + dt = DateTime() + dt1 = DateTime(float(dt), dt.timezone()) + self.assertEqual(str(dt), str(dt1), (dt, dt1)) + dt1 = DateTime(float(dt), unicode(dt.timezone())) + self.assertEqual(str(dt), str(dt1), (dt, dt1)) + + def testConstructor6(self): + # Constructor from year and julian date + # This test must normalize the time zone, or it *will* break when + # DST changes! + dt1 = DateTime(2000, 5.500000578705) + dt = DateTime('2000/1/5 12:00:00.050 pm %s' % dt1.localZone()) + self._compare(dt, dt1) + + def testConstructor7(self): + # Constructor from parts + dt = DateTime() + dt1 = DateTime( + dt.year(), + dt.month(), + dt.day(), + dt.hour(), + dt.minute(), + dt.second(), + dt.timezone()) + # Compare representations as it's the + # only way to compare the dates to the same accuracy + self.assertEqual(repr(dt), repr(dt1)) + + def testDayOfWeek(self): + # Compare to the datetime.date value to make it locale independent + expected = date(2000, 6, 16).strftime('%A') + # strftime() used to always be passed a day of week of 0 + dt = DateTime('2000/6/16') + s = dt.strftime('%A') + self.assertEqual(s, expected, (dt, s)) + + def testOldDate(self): + # Fails when an 1800 date is displayed with negative signs + dt = DateTime('1830/5/6 12:31:46.213 pm') + dt1 = dt.toZone('GMT+6') + self.assertTrue(str(dt1).find('-') < 0, (dt, dt1)) + + def testSubtraction(self): + # Reconstruction of a DateTime from its parts, with subtraction + # this also tests the accuracy of addition and reconstruction + dt = DateTime() + dt1 = dt - 3.141592653 + dt2 = DateTime( + dt.year(), + dt.month(), + dt.day(), + dt.hour(), + dt.minute(), + dt.second()) + dt3 = dt2 - 3.141592653 + self.assertEqual(dt1, dt3, (dt, dt1, dt2, dt3)) + + def testTZ1add(self): + # Time zone manipulation: add to a date + dt = DateTime('1997/3/8 1:45am GMT-4') + dt1 = DateTime('1997/3/9 1:45pm GMT+8') + self.assertTrue((dt + 1.0).equalTo(dt1)) + + def testTZ1sub(self): + # Time zone manipulation: subtract from a date + dt = DateTime('1997/3/8 1:45am GMT-4') + dt1 = DateTime('1997/3/9 1:45pm GMT+8') + self.assertTrue((dt1 - 1.0).equalTo(dt)) + + def testTZ1diff(self): + # Time zone manipulation: diff two dates + dt = DateTime('1997/3/8 1:45am GMT-4') + dt1 = DateTime('1997/3/9 1:45pm GMT+8') + self.assertEqual(dt1 - dt, 1.0, (dt, dt1)) + + def test_compare_methods(self): + # Compare two dates using several methods + dt = DateTime('1997/1/1') + dt1 = DateTime('1997/2/2') + self.assertTrue(dt1.greaterThan(dt)) + self.assertTrue(dt1.greaterThanEqualTo(dt)) + self.assertTrue(dt.lessThan(dt1)) + self.assertTrue(dt.lessThanEqualTo(dt1)) + self.assertTrue(dt.notEqualTo(dt1)) + self.assertFalse(dt.equalTo(dt1)) + + def test_compare_methods_none(self): + # Compare a date to None + dt = DateTime('1997/1/1') + self.assertTrue(dt.greaterThan(None)) + self.assertTrue(dt.greaterThanEqualTo(None)) + self.assertFalse(dt.lessThan(None)) + self.assertFalse(dt.lessThanEqualTo(None)) + self.assertTrue(dt.notEqualTo(None)) + self.assertFalse(dt.equalTo(None)) + + def test_pickle(self): + dt = DateTime() + data = pickle.dumps(dt, 1) + new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def test_pickle_with_tz(self): + dt = DateTime('2002/5/2 8:00am GMT+8') + data = pickle.dumps(dt, 1) + new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def test_pickle_with_numerical_tz(self): + for dt_str in ('2007/01/02 12:34:56.789 +0300', + '2007/01/02 12:34:56.789 +0430', + '2007/01/02 12:34:56.789 -1234'): + dt = DateTime(dt_str) + data = pickle.dumps(dt, 1) + new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def test_pickle_with_micros(self): + dt = DateTime('2002/5/2 8:00:14.123 GMT+8') + data = pickle.dumps(dt, 1) + new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def test_pickle_old(self): + dt = DateTime('2002/5/2 8:00am GMT+0') + data = ('(cDateTime.DateTime\nDateTime\nq\x01Noq\x02}q\x03(U\x05' + '_amonq\x04U\x03Mayq\x05U\x05_adayq\x06U\x03Thuq\x07U\x05_pmonq' + '\x08h\x05U\x05_hourq\tK\x08U\x05_fmonq\nh\x05U\x05_pdayq\x0bU' + '\x04Thu.q\x0cU\x05_fdayq\rU\x08Thursdayq\x0eU\x03_pmq\x0fU\x02amq' + '\x10U\x02_tq\x11GA\xcehy\x00\x00\x00\x00U\x07_minuteq\x12K\x00U' + '\x07_microsq\x13L1020326400000000L\nU\x02_dq\x14G@\xe2\x12j\xaa' + '\xaa\xaa\xabU\x07_secondq\x15G\x00\x00\x00\x00\x00\x00\x00\x00U' + '\x03_tzq\x16U\x05GMT+0q\x17U\x06_monthq\x18K\x05U' + '\x0f_timezone_naiveq\x19I00\nU\x04_dayq\x1aK\x02U\x05_yearq' + '\x1bM\xd2\x07U\x08_nearsecq\x1cG\x00\x00\x00\x00\x00\x00\x00' + '\x00U\x07_pmhourq\x1dK\x08U\n_dayoffsetq\x1eK\x04U\x04timeq' + '\x1fG?\xd5UUUV\x00\x00ub.') + if PY3K: + data = data.encode('latin-1') + new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def test_pickle_old_without_micros(self): + dt = DateTime('2002/5/2 8:00am GMT+0') + data = ('(cDateTime.DateTime\nDateTime\nq\x01Noq\x02}q\x03(U\x05' + '_amonq\x04U\x03Mayq\x05U\x05_adayq\x06U\x03Thuq\x07U\x05_pmonq' + '\x08h\x05U\x05_hourq\tK\x08U\x05_fmonq\nh\x05U\x05_pdayq\x0bU' + '\x04Thu.q\x0cU\x05_fdayq\rU\x08Thursdayq\x0eU\x03_pmq\x0fU' + '\x02amq\x10U\x02_tq\x11GA\xcehy\x00\x00\x00\x00U\x07_minuteq' + '\x12K\x00U\x02_dq\x13G@\xe2\x12j\xaa\xaa\xaa\xabU\x07_secondq' + '\x14G\x00\x00\x00\x00\x00\x00\x00\x00U\x03_tzq\x15U\x05GMT+0q' + '\x16U\x06_monthq\x17K\x05U\x0f_timezone_naiveq\x18I00\nU' + '\x04_dayq\x19K\x02U\x05_yearq\x1aM\xd2\x07U\x08_nearsecq' + '\x1bG\x00\x00\x00\x00\x00\x00\x00\x00U\x07_pmhourq\x1cK\x08U' + '\n_dayoffsetq\x1dK\x04U\x04timeq\x1eG?\xd5UUUV\x00\x00ub.') + if PY3K: + data = data.encode('latin-1') + new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def testTZ2(self): + # Time zone manipulation test 2 + dt = DateTime() + dt1 = dt.toZone('GMT') + s = dt.second() + s1 = dt1.second() + self.assertEqual(s, s1, (dt, dt1, s, s1)) + + def testTZDiffDaylight(self): + # Diff dates across daylight savings dates + dt = DateTime('2000/6/8 1:45am US/Eastern') + dt1 = DateTime('2000/12/8 12:45am US/Eastern') + self.assertEqual(dt1 - dt, 183, (dt, dt1, dt1 - dt)) + + def testY10KDate(self): + # Comparison of a Y10K date and a Y2K date + dt = DateTime('10213/09/21') + dt1 = DateTime(2000, 1, 1) + + dsec = (dt.millis() - dt1.millis()) / 1000.0 + ddays = math.floor((dsec / 86400.0) + 0.5) + + self.assertEqual(ddays, 3000000, ddays) + + def test_tzoffset(self): + # Test time-zone given as an offset + + # GMT + dt = DateTime('Tue, 10 Sep 2001 09:41:03 GMT') + self.assertEqual(dt.tzoffset(), 0) + + # Timezone by name, a timezone that hasn't got daylightsaving. + dt = DateTime('Tue, 2 Mar 2001 09:41:03 GMT+3') + self.assertEqual(dt.tzoffset(), 10800) + + # Timezone by name, has daylightsaving but is not in effect. + dt = DateTime('Tue, 21 Jan 2001 09:41:03 PST') + self.assertEqual(dt.tzoffset(), -28800) + + # Timezone by name, with daylightsaving in effect + dt = DateTime('Tue, 24 Aug 2001 09:41:03 PST') + self.assertEqual(dt.tzoffset(), -25200) + + # A negative numerical timezone + dt = DateTime('Tue, 24 Jul 2001 09:41:03 -0400') + self.assertEqual(dt.tzoffset(), -14400) + + # A positive numerical timzone + dt = DateTime('Tue, 6 Dec 1966 01:41:03 +0200') + self.assertEqual(dt.tzoffset(), 7200) + + # A negative numerical timezone with minutes. + dt = DateTime('Tue, 24 Jul 2001 09:41:03 -0637') + self.assertEqual(dt.tzoffset(), -23820) + + # A positive numerical timezone with minutes. + dt = DateTime('Tue, 24 Jul 2001 09:41:03 +0425') + self.assertEqual(dt.tzoffset(), 15900) + + def testISO8601(self): + # ISO8601 reference dates + ref0 = DateTime('2002/5/2 8:00am GMT') + ref1 = DateTime('2002/5/2 8:00am US/Eastern') + ref2 = DateTime('2006/11/6 10:30 GMT') + ref3 = DateTime('2004/06/14 14:30:15 GMT-3') + ref4 = DateTime('2006/01/01 GMT') + + # Basic tests + # Though this is timezone naive and according to specification should + # be interpreted in the local timezone, to preserve backwards + # compatibility with previously expected behaviour. + isoDt = DateTime('2002-05-02T08:00:00') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002-05-02T08:00:00Z') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002-05-02T08:00:00+00:00') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002-05-02T08:00:00-04:00') + self.assertTrue(ref1.equalTo(isoDt)) + isoDt = DateTime('2002-05-02 08:00:00-04:00') + self.assertTrue(ref1.equalTo(isoDt)) + + # Bug 1386: the colon in the timezone offset is optional + isoDt = DateTime('2002-05-02T08:00:00-0400') + self.assertTrue(ref1.equalTo(isoDt)) + + # Bug 2191: date reduced formats + isoDt = DateTime('2006-01-01') + self.assertTrue(ref4.equalTo(isoDt)) + isoDt = DateTime('200601-01') + self.assertTrue(ref4.equalTo(isoDt)) + isoDt = DateTime('20060101') + self.assertTrue(ref4.equalTo(isoDt)) + isoDt = DateTime('2006-01') + self.assertTrue(ref4.equalTo(isoDt)) + isoDt = DateTime('200601') + self.assertTrue(ref4.equalTo(isoDt)) + isoDt = DateTime('2006') + self.assertTrue(ref4.equalTo(isoDt)) + + # Bug 2191: date/time separators are also optional + isoDt = DateTime('20020502T08:00:00') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002-05-02T080000') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('20020502T080000') + self.assertTrue(ref0.equalTo(isoDt)) + + # Bug 2191: timezones with only one digit for hour + isoDt = DateTime('20020502T080000+0') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('20020502 080000-4') + self.assertTrue(ref1.equalTo(isoDt)) + isoDt = DateTime('20020502T080000-400') + self.assertTrue(ref1.equalTo(isoDt)) + isoDt = DateTime('20020502T080000-4:00') + self.assertTrue(ref1.equalTo(isoDt)) + + # Bug 2191: optional seconds/minutes + isoDt = DateTime('2002-05-02T0800') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002-05-02T08') + self.assertTrue(ref0.equalTo(isoDt)) + + # Bug 2191: week format + isoDt = DateTime('2002-W18-4T0800') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002-W184T0800') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002W18-4T0800') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002W184T08') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2004-W25-1T14:30:15-03:00') + self.assertTrue(ref3.equalTo(isoDt)) + isoDt = DateTime('2004-W25T14:30:15-03:00') + self.assertTrue(ref3.equalTo(isoDt)) + + # Bug 2191: day of year format + isoDt = DateTime('2002-122T0800') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002122T0800') + self.assertTrue(ref0.equalTo(isoDt)) + + # Bug 2191: hours/minutes fractions + isoDt = DateTime('2006-11-06T10.5') + self.assertTrue(ref2.equalTo(isoDt)) + isoDt = DateTime('2006-11-06T10,5') + self.assertTrue(ref2.equalTo(isoDt)) + isoDt = DateTime('20040614T1430.25-3') + self.assertTrue(ref3.equalTo(isoDt)) + isoDt = DateTime('2004-06-14T1430,25-3') + self.assertTrue(ref3.equalTo(isoDt)) + isoDt = DateTime('2004-06-14T14:30.25-3') + self.assertTrue(ref3.equalTo(isoDt)) + isoDt = DateTime('20040614T14:30,25-3') + self.assertTrue(ref3.equalTo(isoDt)) + + # ISO8601 standard format + iso8601_string = '2002-05-02T08:00:00-04:00' + iso8601DT = DateTime(iso8601_string) + self.assertEqual(iso8601_string, iso8601DT.ISO8601()) + + # ISO format with no timezone + isoDt = DateTime('2006-01-01 00:00:00') + self.assertTrue(ref4.equalTo(isoDt)) + + def testJulianWeek(self): + # Check JulianDayWeek function + fn = os.path.join(DATADIR, 'julian_testdata.txt') + with open(fn, 'r') as fd: + lines = fd.readlines() + for line in lines: + d = DateTime(line[:10]) + result_from_mx = tuple(map(int, line[12:-2].split(','))) + self.assertEqual(result_from_mx[1], d.week()) + + def testCopyConstructor(self): + d = DateTime('2004/04/04') + self.assertEqual(DateTime(d), d) + self.assertEqual(str(DateTime(d)), str(d)) + d2 = DateTime('1999/04/12 01:00:00') + self.assertEqual(DateTime(d2), d2) + self.assertEqual(str(DateTime(d2)), str(d2)) + + def testCopyConstructorPreservesTimezone(self): + # test for https://bugs.launchpad.net/zope2/+bug/200007 + # This always worked in the local timezone, so we need at least + # two tests with different zones to be sure at least one of them + # is not local. + d = DateTime('2004/04/04') + self.assertEqual(DateTime(d).timezone(), d.timezone()) + d2 = DateTime('2008/04/25 12:00:00 EST') + self.assertEqual(DateTime(d2).timezone(), d2.timezone()) + self.assertEqual(str(DateTime(d2)), str(d2)) + d3 = DateTime('2008/04/25 12:00:00 PST') + self.assertEqual(DateTime(d3).timezone(), d3.timezone()) + self.assertEqual(str(DateTime(d3)), str(d3)) + + def testRFC822(self): + # rfc822 conversion + dt = DateTime('2002-05-02T08:00:00+00:00') + self.assertEqual(dt.rfc822(), 'Thu, 02 May 2002 08:00:00 +0000') + + dt = DateTime('2002-05-02T08:00:00+02:00') + self.assertEqual(dt.rfc822(), 'Thu, 02 May 2002 08:00:00 +0200') + + dt = DateTime('2002-05-02T08:00:00-02:00') + self.assertEqual(dt.rfc822(), 'Thu, 02 May 2002 08:00:00 -0200') + + # Checking that conversion from local time is working. + dt = DateTime() + dts = dt.rfc822().split(' ') + times = dts[4].split(':') + _isDST = time.localtime(time.time())[8] + if _isDST: + offset = time.altzone + else: + offset = time.timezone + self.assertEqual(dts[0], dt.aDay() + ',') + self.assertEqual(int(dts[1]), dt.day()) + self.assertEqual(dts[2], dt.aMonth()) + self.assertEqual(int(dts[3]), dt.year()) + self.assertEqual(int(times[0]), dt.h_24()) + self.assertEqual(int(times[1]), dt.minute()) + self.assertEqual(int(times[2]), int(dt.second())) + self.assertEqual(dts[5], "%+03d%02d" % divmod((-offset / 60), 60)) + + def testInternationalDateformat(self): + for year in (1990, 2001, 2020): + for month in (1, 12): + for day in (1, 12, 28, 31): + try: + d_us = DateTime("%d/%d/%d" % (year, month, day)) + except Exception: + continue + + d_int = DateTime("%d.%d.%d" % (day, month, year), + datefmt="international") + self.assertEqual(d_us, d_int) + + d_int = DateTime("%d/%d/%d" % (day, month, year), + datefmt="international") + self.assertEqual(d_us, d_int) + + def test_intl_format_hyphen(self): + d_jan = DateTime('2011-01-11 GMT') + d_nov = DateTime('2011-11-01 GMT') + d_us = DateTime('11-01-2011 GMT') + d_int = DateTime('11-01-2011 GMT', datefmt="international") + self.assertNotEqual(d_us, d_int) + self.assertEqual(d_us, d_nov) + self.assertEqual(d_int, d_jan) + + def test_calcTimezoneName(self): + from DateTime.interfaces import TimeError + timezone_dependent_epoch = 2177452800 + try: + DateTime()._calcTimezoneName(timezone_dependent_epoch, 0) + except TimeError: + self.fail('Zope Collector issue #484 (negative time bug): ' + 'TimeError raised') + + def testStrftimeTZhandling(self): + # strftime timezone testing + # This is a test for collector issue #1127 + format = '%Y-%m-%d %H:%M %Z' + dt = DateTime('Wed, 19 Nov 2003 18:32:07 -0215') + dt_string = dt.strftime(format) + dt_local = dt.toZone(_findLocalTimeZoneName(0)) + dt_localstring = dt_local.strftime(format) + self.assertEqual(dt_string, dt_localstring) + + def testStrftimeFarDates(self): + # Checks strftime in dates <= 1900 or >= 2038 + dt = DateTime('1900/01/30') + self.assertEqual(dt.strftime('%d/%m/%Y'), '30/01/1900') + dt = DateTime('2040/01/30') + self.assertEqual(dt.strftime('%d/%m/%Y'), '30/01/2040') + + def testZoneInFarDates(self): + # Checks time zone in dates <= 1900 or >= 2038 + dt1 = DateTime('2040/01/30 14:33 GMT+1') + dt2 = DateTime('2040/01/30 11:33 GMT-2') + self.assertEqual(dt1.strftime('%d/%m/%Y %H:%M'), + dt2.strftime('%d/%m/%Y %H:%M')) + + def testStrftimeUnicode(self): + if IS_PYPY: + # Using Non-Ascii characters for strftime doesn't work in PyPy + # https://bitbucket.org/pypy/pypy/issues/2161/pypy3-strftime-does-not-accept-unicode + return + dt = DateTime('2002-05-02T08:00:00+00:00') + uchar = b'\xc3\xa0'.decode('utf-8') + ok = dt.strftime('Le %d/%m/%Y a %Hh%M').replace('a', uchar) + ustr = b'Le %d/%m/%Y \xc3\xa0 %Hh%M'.decode('utf-8') + self.assertEqual(dt.strftime(ustr), ok) + + def testTimezoneNaiveHandling(self): + # checks that we assign timezone naivity correctly + dt = DateTime('2007-10-04T08:00:00+00:00') + self.assertFalse(dt.timezoneNaive(), + 'error with naivity handling in __parse_iso8601') + dt = DateTime('2007-10-04T08:00:00Z') + self.assertFalse(dt.timezoneNaive(), + 'error with naivity handling in __parse_iso8601') + dt = DateTime('2007-10-04T08:00:00') + self.assertTrue(dt.timezoneNaive(), + 'error with naivity handling in __parse_iso8601') + dt = DateTime('2007/10/04 15:12:33.487618 GMT+1') + self.assertFalse(dt.timezoneNaive(), + 'error with naivity handling in _parse') + dt = DateTime('2007/10/04 15:12:33.487618') + self.assertTrue(dt.timezoneNaive(), + 'error with naivity handling in _parse') + dt = DateTime() + self.assertFalse(dt.timezoneNaive(), + 'error with naivity for current time') + s = '2007-10-04T08:00:00' + dt = DateTime(s) + self.assertEqual(s, dt.ISO8601()) + s = '2007-10-04T08:00:00+00:00' + dt = DateTime(s) + self.assertEqual(s, dt.ISO8601()) + + def testConversions(self): + sdt0 = datetime.now() # this is a timezone naive datetime + dt0 = DateTime(sdt0) + self.assertTrue(dt0.timezoneNaive(), (sdt0, dt0)) + sdt1 = datetime(2007, 10, 4, 18, 14, 42, 580, pytz.utc) + dt1 = DateTime(sdt1) + self.assertFalse(dt1.timezoneNaive(), (sdt1, dt1)) + + # convert back + sdt2 = dt0.asdatetime() + self.assertEqual(sdt0, sdt2) + sdt3 = dt1.utcdatetime() # this returns a timezone naive datetime + self.assertEqual(sdt1.hour, sdt3.hour) + + dt4 = DateTime('2007-10-04T10:00:00+05:00') + sdt4 = datetime(2007, 10, 4, 5, 0) + self.assertEqual(dt4.utcdatetime(), sdt4) + self.assertEqual(dt4.asdatetime(), sdt4.replace(tzinfo=pytz.utc)) + + dt5 = DateTime('2007-10-23 10:00:00 US/Eastern') + tz = pytz.timezone('US/Eastern') + sdt5 = datetime(2007, 10, 23, 10, 0, tzinfo=tz) + dt6 = DateTime(sdt5) + self.assertEqual(dt5.asdatetime(), sdt5) + self.assertEqual(dt6.asdatetime(), sdt5) + self.assertEqual(dt5, dt6) + self.assertEqual(dt5.asdatetime().tzinfo, tz) + self.assertEqual(dt6.asdatetime().tzinfo, tz) + + def testBasicTZ(self): + # psycopg2 supplies it's own tzinfo instances, with no `zone` attribute + tz = FixedOffset(60, 'GMT+1') + dt1 = datetime(2008, 8, 5, 12, 0, tzinfo=tz) + DT = DateTime(dt1) + dt2 = DT.asdatetime() + offset1 = dt1.tzinfo.utcoffset(dt1) + offset2 = dt2.tzinfo.utcoffset(dt2) + self.assertEqual(offset1, offset2) + + def testEDTTimezone(self): + # should be able to parse EDT timezones: see lp:599856. + dt = DateTime("Mon, 28 Jun 2010 10:12:25 EDT") + self.assertEqual(dt.Day(), 'Monday') + self.assertEqual(dt.day(), 28) + self.assertEqual(dt.Month(), 'June') + self.assertEqual(dt.timezone(), 'GMT-4') + + def testParseISO8601(self): + parsed = DateTime()._parse_iso8601('2010-10-10') + self.assertEqual(parsed, (2010, 10, 10, 0, 0, 0, 'GMT+0000')) + + def test_interface(self): + from DateTime.interfaces import IDateTime + self.assertTrue(IDateTime.providedBy(DateTime())) + + def test_security(self): + dt = DateTime() + self.assertEqual(dt.__roles__, None) + self.assertEqual(dt.__allow_access_to_unprotected_subobjects__, 1) + + +def test_suite(): + import doctest + return unittest.TestSuite([ + unittest.makeSuite(DateTimeTests), + doctest.DocFileSuite('DateTime.txt', package='DateTime'), + doctest.DocFileSuite('pytz.txt', package='DateTime'), + ]) diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/PKG-INFO b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..51683fe --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/PKG-INFO @@ -0,0 +1,180 @@ +Metadata-Version: 2.1 +Name: ExtensionClass +Version: 4.4 +Summary: Metaclass for subclassable extension types +Home-page: https://github.com/zopefoundation/ExtensionClass +Author: Zope Foundation and Contributors +Author-email: zope-dev@zope.org +License: ZPL 2.1 +Description: ExtensionClass and ExtensionClass-related packages + ================================================== + + ExtensionClass + -------------- + + This package provides a metaclass that allows classes implemented in + extension modules to be subclassed in Python. Unless you need + ExtensionClasses for legacy applications (e.g. Zope 2), you probably + want to use Python's new-style classes (available since Python 2.2). + + ComputedAttribute + ----------------- + + This package provides a way to attach attributes to an + ``ExtensionClass`` or instance that are computed by calling a + callable. This works very much like ``property`` known from new-style + classes, except that a ``ComputedAttribute`` can also be attached to + an instance and that it honours ExtensionClass semantics (which is + useful for retaining Acquisition wrappers, for example). + + MethodObject + ------------ + + This package lets you attach additional "methods" to ExtensionClasses. + These "methods" are actually implemented by subclassing the + ``MethodObject.Method`` class and implementing the ``__call__`` method + there. Instances of those classes will be bound to the instances + they're attached to and will receive that instance object as a first + parameter (after ``self``). + + + Changelog + ========= + + 4.4 (2018-10-05) + ---------------- + + - Fail if C extensions couldn't be compiled on compatible platforms. + + - Add Appveyor configuration to automate building Windows eggs + + - Add support for Python 3.7. + + - Fix getting attributes that are data descriptors in the Python + implementation. + + - Reach and automatically maintain 100% test coverage. + + 4.3.0 (2017-02-22) + ------------------ + + - Drop support for Python 3.3. + + - Remove unused C macro from `ExtensionClass.h`. + + - Fix C compilation under Windows. + + 4.2.1 (2017-02-02) + ------------------ + + - Fix problems with computed attribute and property wrapping. + + 4.2.0 (2017-01-18) + ------------------ + + - Port the C extension to Python 3. + + - Add support for Python 3.5 and 3.6. + + - Drop support for Python 2.6, 3.2. + + 4.1.2 (2015-04-03) + ------------------ + + - Fix calling of `__class_init__` hook by Python implementation. + + 4.1.1 (2015-03-20) + ------------------ + + - Avoid wrapping ``__parent__`` in pure-Python version. Matches + change made to C version in afb8488. See issue #3. + + 4.1 (2014-12-18) + ------------------ + + - Housekeeping changes only. + + 4.1b1 (2014-11-12) + ------------------ + + - Added compatibility with Python 3.4. + + 4.1a1 (2013-05-04) + ------------------ + + - Added compatibility with Python 3.2 and 3.3 using the Python reference + implementation. + + - Add Python reference implementation. Used by default on PyPy. + + 4.0 (2013-02-24) + ---------------- + + - Added trove classifiers to project metadata. + + 4.0a1 (2011-12-13) + ------------------ + + - Don't create wrappers when retrieving parent pointers. + + 2.13.2 (2010-06-16) + ------------------- + + - LP #587760: Handle tp_basicsize correctly. + + 2.13.1 (2010-04-03) + ------------------- + + - Removed undeclared testing dependency on zope.testing. + + - Removed cruft in ``pickle/pickle.c`` related to removed ``__getnewargs__``. + + 2.13.0 (2010-02-22) + ------------------- + + - Avoid defining ``__getnewargs__`` as not to defeat the ZODB persistent + reference optimization. Refs https://bugs.launchpad.net/zope2/+bug/143657. + In order to take advantage of this optimization, you need to re-save your + objects. + + 2.12.0 (2010-02-14) + ------------------- + + - Removed old build artifacts and some metadata cleanup. + + - Added support for method cache in ExtensionClass. Patch contributed by + Yoshinori K. Okuji. See https://bugs.launchpad.net/zope2/+bug/486182. + + 2.11.3 (2009-08-02) + ------------------- + + - Further 64-bit fixes (Python 2.4 compatibility). + + 2.11.2 (2009-08-02) + ------------------- + + - Fixed 64-bit compatibility issues for Python 2.5.x / 2.6.x. See + http://www.python.org/dev/peps/pep-0353/ for details. + + 2.11.1 (2009-02-19) + ------------------- + + - Initial egg release. + +Platform: UNKNOWN +Classifier: Development Status :: 6 - Mature +Classifier: Environment :: Web Environment +Classifier: Framework :: Zope2 +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Provides-Extra: test diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/SOURCES.txt b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..4020994 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,30 @@ +.coveragerc +.gitignore +CHANGES.rst +COPYRIGHT.txt +LICENSE.txt +MANIFEST.in +README.rst +buildout.cfg +pip-delete-this-directory.txt +setup.cfg +setup.py +tox.ini +src/ComputedAttribute/_ComputedAttribute.c +src/ComputedAttribute/__init__.py +src/ComputedAttribute/tests.py +src/ExtensionClass/ExtensionClass.h +src/ExtensionClass/_ExtensionClass.c +src/ExtensionClass/__init__.py +src/ExtensionClass/_compat.h +src/ExtensionClass/tests.py +src/ExtensionClass.egg-info/PKG-INFO +src/ExtensionClass.egg-info/SOURCES.txt +src/ExtensionClass.egg-info/dependency_links.txt +src/ExtensionClass.egg-info/not-zip-safe +src/ExtensionClass.egg-info/requires.txt +src/ExtensionClass.egg-info/top_level.txt +src/ExtensionClass/pickle/pickle.c +src/MethodObject/_MethodObject.c +src/MethodObject/__init__.py +src/MethodObject/tests.py \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/dependency_links.txt b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/installed-files.txt b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..ad4e977 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/installed-files.txt @@ -0,0 +1,27 @@ +../ComputedAttribute/_ComputedAttribute.c +../ComputedAttribute/_ComputedAttribute.cpython-36m-darwin.so +../ComputedAttribute/__init__.py +../ComputedAttribute/__pycache__/__init__.cpython-36.pyc +../ComputedAttribute/__pycache__/tests.cpython-36.pyc +../ComputedAttribute/tests.py +../ExtensionClass/ExtensionClass.h +../ExtensionClass/_ExtensionClass.c +../ExtensionClass/_ExtensionClass.cpython-36m-darwin.so +../ExtensionClass/__init__.py +../ExtensionClass/__pycache__/__init__.cpython-36.pyc +../ExtensionClass/__pycache__/tests.cpython-36.pyc +../ExtensionClass/_compat.h +../ExtensionClass/pickle/pickle.c +../ExtensionClass/tests.py +../MethodObject/_MethodObject.c +../MethodObject/_MethodObject.cpython-36m-darwin.so +../MethodObject/__init__.py +../MethodObject/__pycache__/__init__.cpython-36.pyc +../MethodObject/__pycache__/tests.cpython-36.pyc +../MethodObject/tests.py +PKG-INFO +SOURCES.txt +dependency_links.txt +not-zip-safe +requires.txt +top_level.txt diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/not-zip-safe b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/requires.txt b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/requires.txt new file mode 100644 index 0000000..c7db00f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/requires.txt @@ -0,0 +1,3 @@ + +[test] +zope.testrunner diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..8464ce5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ExtensionClass-4.4-py3.6.egg-info/top_level.txt @@ -0,0 +1,3 @@ +ComputedAttribute +ExtensionClass +MethodObject diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass/ExtensionClass.h b/thesisenv/lib/python3.6/site-packages/ExtensionClass/ExtensionClass.h new file mode 100644 index 0000000..4b7e84d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ExtensionClass/ExtensionClass.h @@ -0,0 +1,247 @@ +/***************************************************************************** + + Copyright (c) 1996-2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +/* + + $Id$ + + Extension Class Definitions + + Implementing base extension classes + + A base extension class is implemented in much the same way that an + extension type is implemented, except: + + - The include file, 'ExtensionClass.h', must be included. + + - The type structure is declared to be of type + 'PyExtensionClass', rather than of type 'PyTypeObject'. + + - The type structure has an additional member that must be defined + after the documentation string. This extra member is a method chain + ('PyMethodChain') containing a linked list of method definition + ('PyMethodDef') lists. Method chains can be used to implement + method inheritance in C. Most extensions don't use method chains, + but simply define method lists, which are null-terminated arrays + of method definitions. A macro, 'METHOD_CHAIN' is defined in + 'ExtensionClass.h' that converts a method list to a method chain. + (See the example below.) + + - Module functions that create new instances must be replaced by an + '__init__' method that initializes, but does not create storage for + instances. + + - The extension class must be initialized and exported to the module + with:: + + PyExtensionClass_Export(d,"name",type); + + where 'name' is the module name and 'type' is the extension class + type object. + + Attribute lookup + + Attribute lookup is performed by calling the base extension class + 'getattr' operation for the base extension class that includes C + data, or for the first base extension class, if none of the base + extension classes include C data. 'ExtensionClass.h' defines a + macro 'Py_FindAttrString' that can be used to find an object's + attributes that are stored in the object's instance dictionary or + in the object's class or base classes:: + + v = Py_FindAttrString(self,name); + + In addition, a macro is provided that replaces 'Py_FindMethod' + calls with logic to perform the same sort of lookup that is + provided by 'Py_FindAttrString'. + + Linking + + The extension class mechanism was designed to be useful with + dynamically linked extension modules. Modules that implement + extension classes do not have to be linked against an extension + class library. The macro 'PyExtensionClass_Export' imports the + 'ExtensionClass' module and uses objects imported from this module + to initialize an extension class with necessary behavior. + +*/ + +#ifndef EXTENSIONCLASS_H +#define EXTENSIONCLASS_H + +#include "Python.h" +#include "import.h" + +/* Declarations for objects of type ExtensionClass */ + +#define EC PyTypeObject +#define PyExtensionClass PyTypeObject + +#define EXTENSIONCLASS_BINDABLE_FLAG 1 << 2 +#define EXTENSIONCLASS_NOINSTDICT_FLAG 1 << 5 + +typedef struct { + PyObject_HEAD +} _emptyobject; + +static struct ExtensionClassCAPIstruct { + +/***************************************************************************** + + WARNING: THIS STRUCT IS PRIVATE TO THE EXTENSION CLASS INTERFACE + IMPLEMENTATION AND IS SUBJECT TO CHANGE !!! + + *****************************************************************************/ + + + PyObject *(*EC_findiattrs_)(PyObject *self, char *cname); + int (*PyExtensionClass_Export_)(PyObject *dict, char *name, + PyTypeObject *typ); + PyObject *(*PyECMethod_New_)(PyObject *callable, PyObject *inst); + PyExtensionClass *ECBaseType_; + PyExtensionClass *ECExtensionClassType_; +} *PyExtensionClassCAPI = NULL; + +#define ECBaseType (PyExtensionClassCAPI->ECBaseType_) +#define ECExtensionClassType (PyExtensionClassCAPI->ECExtensionClassType_) + +/* Following are macros that are needed or useful for defining extension + classes: + */ + +/* This macro redefines Py_FindMethod to do attribute for an attribute + name given by a C string lookup using extension class meta-data. + This is used by older getattr implementations. + + This macro is used in base class implementations of tp_getattr to + lookup methods or attributes that are not managed by the base type + directly. The macro is generally used to search for attributes + after other attribute searches have failed. + + Note that in Python 1.4, a getattr operation may be provided that + uses an object argument. Classes that support this new operation + should use Py_FindAttr. + */ + +#define EC_findiattrs (PyExtensionClassCAPI->EC_findiattrs_) + +#define Py_FindMethod(M,SELF,NAME) (EC_findiattrs((SELF),(NAME))) + +/* Do method or attribute lookup for an attribute name given by a C + string using extension class meta-data. + + This macro is used in base class implementations of tp_getattro to + lookup methods or attributes that are not managed by the base type + directly. The macro is generally used to search for attributes + after other attribute searches have failed. + + Note that in Python 1.4, a getattr operation may be provided that + uses an object argument. Classes that support this new operation + should use Py_FindAttr. + */ +#define Py_FindAttrString(SELF,NAME) (EC_findiattrs((SELF),(NAME))) + +/* Do method or attribute lookup using extension class meta-data. + + This macro is used in base class implementations of tp_getattr to + lookup methods or attributes that are not managed by the base type + directly. The macro is generally used to search for attributes + after other attribute searches have failed. */ +#define Py_FindAttr (ECBaseType->tp_getattro) + +/* Do attribute assignment for an attribute. + + This macro is used in base class implementations of tp_setattro to + set attributes that are not managed by the base type directly. The + macro is generally used to assign attributes after other attribute + attempts to assign attributes have failed. + */ +#define PyEC_SetAttr(SELF,NAME,V) (ECBaseType->tp_setattro(SELF, NAME, V)) + + +/* Convert a method list to a method chain. */ +#define METHOD_CHAIN(DEF) (traverseproc)(DEF) + +/* The following macro checks whether a type is an extension class: */ +#define PyExtensionClass_Check(TYPE) \ + PyObject_TypeCheck((PyObject*)(TYPE), ECExtensionClassType) + +/* The following macro checks whether an instance is an extension instance: */ +#define PyExtensionInstance_Check(INST) \ + PyObject_TypeCheck(Py_TYPE(INST), ECExtensionClassType) + +#define CHECK_FOR_ERRORS(MESS) + +/* The following macro can be used to define an extension base class + that only provides method and that is used as a pure mix-in class. */ +#define PURE_MIXIN_CLASS(NAME,DOC,METHODS) \ +static PyExtensionClass NAME ## Type = { PyVarObject_HEAD_INIT(NULL, 0) # NAME, \ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \ + 0 , DOC, (traverseproc)METHODS, } + +/* The following macros provide limited access to extension-class + method facilities. */ + +/* Test for an ExtensionClass method: */ +#define PyECMethod_Check(O) PyMethod_Check((O)) + +/* Create a method object that wraps a callable object and an + instance. Note that if the callable object is an extension class + method, then the new method will wrap the callable object that is + wrapped by the extension class method. Also note that if the + callable object is an extension class method with a reference + count of 1, then the callable object will be rebound to the + instance and returned with an incremented reference count. + */ +#define PyECMethod_New(CALLABLE, INST) \ + PyExtensionClassCAPI->PyECMethod_New_((CALLABLE),(INST)) + +/* Return the instance that is bound by an extension class method. */ +#define PyECMethod_Self(M) (PyMethod_Check((M)) ? PyMethod_GET_SELF(M) : NULL) + +/* Check whether an object has an __of__ method for returning itself + in the context of it's container. */ +#define has__of__(O) (PyExtensionInstance_Check(O) && Py_TYPE(O)->tp_descr_get != NULL) + +/* The following macros are used to check whether an instance + or a class' instanses have instance dictionaries: */ +#define HasInstDict(O) (_PyObject_GetDictPtr(O) != NULL) + +#define ClassHasInstDict(C) ((C)->tp_dictoffset > 0) + +/* Get an object's instance dictionary. Use with caution */ +#define INSTANCE_DICT(inst) (_PyObject_GetDictPtr(O)) + +/* Test whether an ExtensionClass, S, is a subclass of ExtensionClass C. */ +#define ExtensionClassSubclass_Check(S,C) PyType_IsSubtype((S), (C)) + +/* Test whether an ExtensionClass instance , I, is a subclass of + ExtensionClass C. */ +#define ExtensionClassSubclassInstance_Check(I,C) PyObject_TypeCheck((I), (C)) + + +/* Export an Extension Base class in a given module dictionary with a + given name and ExtensionClass structure. + */ + +#define PyExtensionClass_Export(D,N,T) \ + if (! ExtensionClassImported || \ + PyExtensionClassCAPI->PyExtensionClass_Export_((D),(N),&(T)) < 0) return NULL; + + +#define ExtensionClassImported \ + ((PyExtensionClassCAPI != NULL) || \ + (PyExtensionClassCAPI = PyCapsule_Import("ExtensionClass.CAPI2", 0))) + +#endif /* EXTENSIONCLASS_H */ diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass/_ExtensionClass.c b/thesisenv/lib/python3.6/site-packages/ExtensionClass/_ExtensionClass.c new file mode 100644 index 0000000..0750b2e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ExtensionClass/_ExtensionClass.c @@ -0,0 +1,1054 @@ +/* + + Copyright (c) 2003 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE. + +*/ +static char _extensionclass_module_documentation[] = +"ExtensionClass\n" +"\n" +"$Id$\n" +; + +#include "ExtensionClass/ExtensionClass.h" +#include "_compat.h" +#define EC PyTypeObject + +static PyObject *str__of__, *str__get__, *str__class_init__, *str__init__; +static PyObject *str__bases__, *str__mro__, *str__new__, *str__parent__; + +#define OBJECT(O) ((PyObject *)(O)) +#define TYPE(O) ((PyTypeObject *)(O)) + +static PyTypeObject ExtensionClassType; +static PyTypeObject BaseType; + +static PyObject * +of_get(PyObject *self, PyObject *inst, PyObject *cls) +{ + /* Descriptor slot function that calls __of__ */ + if (inst && PyExtensionInstance_Check(inst)) + return PyObject_CallMethodObjArgs(self, str__of__, inst, NULL); + + Py_INCREF(self); + return self; +} + +PyObject * +Base_getattro(PyObject *obj, PyObject *name) +{ + PyTypeObject *tp = Py_TYPE(obj); + PyObject *descr = NULL; + PyObject *res = NULL; + descrgetfunc f; + PyObject **dictptr; + + if (!NATIVE_CHECK(name)) { +#ifndef PY3K +#ifdef Py_USING_UNICODE + /* The Unicode to string conversion is done here because the + existing tp_setattro slots expect a string object as name + and we wouldn't want to break those. */ + if (PyUnicode_Check(name)) { + name = PyUnicode_AsEncodedString(name, NULL, NULL); + if (name == NULL) + return NULL; + } + else +#endif +#endif + { + PyErr_Format(PyExc_TypeError, + "attribute name must be string, not '%.200s'", + Py_TYPE(name)->tp_name); + return NULL; + } + } + else + Py_INCREF(name); + + if (tp->tp_dict == NULL) { + if (PyType_Ready(tp) < 0) + goto done; + } + + descr = _PyType_Lookup(tp, name); + Py_XINCREF(descr); + + f = NULL; + if (descr != NULL && HAS_TP_DESCR_GET(descr)) { + f = descr->ob_type->tp_descr_get; + if (f != NULL && PyDescr_IsData(descr)) { + res = f(descr, obj, (PyObject *)obj->ob_type); + Py_DECREF(descr); + goto done; + } + } + + dictptr = _PyObject_GetDictPtr(obj); + + if (dictptr && *dictptr) { + Py_INCREF(*dictptr); + res = PyDict_GetItem(*dictptr, name); + if (res != NULL) { + Py_INCREF(res); + Py_XDECREF(descr); + Py_DECREF(*dictptr); + + /* CHANGED! If the tp_descr_get of res is of_get, then call it. */ + if (PyObject_TypeCheck(Py_TYPE(res), &ExtensionClassType)) { + if (Py_TYPE(res)->tp_descr_get) { + int name_is_parent = PyObject_RichCompareBool(name, str__parent__, Py_EQ); + + if (name_is_parent == 0) { + PyObject *tres = Py_TYPE(res)->tp_descr_get(res, obj, (PyObject*)Py_TYPE(obj)); + Py_DECREF(res); + res = tres; + } + else if (name_is_parent == -1) { + PyErr_Clear(); + } + } + } + /* End of change. */ + + goto done; + } + Py_DECREF(*dictptr); + } + + if (f != NULL) { + res = f(descr, obj, (PyObject *)Py_TYPE(obj)); + Py_DECREF(descr); + goto done; + } + + if (descr != NULL) { + res = descr; + /* descr was already increfed above */ + goto done; + } + +#ifdef PY3K + PyErr_Format(PyExc_AttributeError, + "'%.50s' object has no attribute '%U'", + tp->tp_name, name); +#else + PyErr_Format(PyExc_AttributeError, + "'%.50s' object has no attribute '%.400s'", + tp->tp_name, PyString_AS_STRING(name)); +#endif + + done: + Py_DECREF(name); + return res; + +} + +#include "pickle/pickle.c" + +static struct PyMethodDef Base_methods[] = { + PICKLE_METHODS + {NULL, (PyCFunction)NULL, 0, NULL} /* sentinel */ + }; + +static PyTypeObject BaseType = { + PyVarObject_HEAD_INIT(NULL, 0) + "ExtensionClass.Base", /* tp_name */ + 0, /* tp_basicsize */ + 0, /* tp_itemsize */ + 0, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str*/ + (getattrofunc)Base_getattro, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | + Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_HAVE_VERSION_TAG, /* tp_flags */ + "Standard ExtensionClass base type", /* tp_doc*/ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + Base_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + 0, /* tp_new */ + 0, /* tp_free */ + 0, /* tp_is_gc */ +}; + +static PyTypeObject NoInstanceDictionaryBaseType = { + PyVarObject_HEAD_INIT(NULL, 0) + "ExtensionClass.NoInstanceDictionaryBase", /* tp_name */ + 0, /* tp_basicsize */ + 0, /* tp_itemsize */ + 0, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str*/ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | + Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_HAVE_VERSION_TAG, /* tp_flags */ + "Base types for subclasses without instance dictionaries", /* tp_doc*/ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + 0, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + 0, /* tp_new */ + 0, /* tp_free */ + 0, /* tp_is_gc */ +}; + +static PyObject * +EC_new(PyTypeObject *self, PyObject *args, PyObject *kw) +{ + PyObject *name, *bases=NULL, *dict=NULL; + PyObject *new_bases=NULL, *new_args, *result; + int have_base = 0, i; + + if (kw && PyObject_IsTrue(kw)) + { + PyErr_SetString(PyExc_TypeError, + "Keyword arguments are not supported"); + return NULL; + } + + if (!PyArg_ParseTuple(args, "O|O!O!", &name, + &PyTuple_Type, &bases, &PyDict_Type, &dict)) + return NULL; + + /* Make sure Base is in bases */ + if (bases) + { + for (i = 0; i < PyTuple_GET_SIZE(bases); i++) + { + if (PyObject_TypeCheck(PyTuple_GET_ITEM(bases, i), + &ExtensionClassType)) + { + have_base = 1; + break; + } + } + if (! have_base) + { + new_bases = PyTuple_New(PyTuple_GET_SIZE(bases) + 1); + if (new_bases == NULL) + return NULL; + for (i = 0; i < PyTuple_GET_SIZE(bases); i++) + { + Py_XINCREF(PyTuple_GET_ITEM(bases, i)); + PyTuple_SET_ITEM(new_bases, i, PyTuple_GET_ITEM(bases, i)); + } + Py_INCREF(OBJECT(&BaseType)); + PyTuple_SET_ITEM(new_bases, PyTuple_GET_SIZE(bases), + OBJECT(&BaseType)); + } + } + else + { + new_bases = Py_BuildValue("(O)", &BaseType); + if (new_bases == NULL) + return NULL; + } + + + + if (new_bases) + { + if (dict) + new_args = Py_BuildValue("OOO", name, new_bases, dict); + else + new_args = Py_BuildValue("OO", name, new_bases); + + Py_DECREF(new_bases); + + if (new_args == NULL) + return NULL; + + result = PyType_Type.tp_new(self, new_args, kw); + Py_DECREF(new_args); + } + else + { + result = PyType_Type.tp_new(self, args, kw); + + /* We didn't have to add Base, so maybe NoInstanceDictionaryBase + is in the bases. We need to check if it was. If it was, we + need to suppress instance dictionary support. */ + for (i = 0; i < PyTuple_GET_SIZE(bases); i++) + { + if ( + PyObject_TypeCheck(PyTuple_GET_ITEM(bases, i), + &ExtensionClassType) + && + PyType_IsSubtype(TYPE(PyTuple_GET_ITEM(bases, i)), + &NoInstanceDictionaryBaseType) + ) + { + TYPE(result)->tp_dictoffset = 0; + break; + } + } + + } + + return result; +} + +/* set up __get__, if necessary */ +static int +EC_init_of(PyTypeObject *self) +{ + PyObject *__of__; + + __of__ = PyObject_GetAttr(OBJECT(self), str__of__); + if (__of__) + { + Py_DECREF(__of__); + if (self->tp_descr_get) + { + if (self->tp_descr_get != of_get) + { + PyErr_SetString(PyExc_TypeError, + "Can't mix __of__ and descriptors"); + return -1; + } + } + else + self->tp_descr_get = of_get; + } + else + { + PyErr_Clear(); + if (self->tp_descr_get == of_get) + self->tp_descr_get = NULL; + } + + return 0; +} + +static int +EC_init(PyTypeObject *self, PyObject *args, PyObject *kw) +{ + PyObject *__class_init__, *r; + PyObject* func = NULL; + + if (PyType_Type.tp_init(OBJECT(self), args, kw) < 0) + return -1; + + if (self->tp_dict != NULL) + { + r = PyDict_GetItemString(self->tp_dict, "__doc__"); + if ((r == Py_None) && + (PyDict_DelItemString(self->tp_dict, "__doc__") < 0) + ) + return -1; + } + + if (EC_init_of(self) < 0) + return -1; + + /* Call __class_init__ */ + __class_init__ = PyObject_GetAttr(OBJECT(self), str__class_init__); + if (__class_init__ == NULL) + { + PyErr_Clear(); + return 0; + } + + if (PyFunction_Check(__class_init__)) { + func = __class_init__; + } + else if (PyMethod_Check(__class_init__)) { + func = PyMethod_GET_FUNCTION(__class_init__); + } +#ifdef PY3K + else if (PyInstanceMethod_Check(__class_init__)) { + func = PyInstanceMethod_GET_FUNCTION(__class_init__); + } +#endif + + if (func == NULL) { + Py_DECREF(__class_init__); + PyErr_SetString(PyExc_TypeError, "Invalid type for __class_init__"); + return -1; + } + + r = PyObject_CallFunctionObjArgs(func, OBJECT(self), NULL); + Py_DECREF(__class_init__); + if (! r) + return -1; + Py_DECREF(r); + + return 0; +} + +static int +_is_bad_setattr_name(PyTypeObject* type, PyObject* as_bytes) +{ + char *cname = PyBytes_AS_STRING(as_bytes); + int l = PyBytes_GET_SIZE(as_bytes); + + if (l < 4) { + return 0; + } + + if (cname[0] == '_' && cname[1] == '_' && cname[l-1] == '_' && cname[l-2] == '_') { + char *c; + c = strchr(cname+2, '_'); + if (c != NULL && (c - cname) >= (l-2)) { + PyErr_Format ( + PyExc_TypeError, + "can't set attributes of built-in/extension type '%s' if the " + "attribute name begins and ends with __ and contains only " + "4 _ characters", + type->tp_name + ); + return 1; + } + } + return 0; +} + +static int +EC_setattro(PyTypeObject *type, PyObject *name, PyObject *value) +{ + /* We want to allow setting attributes of builti-in types, because + EC did in the past and there's code that relies on it. + + We can't really set slots though, but I don't think we need to. + There's no good way to spot slots. We could use a lame rule like + names that begin and end with __s and have just 4 _s smell too + much like slots. + + + */ + + if (! (type->tp_flags & Py_TPFLAGS_HEAPTYPE)) { + PyObject* as_bytes = convert_name(name); + if (as_bytes == NULL) { + return -1; + } + + if (_is_bad_setattr_name(type, as_bytes)) { + Py_DECREF(as_bytes); + return -1; + } + + if (PyObject_GenericSetAttr(OBJECT(type), name, value) < 0) { + Py_DECREF(as_bytes); + return -1; + } + } + else if (PyType_Type.tp_setattro(OBJECT(type), name, value) < 0) { + return -1; + } + + PyType_Modified(type); + return 0; +} + + +static PyObject * +inheritedAttribute(PyTypeObject *self, PyObject *name) +{ + PyObject* cls = NULL; + PyObject* res = NULL; + + cls = PyObject_CallFunction((PyObject*)&PySuper_Type, "OO", self, self); + if (cls == NULL) { + return NULL; + } + + res = PyObject_GetAttr(cls, name); + Py_DECREF(cls); + return res; +} + +static PyObject * +__basicnew__(PyObject *self) +{ + return PyObject_CallMethodObjArgs(self, str__new__, self, NULL); +} + +static int +append_new(PyObject *result, PyObject *v) +{ + int contains; + + if (v == OBJECT(&BaseType) || v == OBJECT(&PyBaseObject_Type)) + return 0; /* Don't add these until end */ + contains = PySequence_Contains(result, v); + if (contains != 0) + return contains; + return PyList_Append(result, v); +} + +static int +copy_mro(PyObject *mro, PyObject *result) +{ + PyObject *base; + int i, l; + + l = PyTuple_Size(mro); + if (l < 0) + return -1; + + for (i=0; i < l; i++) + { + base = PyTuple_GET_ITEM(mro, i); + if (append_new(result, base) < 0) + return -1; + } + return 0; +} + +static int +copy_classic(PyObject *base, PyObject *result) +{ + PyObject *bases, *basebase; + int i, l, err=-1; + + if (append_new(result, base) < 0) + return -1; + + bases = PyObject_GetAttr(base, str__bases__); + if (bases == NULL) + return -1; + + l = PyTuple_Size(bases); + if (l < 0) + goto end; + + for (i=0; i < l; i++) + { + basebase = PyTuple_GET_ITEM(bases, i); + if (copy_classic(basebase, result) < 0) + goto end; + } + + err = 0; + + end: + Py_DECREF(bases); + return err; +} + +static PyObject * +mro(PyTypeObject *self) +{ + /* Compute an MRO for a class */ + PyObject *result, *base, *basemro, *mro=NULL; + int i, l, err; + + result = PyList_New(0); + if (result == NULL) + return NULL; + if (PyList_Append(result, OBJECT(self)) < 0) + goto end; + l = PyTuple_Size(self->tp_bases); + if (l < 0) + goto end; + for (i=0; i < l; i++) + { + base = PyTuple_GET_ITEM(self->tp_bases, i); + if (base == NULL) + continue; + basemro = PyObject_GetAttr(base, str__mro__); + if (basemro != NULL) + { + /* Type */ + err = copy_mro(basemro, result); + Py_DECREF(basemro); + if (err < 0) + goto end; + } + else + { + PyErr_Clear(); + if (copy_classic(base, result) < 0) + goto end; + } + } + + if (self != &BaseType && PyList_Append(result, OBJECT(&BaseType)) < 0) + goto end; + + if (PyList_Append(result, OBJECT(&PyBaseObject_Type)) < 0) + goto end; + + l = PyList_GET_SIZE(result); + mro = PyTuple_New(l); + if (mro == NULL) + goto end; + + for (i=0; i < l; i++) + { + Py_INCREF(PyList_GET_ITEM(result, i)); + PyTuple_SET_ITEM(mro, i, PyList_GET_ITEM(result, i)); + } + + end: + Py_DECREF(result); + return mro; +} + +static struct PyMethodDef EC_methods[] = { + {"__basicnew__", (PyCFunction)__basicnew__, METH_NOARGS, + "Create a new empty object"}, + {"inheritedAttribute", (PyCFunction)inheritedAttribute, METH_O, + "Look up an inherited attribute"}, + {"mro", (PyCFunction)mro, METH_NOARGS, + "Compute an mro using the 'encalsulated base' scheme"}, + {NULL, (PyCFunction)NULL, 0, NULL} /* sentinel */ + }; + +static PyTypeObject ExtensionClassType = { + PyVarObject_HEAD_INIT(NULL, 0) + "ExtensionClass.ExtensionClass", /* tp_name */ + 0, /* tp_basicsize */ + 0, /* tp_itemsize */ + 0, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str*/ + 0, /* tp_getattro */ + (setattrofunc)EC_setattro, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | + Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_HAVE_VERSION_TAG, /* tp_flags */ + "Meta-class for extension classes", /* tp_doc*/ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + EC_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + (initproc)EC_init, /* tp_init */ + 0, /* tp_alloc */ + (newfunc)EC_new, /* tp_new */ + 0, /* tp_free */ + 0, /* tp_is_gc */ +}; + +static PyObject * +debug(PyObject *self, PyObject *o) +{ + Py_INCREF(Py_None); + return Py_None; +} + +static PyObject * +pmc_init_of(PyObject *self, PyObject *args) +{ + PyObject *o; + + if (! PyArg_ParseTuple(args, "O!", (PyObject *)&ExtensionClassType, &o)) + return NULL; + + if (EC_init_of((PyTypeObject *)o) < 0) + return NULL; + + Py_INCREF(Py_None); + return Py_None; +} + + +/* List of methods defined in the module */ + +static struct PyMethodDef ec_methods[] = { + {"debug", (PyCFunction)debug, METH_O, ""}, + {"pmc_init_of", (PyCFunction)pmc_init_of, METH_VARARGS, + "Initialize __get__ for classes that define __of__"}, + {NULL, (PyCFunction)NULL, 0, NULL} /* sentinel */ + }; + + +static PyObject * +EC_findiattrs_(PyObject *self, char *cname) +{ + PyObject *name, *r; + + name = NATIVE_FROM_STRING(cname); + if (name == NULL) + return NULL; + r = ECBaseType->tp_getattro(self, name); + Py_DECREF(name); + return r; +} + +static PyObject * +ec_new_for_custom_dealloc(PyTypeObject *type, PyObject *args, PyObject *kw) +{ + /* This is for EC's that have deallocs. For these, we need to + incref the type when we create an instance, because the deallocs + will decref the type. + */ + + PyObject *r; + + r = PyType_GenericNew(type, args, kw); + if (r) + { + Py_INCREF(type); + } + return r; +} + +static int +ec_init(PyObject *self, PyObject *args, PyObject *kw) +{ + PyObject *r, *__init__; + + __init__ = PyObject_GetAttr(self, str__init__); + if (__init__ == NULL) + return -1; + + r = PyObject_Call(__init__, args, kw); + Py_DECREF(__init__); + if (r == NULL) + return -1; + + Py_DECREF(r); + return 0; +} + +static int +PyExtensionClass_Export_(PyObject *dict, char *name, PyTypeObject *typ) +{ + long ecflags = 0; + PyMethodDef *pure_methods = NULL, *mdef = NULL; + PyObject *m; + + if (typ->tp_flags == 0) + { + /* Old-style EC */ + + if (typ->tp_traverse) + { + /* ExtensionClasses stick there methods in the tp_traverse slot */ + mdef = (PyMethodDef *)typ->tp_traverse; + + if (typ->tp_basicsize <= sizeof(_emptyobject)) + /* Pure mixin. We want rebindable methods */ + pure_methods = mdef; + else + typ->tp_methods = mdef; + + typ->tp_traverse = NULL; + + /* Look for __init__ method */ + for (; mdef->ml_name; mdef++) + { + if (strcmp(mdef->ml_name, "__init__") == 0) + { + /* we have an old-style __init__, install a special slot */ + typ->tp_init = ec_init; + break; + } + } + } + + if (typ->tp_clear) + { + /* ExtensionClasses stick there flags in the tp_clear slot */ + ecflags = (long)(typ->tp_clear); + + /* Some old-style flags were set */ + + if ((ecflags & EXTENSIONCLASS_BINDABLE_FLAG) + && typ->tp_descr_get == NULL) + /* We have __of__-style binding */ + typ->tp_descr_get = of_get; + } + typ->tp_clear = NULL; + typ->tp_flags = Py_TPFLAGS_DEFAULT + | Py_TPFLAGS_BASETYPE; + + if (typ->tp_dealloc != NULL) + typ->tp_new = ec_new_for_custom_dealloc; + } + + Py_TYPE(typ) = ECExtensionClassType; + + if (ecflags & EXTENSIONCLASS_NOINSTDICT_FLAG) + typ->tp_base = &NoInstanceDictionaryBaseType; + else + typ->tp_base = &BaseType; + typ->tp_basicsize += typ->tp_base->tp_basicsize; + + if (typ->tp_new == NULL) + typ->tp_new = PyType_GenericNew; + + if (PyType_Ready(typ) < 0) + return -1; + + if (pure_methods) + { + /* We had pure methods. We want to be able to rebind these, so + we'll make them ordinary method wrappers around method descrs + */ + for (; pure_methods->ml_name; pure_methods++) + { + m = PyDescr_NewMethod(ECBaseType, pure_methods); + if (! m) + return -1; + #ifdef PY3K + m = PyInstanceMethod_New((PyObject*) m); + #else + m = PyMethod_New((PyObject *)m, NULL, (PyObject *)ECBaseType); + #endif + if (! m) + return -1; + if (PyDict_SetItemString(typ->tp_dict, pure_methods->ml_name, m) + < 0) + return -1; + } + PyType_Modified(typ); + } + else if (mdef && mdef->ml_name) + { + /* Blast, we have to stick __init__ in the dict ourselves + because PyType_Ready probably stuck a wrapper for ec_init in + instead. + */ + m = PyDescr_NewMethod(typ, mdef); + if (! m) + return -1; + if (PyDict_SetItemString(typ->tp_dict, mdef->ml_name, m) < 0) + return -1; + PyType_Modified(typ); + } + + if (PyMapping_SetItemString(dict, name, (PyObject*)typ) < 0) + return -1; + + return 0; +} + +PyObject * +PyECMethod_New_(PyObject *callable, PyObject *inst) +{ + if (! PyExtensionInstance_Check(inst)) + { + PyErr_SetString(PyExc_TypeError, + "Can't bind non-ExtensionClass instance."); + return NULL; + } + + if (PyMethod_Check(callable)) + { + if (callable->ob_refcnt == 1) + { + Py_XDECREF(((PyMethodObject*)callable)->im_self); + Py_INCREF(inst); + ((PyMethodObject*)callable)->im_self = inst; + Py_INCREF(callable); + return callable; + } + else { + #ifdef PY3K + return PyMethod_New(PyMethod_GET_FUNCTION(callable), inst); + #else + return PyMethod_New( + PyMethod_GET_FUNCTION(callable), + inst, + PyMethod_GET_CLASS(callable)); + #endif + } + } + else { + #ifdef PY3K + return PyMethod_New(callable, inst); + #else + return PyMethod_New(callable, inst, (PyObject*)(ECBaseType)); + #endif + } +} + +static struct ExtensionClassCAPIstruct +TrueExtensionClassCAPI = { + EC_findiattrs_, + PyExtensionClass_Export_, + PyECMethod_New_, + &BaseType, + &ExtensionClassType, +}; + +#ifdef PY3K +static struct PyModuleDef moduledef = +{ + PyModuleDef_HEAD_INIT, + "_ExtensionClass", /* m_name */ + _extensionclass_module_documentation, /* m_doc */ + -1, /* m_size */ + ec_methods, /* m_methods */ + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL, /* m_free */ +}; +#endif + +static PyObject* +module_init(void) +{ + PyObject *m, *s; + + if (pickle_setup() < 0) { + return NULL; + } + +#define DEFINE_STRING(S) \ + if(! (str ## S = NATIVE_FROM_STRING(# S))) return NULL + + DEFINE_STRING(__of__); + DEFINE_STRING(__get__); + DEFINE_STRING(__class_init__); + DEFINE_STRING(__init__); + DEFINE_STRING(__bases__); + DEFINE_STRING(__mro__); + DEFINE_STRING(__new__); + DEFINE_STRING(__parent__); +#undef DEFINE_STRING + + PyExtensionClassCAPI = &TrueExtensionClassCAPI; + + Py_TYPE(&ExtensionClassType) = &PyType_Type; + ExtensionClassType.tp_base = &PyType_Type; + ExtensionClassType.tp_basicsize = PyType_Type.tp_basicsize; + ExtensionClassType.tp_traverse = PyType_Type.tp_traverse; + ExtensionClassType.tp_clear = PyType_Type.tp_clear; + + /* Initialize types: */ + if (PyType_Ready(&ExtensionClassType) < 0) + return NULL; + + Py_TYPE(&BaseType) = &ExtensionClassType; + BaseType.tp_base = &PyBaseObject_Type; + BaseType.tp_basicsize = PyBaseObject_Type.tp_basicsize; + BaseType.tp_new = PyType_GenericNew; + + if (PyType_Ready(&BaseType) < 0) + return NULL; + + Py_TYPE(&NoInstanceDictionaryBaseType) = &ExtensionClassType; + NoInstanceDictionaryBaseType.tp_base = &BaseType; + NoInstanceDictionaryBaseType.tp_basicsize = BaseType.tp_basicsize; + NoInstanceDictionaryBaseType.tp_new = PyType_GenericNew; + + if (PyType_Ready(&NoInstanceDictionaryBaseType) < 0) + return NULL; + + /* Create the module and add the functions */ +#ifdef PY3K + m = PyModule_Create(&moduledef); +#else + m = Py_InitModule3("_ExtensionClass", ec_methods, + _extensionclass_module_documentation); +#endif + + if (m == NULL) + return NULL; + + s = PyCapsule_New(PyExtensionClassCAPI, "ExtensionClass.CAPI2", NULL); + if (s == NULL) { + return NULL; + } + + if (PyModule_AddObject(m, "CAPI2", s) < 0) + return NULL; + + /* Add types: */ + if (PyModule_AddObject(m, "ExtensionClass", + (PyObject *)&ExtensionClassType) < 0) + return NULL; + if (PyModule_AddObject(m, "Base", (PyObject *)&BaseType) < 0) + return NULL; + + if (PyModule_AddObject(m, "NoInstanceDictionaryBase", + (PyObject *)&NoInstanceDictionaryBaseType) < 0) + return NULL; + + return m; +} + +#ifdef PY3K +PyMODINIT_FUNC PyInit__ExtensionClass(void) +{ + return module_init(); +} +#else +PyMODINIT_FUNC init_ExtensionClass(void) +{ + module_init(); +} +#endif diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass/_ExtensionClass.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/ExtensionClass/_ExtensionClass.cpython-36m-darwin.so new file mode 100755 index 0000000..a6be9b3 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/ExtensionClass/_ExtensionClass.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass/__init__.py b/thesisenv/lib/python3.6/site-packages/ExtensionClass/__init__.py new file mode 100644 index 0000000..e41922c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ExtensionClass/__init__.py @@ -0,0 +1,343 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""ExtensionClass + +Extension Class exists to support types derived from the old ExtensionType +meta-class that preceeded Python 2.2 and new-style classes. + +As a meta-class, ExtensionClass provides the following features: + +- Support for a class initialiser: + + >>> from ExtensionClass import ExtensionClass, Base + + >>> class C(Base): + ... def __class_init__(self): + ... print('class init called') + ... print(self.__name__) + ... def bar(self): + ... return 'bar called' + class init called + C + >>> c = C() + >>> int(c.__class__ is C) + 1 + >>> int(c.__class__ is type(c)) + 1 + +- Making sure that every instance of the meta-class has Base as a base class: + + >>> X = ExtensionClass('X', (), {}) + >>> Base in X.__mro__ + 1 + +- Provide an inheritedAttribute method for looking up attributes in + base classes: + + >>> class C2(C): + ... def bar(*a): + ... return C2.inheritedAttribute('bar')(*a), 42 + class init called + C2 + >>> o = C2() + >>> o.bar() + ('bar called', 42) + + This is for compatability with old code. New code should use super + instead. + +The base class, Base, exists mainly to support the __of__ protocol. +The __of__ protocol is similar to __get__ except that __of__ is called +when an implementor is retrieved from an instance as well as from a +class: + +>>> class O(Base): +... def __of__(*a): +... return a + +>>> o1 = O() +>>> o2 = O() +>>> C.o1 = o1 +>>> c.o2 = o2 +>>> c.o1 == (o1, c) +1 +>>> C.o1 == o1 +1 +>>> int(c.o2 == (o2, c)) +1 + +We accomplish this by making a class that implements __of__ a +descriptor and treating all descriptor ExtensionClasses this way. That +is, if an extension class is a descriptor, it's __get__ method will be +called even when it is retrieved from an instance. + +>>> class O(Base): +... def __get__(*a): +... return a +... +>>> o1 = O() +>>> o2 = O() +>>> C.o1 = o1 +>>> c.o2 = o2 +>>> int(c.o1 == (o1, c, type(c))) +1 +>>> int(C.o1 == (o1, None, type(c))) +1 +>>> int(c.o2 == (o2, c, type(c))) +1 +""" + +import inspect +import os +import platform +import sys + +if sys.version_info > (3, ): + import copyreg as copy_reg +else: # pragma: no cover + import copy_reg + +_IS_PYPY = platform.python_implementation() == 'PyPy' +_IS_PURE = 'PURE_PYTHON' in os.environ +C_EXTENSION = not (_IS_PYPY or _IS_PURE) + + +def of_get(self, inst, type_=None): + if not issubclass(type(type_), ExtensionClass): + return self + if inst is not None: + return self.__of__(inst) + return self + + +def pmc_init_of(cls): + # set up __get__ if __of__ is implemented + of = getattr(cls, '__of__', None) + if of is not None: + cls.__get__ = of_get + else: + get = getattr(cls, '__get__', None) + if (get is not None and + (get is of_get or getattr(get, '__func__', None) is of_get)): + del cls.__get__ + + +_Base = type('dummy', (), {}) +_NoInstanceDictionaryBase = type('dummy', (), {}) + + +def _add_classic_mro(mro, cls): + if cls not in mro: + mro.append(cls) + for base in cls.__bases__: + if base not in mro: + mro.append(base) + _add_classic_mro(mro, base) + + +class ExtensionClass(type): + + def __new__(cls, name, bases=(), attrs=None): + attrs = {} if attrs is None else attrs + # Make sure we have an ExtensionClass instance as a base + if (name != 'Base' and + not any(isinstance(b, ExtensionClass) for b in bases)): + bases += (_Base,) + if ('__slots__' not in attrs and + any(issubclass(b, _NoInstanceDictionaryBase) for b in bases)): + attrs['__slots__'] = [] + + cls = type.__new__(cls, name, bases, attrs) + + # Inherit docstring + if not cls.__doc__: + cls.__doc__ = super(cls, cls).__doc__ + + # set up __get__ if __of__ is implemented + pmc_init_of(cls) + + # call class init method + if hasattr(cls, '__class_init__'): + class_init = cls.__class_init__ + if hasattr(class_init, '__func__'): + class_init = class_init.__func__ + class_init(cls) + return cls + + def __basicnew__(self): + """Create a new empty object""" + return self.__new__(self) + + + def mro(self): + """Compute an mro using the 'encapsulated base' scheme""" + mro = [self] + for base in self.__bases__: + if hasattr(base, '__mro__'): + for c in base.__mro__: + if c in (_Base, _NoInstanceDictionaryBase, object): + continue + if c in mro: + continue + mro.append(c) + else: # pragma: no cover (python 2 only) + _add_classic_mro(mro, base) + + if _NoInstanceDictionaryBase in self.__bases__: + mro.append(_NoInstanceDictionaryBase) + elif self.__name__ != 'Base': + mro.append(_Base) + mro.append(object) + return mro + + def inheritedAttribute(self, name): + """Look up an inherited attribute""" + return getattr(super(self, self), name) + + def __setattr__(self, name, value): + if name not in ('__get__', '__doc__', '__of__'): + if (name.startswith('__') and name.endswith('__') and + name.count('_') == 4): + raise TypeError( + "can't set attributes of built-in/extension type '%s.%s' " + "if the attribute name begins and ends with __ and " + "contains only 4 _ characters" % + (self.__module__, self.__name__)) + return type.__setattr__(self, name, value) + + +# Base and object are always moved to the last two positions +# in a subclasses mro, no matter how they are declared in the +# hierarchy. This means the Base_* methods effectively don't have +# to care or worry about using super(): it's always object. + +def Base_getattro(self, name): + descr = None + + for base in type(self).__mro__: + if name in base.__dict__: + descr = base.__dict__[name] + break + + if descr is not None and inspect.isdatadescriptor(descr): + return descr.__get__(self, type(self)) + + try: + # Don't do self.__dict__ otherwise you get recursion. + inst_dict = object.__getattribute__(self, '__dict__') + except AttributeError: + pass + else: + if name in inst_dict: + descr = inst_dict[name] + # If the tp_descr_get of res is of_get, then call it. + if name == '__parent__' or not isinstance(descr, Base): + return descr + + if descr is not None: + descr_get = getattr(descr, '__get__', None) + if descr_get is None: + return descr + + return descr_get(self, type(self)) + + raise AttributeError( + "'%.50s' object has not attribute '%s'", + type(self).__name__, name) + + +def _slotnames(self): + slotnames = copy_reg._slotnames(type(self)) + return [x for x in slotnames + if not x.startswith('_p_') and + not x.startswith('_v_')] + + +def Base__getstate__(self): + idict = getattr(self, '__dict__', None) + slotnames = _slotnames(self) + if idict is not None: + d = dict([x for x in idict.items() + if not x[0].startswith('_p_') and + not x[0].startswith('_v_')]) + else: + d = None + if slotnames: + s = {} + for slotname in slotnames: + value = getattr(self, slotname, self) + if value is not self: + s[slotname] = value + return d, s + return d + + +def Base__setstate__(self, state): + """ See IPersistent. + """ + try: + inst_dict, slots = state + except: + inst_dict, slots = state, () + idict = getattr(self, '__dict__', None) + if inst_dict is not None: + if idict is None: + raise TypeError('No instance dict') # pragma no cover + idict.clear() + idict.update(inst_dict) + slotnames = _slotnames(self) + if slotnames: + for k, v in slots.items(): + setattr(self, k, v) + + +def Base__reduce__(self): + gna = getattr(self, '__getnewargs__', lambda: ()) + return (copy_reg.__newobj__, + (type(self),) + gna(), self.__getstate__()) + + +def Base__new__(cls, *args, **kw): + return object.__new__(cls) + + +Base = ExtensionClass("Base", (object, ), { + '__slots__': (), + '__getattribute__': Base_getattro, + '__getstate__': Base__getstate__, + '__setstate__': Base__setstate__, + '__reduce__': Base__reduce__, + '__new__': Base__new__, +}) + +_Base = Base + + +class NoInstanceDictionaryBase(Base): + __slots__ = () + + +_NoInstanceDictionaryBase = NoInstanceDictionaryBase + + +if C_EXTENSION: # pragma no cover + from ._ExtensionClass import * # NOQA + +# We always want to get the CAPI2 value (if possible) so that +# MethodObject and anything else using the PyExtensionClass_Export +# macro from ExtensionClass.h doesn't break with an AttributeError +try: + from ._ExtensionClass import CAPI2 +except ImportError: # pragma: no cover + pass diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass/_compat.h b/thesisenv/lib/python3.6/site-packages/ExtensionClass/_compat.h new file mode 100644 index 0000000..9249333 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ExtensionClass/_compat.h @@ -0,0 +1,53 @@ +/***************************************************************************** + + Copyright (c) 2012 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +#ifndef PERSISTENT__COMPAT_H +#define PERSISTENT__COMPAT_H + +#include "Python.h" + +#if PY_MAJOR_VERSION >= 3 +#define PY3K +#endif + +#ifdef PY3K +#define INTERN PyUnicode_InternFromString +#define INTERN_INPLACE PyUnicode_InternInPlace +#define NATIVE_CHECK PyUnicode_Check +#define NATIVE_CHECK_EXACT PyUnicode_CheckExact +#define NATIVE_FROM_STRING PyUnicode_FromString +#define NATIVE_FROM_STRING_AND_SIZE PyUnicode_FromStringAndSize + +#define INT_FROM_LONG(x) PyLong_FromLong(x) +#define INT_CHECK(x) PyLong_Check(x) +#define INT_AS_LONG(x) PyLong_AS_LONG(x) + +#define HAS_TP_DESCR_GET(ob) 1 + +#else +#define INTERN PyString_InternFromString +#define INTERN_INPLACE PyString_InternInPlace +#define NATIVE_CHECK PyString_Check +#define NATIVE_CHECK_EXACT PyString_CheckExact +#define NATIVE_FROM_STRING PyString_FromString +#define NATIVE_FROM_STRING_AND_SIZE PyString_FromStringAndSize + +#define INT_FROM_LONG(x) PyInt_FromLong(x) +#define INT_CHECK(x) PyInt_Check(x) +#define INT_AS_LONG(x) PyInt_AS_LONG(x) + +#define HAS_TP_DESCR_GET(ob) PyType_HasFeature(Py_TYPE(ob), Py_TPFLAGS_HAVE_CLASS) +#endif + +#endif diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass/pickle/pickle.c b/thesisenv/lib/python3.6/site-packages/ExtensionClass/pickle/pickle.c new file mode 100644 index 0000000..a3e829d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ExtensionClass/pickle/pickle.c @@ -0,0 +1,432 @@ +/***************************************************************************** + + Copyright (c) 2001, 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + +****************************************************************************/ + +/* Strings initialized by init_strings() below. */ +static PyObject *py___slotnames__, *copy_reg_slotnames, *__newobj__; +static PyObject *py___getnewargs__, *py___getstate__; + + +static int +pickle_setup(void) +{ + PyObject* copy_reg; + +#define INIT_STRING(S) if (!(py_ ## S = INTERN(#S))) return -1; + INIT_STRING(__slotnames__); + INIT_STRING(__getnewargs__); + INIT_STRING(__getstate__); +#undef INIT_STRING + +#ifdef PY3K + copy_reg = PyImport_ImportModule("copyreg"); +#else + copy_reg = PyImport_ImportModule("copy_reg"); +#endif + + if (!copy_reg) { + return -1; + } + + copy_reg_slotnames = PyObject_GetAttrString(copy_reg, "_slotnames"); + if (!copy_reg_slotnames) + { + Py_DECREF(copy_reg); + return -1; + } + + __newobj__ = PyObject_GetAttrString(copy_reg, "__newobj__"); + if (!__newobj__) + { + Py_DECREF(copy_reg); + return -1; + } + + return 0; +} + +static PyObject * pickle_slotnames(PyTypeObject *cls); +static PyObject * convert_name(PyObject *name); + + + +/****************************************************************************/ + + +static PyObject * +pickle_slotnames(PyTypeObject *cls) +{ + PyObject *slotnames; + + slotnames = PyDict_GetItem(cls->tp_dict, py___slotnames__); + if (slotnames) + { + int n = PyObject_Not(slotnames); + if (n < 0) + return NULL; + if (n) + slotnames = Py_None; + + Py_INCREF(slotnames); + return slotnames; + } + + slotnames = PyObject_CallFunctionObjArgs(copy_reg_slotnames, + (PyObject*)cls, NULL); + if (slotnames && !(slotnames == Py_None || PyList_Check(slotnames))) + { + PyErr_SetString(PyExc_TypeError, + "copy_reg._slotnames didn't return a list or None"); + Py_DECREF(slotnames); + return NULL; + } + + return slotnames; +} + +static PyObject * +pickle_copy_dict(PyObject *state) +{ + PyObject *copy, *key, *value; + char *ckey; + Py_ssize_t pos = 0; + + copy = PyDict_New(); + if (!copy) + return NULL; + + if (!state) + return copy; + + while (PyDict_Next(state, &pos, &key, &value)) + { + int is_special; +#ifdef PY3K + if (key && PyUnicode_Check(key)) + { + PyObject *converted = convert_name(key); + ckey = PyBytes_AS_STRING(converted); +#else + if (key && PyBytes_Check(key)) + { + ckey = PyBytes_AS_STRING(key); +#endif + is_special = (*ckey == '_' && + (ckey[1] == 'v' || ckey[1] == 'p') && + ckey[2] == '_'); +#ifdef PY3K + Py_DECREF(converted); +#endif + if (is_special) /* skip volatile and persistent */ + continue; + } + + if (PyObject_SetItem(copy, key, value) < 0) + goto err; + } + + return copy; +err: + Py_DECREF(copy); + return NULL; +} + + +static char pickle___getstate__doc[] = + "Get the object serialization state\n" + "\n" + "If the object has no assigned slots and has no instance dictionary, then \n" + "None is returned.\n" + "\n" + "If the object has no assigned slots and has an instance dictionary, then \n" + "the a copy of the instance dictionary is returned. The copy has any items \n" + "with names starting with '_v_' or '_p_' ommitted.\n" + "\n" + "If the object has assigned slots, then a two-element tuple is returned. \n" + "The first element is either None or a copy of the instance dictionary, \n" + "as described above. The second element is a dictionary with items \n" + "for each of the assigned slots.\n" + ; + +static PyObject * +pickle___getstate__(PyObject *self) +{ + PyObject *slotnames=NULL, *slots=NULL, *state=NULL; + PyObject **dictp; + int n=0; + + slotnames = pickle_slotnames(Py_TYPE(self)); + if (!slotnames) + return NULL; + + dictp = _PyObject_GetDictPtr(self); + if (dictp) + state = pickle_copy_dict(*dictp); + else + { + state = Py_None; + Py_INCREF(state); + } + + if (slotnames != Py_None) + { + int i; + + slots = PyDict_New(); + if (!slots) + goto end; + + for (i = 0; i < PyList_GET_SIZE(slotnames); i++) + { + PyObject *name, *value; + char *cname; + int is_special; + + name = PyList_GET_ITEM(slotnames, i); +#ifdef PY3K + if (PyUnicode_Check(name)) + { + PyObject *converted = convert_name(name); + cname = PyBytes_AS_STRING(converted); +#else + if (PyBytes_Check(name)) + { + cname = PyBytes_AS_STRING(name); +#endif + is_special = (*cname == '_' && + (cname[1] == 'v' || cname[1] == 'p') && + cname[2] == '_'); +#ifdef PY3K + Py_DECREF(converted); +#endif + if (is_special) /* skip volatile and persistent */ + { + continue; + } + } + + /* Unclear: Will this go through our getattr hook? */ + value = PyObject_GetAttr(self, name); + if (value == NULL) + PyErr_Clear(); + else + { + int err = PyDict_SetItem(slots, name, value); + Py_DECREF(value); + if (err < 0) + goto end; + n++; + } + } + } + + if (n) + state = Py_BuildValue("(NO)", state, slots); + +end: + Py_XDECREF(slotnames); + Py_XDECREF(slots); + + return state; +} + +static int +pickle_setattrs_from_dict(PyObject *self, PyObject *dict) +{ + PyObject *key, *value; + Py_ssize_t pos = 0; + + if (!PyDict_Check(dict)) + { + PyErr_SetString(PyExc_TypeError, "Expected dictionary"); + return -1; + } + + while (PyDict_Next(dict, &pos, &key, &value)) + { + if (PyObject_SetAttr(self, key, value) < 0) + return -1; + } + return 0; +} + +static char pickle___setstate__doc[] = + "Set the object serialization state\n\n" + "The state should be in one of 3 forms:\n\n" + "- None\n\n" + " Ignored\n\n" + "- A dictionary\n\n" + " In this case, the object's instance dictionary will be cleared and \n" + " updated with the new state.\n\n" + "- A two-tuple with a string as the first element. \n\n" + " In this case, the method named by the string in the first element will\n" + " be called with the second element.\n\n" + " This form supports migration of data formats.\n\n" + "- A two-tuple with None or a Dictionary as the first element and\n" + " with a dictionary as the second element.\n\n" + " If the first element is not None, then the object's instance dictionary \n" + " will be cleared and updated with the value.\n\n" + " The items in the second element will be assigned as attributes.\n" + ; + +static PyObject * +pickle___setstate__(PyObject *self, PyObject *state) +{ + PyObject *slots=NULL; + + if (PyTuple_Check(state)) + { + if (!PyArg_ParseTuple(state, "OO:__setstate__", &state, &slots)) + return NULL; + } + + if (state != Py_None) + { + PyObject **dict; + PyObject *d_key, *d_value; + Py_ssize_t i; + + dict = _PyObject_GetDictPtr(self); + + if (!dict) + { + PyErr_SetString(PyExc_TypeError, + "this object has no instance dictionary"); + return NULL; + } + + if (!*dict) + { + *dict = PyDict_New(); + if (!*dict) + return NULL; + } + + PyDict_Clear(*dict); + + i = 0; + while (PyDict_Next(state, &i, &d_key, &d_value)) { + /* normally the keys for instance attributes are + interned. we should try to do that here. */ + if (NATIVE_CHECK_EXACT(d_key)) { + Py_INCREF(d_key); + INTERN_INPLACE(&d_key); + Py_DECREF(d_key); + } + if (PyObject_SetItem(*dict, d_key, d_value) < 0) + return NULL; + } + } + + if (slots && pickle_setattrs_from_dict(self, slots) < 0) + return NULL; + + Py_INCREF(Py_None); + return Py_None; +} + +static char pickle___reduce__doc[] = + "Reduce an object to contituent parts for serialization\n" + ; + +static PyObject * +pickle___reduce__(PyObject *self) +{ + PyObject *args=NULL, *bargs=NULL, *state=NULL, *getnewargs=NULL; + int l, i; + + getnewargs = PyObject_GetAttr(self, py___getnewargs__); + if (getnewargs) + { + bargs = PyObject_CallFunctionObjArgs(getnewargs, NULL); + Py_DECREF(getnewargs); + if (!bargs) + return NULL; + l = PyTuple_Size(bargs); + if (l < 0) + goto end; + } + else + { + PyErr_Clear(); + l = 0; + } + + args = PyTuple_New(l+1); + if (args == NULL) + goto end; + + Py_INCREF(Py_TYPE(self)); + PyTuple_SET_ITEM(args, 0, (PyObject*)(Py_TYPE(self))); + for (i = 0; i < l; i++) + { + Py_INCREF(PyTuple_GET_ITEM(bargs, i)); + PyTuple_SET_ITEM(args, i+1, PyTuple_GET_ITEM(bargs, i)); + } + + state = PyObject_CallMethodObjArgs(self, py___getstate__, NULL); + if (!state) + goto end; + + state = Py_BuildValue("(OON)", __newobj__, args, state); + +end: + Py_XDECREF(bargs); + Py_XDECREF(args); + + return state; +} + +/* convert_name() returns a new reference to a string name + or sets an exception and returns NULL. +*/ + +static PyObject * +convert_name(PyObject *name) +{ +#ifdef Py_USING_UNICODE + /* The Unicode to string conversion is done here because the + existing tp_setattro slots expect a string object as name + and we wouldn't want to break those. */ + if (PyUnicode_Check(name)) + { + name = PyUnicode_AsEncodedString(name, NULL, NULL); + } + else +#endif + if (!PyBytes_Check(name)) + { + PyErr_SetString(PyExc_TypeError, "attribute name must be a string"); + return NULL; + } + else + Py_INCREF(name); + return name; +} + +#define PICKLE_GETSTATE_DEF \ +{"__getstate__", (PyCFunction)pickle___getstate__, METH_NOARGS, \ + pickle___getstate__doc}, + +#define PICKLE_SETSTATE_DEF \ +{"__setstate__", (PyCFunction)pickle___setstate__, METH_O, \ + pickle___setstate__doc}, + +#define PICKLE_GETNEWARGS_DEF + +#define PICKLE_REDUCE_DEF \ +{"__reduce__", (PyCFunction)pickle___reduce__, METH_NOARGS, \ + pickle___reduce__doc}, + +#define PICKLE_METHODS PICKLE_GETSTATE_DEF PICKLE_SETSTATE_DEF \ + PICKLE_GETNEWARGS_DEF PICKLE_REDUCE_DEF diff --git a/thesisenv/lib/python3.6/site-packages/ExtensionClass/tests.py b/thesisenv/lib/python3.6/site-packages/ExtensionClass/tests.py new file mode 100644 index 0000000..88353e4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ExtensionClass/tests.py @@ -0,0 +1,1037 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + + +from doctest import DocTestSuite +import sys +import unittest + +from ExtensionClass import * # NOQA + + +def print_dict(d): + d = d.items() + print('{%s}' % (', '.join( + [('%r: %r' % (k, v)) for (k, v) in sorted(d)] + ))) + + +def test_mixing(): + """Test working with a classic class + + >>> class Classic: + ... def x(self): + ... return 42 + + >>> class O(Base): + ... def __of__(*a): + ... return a + + >>> class O2(Classic, O): + ... def __of__(*a): + ... return (O2.inheritedAttribute('__of__')(*a), + ... O2.inheritedAttribute('x')(a[0])) + + >>> class C(Base): + ... def __class_init__(cls): + ... print('class init called') + ... print(cls.__name__) + ... def bar(self): + ... return 'bar called' + class init called + C + + >>> c = C() + >>> o2 = O2() + >>> c.o2 = o2 + >>> int(c.o2 == ((o2, c), 42)) + 1 + + Test working with a new style + + >>> class Modern(object): + ... def x(self): + ... return 42 + + >>> class O2(Modern, O): + ... def __of__(*a): + ... return (O2.inheritedAttribute('__of__')(*a), + ... O2.inheritedAttribute('x')(a[0])) + + >>> o2 = O2() + >>> c.o2 = o2 + >>> int(c.o2 == ((o2, c), 42)) + 1 + """ + + +def test_class_creation_under_stress(): + """ + >>> numbers = [] + >>> for i in range(100): + ... class B(Base): + ... numbers.append(i) + >>> numbers == list(range(100)) + True + + >>> import gc + >>> x = gc.collect() + """ + + +def old_test_add(): + """test_add.py from old EC + + >>> class foo(Base): + ... def __add__(self,other): + ... print('add called') + + >>> foo()+foo() + add called + """ + + +def proper_error_on_deleattr(): + """ + Florent Guillaume wrote: + + ... + + Excellent. + Will it also fix this particularity of ExtensionClass: + + >>> class A(Base): + ... def foo(self): + ... self.gee + ... def bar(self): + ... del self.gee + + >>> a=A() + >>> a.foo() # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + AttributeError: 'A' object has no attribute 'gee' + + >>> a.bar() # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + AttributeError: 'A' object has no attribute 'gee' + + I.e., the fact that KeyError is raised whereas a normal class would + raise AttributeError. + """ + + +def test_NoInstanceDictionaryBase(): + """ + >>> class B(NoInstanceDictionaryBase): pass + ... + >>> B().__dict__ # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + AttributeError: ... + >>> class B(NoInstanceDictionaryBase): + ... __slots__ = ('a', 'b') + ... + >>> class BB(B): pass + ... + >>> b = BB() + >>> b.__dict__ # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + AttributeError: ... + >>> b.a = 1 + >>> b.b = 2 + >>> b.a + 1 + >>> b.b + 2 + """ + + +def test__basicnew__(): + """ + >>> x = Simple.__basicnew__() + >>> x.__dict__ + {} + """ + + +def eqattrs(self, other, *attrs): + self_data = [getattr(self, a, None) for a in attrs] + other_data = [getattr(other, a, None) for a in attrs] + return self_data == other_data + + +class Simple(Base): + def __init__(self, name, **kw): + self.__name__ = name + self.__dict__.update(kw) + self._v_favorite_color = 'blue' + self._p_foo = 'bar' + + def __eq__(self, other): + return eqattrs(self, other, '__class__', *(self.__dict__.keys())) + + +def test_basic_pickling(): + """ + >>> x = Simple('x', aaa=1, bbb='foo') + + >>> x.__getnewargs__() # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + AttributeError: ... + + >>> print_dict(x.__getstate__()) + {'__name__': 'x', 'aaa': 1, 'bbb': 'foo'} + + >>> f, (c,), state = x.__reduce__() + >>> f.__name__ + '__newobj__' + >>> f.__module__ in ('copyreg', 'copy_reg') + True + >>> c.__name__ + 'Simple' + + >>> print_dict(state) + {'__name__': 'x', 'aaa': 1, 'bbb': 'foo'} + + >>> import pickle + >>> pickle.loads(pickle.dumps(x)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 0)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 1)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 2)) == x + 1 + + >>> x.__setstate__({'z': 1}) + >>> x.__dict__ + {'z': 1} + """ + + +class Custom(Simple): + + def __new__(cls, x, y): + r = Base.__new__(cls) + r.x, r.y = x, y + return r + + def __init__(self, x, y): + self.a = 42 + + def __getnewargs__(self): + return self.x, self.y + + def __getstate__(self): + return self.a + + def __setstate__(self, a): + self.a = a + + +def test_pickling_w_overrides(): + """ + >>> x = Custom('x', 'y') + >>> x.a = 99 + + >>> (f, (c, ax, ay), a) = x.__reduce__() + >>> f.__name__ + '__newobj__' + >>> f.__module__ in ('copy_reg', 'copyreg') + True + >>> c.__name__ + 'Custom' + >>> ax, ay, a + ('x', 'y', 99) + + >>> import pickle + >>> pickle.loads(pickle.dumps(x)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 0)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 1)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 2)) == x + 1 + """ + + +class Slotted(Base): + __slots__ = 's1', 's2', '_p_splat', '_v_eek' + + def __init__(self, s1, s2): + self.s1, self.s2 = s1, s2 + self._v_eek = 1 + self._p_splat = 2 + + +class SubSlotted(Slotted): + __slots__ = 's3', 's4' + + def __init__(self, s1, s2, s3): + Slotted.__init__(self, s1, s2) + self.s3 = s3 + + def __eq__(self, other): + return eqattrs(self, other, '__class__', 's1', 's2', 's3', 's4') + + +def test_pickling_w_slots_only(): + """ + >>> x = SubSlotted('x', 'y', 'z') + + >>> x.__getnewargs__() # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + AttributeError: ... + + >>> d, s = x.__getstate__() + >>> d + >>> print_dict(s) + {'s1': 'x', 's2': 'y', 's3': 'z'} + + >>> import pickle + >>> pickle.loads(pickle.dumps(x)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 0)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 1)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 2)) == x + 1 + + >>> x.s4 = 'spam' + + >>> d, s = x.__getstate__() + >>> d + >>> print_dict(s) + {'s1': 'x', 's2': 'y', 's3': 'z', 's4': 'spam'} + + >>> pickle.loads(pickle.dumps(x)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 0)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 1)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 2)) == x + 1 + """ + + +class SubSubSlotted(SubSlotted): + + def __init__(self, s1, s2, s3, **kw): + SubSlotted.__init__(self, s1, s2, s3) + self.__dict__.update(kw) + self._v_favorite_color = 'blue' + self._p_foo = 'bar' + + def __eq__(self, other): + return eqattrs(self, other, + '__class__', 's1', 's2', 's3', 's4', + *(self.__dict__.keys())) + + +def test_pickling_w_slots(): + """ + >>> x = SubSubSlotted('x', 'y', 'z', aaa=1, bbb='foo') + + >>> x.__getnewargs__() # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + AttributeError: ... + + >>> d, s = x.__getstate__() + >>> print_dict(d) + {'aaa': 1, 'bbb': 'foo'} + >>> print_dict(s) + {'s1': 'x', 's2': 'y', 's3': 'z'} + + >>> import pickle + >>> pickle.loads(pickle.dumps(x)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 0)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 1)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 2)) == x + 1 + + >>> x.s4 = 'spam' + + >>> d, s = x.__getstate__() + >>> print_dict(d) + {'aaa': 1, 'bbb': 'foo'} + >>> print_dict(s) + {'s1': 'x', 's2': 'y', 's3': 'z', 's4': 'spam'} + + >>> pickle.loads(pickle.dumps(x)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 0)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 1)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 2)) == x + 1 + + """ + + +def test_pickling_w_slots_w_empty_dict(): + """ + >>> x = SubSubSlotted('x', 'y', 'z') + + >>> x.__getnewargs__() # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + AttributeError: ... + + >>> d, s = x.__getstate__() + >>> print_dict(d) + {} + >>> print_dict(s) + {'s1': 'x', 's2': 'y', 's3': 'z'} + + >>> import pickle + >>> pickle.loads(pickle.dumps(x)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 0)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 1)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 2)) == x + 1 + + >>> x.s4 = 'spam' + + >>> d, s = x.__getstate__() + >>> print_dict(d) + {} + >>> print_dict(s) + {'s1': 'x', 's2': 'y', 's3': 'z', 's4': 'spam'} + + >>> pickle.loads(pickle.dumps(x)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 0)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 1)) == x + 1 + >>> pickle.loads(pickle.dumps(x, 2)) == x + 1 + """ + + +def test_setattr_on_extension_type(): + """ + >>> for name in 'x', '_x', 'x_', '__x_y__', '___x__', '__x___', '_x_': + ... setattr(Base, name, 1) + ... print(getattr(Base, name)) + ... delattr(Base, name) + ... print(getattr(Base, name, 0)) + 1 + 0 + 1 + 0 + 1 + 0 + 1 + 0 + 1 + 0 + 1 + 0 + 1 + 0 + + >>> Base.__foo__ = 1 # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + TypeError: can't set attributes of built-in/extension type """ \ + """'ExtensionClass.Base' if the attribute name begins """ \ + """and ends with __ and contains only 4 _ characters + + >>> Base.__foo__ # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + AttributeError: ... + + >>> try: + ... del Base.__foo__ + ... except (AttributeError, TypeError): # different on pypy + ... print('error') + error + """ + + +def test_mro(): + """ExtensionClass method-resolution order + + The EC MRO is chosen to maximize backward compatibility and + provide a model that is easy to reason about. The basic idea is: + + I'll call this the "encapsulated base" scheme. + + Consider: + + >>> class X(Base): + ... pass + >>> class Y(Base): + ... pass + >>> class Z(Base): + ... pass + + >>> class C(X, Y, Z): + ... def foo(self): + ... return 42 + + When we look up an attribute, we do the following: + + - Look in C's dictionary first. + + - Look up the attribute in X. We don't care how we get the + attribute from X. If X is a new-style-class, we use the new + algorithm. If X is a classic class, we use left-to-right + depth-first. If X is an nsEC, use the "encapsulated base" + algorithm. + + If we don't find the attribute in X, look in Y and then in Z, + using the same approach. + + This algorithm will produce backward compatible results, providing + the equivalent of left-to-right depth-first for nsECs and classic + classes. + + We'll actually do something less abstract. We'll use a simple + algorthm to merge the __mro__ of the base classes, computing an + __mro__ for classic classes using the left-to-right depth-first + algorithm. We'll basically lay the mros end-to-end left-to-right + and remove repeats, keeping the first occurence of each class. + + >>> [c.__name__ for c in C.__mro__] + ['C', 'X', 'Y', 'Z', 'Base', 'object'] + + For backward-compatability's sake, we actually depart from the + above description a bit. We always put Base and object last in the + mro, as shown in the example above. The primary reason for this is + that object provides a do-nothing __init__ method. It is common + practice to mix a C-implemented base class that implements a few + methods with a Python class that implements those methods and + others. The idea is that the C implementation overrides selected + methods in C, so the C subclass is listed first. Unfortunately, + because all extension classes are required to subclass Base, and + thus, object, the C subclass brings along the __init__ object + from objects, which would hide any __init__ method provided by the + Python mix-in. + + Base and object are special in that they are implied by their meta + classes. For example, a new-style class always has object as an + ancestor, even if it isn't listed as a base: + + >>> O = type('O', (), {}) + >>> [c.__name__ for c in O.__bases__] + ['object'] + >>> [c.__name__ for c in O.__mro__] + ['O', 'object'] + + Similarly, Base is always an ancestor of an extension class: + + >>> E = ExtensionClass('E', (), {}) + >>> [c.__name__ for c in E.__bases__] + ['Base'] + >>> [c.__name__ for c in E.__mro__] + ['E', 'Base', 'object'] + + Base and object are generally added soley to get a particular meta + class. They aren't used to provide application functionality and + really shouldn't be considered when reasoning about where + attributes come from. They do provide some useful default + functionality and should be included at the end of the mro. + + Here are more examples: + + >>> from ExtensionClass import Base + + >>> class NA(object): + ... pass + >>> class NB(NA): + ... pass + >>> class NC(NA): + ... pass + >>> class ND(NB, NC): + ... pass + >>> [c.__name__ for c in ND.__mro__] + ['ND', 'NB', 'NC', 'NA', 'object'] + + >>> class EA(Base): + ... pass + >>> class EB(EA): + ... pass + >>> class EC(EA): + ... pass + >>> class ED(EB, EC): + ... pass + >>> [c.__name__ for c in ED.__mro__] + ['ED', 'EB', 'EA', 'EC', 'Base', 'object'] + + >>> class EE(ED, ND): + ... pass + >>> [c.__name__ for c in EE.__mro__] + ['EE', 'ED', 'EB', 'EA', 'EC', 'ND', 'NB', 'NC', 'NA', 'Base', 'object'] + + >>> class EF(ND, ED): + ... pass + >>> [c.__name__ for c in EF.__mro__] + ['EF', 'ND', 'NB', 'NC', 'NA', 'ED', 'EB', 'EA', 'EC', 'Base', 'object'] + + >>> class CA(object): + ... pass + >>> class CB(CA): + ... pass + >>> class CC(CA): + ... pass + >>> class CD(CB, CC): + ... pass + + >>> class ECD(Base, CD): + ... pass + >>> [c.__name__ for c in ECD.__mro__] + ['ECD', 'CD', 'CB', 'CC', 'CA', 'Base', 'object'] + + >>> class CDE(CD, Base): + ... pass + >>> [c.__name__ for c in CDE.__mro__] + ['CDE', 'CD', 'CB', 'CC', 'CA', 'Base', 'object'] + + >>> class CEND(CD, ED, ND): + ... pass + >>> [c.__name__ for c in CEND.__mro__] + ['CEND', 'CD', 'CB', 'CC', 'CA', """ \ + """'ED', 'EB', 'EA', 'EC', 'ND', 'NB', 'NC', 'NA', 'Base', 'object'] + """ + + +def test_avoiding___init__decoy_w_inheritedAttribute(): + """ + + >>> class Decoy(Base): + ... pass + + >>> class B(Base): + ... def __init__(self, a, b): + ... print('__init__ %s %s' % (a, b)) + + >>> class C(Decoy, B): + ... def __init__(self): + ... print('C init') + ... C.inheritedAttribute('__init__')(self, 1, 2) + + >>> x = C() + C init + __init__ 1 2 + """ + + +def test_of_not_called_when_not_accessed_through_EC_instance(): + """ + + >>> class Eek(Base): + ... def __of__(self, parent): + ... return self, parent + + If I define an EC instance as an attr of an ordinary class: + + >>> class O(object): + ... eek = Eek() + + >>> class C: + ... eek = Eek() + + I get the instance, without calling __of__, when I get it from + either tha class: + + >>> O.eek is O.__dict__['eek'] + True + + >>> C.eek is C.__dict__['eek'] + True + + or an instance of the class: + + >>> O().eek is O.__dict__['eek'] + True + + >>> C().eek is C.__dict__['eek'] + True + + If I define an EC instance as an attr of an extension class: + + >>> class E(Base): + ... eek = Eek() + + + I get the instance, without calling __of__, when I get it from + tha class: + + >>> E.eek is E.__dict__['eek'] + True + + But __of__ is called if I go through the instance: + + >>> e = E() + >>> e.eek == (E.__dict__['eek'], e) + True + """ + + +def test_inheriting___doc__(): + """Old-style ExtensionClass inherited __doc__ from base classes. + + >>> class E(Base): + ... "eek" + + >>> class EE(E): + ... pass + + >>> EE.__doc__ + 'eek' + + >>> EE().__doc__ + 'eek' + """ + + +def test___of___w_metaclass_instance(): + """When looking for extension class instances, need to handle meta classes + + >>> class C(Base): + ... pass + + >>> class O(Base): + ... def __of__(self, parent): + ... print('__of__ called on an O') + + >>> class M(ExtensionClass): + ... pass + + >>> X = M('X', (), {}) + >>> class S(X, O): + ... pass + + >>> c = C() + >>> c.s = S() + >>> c.s + __of__ called on an O + """ + + +def test___of__set_after_creation(): + """We may need to set __of__ after a class is created. + + Normally, in a class's __init__, the initialization code checks for + an __of__ method and, if it isn't already set, sets __get__. + + If a class is persistent and loaded from the database, we want + this to happen in __setstate__. The pmc_init_of function allws us + to do that. + + We'll create an extension class without a __of__. We'll also give + it a special meta class, just to make sure that this works with + funny metaclasses too: + + >>> import ExtensionClass + >>> class M(ExtensionClass.ExtensionClass): + ... "A meta class" + >>> def B__init__(self, name): + ... self.name = name + >>> def B__repr__(self): + ... return self.name + + >>> B = M('B', (ExtensionClass.Base, ), { + ... '__init__': B__init__, + ... '__repr__': B__repr__, + ... }) + + >>> B.__class__ is M + True + + >>> x = B('x') + >>> x.y = B('y') + >>> x.y + y + + We define a __of__ method for B after the fact: + + >>> def __of__(self, other): + ... print('__of__(%r, %r)' % (self, other)) + ... return self + + >>> B.__of__ = __of__ + + We see that this has no effect: + + >>> x.y + y + + Until we use pmc_init_of: + + >>> ExtensionClass.pmc_init_of(B) + >>> x.y + __of__(y, x) + y + + Note that there is no harm in calling pmc_init_of multiple times: + + >>> ExtensionClass.pmc_init_of(B) + >>> ExtensionClass.pmc_init_of(B) + >>> ExtensionClass.pmc_init_of(B) + >>> x.y + __of__(y, x) + y + + If we remove __of__, we'll go back to the behavior we had before: + + >>> del B.__of__ + >>> ExtensionClass.pmc_init_of(B) + >>> x.y + y + """ + + +def test_Basic_gc(): + """Test to make sure that EC instances participate in GC + + >>> from ExtensionClass import Base + >>> import gc + >>> class C1(Base): + ... pass + ... + >>> class C2(Base): + ... def __del__(self): + ... print('removed') + ... + >>> a=C1() + >>> a.b = C1() + >>> a.b.a = a + >>> a.b.c = C2() + >>> ignore = gc.collect() + >>> del a + >>> ignored = gc.collect() + removed + """ + + +def test__init__w_arg(): + """ + Traditionally Base's tp_new slot was set to PyType_GenericNew + which doesn't validate its arguments, so we need to support + that. + + >>> Base('foo', bar='baz') # doctest: +ELLIPSIS + + """ + +def test__parent__does_not_get_wrapped(): + """ + The issue at + https://github.com/zopefoundation/ExtensionClass/issues/3 + describes how commit afb8488 made the C implementation of + ExtensionClass.Base not wrap __parent__ objects, but the pure + python version was still doing so. Let's make sure that the behaviour + is consistent. + + >>> import ExtensionClass + >>> class I(ExtensionClass.Base): + ... + ... def __init__(self, id): + ... self.id = id + ... + ... def __of__(self,o): + ... return 'wrapped' + ... + ... def __repr__(self): + ... return self.id + ... + >>> x = I('a') + >>> x.__parent__ = I('b') + >>> x.__parent__ + b + """ + +def test_unbound_function_as___class_init___hook(): + """ + Zope patches an unbound function as a `__class_init__` hook onto `Persistent`; + let's make sure that gets called correctly. + + >>> def InitializeClass(cls): + ... print('InitializeClass called') + ... print(cls.__name__) + >>> class A(Base): + ... pass + >>> A.__class_init__ = InitializeClass + >>> class B(A): + ... pass + InitializeClass called + B + """ + + +class TestEffectivelyCooperativeBase(unittest.TestCase): + + def test___getattribute__cooperative(self): + # This is similar to test_mro() but covering a specific + # application. The fact that Base and object are *always* moved + # to the end of a given class's mro means that even though + # the actual implementation of Base.__getattribute__ is non-cooperative + # (i.e., in Python, using object.__getattribute__ directly, not super()), + # it doesn't matter: everything else in the hierarchy has already been called + class YouShallNotPass(Exception): + pass + + class NoAttributes(object): + def __getattribute__(self, name): + raise YouShallNotPass() + + class WithBaseAndNoAttributes(Base, NoAttributes): + pass + + # Even though it's declared this way... + self.assertEqual(WithBaseAndNoAttributes.__bases__, (Base, NoAttributes)) + # ... the effective value puts base at the end + self.assertEqual((Base, object), tuple(WithBaseAndNoAttributes.mro()[-2:])) + + # Therefore, we don't get AttributeError, we get our defined exception + self.assertRaises(YouShallNotPass, getattr, WithBaseAndNoAttributes(), 'a') + + +class Test_add_classic_mro(unittest.TestCase): + + def _callFUT(self, mro, cls): + from ExtensionClass import _add_classic_mro as FUT + return FUT(mro, cls) + + def test_w_empty_mro_newstyle_class_no_bases(self): + + class _Class(object): + pass + + mro = [] + self._callFUT(mro, _Class) + self.assertEqual(mro, [_Class, object]) + + def test_w_empty_mro_newstyle_class_w_bases(self): + + class _Base(object): + pass + + class _Derived(_Base): + pass + + mro = [] + self._callFUT(mro, _Derived) + self.assertEqual(mro, [_Derived, _Base, object]) + + def test_w_empty_mro_newstyle_class_w_diamond_inheritance(self): + + class _Base(object): + pass + + class _One(_Base): + pass + + class _Another(_Base): + pass + + class _Derived(_One, _Another): + pass + + mro = [] + self._callFUT(mro, _Derived) + self.assertEqual(mro, [_Derived, _One, _Base, object, _Another]) + + + def test_w_filled_mro_oldstyle_class_w_bases(self): + + class _Base: + pass + + class _Derived(_Base): + pass + + already = object() + mro = [already] + self._callFUT(mro, _Derived) + self.assertEqual( + mro, + [already, _Derived, _Base] + ([object] if sys.version_info[0] > 2 else [])) + + +class TestExtensionClass(unittest.TestCase): + + def test_compilation(self): + from ExtensionClass import _IS_PYPY + try: + from ExtensionClass import _ExtensionClass + except ImportError: # pragma: no cover + self.assertTrue(_IS_PYPY) + else: + self.assertTrue(hasattr(_ExtensionClass, 'CAPI2')) + + + def test_mro_classic_class(self): + + class _Base: + pass + + class _Derived(_Base, ExtensionClass): + pass + + + self.assertEqual(_Derived.__mro__, + (_Derived, _Base, ExtensionClass, type, object)) + + def test_class_init(self): + class _Derived(ExtensionClass): + init = 0 + def __class_init__(cls): + cls.init = 1 + Derived = _Derived('Derived', (), {}) + self.assertEqual(0, _Derived.init) + self.assertEqual(1, Derived.init) + +class TestBase(unittest.TestCase): + + def test_data_descriptor(self): + class Descr(object): + def __get__(self, inst, klass): + return (inst, klass) + def __set__(self, value): + "Does nothing, needed to be a data descriptor" + + class O(Base): + attr = Descr() + + o = O() + self.assertEqual(o.attr, (o, O)) + + +def test_suite(): + return unittest.TestSuite(( + DocTestSuite('ExtensionClass'), + DocTestSuite(), + unittest.defaultTestLoader.loadTestsFromName(__name__) + )) diff --git a/thesisenv/lib/python3.6/site-packages/MethodObject/_MethodObject.c b/thesisenv/lib/python3.6/site-packages/MethodObject/_MethodObject.c new file mode 100644 index 0000000..7b08a57 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/MethodObject/_MethodObject.c @@ -0,0 +1,102 @@ +/***************************************************************************** + + Copyright (c) 1996-2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +#include "ExtensionClass/ExtensionClass.h" +#include "ExtensionClass/_compat.h" + +static PyObject * +of(PyObject *self, PyObject *args) +{ + PyObject *inst; + + if (!PyArg_ParseTuple(args, "O", &inst)) { + return NULL; + } + + return PyECMethod_New(self, inst); +} + +struct PyMethodDef Method_methods[] = { + {"__of__",(PyCFunction)of,METH_VARARGS,""}, + {NULL, NULL} /* sentinel */ +}; + +static struct PyMethodDef methods[] = {{NULL, NULL}}; + +#ifdef PY3K +static struct PyModuleDef moduledef = +{ + PyModuleDef_HEAD_INIT, + "_MethodObject", /* m_name */ + "Method-object mix-in class module\n\n", /* m_doc */ + -1, /* m_size */ + methods, /* m_methods */ + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL, /* m_free */ +}; +#endif + + +static PyObject* +module_init(void) +{ + PyObject *m, *d; + PURE_MIXIN_CLASS(Method, + "Base class for objects that want to be treated as methods\n" + "\n" + "The method class provides a method, __of__, that\n" + "binds an object to an instance. If a method is a subobject\n" + "of an extension-class instance, the the method will be bound\n" + "to the instance and when the resulting object is called, it\n" + "will call the method and pass the instance in addition to\n" + "other arguments. It is the responsibility of Method objects\n" + "to implement (or inherit) a __call__ method.\n", + Method_methods); + +#ifdef PY3K + m = PyModule_Create(&moduledef); +#else + m = Py_InitModule3( + "_MethodObject", + methods, + "Method-object mix-in class module\n\n"); +#endif + + if (m == NULL) { + return NULL; + } + + d = PyModule_GetDict(m); + if (d == NULL) { + return NULL; + } + + PyExtensionClass_Export(d, "Method", MethodType); + + return m; +} + +#ifdef PY3K +PyMODINIT_FUNC PyInit__MethodObject(void) +{ + return module_init(); +} +#else +PyMODINIT_FUNC init_MethodObject(void) +{ + module_init(); +} +#endif diff --git a/thesisenv/lib/python3.6/site-packages/MethodObject/_MethodObject.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/MethodObject/_MethodObject.cpython-36m-darwin.so new file mode 100755 index 0000000..4a3b3a6 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/MethodObject/_MethodObject.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/MethodObject/__init__.py b/thesisenv/lib/python3.6/site-packages/MethodObject/__init__.py new file mode 100644 index 0000000..4d53d02 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/MethodObject/__init__.py @@ -0,0 +1,24 @@ +from types import MethodType + +from ExtensionClass import Base +from ExtensionClass import C_EXTENSION + + +class Method(Base): + """Base class for objects that want to be treated as methods + + The method class provides a method, __of__, that + binds an object to an instance. If a method is a subobject + of an extension-class instance, the the method will be bound + to the instance and when the resulting object is called, it + will call the method and pass the instance in addition to + other arguments. It is the responsibility of Method objects + to implement (or inherit) a __call__ method. + """ + + def __of__(self, inst): + return MethodType(self, inst) + + +if C_EXTENSION: # pragma no cover + from ._MethodObject import * # NOQA diff --git a/thesisenv/lib/python3.6/site-packages/MethodObject/tests.py b/thesisenv/lib/python3.6/site-packages/MethodObject/tests.py new file mode 100644 index 0000000..f46bf96 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/MethodObject/tests.py @@ -0,0 +1,51 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import unittest + + +class TestMethodObject(unittest.TestCase): + + def test_compilation(self): + from ExtensionClass import _IS_PYPY + try: + from MethodObject import _MethodObject + except ImportError: # pragma: no cover + self.assertTrue(_IS_PYPY) + else: + self.assertTrue(hasattr(_MethodObject, 'Method')) + + def test_methodobject(self): + from ExtensionClass import Base + from MethodObject import Method + + class Callable(Method): + def __call__(self, ob, *args, **kw): + return (repr(ob), args, kw) + + class ExClass(Base): + def __repr__(self): + return "bar()" + + hi = Callable() + + x = ExClass() + hi = x.hi + result = hi(1, 2, 3, name='spam') + + self.assertEqual(result, + ("bar()", (1, 2, 3), {'name': 'spam'})) + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libfreetype.6.dylib b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libfreetype.6.dylib new file mode 100644 index 0000000..fa60c0d Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libfreetype.6.dylib differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libjpeg.9.dylib b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libjpeg.9.dylib new file mode 100644 index 0000000..7feebd1 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libjpeg.9.dylib differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/liblcms2.2.dylib b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/liblcms2.2.dylib new file mode 100644 index 0000000..8f16c5b Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/liblcms2.2.dylib differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/liblzma.5.dylib b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/liblzma.5.dylib new file mode 100644 index 0000000..e145e5e Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/liblzma.5.dylib differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libopenjp2.2.1.0.dylib b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libopenjp2.2.1.0.dylib new file mode 100644 index 0000000..180ee09 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libopenjp2.2.1.0.dylib differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libpng16.16.dylib b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libpng16.16.dylib new file mode 100644 index 0000000..77a3038 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libpng16.16.dylib differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libtiff.5.dylib b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libtiff.5.dylib new file mode 100644 index 0000000..6a2be0f Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libtiff.5.dylib differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libwebp.7.dylib b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libwebp.7.dylib new file mode 100644 index 0000000..130dae4 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libwebp.7.dylib differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libwebpdemux.2.dylib b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libwebpdemux.2.dylib new file mode 100644 index 0000000..7a3480e Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libwebpdemux.2.dylib differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libwebpmux.3.dylib b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libwebpmux.3.dylib new file mode 100644 index 0000000..274dce9 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libwebpmux.3.dylib differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libz.1.2.11.dylib b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libz.1.2.11.dylib new file mode 100644 index 0000000..2c9e209 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/.dylibs/libz.1.2.11.dylib differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/BdfFontFile.py b/thesisenv/lib/python3.6/site-packages/PIL/BdfFontFile.py new file mode 100644 index 0000000..eac19bd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/BdfFontFile.py @@ -0,0 +1,119 @@ +# +# The Python Imaging Library +# $Id$ +# +# bitmap distribution font (bdf) file parser +# +# history: +# 1996-05-16 fl created (as bdf2pil) +# 1997-08-25 fl converted to FontFile driver +# 2001-05-25 fl removed bogus __init__ call +# 2002-11-20 fl robustification (from Kevin Cazabon, Dmitry Vasiliev) +# 2003-04-22 fl more robustification (from Graham Dumpleton) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from . import Image, FontFile + + +# -------------------------------------------------------------------- +# parse X Bitmap Distribution Format (BDF) +# -------------------------------------------------------------------- + +bdf_slant = { + "R": "Roman", + "I": "Italic", + "O": "Oblique", + "RI": "Reverse Italic", + "RO": "Reverse Oblique", + "OT": "Other" +} + +bdf_spacing = { + "P": "Proportional", + "M": "Monospaced", + "C": "Cell" +} + + +def bdf_char(f): + # skip to STARTCHAR + while True: + s = f.readline() + if not s: + return None + if s[:9] == b"STARTCHAR": + break + id = s[9:].strip().decode('ascii') + + # load symbol properties + props = {} + while True: + s = f.readline() + if not s or s[:6] == b"BITMAP": + break + i = s.find(b" ") + props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii') + + # load bitmap + bitmap = [] + while True: + s = f.readline() + if not s or s[:7] == b"ENDCHAR": + break + bitmap.append(s[:-1]) + bitmap = b"".join(bitmap) + + [x, y, l, d] = [int(p) for p in props["BBX"].split()] + [dx, dy] = [int(p) for p in props["DWIDTH"].split()] + + bbox = (dx, dy), (l, -d-y, x+l, -d), (0, 0, x, y) + + try: + im = Image.frombytes("1", (x, y), bitmap, "hex", "1") + except ValueError: + # deal with zero-width characters + im = Image.new("1", (x, y)) + + return id, int(props["ENCODING"]), bbox, im + + +## +# Font file plugin for the X11 BDF format. + +class BdfFontFile(FontFile.FontFile): + + def __init__(self, fp): + + FontFile.FontFile.__init__(self) + + s = fp.readline() + if s[:13] != b"STARTFONT 2.1": + raise SyntaxError("not a valid BDF file") + + props = {} + comments = [] + + while True: + s = fp.readline() + if not s or s[:13] == b"ENDPROPERTIES": + break + i = s.find(b" ") + props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii') + if s[:i] in [b"COMMENT", b"COPYRIGHT"]: + if s.find(b"LogicalFontDescription") < 0: + comments.append(s[i+1:-1].decode('ascii')) + + while True: + c = bdf_char(fp) + if not c: + break + id, ch, (xy, dst, src), im = c + if 0 <= ch < len(self.glyph): + self.glyph[ch] = xy, dst, src, im diff --git a/thesisenv/lib/python3.6/site-packages/PIL/BlpImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/BlpImagePlugin.py new file mode 100644 index 0000000..398e0fa --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/BlpImagePlugin.py @@ -0,0 +1,435 @@ +""" +Blizzard Mipmap Format (.blp) +Jerome Leclanche + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: + https://creativecommons.org/publicdomain/zero/1.0/ + +BLP1 files, used mostly in Warcraft III, are not fully supported. +All types of BLP2 files used in World of Warcraft are supported. + +The BLP file structure consists of a header, up to 16 mipmaps of the +texture + +Texture sizes must be powers of two, though the two dimensions do +not have to be equal; 512x256 is valid, but 512x200 is not. +The first mipmap (mipmap #0) is the full size image; each subsequent +mipmap halves both dimensions. The final mipmap should be 1x1. + +BLP files come in many different flavours: +* JPEG-compressed (type == 0) - only supported for BLP1. +* RAW images (type == 1, encoding == 1). Each mipmap is stored as an + array of 8-bit values, one per pixel, left to right, top to bottom. + Each value is an index to the palette. +* DXT-compressed (type == 1, encoding == 2): +- DXT1 compression is used if alpha_encoding == 0. + - An additional alpha bit is used if alpha_depth == 1. + - DXT3 compression is used if alpha_encoding == 1. + - DXT5 compression is used if alpha_encoding == 7. +""" + +import struct +from io import BytesIO + +from . import Image, ImageFile + + +BLP_FORMAT_JPEG = 0 + +BLP_ENCODING_UNCOMPRESSED = 1 +BLP_ENCODING_DXT = 2 +BLP_ENCODING_UNCOMPRESSED_RAW_BGRA = 3 + +BLP_ALPHA_ENCODING_DXT1 = 0 +BLP_ALPHA_ENCODING_DXT3 = 1 +BLP_ALPHA_ENCODING_DXT5 = 7 + + +def unpack_565(i): + return ( + ((i >> 11) & 0x1f) << 3, + ((i >> 5) & 0x3f) << 2, + (i & 0x1f) << 3 + ) + + +def decode_dxt1(data, alpha=False): + """ + input: one "row" of data (i.e. will produce 4*width pixels) + """ + + blocks = len(data) // 8 # number of blocks in row + ret = (bytearray(), bytearray(), bytearray(), bytearray()) + + for block in range(blocks): + # Decode next 8-byte block. + idx = block * 8 + color0, color1, bits = struct.unpack_from("> 2 + + a = 0xFF + if control == 0: + r, g, b = r0, g0, b0 + elif control == 1: + r, g, b = r1, g1, b1 + elif control == 2: + if color0 > color1: + r = (2 * r0 + r1) // 3 + g = (2 * g0 + g1) // 3 + b = (2 * b0 + b1) // 3 + else: + r = (r0 + r1) // 2 + g = (g0 + g1) // 2 + b = (b0 + b1) // 2 + elif control == 3: + if color0 > color1: + r = (2 * r1 + r0) // 3 + g = (2 * g1 + g0) // 3 + b = (2 * b1 + b0) // 3 + else: + r, g, b, a = 0, 0, 0, 0 + + if alpha: + ret[j].extend([r, g, b, a]) + else: + ret[j].extend([r, g, b]) + + return ret + + +def decode_dxt3(data): + """ + input: one "row" of data (i.e. will produce 4*width pixels) + """ + + blocks = len(data) // 16 # number of blocks in row + ret = (bytearray(), bytearray(), bytearray(), bytearray()) + + for block in range(blocks): + idx = block * 16 + block = data[idx:idx + 16] + # Decode next 16-byte block. + bits = struct.unpack_from("<8B", block) + color0, color1 = struct.unpack_from(">= 4 + else: + high = True + a &= 0xf + a *= 17 # We get a value between 0 and 15 + + color_code = (code >> 2 * (4 * j + i)) & 0x03 + + if color_code == 0: + r, g, b = r0, g0, b0 + elif color_code == 1: + r, g, b = r1, g1, b1 + elif color_code == 2: + r = (2 * r0 + r1) // 3 + g = (2 * g0 + g1) // 3 + b = (2 * b0 + b1) // 3 + elif color_code == 3: + r = (2 * r1 + r0) // 3 + g = (2 * g1 + g0) // 3 + b = (2 * b1 + b0) // 3 + + ret[j].extend([r, g, b, a]) + + return ret + + +def decode_dxt5(data): + """ + input: one "row" of data (i.e. will produce 4 * width pixels) + """ + + blocks = len(data) // 16 # number of blocks in row + ret = (bytearray(), bytearray(), bytearray(), bytearray()) + + for block in range(blocks): + idx = block * 16 + block = data[idx:idx + 16] + # Decode next 16-byte block. + a0, a1 = struct.unpack_from("> alphacode_index) & 0x07 + elif alphacode_index == 15: + alphacode = (alphacode2 >> 15) | ((alphacode1 << 1) & 0x06) + else: # alphacode_index >= 18 and alphacode_index <= 45 + alphacode = (alphacode1 >> (alphacode_index - 16)) & 0x07 + + if alphacode == 0: + a = a0 + elif alphacode == 1: + a = a1 + elif a0 > a1: + a = ((8 - alphacode) * a0 + (alphacode - 1) * a1) // 7 + elif alphacode == 6: + a = 0 + elif alphacode == 7: + a = 255 + else: + a = ((6 - alphacode) * a0 + (alphacode - 1) * a1) // 5 + + color_code = (code >> 2 * (4 * j + i)) & 0x03 + + if color_code == 0: + r, g, b = r0, g0, b0 + elif color_code == 1: + r, g, b = r1, g1, b1 + elif color_code == 2: + r = (2 * r0 + r1) // 3 + g = (2 * g0 + g1) // 3 + b = (2 * b0 + b1) // 3 + elif color_code == 3: + r = (2 * r1 + r0) // 3 + g = (2 * g1 + g0) // 3 + b = (2 * b1 + b0) // 3 + + ret[j].extend([r, g, b, a]) + + return ret + + +class BLPFormatError(NotImplementedError): + pass + + +class BlpImageFile(ImageFile.ImageFile): + """ + Blizzard Mipmap Format + """ + format = "BLP" + format_description = "Blizzard Mipmap Format" + + def _open(self): + self.magic = self.fp.read(4) + self._read_blp_header() + + if self.magic == b"BLP1": + decoder = "BLP1" + self.mode = "RGB" + elif self.magic == b"BLP2": + decoder = "BLP2" + self.mode = "RGBA" if self._blp_alpha_depth else "RGB" + else: + raise BLPFormatError("Bad BLP magic %r" % (self.magic)) + + self.tile = [ + (decoder, (0, 0) + self.size, 0, (self.mode, 0, 1)) + ] + + def _read_blp_header(self): + self._blp_compression, = struct.unpack(" mode, rawmode + 1: ("P", "P;1"), + 4: ("P", "P;4"), + 8: ("P", "P"), + 16: ("RGB", "BGR;15"), + 24: ("RGB", "BGR"), + 32: ("RGB", "BGRX"), +} + + +def _accept(prefix): + return prefix[:2] == b"BM" + + +# ============================================================================== +# Image plugin for the Windows BMP format. +# ============================================================================== +class BmpImageFile(ImageFile.ImageFile): + """ Image plugin for the Windows Bitmap format (BMP) """ + + # -------------------------------------------------------------- Description + format_description = "Windows Bitmap" + format = "BMP" + # --------------------------------------------------- BMP Compression values + COMPRESSIONS = { + 'RAW': 0, + 'RLE8': 1, + 'RLE4': 2, + 'BITFIELDS': 3, + 'JPEG': 4, + 'PNG': 5 + } + RAW, RLE8, RLE4, BITFIELDS, JPEG, PNG = 0, 1, 2, 3, 4, 5 + + def _bitmap(self, header=0, offset=0): + """ Read relevant info about the BMP """ + read, seek = self.fp.read, self.fp.seek + if header: + seek(header) + file_info = {} + # read bmp header size @offset 14 (this is part of the header size) + file_info['header_size'] = i32(read(4)) + file_info['direction'] = -1 + # --------------------- If requested, read header at a specific position + # read the rest of the bmp header, without its size + header_data = ImageFile._safe_read(self.fp, + file_info['header_size'] - 4) + # --------------------------------------------------- IBM OS/2 Bitmap v1 + # ------ This format has different offsets because of width/height types + if file_info['header_size'] == 12: + file_info['width'] = i16(header_data[0:2]) + file_info['height'] = i16(header_data[2:4]) + file_info['planes'] = i16(header_data[4:6]) + file_info['bits'] = i16(header_data[6:8]) + file_info['compression'] = self.RAW + file_info['palette_padding'] = 3 + # ---------------------------------------------- Windows Bitmap v2 to v5 + elif file_info['header_size'] in (40, 64, 108, 124): # v3, OS/2 v2, v4, v5 + if file_info['header_size'] >= 40: # v3 and OS/2 + file_info['y_flip'] = i8(header_data[7]) == 0xff + file_info['direction'] = 1 if file_info['y_flip'] else -1 + file_info['width'] = i32(header_data[0:4]) + file_info['height'] = (i32(header_data[4:8]) + if not file_info['y_flip'] + else 2**32 - i32(header_data[4:8])) + file_info['planes'] = i16(header_data[8:10]) + file_info['bits'] = i16(header_data[10:12]) + file_info['compression'] = i32(header_data[12:16]) + # byte size of pixel data + file_info['data_size'] = i32(header_data[16:20]) + file_info['pixels_per_meter'] = (i32(header_data[20:24]), + i32(header_data[24:28])) + file_info['colors'] = i32(header_data[28:32]) + file_info['palette_padding'] = 4 + self.info["dpi"] = tuple( + map(lambda x: int(math.ceil(x / 39.3701)), + file_info['pixels_per_meter'])) + if file_info['compression'] == self.BITFIELDS: + if len(header_data) >= 52: + for idx, mask in enumerate(['r_mask', + 'g_mask', + 'b_mask', + 'a_mask']): + file_info[mask] = i32(header_data[36+idx*4:40+idx*4]) + else: + # 40 byte headers only have the three components in the + # bitfields masks, + # ref: https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx + # See also https://github.com/python-pillow/Pillow/issues/1293 + # There is a 4th component in the RGBQuad, in the alpha + # location, but it is listed as a reserved component, + # and it is not generally an alpha channel + file_info['a_mask'] = 0x0 + for mask in ['r_mask', 'g_mask', 'b_mask']: + file_info[mask] = i32(read(4)) + file_info['rgb_mask'] = (file_info['r_mask'], + file_info['g_mask'], + file_info['b_mask']) + file_info['rgba_mask'] = (file_info['r_mask'], + file_info['g_mask'], + file_info['b_mask'], + file_info['a_mask']) + else: + raise IOError("Unsupported BMP header type (%d)" % + file_info['header_size']) + # ------------------ Special case : header is reported 40, which + # ---------------------- is shorter than real size for bpp >= 16 + self._size = file_info['width'], file_info['height'] + # -------- If color count was not found in the header, compute from bits + file_info['colors'] = file_info['colors'] if file_info.get('colors', 0) else (1 << file_info['bits']) + # -------------------------------- Check abnormal values for DOS attacks + if file_info['width'] * file_info['height'] > 2**31: + raise IOError("Unsupported BMP Size: (%dx%d)" % self.size) + # ----------------------- Check bit depth for unusual unsupported values + self.mode, raw_mode = BIT2MODE.get(file_info['bits'], (None, None)) + if self.mode is None: + raise IOError("Unsupported BMP pixel depth (%d)" + % file_info['bits']) + # ----------------- Process BMP with Bitfields compression (not palette) + if file_info['compression'] == self.BITFIELDS: + SUPPORTED = { + 32: [(0xff0000, 0xff00, 0xff, 0x0), + (0xff0000, 0xff00, 0xff, 0xff000000), + (0x0, 0x0, 0x0, 0x0), + (0xff000000, 0xff0000, 0xff00, 0x0)], + 24: [(0xff0000, 0xff00, 0xff)], + 16: [(0xf800, 0x7e0, 0x1f), (0x7c00, 0x3e0, 0x1f)] + } + MASK_MODES = { + (32, (0xff0000, 0xff00, 0xff, 0x0)): "BGRX", + (32, (0xff000000, 0xff0000, 0xff00, 0x0)): "XBGR", + (32, (0xff0000, 0xff00, 0xff, 0xff000000)): "BGRA", + (32, (0x0, 0x0, 0x0, 0x0)): "BGRA", + (24, (0xff0000, 0xff00, 0xff)): "BGR", + (16, (0xf800, 0x7e0, 0x1f)): "BGR;16", + (16, (0x7c00, 0x3e0, 0x1f)): "BGR;15" + } + if file_info['bits'] in SUPPORTED: + if file_info['bits'] == 32 and \ + file_info['rgba_mask'] in SUPPORTED[file_info['bits']]: + raw_mode = MASK_MODES[(file_info['bits'], file_info['rgba_mask'])] + self.mode = "RGBA" if raw_mode in ("BGRA",) else self.mode + elif (file_info['bits'] in (24, 16) and + file_info['rgb_mask'] in SUPPORTED[file_info['bits']]): + raw_mode = MASK_MODES[ + (file_info['bits'], file_info['rgb_mask']) + ] + else: + raise IOError("Unsupported BMP bitfields layout") + else: + raise IOError("Unsupported BMP bitfields layout") + elif file_info['compression'] == self.RAW: + if file_info['bits'] == 32 and header == 22: # 32-bit .cur offset + raw_mode, self.mode = "BGRA", "RGBA" + else: + raise IOError("Unsupported BMP compression (%d)" % + file_info['compression']) + # ---------------- Once the header is processed, process the palette/LUT + if self.mode == "P": # Paletted for 1, 4 and 8 bit images + # ----------------------------------------------------- 1-bit images + if not (0 < file_info['colors'] <= 65536): + raise IOError("Unsupported BMP Palette size (%d)" % + file_info['colors']) + else: + padding = file_info['palette_padding'] + palette = read(padding * file_info['colors']) + greyscale = True + indices = (0, 255) if file_info['colors'] == 2 else \ + list(range(file_info['colors'])) + # ------------------ Check if greyscale and ignore palette if so + for ind, val in enumerate(indices): + rgb = palette[ind*padding:ind*padding + 3] + if rgb != o8(val) * 3: + greyscale = False + # -------- If all colors are grey, white or black, ditch palette + if greyscale: + self.mode = "1" if file_info['colors'] == 2 else "L" + raw_mode = self.mode + else: + self.mode = "P" + self.palette = ImagePalette.raw( + "BGRX" if padding == 4 else "BGR", palette) + + # ----------------------------- Finally set the tile data for the plugin + self.info['compression'] = file_info['compression'] + self.tile = [ + ('raw', + (0, 0, file_info['width'], file_info['height']), + offset or self.fp.tell(), + (raw_mode, + ((file_info['width'] * file_info['bits'] + 31) >> 3) & (~3), + file_info['direction'])) + ] + + def _open(self): + """ Open file, check magic number and read header """ + # read 14 bytes: magic number, filesize, reserved, header final offset + head_data = self.fp.read(14) + # choke if the file does not have the required magic bytes + if head_data[0:2] != b"BM": + raise SyntaxError("Not a BMP file") + # read the start position of the BMP image data (u32) + offset = i32(head_data[10:14]) + # load bitmap information (offset=raster info) + self._bitmap(offset=offset) + + +# ============================================================================== +# Image plugin for the DIB format (BMP alias) +# ============================================================================== +class DibImageFile(BmpImageFile): + + format = "DIB" + format_description = "Windows Bitmap" + + def _open(self): + self._bitmap() + +# +# -------------------------------------------------------------------- +# Write BMP file + + +SAVE = { + "1": ("1", 1, 2), + "L": ("L", 8, 256), + "P": ("P", 8, 256), + "RGB": ("BGR", 24, 0), + "RGBA": ("BGRA", 32, 0), +} + + +def _save(im, fp, filename): + try: + rawmode, bits, colors = SAVE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as BMP" % im.mode) + + info = im.encoderinfo + + dpi = info.get("dpi", (96, 96)) + + # 1 meter == 39.3701 inches + ppm = tuple(map(lambda x: int(x * 39.3701), dpi)) + + stride = ((im.size[0]*bits+7)//8+3) & (~3) + header = 40 # or 64 for OS/2 version 2 + offset = 14 + header + colors * 4 + image = stride * im.size[1] + + # bitmap header + fp.write(b"BM" + # file type (magic) + o32(offset+image) + # file size + o32(0) + # reserved + o32(offset)) # image data offset + + # bitmap info header + fp.write(o32(header) + # info header size + o32(im.size[0]) + # width + o32(im.size[1]) + # height + o16(1) + # planes + o16(bits) + # depth + o32(0) + # compression (0=uncompressed) + o32(image) + # size of bitmap + o32(ppm[0]) + o32(ppm[1]) + # resolution + o32(colors) + # colors used + o32(colors)) # colors important + + fp.write(b"\0" * (header - 40)) # padding (for OS/2 format) + + if im.mode == "1": + for i in (0, 255): + fp.write(o8(i) * 4) + elif im.mode == "L": + for i in range(256): + fp.write(o8(i) * 4) + elif im.mode == "P": + fp.write(im.im.getpalette("RGB", "BGRX")) + + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, + (rawmode, stride, -1))]) + +# +# -------------------------------------------------------------------- +# Registry + + +Image.register_open(BmpImageFile.format, BmpImageFile, _accept) +Image.register_save(BmpImageFile.format, _save) + +Image.register_extension(BmpImageFile.format, ".bmp") + +Image.register_mime(BmpImageFile.format, "image/bmp") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/BufrStubImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/BufrStubImagePlugin.py new file mode 100644 index 0000000..a1957b3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/BufrStubImagePlugin.py @@ -0,0 +1,72 @@ +# +# The Python Imaging Library +# $Id$ +# +# BUFR stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile + +_handler = None + + +def register_handler(handler): + """ + Install application-specific BUFR image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + +def _accept(prefix): + return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC" + + +class BufrStubImageFile(ImageFile.StubImageFile): + + format = "BUFR" + format_description = "BUFR" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(4)): + raise SyntaxError("Not a BUFR file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("BUFR save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept) +Image.register_save(BufrStubImageFile.format, _save) + +Image.register_extension(BufrStubImageFile.format, ".bufr") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ContainerIO.py b/thesisenv/lib/python3.6/site-packages/PIL/ContainerIO.py new file mode 100644 index 0000000..682ad90 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ContainerIO.py @@ -0,0 +1,116 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a class to read from a container file +# +# History: +# 1995-06-18 fl Created +# 1995-09-07 fl Added readline(), readlines() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +## +# A file object that provides read access to a part of an existing +# file (for example a TAR file). + + +class ContainerIO(object): + + def __init__(self, file, offset, length): + """ + Create file object. + + :param file: Existing file. + :param offset: Start of region, in bytes. + :param length: Size of region, in bytes. + """ + self.fh = file + self.pos = 0 + self.offset = offset + self.length = length + self.fh.seek(offset) + + ## + # Always false. + + def isatty(self): + return 0 + + def seek(self, offset, mode=0): + """ + Move file pointer. + + :param offset: Offset in bytes. + :param mode: Starting position. Use 0 for beginning of region, 1 + for current offset, and 2 for end of region. You cannot move + the pointer outside the defined region. + """ + if mode == 1: + self.pos = self.pos + offset + elif mode == 2: + self.pos = self.length + offset + else: + self.pos = offset + # clamp + self.pos = max(0, min(self.pos, self.length)) + self.fh.seek(self.offset + self.pos) + + def tell(self): + """ + Get current file pointer. + + :returns: Offset from start of region, in bytes. + """ + return self.pos + + def read(self, n=0): + """ + Read data. + + :param n: Number of bytes to read. If omitted or zero, + read until end of region. + :returns: An 8-bit string. + """ + if n: + n = min(n, self.length - self.pos) + else: + n = self.length - self.pos + if not n: # EOF + return "" + self.pos = self.pos + n + return self.fh.read(n) + + def readline(self): + """ + Read a line of text. + + :returns: An 8-bit string. + """ + s = "" + while True: + c = self.read(1) + if not c: + break + s = s + c + if c == "\n": + break + return s + + def readlines(self): + """ + Read multiple lines of text. + + :returns: A list of 8-bit strings. + """ + lines = [] + while True: + s = self.readline() + if not s: + break + lines.append(s) + return lines diff --git a/thesisenv/lib/python3.6/site-packages/PIL/CurImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/CurImagePlugin.py new file mode 100644 index 0000000..3e8f321 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/CurImagePlugin.py @@ -0,0 +1,78 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Cursor support for PIL +# +# notes: +# uses BmpImagePlugin.py to read the bitmap data. +# +# history: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from . import Image, BmpImagePlugin +from ._binary import i8, i16le as i16, i32le as i32 + +__version__ = "0.1" + +# +# -------------------------------------------------------------------- + + +def _accept(prefix): + return prefix[:4] == b"\0\0\2\0" + + +## +# Image plugin for Windows Cursor files. + +class CurImageFile(BmpImagePlugin.BmpImageFile): + + format = "CUR" + format_description = "Windows Cursor" + + def _open(self): + + offset = self.fp.tell() + + # check magic + s = self.fp.read(6) + if not _accept(s): + raise SyntaxError("not a CUR file") + + # pick the largest cursor in the file + m = b"" + for i in range(i16(s[4:])): + s = self.fp.read(16) + if not m: + m = s + elif i8(s[0]) > i8(m[0]) and i8(s[1]) > i8(m[1]): + m = s + if not m: + raise TypeError("No cursors were found") + + # load as bitmap + self._bitmap(i32(m[12:]) + offset) + + # patch up the bitmap height + self._size = self.size[0], self.size[1]//2 + d, e, o, a = self.tile[0] + self.tile[0] = d, (0, 0)+self.size, o, a + + return + + +# +# -------------------------------------------------------------------- + +Image.register_open(CurImageFile.format, CurImageFile, _accept) + +Image.register_extension(CurImageFile.format, ".cur") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/DcxImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/DcxImagePlugin.py new file mode 100644 index 0000000..2045927 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/DcxImagePlugin.py @@ -0,0 +1,87 @@ +# +# The Python Imaging Library. +# $Id$ +# +# DCX file handling +# +# DCX is a container file format defined by Intel, commonly used +# for fax applications. Each DCX file consists of a directory +# (a list of file offsets) followed by a set of (usually 1-bit) +# PCX files. +# +# History: +# 1995-09-09 fl Created +# 1996-03-20 fl Properly derived from PcxImageFile. +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2002-07-30 fl Fixed file handling +# +# Copyright (c) 1997-98 by Secret Labs AB. +# Copyright (c) 1995-96 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from . import Image +from ._binary import i32le as i32 +from .PcxImagePlugin import PcxImageFile + +__version__ = "0.2" + +MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then? + + +def _accept(prefix): + return len(prefix) >= 4 and i32(prefix) == MAGIC + + +## +# Image plugin for the Intel DCX format. + +class DcxImageFile(PcxImageFile): + + format = "DCX" + format_description = "Intel DCX" + _close_exclusive_fp_after_loading = False + + def _open(self): + + # Header + s = self.fp.read(4) + if i32(s) != MAGIC: + raise SyntaxError("not a DCX file") + + # Component directory + self._offset = [] + for i in range(1024): + offset = i32(self.fp.read(4)) + if not offset: + break + self._offset.append(offset) + + self.__fp = self.fp + self.frame = None + self.seek(0) + + @property + def n_frames(self): + return len(self._offset) + + @property + def is_animated(self): + return len(self._offset) > 1 + + def seek(self, frame): + if not self._seek_check(frame): + return + self.frame = frame + self.fp = self.__fp + self.fp.seek(self._offset[frame]) + PcxImageFile._open(self) + + def tell(self): + return self.frame + + +Image.register_open(DcxImageFile.format, DcxImageFile, _accept) + +Image.register_extension(DcxImageFile.format, ".dcx") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/DdsImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/DdsImagePlugin.py new file mode 100644 index 0000000..7660827 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/DdsImagePlugin.py @@ -0,0 +1,173 @@ +""" +A Pillow loader for .dds files (S3TC-compressed aka DXTC) +Jerome Leclanche + +Documentation: + https://web.archive.org/web/20170802060935/http://oss.sgi.com/projects/ogl-sample/registry/EXT/texture_compression_s3tc.txt + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: + https://creativecommons.org/publicdomain/zero/1.0/ +""" + +import struct +from io import BytesIO +from . import Image, ImageFile + + +# Magic ("DDS ") +DDS_MAGIC = 0x20534444 + +# DDS flags +DDSD_CAPS = 0x1 +DDSD_HEIGHT = 0x2 +DDSD_WIDTH = 0x4 +DDSD_PITCH = 0x8 +DDSD_PIXELFORMAT = 0x1000 +DDSD_MIPMAPCOUNT = 0x20000 +DDSD_LINEARSIZE = 0x80000 +DDSD_DEPTH = 0x800000 + +# DDS caps +DDSCAPS_COMPLEX = 0x8 +DDSCAPS_TEXTURE = 0x1000 +DDSCAPS_MIPMAP = 0x400000 + +DDSCAPS2_CUBEMAP = 0x200 +DDSCAPS2_CUBEMAP_POSITIVEX = 0x400 +DDSCAPS2_CUBEMAP_NEGATIVEX = 0x800 +DDSCAPS2_CUBEMAP_POSITIVEY = 0x1000 +DDSCAPS2_CUBEMAP_NEGATIVEY = 0x2000 +DDSCAPS2_CUBEMAP_POSITIVEZ = 0x4000 +DDSCAPS2_CUBEMAP_NEGATIVEZ = 0x8000 +DDSCAPS2_VOLUME = 0x200000 + +# Pixel Format +DDPF_ALPHAPIXELS = 0x1 +DDPF_ALPHA = 0x2 +DDPF_FOURCC = 0x4 +DDPF_PALETTEINDEXED8 = 0x20 +DDPF_RGB = 0x40 +DDPF_LUMINANCE = 0x20000 + + +# dds.h + +DDS_FOURCC = DDPF_FOURCC +DDS_RGB = DDPF_RGB +DDS_RGBA = DDPF_RGB | DDPF_ALPHAPIXELS +DDS_LUMINANCE = DDPF_LUMINANCE +DDS_LUMINANCEA = DDPF_LUMINANCE | DDPF_ALPHAPIXELS +DDS_ALPHA = DDPF_ALPHA +DDS_PAL8 = DDPF_PALETTEINDEXED8 + +DDS_HEADER_FLAGS_TEXTURE = (DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | + DDSD_PIXELFORMAT) +DDS_HEADER_FLAGS_MIPMAP = DDSD_MIPMAPCOUNT +DDS_HEADER_FLAGS_VOLUME = DDSD_DEPTH +DDS_HEADER_FLAGS_PITCH = DDSD_PITCH +DDS_HEADER_FLAGS_LINEARSIZE = DDSD_LINEARSIZE + +DDS_HEIGHT = DDSD_HEIGHT +DDS_WIDTH = DDSD_WIDTH + +DDS_SURFACE_FLAGS_TEXTURE = DDSCAPS_TEXTURE +DDS_SURFACE_FLAGS_MIPMAP = DDSCAPS_COMPLEX | DDSCAPS_MIPMAP +DDS_SURFACE_FLAGS_CUBEMAP = DDSCAPS_COMPLEX + +DDS_CUBEMAP_POSITIVEX = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEX +DDS_CUBEMAP_NEGATIVEX = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEX +DDS_CUBEMAP_POSITIVEY = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEY +DDS_CUBEMAP_NEGATIVEY = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEY +DDS_CUBEMAP_POSITIVEZ = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEZ +DDS_CUBEMAP_NEGATIVEZ = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_NEGATIVEZ + + +# DXT1 +DXT1_FOURCC = 0x31545844 + +# DXT3 +DXT3_FOURCC = 0x33545844 + +# DXT5 +DXT5_FOURCC = 0x35545844 + + +# dxgiformat.h + +DXGI_FORMAT_BC7_TYPELESS = 97 +DXGI_FORMAT_BC7_UNORM = 98 +DXGI_FORMAT_BC7_UNORM_SRGB = 99 + + +class DdsImageFile(ImageFile.ImageFile): + format = "DDS" + format_description = "DirectDraw Surface" + + def _open(self): + magic, header_size = struct.unpack(" 0: + s = fp.read(min(lengthfile, 100*1024)) + if not s: + break + lengthfile -= len(s) + f.write(s) + + # Build Ghostscript command + command = ["gs", + "-q", # quiet mode + "-g%dx%d" % size, # set output geometry (pixels) + "-r%fx%f" % res, # set input DPI (dots per inch) + "-dBATCH", # exit after processing + "-dNOPAUSE", # don't pause between pages + "-dSAFER", # safe mode + "-sDEVICE=ppmraw", # ppm driver + "-sOutputFile=%s" % outfile, # output file + "-c", "%d %d translate" % (-bbox[0], -bbox[1]), + # adjust for image origin + "-f", infile, # input file + "-c", "showpage", # showpage (see: https://bugs.ghostscript.com/show_bug.cgi?id=698272) + ] + + if gs_windows_binary is not None: + if not gs_windows_binary: + raise WindowsError('Unable to locate Ghostscript on paths') + command[0] = gs_windows_binary + + # push data through Ghostscript + try: + with open(os.devnull, 'w+b') as devnull: + startupinfo = None + if sys.platform.startswith('win'): + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + subprocess.check_call(command, stdin=devnull, stdout=devnull, + startupinfo=startupinfo) + im = Image.open(outfile) + im.load() + finally: + try: + os.unlink(outfile) + if infile_temp: + os.unlink(infile_temp) + except OSError: + pass + + return im.im.copy() + + +class PSFile(object): + """ + Wrapper for bytesio object that treats either CR or LF as end of line. + """ + def __init__(self, fp): + self.fp = fp + self.char = None + + def seek(self, offset, whence=0): + self.char = None + self.fp.seek(offset, whence) + + def readline(self): + s = self.char or b"" + self.char = None + + c = self.fp.read(1) + while c not in b"\r\n": + s = s + c + c = self.fp.read(1) + + self.char = self.fp.read(1) + # line endings can be 1 or 2 of \r \n, in either order + if self.char in b"\r\n": + self.char = None + + return s.decode('latin-1') + + +def _accept(prefix): + return prefix[:4] == b"%!PS" or \ + (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5) + +## +# Image plugin for Encapsulated Postscript. This plugin supports only +# a few variants of this format. + + +class EpsImageFile(ImageFile.ImageFile): + """EPS File Parser for the Python Imaging Library""" + + format = "EPS" + format_description = "Encapsulated Postscript" + + mode_map = {1: "L", 2: "LAB", 3: "RGB", 4: "CMYK"} + + def _open(self): + (length, offset) = self._find_offset(self.fp) + + # Rewrap the open file pointer in something that will + # convert line endings and decode to latin-1. + fp = PSFile(self.fp) + + # go to offset - start of "%!PS" + fp.seek(offset) + + box = None + + self.mode = "RGB" + self._size = 1, 1 # FIXME: huh? + + # + # Load EPS header + + s_raw = fp.readline() + s = s_raw.strip('\r\n') + + while s_raw: + if s: + if len(s) > 255: + raise SyntaxError("not an EPS file") + + try: + m = split.match(s) + except re.error: + raise SyntaxError("not an EPS file") + + if m: + k, v = m.group(1, 2) + self.info[k] = v + if k == "BoundingBox": + try: + # Note: The DSC spec says that BoundingBox + # fields should be integers, but some drivers + # put floating point values there anyway. + box = [int(float(i)) for i in v.split()] + self._size = box[2] - box[0], box[3] - box[1] + self.tile = [("eps", (0, 0) + self.size, offset, + (length, box))] + except Exception: + pass + + else: + m = field.match(s) + if m: + k = m.group(1) + + if k == "EndComments": + break + if k[:8] == "PS-Adobe": + self.info[k[:8]] = k[9:] + else: + self.info[k] = "" + elif s[0] == '%': + # handle non-DSC Postscript comments that some + # tools mistakenly put in the Comments section + pass + else: + raise IOError("bad EPS header") + + s_raw = fp.readline() + s = s_raw.strip('\r\n') + + if s and s[:1] != "%": + break + + # + # Scan for an "ImageData" descriptor + + while s[:1] == "%": + + if len(s) > 255: + raise SyntaxError("not an EPS file") + + if s[:11] == "%ImageData:": + # Encoded bitmapped image. + x, y, bi, mo = s[11:].split(None, 7)[:4] + + if int(bi) != 8: + break + try: + self.mode = self.mode_map[int(mo)] + except ValueError: + break + + self._size = int(x), int(y) + return + + s = fp.readline().strip('\r\n') + if not s: + break + + if not box: + raise IOError("cannot determine EPS bounding box") + + def _find_offset(self, fp): + + s = fp.read(160) + + if s[:4] == b"%!PS": + # for HEAD without binary preview + fp.seek(0, 2) + length = fp.tell() + offset = 0 + elif i32(s[0:4]) == 0xC6D3D0C5: + # FIX for: Some EPS file not handled correctly / issue #302 + # EPS can contain binary data + # or start directly with latin coding + # more info see: + # https://web.archive.org/web/20160528181353/http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf + offset = i32(s[4:8]) + length = i32(s[8:12]) + else: + raise SyntaxError("not an EPS file") + + return (length, offset) + + def load(self, scale=1): + # Load EPS via Ghostscript + if not self.tile: + return + self.im = Ghostscript(self.tile, self.size, self.fp, scale) + self.mode = self.im.mode + self._size = self.im.size + self.tile = [] + + def load_seek(self, *args, **kwargs): + # we can't incrementally load, so force ImageFile.parser to + # use our custom load method by defining this method. + pass + + +# +# -------------------------------------------------------------------- + +def _save(im, fp, filename, eps=1): + """EPS Writer for the Python Imaging Library.""" + + # + # make sure image data is available + im.load() + + # + # determine postscript image mode + if im.mode == "L": + operator = (8, 1, "image") + elif im.mode == "RGB": + operator = (8, 3, "false 3 colorimage") + elif im.mode == "CMYK": + operator = (8, 4, "false 4 colorimage") + else: + raise ValueError("image mode is not supported") + + base_fp = fp + wrapped_fp = False + if fp != sys.stdout: + if sys.version_info.major > 2: + fp = io.TextIOWrapper(fp, encoding='latin-1') + wrapped_fp = True + + try: + if eps: + # + # write EPS header + fp.write("%!PS-Adobe-3.0 EPSF-3.0\n") + fp.write("%%Creator: PIL 0.1 EpsEncode\n") + # fp.write("%%CreationDate: %s"...) + fp.write("%%%%BoundingBox: 0 0 %d %d\n" % im.size) + fp.write("%%Pages: 1\n") + fp.write("%%EndComments\n") + fp.write("%%Page: 1 1\n") + fp.write("%%ImageData: %d %d " % im.size) + fp.write("%d %d 0 1 1 \"%s\"\n" % operator) + + # + # image header + fp.write("gsave\n") + fp.write("10 dict begin\n") + fp.write("/buf %d string def\n" % (im.size[0] * operator[1])) + fp.write("%d %d scale\n" % im.size) + fp.write("%d %d 8\n" % im.size) # <= bits + fp.write("[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1])) + fp.write("{ currentfile buf readhexstring pop } bind\n") + fp.write(operator[2] + "\n") + if hasattr(fp, "flush"): + fp.flush() + + ImageFile._save(im, base_fp, [("eps", (0, 0)+im.size, 0, None)]) + + fp.write("\n%%%%EndBinary\n") + fp.write("grestore end\n") + if hasattr(fp, "flush"): + fp.flush() + finally: + if wrapped_fp: + fp.detach() + +# +# -------------------------------------------------------------------- + + +Image.register_open(EpsImageFile.format, EpsImageFile, _accept) + +Image.register_save(EpsImageFile.format, _save) + +Image.register_extensions(EpsImageFile.format, [".ps", ".eps"]) + +Image.register_mime(EpsImageFile.format, "application/postscript") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ExifTags.py b/thesisenv/lib/python3.6/site-packages/PIL/ExifTags.py new file mode 100644 index 0000000..a8ad26b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ExifTags.py @@ -0,0 +1,315 @@ +# +# The Python Imaging Library. +# $Id$ +# +# EXIF tags +# +# Copyright (c) 2003 by Secret Labs AB +# +# See the README file for information on usage and redistribution. +# + +## +# This module provides constants and clear-text names for various +# well-known EXIF tags. +## + +## +# Maps EXIF tags to tag names. + +TAGS = { + + # possibly incomplete + 0x000b: "ProcessingSoftware", + 0x00fe: "NewSubfileType", + 0x00ff: "SubfileType", + 0x0100: "ImageWidth", + 0x0101: "ImageLength", + 0x0102: "BitsPerSample", + 0x0103: "Compression", + 0x0106: "PhotometricInterpretation", + 0x0107: "Thresholding", + 0x0108: "CellWidth", + 0x0109: "CellLength", + 0x010a: "FillOrder", + 0x010d: "DocumentName", + 0x010e: "ImageDescription", + 0x010f: "Make", + 0x0110: "Model", + 0x0111: "StripOffsets", + 0x0112: "Orientation", + 0x0115: "SamplesPerPixel", + 0x0116: "RowsPerStrip", + 0x0117: "StripByteCounts", + 0x0118: "MinSampleValue", + 0x0119: "MaxSampleValue", + 0x011a: "XResolution", + 0x011b: "YResolution", + 0x011c: "PlanarConfiguration", + 0x011d: "PageName", + 0x0120: "FreeOffsets", + 0x0121: "FreeByteCounts", + 0x0122: "GrayResponseUnit", + 0x0123: "GrayResponseCurve", + 0x0124: "T4Options", + 0x0125: "T6Options", + 0x0128: "ResolutionUnit", + 0x0129: "PageNumber", + 0x012d: "TransferFunction", + 0x0131: "Software", + 0x0132: "DateTime", + 0x013b: "Artist", + 0x013c: "HostComputer", + 0x013d: "Predictor", + 0x013e: "WhitePoint", + 0x013f: "PrimaryChromaticities", + 0x0140: "ColorMap", + 0x0141: "HalftoneHints", + 0x0142: "TileWidth", + 0x0143: "TileLength", + 0x0144: "TileOffsets", + 0x0145: "TileByteCounts", + 0x014a: "SubIFDs", + 0x014c: "InkSet", + 0x014d: "InkNames", + 0x014e: "NumberOfInks", + 0x0150: "DotRange", + 0x0151: "TargetPrinter", + 0x0152: "ExtraSamples", + 0x0153: "SampleFormat", + 0x0154: "SMinSampleValue", + 0x0155: "SMaxSampleValue", + 0x0156: "TransferRange", + 0x0157: "ClipPath", + 0x0158: "XClipPathUnits", + 0x0159: "YClipPathUnits", + 0x015a: "Indexed", + 0x015b: "JPEGTables", + 0x015f: "OPIProxy", + 0x0200: "JPEGProc", + 0x0201: "JpegIFOffset", + 0x0202: "JpegIFByteCount", + 0x0203: "JpegRestartInterval", + 0x0205: "JpegLosslessPredictors", + 0x0206: "JpegPointTransforms", + 0x0207: "JpegQTables", + 0x0208: "JpegDCTables", + 0x0209: "JpegACTables", + 0x0211: "YCbCrCoefficients", + 0x0212: "YCbCrSubSampling", + 0x0213: "YCbCrPositioning", + 0x0214: "ReferenceBlackWhite", + 0x02bc: "XMLPacket", + 0x1000: "RelatedImageFileFormat", + 0x1001: "RelatedImageWidth", + 0x1002: "RelatedImageLength", + 0x4746: "Rating", + 0x4749: "RatingPercent", + 0x800d: "ImageID", + 0x828d: "CFARepeatPatternDim", + 0x828e: "CFAPattern", + 0x828f: "BatteryLevel", + 0x8298: "Copyright", + 0x829a: "ExposureTime", + 0x829d: "FNumber", + 0x83bb: "IPTCNAA", + 0x8649: "ImageResources", + 0x8769: "ExifOffset", + 0x8773: "InterColorProfile", + 0x8822: "ExposureProgram", + 0x8824: "SpectralSensitivity", + 0x8825: "GPSInfo", + 0x8827: "ISOSpeedRatings", + 0x8828: "OECF", + 0x8829: "Interlace", + 0x882a: "TimeZoneOffset", + 0x882b: "SelfTimerMode", + 0x9000: "ExifVersion", + 0x9003: "DateTimeOriginal", + 0x9004: "DateTimeDigitized", + 0x9101: "ComponentsConfiguration", + 0x9102: "CompressedBitsPerPixel", + 0x9201: "ShutterSpeedValue", + 0x9202: "ApertureValue", + 0x9203: "BrightnessValue", + 0x9204: "ExposureBiasValue", + 0x9205: "MaxApertureValue", + 0x9206: "SubjectDistance", + 0x9207: "MeteringMode", + 0x9208: "LightSource", + 0x9209: "Flash", + 0x920a: "FocalLength", + 0x920b: "FlashEnergy", + 0x920c: "SpatialFrequencyResponse", + 0x920d: "Noise", + 0x9211: "ImageNumber", + 0x9212: "SecurityClassification", + 0x9213: "ImageHistory", + 0x9214: "SubjectLocation", + 0x9215: "ExposureIndex", + 0x9216: "TIFF/EPStandardID", + 0x927c: "MakerNote", + 0x9286: "UserComment", + 0x9290: "SubsecTime", + 0x9291: "SubsecTimeOriginal", + 0x9292: "SubsecTimeDigitized", + 0x9c9b: "XPTitle", + 0x9c9c: "XPComment", + 0x9c9d: "XPAuthor", + 0x9c9e: "XPKeywords", + 0x9c9f: "XPSubject", + 0xa000: "FlashPixVersion", + 0xa001: "ColorSpace", + 0xa002: "ExifImageWidth", + 0xa003: "ExifImageHeight", + 0xa004: "RelatedSoundFile", + 0xa005: "ExifInteroperabilityOffset", + 0xa20b: "FlashEnergy", + 0xa20c: "SpatialFrequencyResponse", + 0xa20e: "FocalPlaneXResolution", + 0xa20f: "FocalPlaneYResolution", + 0xa210: "FocalPlaneResolutionUnit", + 0xa214: "SubjectLocation", + 0xa215: "ExposureIndex", + 0xa217: "SensingMethod", + 0xa300: "FileSource", + 0xa301: "SceneType", + 0xa302: "CFAPattern", + 0xa401: "CustomRendered", + 0xa402: "ExposureMode", + 0xa403: "WhiteBalance", + 0xa404: "DigitalZoomRatio", + 0xa405: "FocalLengthIn35mmFilm", + 0xa406: "SceneCaptureType", + 0xa407: "GainControl", + 0xa408: "Contrast", + 0xa409: "Saturation", + 0xa40a: "Sharpness", + 0xa40b: "DeviceSettingDescription", + 0xa40c: "SubjectDistanceRange", + 0xa420: "ImageUniqueID", + 0xa430: "CameraOwnerName", + 0xa431: "BodySerialNumber", + 0xa432: "LensSpecification", + 0xa433: "LensMake", + 0xa434: "LensModel", + 0xa435: "LensSerialNumber", + 0xa500: "Gamma", + 0xc4a5: "PrintImageMatching", + 0xc612: "DNGVersion", + 0xc613: "DNGBackwardVersion", + 0xc614: "UniqueCameraModel", + 0xc615: "LocalizedCameraModel", + 0xc616: "CFAPlaneColor", + 0xc617: "CFALayout", + 0xc618: "LinearizationTable", + 0xc619: "BlackLevelRepeatDim", + 0xc61a: "BlackLevel", + 0xc61b: "BlackLevelDeltaH", + 0xc61c: "BlackLevelDeltaV", + 0xc61d: "WhiteLevel", + 0xc61e: "DefaultScale", + 0xc61f: "DefaultCropOrigin", + 0xc620: "DefaultCropSize", + 0xc621: "ColorMatrix1", + 0xc622: "ColorMatrix2", + 0xc623: "CameraCalibration1", + 0xc624: "CameraCalibration2", + 0xc625: "ReductionMatrix1", + 0xc626: "ReductionMatrix2", + 0xc627: "AnalogBalance", + 0xc628: "AsShotNeutral", + 0xc629: "AsShotWhiteXY", + 0xc62a: "BaselineExposure", + 0xc62b: "BaselineNoise", + 0xc62c: "BaselineSharpness", + 0xc62d: "BayerGreenSplit", + 0xc62e: "LinearResponseLimit", + 0xc62f: "CameraSerialNumber", + 0xc630: "LensInfo", + 0xc631: "ChromaBlurRadius", + 0xc632: "AntiAliasStrength", + 0xc633: "ShadowScale", + 0xc634: "DNGPrivateData", + 0xc635: "MakerNoteSafety", + 0xc65a: "CalibrationIlluminant1", + 0xc65b: "CalibrationIlluminant2", + 0xc65c: "BestQualityScale", + 0xc65d: "RawDataUniqueID", + 0xc68b: "OriginalRawFileName", + 0xc68c: "OriginalRawFileData", + 0xc68d: "ActiveArea", + 0xc68e: "MaskedAreas", + 0xc68f: "AsShotICCProfile", + 0xc690: "AsShotPreProfileMatrix", + 0xc691: "CurrentICCProfile", + 0xc692: "CurrentPreProfileMatrix", + 0xc6bf: "ColorimetricReference", + 0xc6f3: "CameraCalibrationSignature", + 0xc6f4: "ProfileCalibrationSignature", + 0xc6f6: "AsShotProfileName", + 0xc6f7: "NoiseReductionApplied", + 0xc6f8: "ProfileName", + 0xc6f9: "ProfileHueSatMapDims", + 0xc6fa: "ProfileHueSatMapData1", + 0xc6fb: "ProfileHueSatMapData2", + 0xc6fc: "ProfileToneCurve", + 0xc6fd: "ProfileEmbedPolicy", + 0xc6fe: "ProfileCopyright", + 0xc714: "ForwardMatrix1", + 0xc715: "ForwardMatrix2", + 0xc716: "PreviewApplicationName", + 0xc717: "PreviewApplicationVersion", + 0xc718: "PreviewSettingsName", + 0xc719: "PreviewSettingsDigest", + 0xc71a: "PreviewColorSpace", + 0xc71b: "PreviewDateTime", + 0xc71c: "RawImageDigest", + 0xc71d: "OriginalRawFileDigest", + 0xc71e: "SubTileBlockSize", + 0xc71f: "RowInterleaveFactor", + 0xc725: "ProfileLookTableDims", + 0xc726: "ProfileLookTableData", + 0xc740: "OpcodeList1", + 0xc741: "OpcodeList2", + 0xc74e: "OpcodeList3", + 0xc761: "NoiseProfile" +} + +## +# Maps EXIF GPS tags to tag names. + +GPSTAGS = { + 0: "GPSVersionID", + 1: "GPSLatitudeRef", + 2: "GPSLatitude", + 3: "GPSLongitudeRef", + 4: "GPSLongitude", + 5: "GPSAltitudeRef", + 6: "GPSAltitude", + 7: "GPSTimeStamp", + 8: "GPSSatellites", + 9: "GPSStatus", + 10: "GPSMeasureMode", + 11: "GPSDOP", + 12: "GPSSpeedRef", + 13: "GPSSpeed", + 14: "GPSTrackRef", + 15: "GPSTrack", + 16: "GPSImgDirectionRef", + 17: "GPSImgDirection", + 18: "GPSMapDatum", + 19: "GPSDestLatitudeRef", + 20: "GPSDestLatitude", + 21: "GPSDestLongitudeRef", + 22: "GPSDestLongitude", + 23: "GPSDestBearingRef", + 24: "GPSDestBearing", + 25: "GPSDestDistanceRef", + 26: "GPSDestDistance", + 27: "GPSProcessingMethod", + 28: "GPSAreaInformation", + 29: "GPSDateStamp", + 30: "GPSDifferential", + 31: "GPSHPositioningError", +} diff --git a/thesisenv/lib/python3.6/site-packages/PIL/FitsStubImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/FitsStubImagePlugin.py new file mode 100644 index 0000000..63c195c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/FitsStubImagePlugin.py @@ -0,0 +1,75 @@ +# +# The Python Imaging Library +# $Id$ +# +# FITS stub adapter +# +# Copyright (c) 1998-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile + +_handler = None + + +def register_handler(handler): + """ + Install application-specific FITS image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix): + return prefix[:6] == b"SIMPLE" + + +class FITSStubImageFile(ImageFile.StubImageFile): + + format = "FITS" + format_description = "FITS" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(6)): + raise SyntaxError("Not a FITS file") + + # FIXME: add more sanity checks here; mandatory header items + # include SIMPLE, BITPIX, NAXIS, etc. + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("FITS save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(FITSStubImageFile.format, FITSStubImageFile, _accept) +Image.register_save(FITSStubImageFile.format, _save) + +Image.register_extensions(FITSStubImageFile.format, [".fit", ".fits"]) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/FliImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/FliImagePlugin.py new file mode 100644 index 0000000..c78c8c6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/FliImagePlugin.py @@ -0,0 +1,163 @@ +# +# The Python Imaging Library. +# $Id$ +# +# FLI/FLC file handling. +# +# History: +# 95-09-01 fl Created +# 97-01-03 fl Fixed parser, setup decoder tile +# 98-07-15 fl Renamed offset attribute to avoid name clash +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile, ImagePalette +from ._binary import i8, i16le as i16, i32le as i32, o8 + +__version__ = "0.2" + + +# +# decoder + +def _accept(prefix): + return len(prefix) >= 6 and i16(prefix[4:6]) in [0xAF11, 0xAF12] + + +## +# Image plugin for the FLI/FLC animation format. Use the seek +# method to load individual frames. + +class FliImageFile(ImageFile.ImageFile): + + format = "FLI" + format_description = "Autodesk FLI/FLC Animation" + _close_exclusive_fp_after_loading = False + + def _open(self): + + # HEAD + s = self.fp.read(128) + magic = i16(s[4:6]) + if not (magic in [0xAF11, 0xAF12] and + i16(s[14:16]) in [0, 3] and # flags + s[20:22] == b"\x00\x00"): # reserved + raise SyntaxError("not an FLI/FLC file") + + # frames + self.__framecount = i16(s[6:8]) + + # image characteristics + self.mode = "P" + self._size = i16(s[8:10]), i16(s[10:12]) + + # animation speed + duration = i32(s[16:20]) + if magic == 0xAF11: + duration = (duration * 1000) // 70 + self.info["duration"] = duration + + # look for palette + palette = [(a, a, a) for a in range(256)] + + s = self.fp.read(16) + + self.__offset = 128 + + if i16(s[4:6]) == 0xF100: + # prefix chunk; ignore it + self.__offset = self.__offset + i32(s) + s = self.fp.read(16) + + if i16(s[4:6]) == 0xF1FA: + # look for palette chunk + s = self.fp.read(6) + if i16(s[4:6]) == 11: + self._palette(palette, 2) + elif i16(s[4:6]) == 4: + self._palette(palette, 0) + + palette = [o8(r)+o8(g)+o8(b) for (r, g, b) in palette] + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + # set things up to decode first frame + self.__frame = -1 + self.__fp = self.fp + self.__rewind = self.fp.tell() + self.seek(0) + + def _palette(self, palette, shift): + # load palette + + i = 0 + for e in range(i16(self.fp.read(2))): + s = self.fp.read(2) + i = i + i8(s[0]) + n = i8(s[1]) + if n == 0: + n = 256 + s = self.fp.read(n * 3) + for n in range(0, len(s), 3): + r = i8(s[n]) << shift + g = i8(s[n+1]) << shift + b = i8(s[n+2]) << shift + palette[i] = (r, g, b) + i += 1 + + @property + def n_frames(self): + return self.__framecount + + @property + def is_animated(self): + return self.__framecount > 1 + + def seek(self, frame): + if not self._seek_check(frame): + return + if frame < self.__frame: + self._seek(0) + + for f in range(self.__frame + 1, frame + 1): + self._seek(f) + + def _seek(self, frame): + if frame == 0: + self.__frame = -1 + self.__fp.seek(self.__rewind) + self.__offset = 128 + + if frame != self.__frame + 1: + raise ValueError("cannot seek to frame %d" % frame) + self.__frame = frame + + # move to next frame + self.fp = self.__fp + self.fp.seek(self.__offset) + + s = self.fp.read(4) + if not s: + raise EOFError + + framesize = i32(s) + + self.decodermaxblock = framesize + self.tile = [("fli", (0, 0)+self.size, self.__offset, None)] + + self.__offset += framesize + + def tell(self): + return self.__frame + + +# +# registry + +Image.register_open(FliImageFile.format, FliImageFile, _accept) + +Image.register_extensions(FliImageFile.format, [".fli", ".flc"]) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/FontFile.py b/thesisenv/lib/python3.6/site-packages/PIL/FontFile.py new file mode 100644 index 0000000..305e8af --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/FontFile.py @@ -0,0 +1,113 @@ +# +# The Python Imaging Library +# $Id$ +# +# base class for raster font file parsers +# +# history: +# 1997-06-05 fl created +# 1997-08-19 fl restrict image width +# +# Copyright (c) 1997-1998 by Secret Labs AB +# Copyright (c) 1997-1998 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +import os +from . import Image, _binary + +WIDTH = 800 + + +def puti16(fp, values): + # write network order (big-endian) 16-bit sequence + for v in values: + if v < 0: + v += 65536 + fp.write(_binary.o16be(v)) + + +## +# Base class for raster font file handlers. + +class FontFile(object): + + bitmap = None + + def __init__(self): + + self.info = {} + self.glyph = [None] * 256 + + def __getitem__(self, ix): + return self.glyph[ix] + + def compile(self): + "Create metrics and bitmap" + + if self.bitmap: + return + + # create bitmap large enough to hold all data + h = w = maxwidth = 0 + lines = 1 + for glyph in self: + if glyph: + d, dst, src, im = glyph + h = max(h, src[3] - src[1]) + w = w + (src[2] - src[0]) + if w > WIDTH: + lines += 1 + w = (src[2] - src[0]) + maxwidth = max(maxwidth, w) + + xsize = maxwidth + ysize = lines * h + + if xsize == 0 and ysize == 0: + return "" + + self.ysize = h + + # paste glyphs into bitmap + self.bitmap = Image.new("1", (xsize, ysize)) + self.metrics = [None] * 256 + x = y = 0 + for i in range(256): + glyph = self[i] + if glyph: + d, dst, src, im = glyph + xx = src[2] - src[0] + # yy = src[3] - src[1] + x0, y0 = x, y + x = x + xx + if x > WIDTH: + x, y = 0, y + h + x0, y0 = x, y + x = xx + s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0 + self.bitmap.paste(im.crop(src), s) + self.metrics[i] = d, dst, s + + def save(self, filename): + "Save font" + + self.compile() + + # font data + self.bitmap.save(os.path.splitext(filename)[0] + ".pbm", "PNG") + + # font metrics + with open(os.path.splitext(filename)[0] + ".pil", "wb") as fp: + fp.write(b"PILfont\n") + fp.write((";;;;;;%d;\n" % self.ysize).encode('ascii')) # HACK!!! + fp.write(b"DATA\n") + for id in range(256): + m = self.metrics[id] + if not m: + puti16(fp, [0] * 10) + else: + puti16(fp, m[0] + m[1] + m[2]) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/FpxImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/FpxImagePlugin.py new file mode 100644 index 0000000..9f284fd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/FpxImagePlugin.py @@ -0,0 +1,225 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library. +# $Id$ +# +# FlashPix support for PIL +# +# History: +# 97-01-25 fl Created (reads uncompressed RGB images only) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from . import Image, ImageFile +from ._binary import i32le as i32, i8 + +import olefile + +__version__ = "0.1" + +# we map from colour field tuples to (mode, rawmode) descriptors +MODES = { + # opacity + (0x00007ffe): ("A", "L"), + # monochrome + (0x00010000,): ("L", "L"), + (0x00018000, 0x00017ffe): ("RGBA", "LA"), + # photo YCC + (0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"), + (0x00028000, 0x00028001, 0x00028002, 0x00027ffe): ("RGBA", "YCCA;P"), + # standard RGB (NIFRGB) + (0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"), + (0x00038000, 0x00038001, 0x00038002, 0x00037ffe): ("RGBA", "RGBA"), +} + + +# +# -------------------------------------------------------------------- + +def _accept(prefix): + return prefix[:8] == olefile.MAGIC + + +## +# Image plugin for the FlashPix images. + +class FpxImageFile(ImageFile.ImageFile): + + format = "FPX" + format_description = "FlashPix" + + def _open(self): + # + # read the OLE directory and see if this is a likely + # to be a FlashPix file + + try: + self.ole = olefile.OleFileIO(self.fp) + except IOError: + raise SyntaxError("not an FPX file; invalid OLE file") + + if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B": + raise SyntaxError("not an FPX file; bad root CLSID") + + self._open_index(1) + + def _open_index(self, index=1): + # + # get the Image Contents Property Set + + prop = self.ole.getproperties([ + "Data Object Store %06d" % index, + "\005Image Contents" + ]) + + # size (highest resolution) + + self._size = prop[0x1000002], prop[0x1000003] + + size = max(self.size) + i = 1 + while size > 64: + size = size / 2 + i += 1 + self.maxid = i - 1 + + # mode. instead of using a single field for this, flashpix + # requires you to specify the mode for each channel in each + # resolution subimage, and leaves it to the decoder to make + # sure that they all match. for now, we'll cheat and assume + # that this is always the case. + + id = self.maxid << 16 + + s = prop[0x2000002 | id] + + colors = [] + for i in range(i32(s, 4)): + # note: for now, we ignore the "uncalibrated" flag + colors.append(i32(s, 8+i*4) & 0x7fffffff) + + self.mode, self.rawmode = MODES[tuple(colors)] + + # load JPEG tables, if any + self.jpeg = {} + for i in range(256): + id = 0x3000001 | (i << 16) + if id in prop: + self.jpeg[i] = prop[id] + + self._open_subimage(1, self.maxid) + + def _open_subimage(self, index=1, subimage=0): + # + # setup tile descriptors for a given subimage + + stream = [ + "Data Object Store %06d" % index, + "Resolution %04d" % subimage, + "Subimage 0000 Header" + ] + + fp = self.ole.openstream(stream) + + # skip prefix + fp.read(28) + + # header stream + s = fp.read(36) + + size = i32(s, 4), i32(s, 8) + # tilecount = i32(s, 12) + tilesize = i32(s, 16), i32(s, 20) + # channels = i32(s, 24) + offset = i32(s, 28) + length = i32(s, 32) + + if size != self.size: + raise IOError("subimage mismatch") + + # get tile descriptors + fp.seek(28 + offset) + s = fp.read(i32(s, 12) * length) + + x = y = 0 + xsize, ysize = size + xtile, ytile = tilesize + self.tile = [] + + for i in range(0, len(s), length): + + compression = i32(s, i+8) + + if compression == 0: + self.tile.append(("raw", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (self.rawmode))) + + elif compression == 1: + + # FIXME: the fill decoder is not implemented + self.tile.append(("fill", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (self.rawmode, s[12:16]))) + + elif compression == 2: + + internal_color_conversion = i8(s[14]) + jpeg_tables = i8(s[15]) + rawmode = self.rawmode + + if internal_color_conversion: + # The image is stored as usual (usually YCbCr). + if rawmode == "RGBA": + # For "RGBA", data is stored as YCbCrA based on + # negative RGB. The following trick works around + # this problem : + jpegmode, rawmode = "YCbCrK", "CMYK" + else: + jpegmode = None # let the decoder decide + + else: + # The image is stored as defined by rawmode + jpegmode = rawmode + + self.tile.append(("jpeg", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (rawmode, jpegmode))) + + # FIXME: jpeg tables are tile dependent; the prefix + # data must be placed in the tile descriptor itself! + + if jpeg_tables: + self.tile_prefix = self.jpeg[jpeg_tables] + + else: + raise IOError("unknown/invalid compression") + + x = x + xtile + if x >= xsize: + x, y = 0, y + ytile + if y >= ysize: + break # isn't really required + + self.stream = stream + self.fp = None + + def load(self): + + if not self.fp: + self.fp = self.ole.openstream(self.stream[:2] + + ["Subimage 0000 Data"]) + + return ImageFile.ImageFile.load(self) + +# +# -------------------------------------------------------------------- + + +Image.register_open(FpxImageFile.format, FpxImageFile, _accept) + +Image.register_extension(FpxImageFile.format, ".fpx") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/FtexImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/FtexImagePlugin.py new file mode 100644 index 0000000..08ce0e0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/FtexImagePlugin.py @@ -0,0 +1,100 @@ +""" +A Pillow loader for .ftc and .ftu files (FTEX) +Jerome Leclanche + +The contents of this file are hereby released in the public domain (CC0) +Full text of the CC0 license: + https://creativecommons.org/publicdomain/zero/1.0/ + +Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001 + +The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a +packed custom format called FTEX. This file format uses file extensions FTC +and FTU. +* FTC files are compressed textures (using standard texture compression). +* FTU files are not compressed. +Texture File Format +The FTC and FTU texture files both use the same format. This +has the following structure: +{header} +{format_directory} +{data} +Where: +{header} = { u32:magic, u32:version, u32:width, u32:height, u32:mipmap_count, u32:format_count } + +* The "magic" number is "FTEX". +* "width" and "height" are the dimensions of the texture. +* "mipmap_count" is the number of mipmaps in the texture. +* "format_count" is the number of texture formats (different versions of the +same texture) in this file. + +{format_directory} = format_count * { u32:format, u32:where } + +The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB +uncompressed textures. +The texture data for a format starts at the position "where" in the file. + +Each set of texture data in the file has the following structure: +{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } } +* "mipmap_size" is the number of bytes in that mip level. For compressed +textures this is the size of the texture data compressed with DXT1. For 24 bit +uncompressed textures, this is 3 * width * height. Following this are the image +bytes for that mipmap level. + +Note: All data is stored in little-Endian (Intel) byte order. +""" + +import struct +from io import BytesIO +from . import Image, ImageFile + + +MAGIC = b"FTEX" +FORMAT_DXT1 = 0 +FORMAT_UNCOMPRESSED = 1 + + +class FtexImageFile(ImageFile.ImageFile): + format = "FTEX" + format_description = "Texture File Format (IW2:EOC)" + + def _open(self): + magic = struct.unpack("= 8 and \ + i32(prefix[:4]) >= 20 and i32(prefix[4:8]) in (1, 2) + + +## +# Image plugin for the GIMP brush format. + +class GbrImageFile(ImageFile.ImageFile): + + format = "GBR" + format_description = "GIMP brush file" + + def _open(self): + header_size = i32(self.fp.read(4)) + version = i32(self.fp.read(4)) + if header_size < 20: + raise SyntaxError("not a GIMP brush") + if version not in (1, 2): + raise SyntaxError("Unsupported GIMP brush version: %s" % version) + + width = i32(self.fp.read(4)) + height = i32(self.fp.read(4)) + color_depth = i32(self.fp.read(4)) + if width <= 0 or height <= 0: + raise SyntaxError("not a GIMP brush") + if color_depth not in (1, 4): + raise SyntaxError( + "Unsupported GIMP brush color depth: %s" % color_depth) + + if version == 1: + comment_length = header_size-20 + else: + comment_length = header_size-28 + magic_number = self.fp.read(4) + if magic_number != b'GIMP': + raise SyntaxError("not a GIMP brush, bad magic number") + self.info['spacing'] = i32(self.fp.read(4)) + + comment = self.fp.read(comment_length)[:-1] + + if color_depth == 1: + self.mode = "L" + else: + self.mode = 'RGBA' + + self._size = width, height + + self.info["comment"] = comment + + # Image might not be small + Image._decompression_bomb_check(self.size) + + # Data is an uncompressed block of w * h * bytes/pixel + self._data_size = width * height * color_depth + + def load(self): + self.im = Image.core.new(self.mode, self.size) + self.frombytes(self.fp.read(self._data_size)) + +# +# registry + + +Image.register_open(GbrImageFile.format, GbrImageFile, _accept) +Image.register_extension(GbrImageFile.format, ".gbr") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/GdImageFile.py b/thesisenv/lib/python3.6/site-packages/PIL/GdImageFile.py new file mode 100644 index 0000000..1361542 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/GdImageFile.py @@ -0,0 +1,84 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GD file handling +# +# History: +# 1996-04-12 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +# NOTE: This format cannot be automatically recognized, so the +# class is not registered for use with Image.open(). To open a +# gd file, use the GdImageFile.open() function instead. + +# THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE. This +# implementation is provided for convenience and demonstrational +# purposes only. + + +from . import ImageFile, ImagePalette +from ._binary import i8, i16be as i16, i32be as i32 + +__version__ = "0.1" + + +## +# Image plugin for the GD uncompressed format. Note that this format +# is not supported by the standard Image.open function. To use +# this plugin, you have to import the GdImageFile module and +# use the GdImageFile.open function. + +class GdImageFile(ImageFile.ImageFile): + + format = "GD" + format_description = "GD uncompressed images" + + def _open(self): + + # Header + s = self.fp.read(1037) + + if not i16(s[:2]) in [65534, 65535]: + raise SyntaxError("Not a valid GD 2.x .gd file") + + self.mode = "L" # FIXME: "P" + self._size = i16(s[2:4]), i16(s[4:6]) + + trueColor = i8(s[6]) + trueColorOffset = 2 if trueColor else 0 + + # transparency index + tindex = i32(s[7+trueColorOffset:7+trueColorOffset+4]) + if tindex < 256: + self.info["transparency"] = tindex + + self.palette = ImagePalette.raw("XBGR", s[7+trueColorOffset+4:7+trueColorOffset+4+256*4]) + + self.tile = [("raw", (0, 0)+self.size, 7+trueColorOffset+4+256*4, + ("L", 0, 1))] + + +def open(fp, mode="r"): + """ + Load texture from a GD image file. + + :param filename: GD file name, or an opened file handle. + :param mode: Optional mode. In this version, if the mode argument + is given, it must be "r". + :returns: An image instance. + :raises IOError: If the image could not be read. + """ + if mode != "r": + raise ValueError("bad mode") + + try: + return GdImageFile(fp) + except SyntaxError: + raise IOError("cannot identify this image file") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/GifImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/GifImagePlugin.py new file mode 100644 index 0000000..107c015 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/GifImagePlugin.py @@ -0,0 +1,825 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GIF file handling +# +# History: +# 1995-09-01 fl Created +# 1996-12-14 fl Added interlace support +# 1996-12-30 fl Added animation support +# 1997-01-05 fl Added write support, fixed local colour map bug +# 1997-02-23 fl Make sure to load raster data in getdata() +# 1997-07-05 fl Support external decoder (0.4) +# 1998-07-09 fl Handle all modes when saving (0.5) +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2001-04-16 fl Added rewind support (seek to frame 0) (0.6) +# 2001-04-17 fl Added palette optimization (0.7) +# 2002-06-06 fl Added transparency support for save (0.8) +# 2004-02-24 fl Disable interlacing for small images +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile, ImagePalette, ImageChops, ImageSequence +from ._binary import i8, i16le as i16, o8, o16le as o16 + +import itertools + +__version__ = "0.9" + + +# -------------------------------------------------------------------- +# Identify/read GIF files + +def _accept(prefix): + return prefix[:6] in [b"GIF87a", b"GIF89a"] + + +## +# Image plugin for GIF images. This plugin supports both GIF87 and +# GIF89 images. + +class GifImageFile(ImageFile.ImageFile): + + format = "GIF" + format_description = "Compuserve GIF" + _close_exclusive_fp_after_loading = False + + global_palette = None + + def data(self): + s = self.fp.read(1) + if s and i8(s): + return self.fp.read(i8(s)) + return None + + def _open(self): + + # Screen + s = self.fp.read(13) + if s[:6] not in [b"GIF87a", b"GIF89a"]: + raise SyntaxError("not a GIF file") + + self.info["version"] = s[:6] + self._size = i16(s[6:]), i16(s[8:]) + self.tile = [] + flags = i8(s[10]) + bits = (flags & 7) + 1 + + if flags & 128: + # get global palette + self.info["background"] = i8(s[11]) + # check if palette contains colour indices + p = self.fp.read(3 << bits) + for i in range(0, len(p), 3): + if not (i//3 == i8(p[i]) == i8(p[i+1]) == i8(p[i+2])): + p = ImagePalette.raw("RGB", p) + self.global_palette = self.palette = p + break + + self.__fp = self.fp # FIXME: hack + self.__rewind = self.fp.tell() + self._n_frames = None + self._is_animated = None + self._seek(0) # get ready to read first frame + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self.seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + if self._n_frames is not None: + self._is_animated = self._n_frames != 1 + else: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + if not self._seek_check(frame): + return + if frame < self.__frame: + self._seek(0) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError: + self.seek(last_frame) + raise EOFError("no more images in GIF file") + + def _seek(self, frame): + + if frame == 0: + # rewind + self.__offset = 0 + self.dispose = None + self.dispose_extent = [0, 0, 0, 0] # x0, y0, x1, y1 + self.__frame = -1 + self.__fp.seek(self.__rewind) + self._prev_im = None + self.disposal_method = 0 + else: + # ensure that the previous frame was loaded + if not self.im: + self.load() + + if frame != self.__frame + 1: + raise ValueError("cannot seek to frame %d" % frame) + self.__frame = frame + + self.tile = [] + + self.fp = self.__fp + if self.__offset: + # backup to last frame + self.fp.seek(self.__offset) + while self.data(): + pass + self.__offset = 0 + + if self.dispose: + self.im.paste(self.dispose, self.dispose_extent) + + from copy import copy + self.palette = copy(self.global_palette) + + info = {} + while True: + + s = self.fp.read(1) + if not s or s == b";": + break + + elif s == b"!": + # + # extensions + # + s = self.fp.read(1) + block = self.data() + if i8(s) == 249: + # + # graphic control extension + # + flags = i8(block[0]) + if flags & 1: + info["transparency"] = i8(block[3]) + info["duration"] = i16(block[1:3]) * 10 + + # disposal method - find the value of bits 4 - 6 + dispose_bits = 0b00011100 & flags + dispose_bits = dispose_bits >> 2 + if dispose_bits: + # only set the dispose if it is not + # unspecified. I'm not sure if this is + # correct, but it seems to prevent the last + # frame from looking odd for some animations + self.disposal_method = dispose_bits + elif i8(s) == 254: + # + # comment extension + # + info["comment"] = block + elif i8(s) == 255: + # + # application extension + # + info["extension"] = block, self.fp.tell() + if block[:11] == b"NETSCAPE2.0": + block = self.data() + if len(block) >= 3 and i8(block[0]) == 1: + info["loop"] = i16(block[1:3]) + while self.data(): + pass + + elif s == b",": + # + # local image + # + s = self.fp.read(9) + + # extent + x0, y0 = i16(s[0:]), i16(s[2:]) + x1, y1 = x0 + i16(s[4:]), y0 + i16(s[6:]) + self.dispose_extent = x0, y0, x1, y1 + flags = i8(s[8]) + + interlace = (flags & 64) != 0 + + if flags & 128: + bits = (flags & 7) + 1 + self.palette =\ + ImagePalette.raw("RGB", self.fp.read(3 << bits)) + + # image data + bits = i8(self.fp.read(1)) + self.__offset = self.fp.tell() + self.tile = [("gif", + (x0, y0, x1, y1), + self.__offset, + (bits, interlace))] + break + + else: + pass + # raise IOError, "illegal GIF tag `%x`" % i8(s) + + try: + if self.disposal_method < 2: + # do not dispose or none specified + self.dispose = None + elif self.disposal_method == 2: + # replace with background colour + self.dispose = Image.core.fill("P", self.size, + self.info["background"]) + else: + # replace with previous contents + if self.im: + self.dispose = self.im.copy() + + # only dispose the extent in this frame + if self.dispose: + self.dispose = self._crop(self.dispose, self.dispose_extent) + except (AttributeError, KeyError): + pass + + if not self.tile: + # self.__fp = None + raise EOFError + + for k in ["transparency", "duration", "comment", "extension", "loop"]: + if k in info: + self.info[k] = info[k] + elif k in self.info: + del self.info[k] + + self.mode = "L" + if self.palette: + self.mode = "P" + + def tell(self): + return self.__frame + + def load_end(self): + ImageFile.ImageFile.load_end(self) + + # if the disposal method is 'do not dispose', transparent + # pixels should show the content of the previous frame + if self._prev_im and self.disposal_method == 1: + # we do this by pasting the updated area onto the previous + # frame which we then use as the current image content + updated = self._crop(self.im, self.dispose_extent) + self._prev_im.paste(updated, self.dispose_extent, + updated.convert('RGBA')) + self.im = self._prev_im + self._prev_im = self.im.copy() + +# -------------------------------------------------------------------- +# Write GIF files + + +RAWMODE = { + "1": "L", + "L": "L", + "P": "P" +} + + +def _normalize_mode(im, initial_call=False): + """ + Takes an image (or frame), returns an image in a mode that is appropriate + for saving in a Gif. + + It may return the original image, or it may return an image converted to + palette or 'L' mode. + + UNDONE: What is the point of mucking with the initial call palette, for + an image that shouldn't have a palette, or it would be a mode 'P' and + get returned in the RAWMODE clause. + + :param im: Image object + :param initial_call: Default false, set to true for a single frame. + :returns: Image object + """ + if im.mode in RAWMODE: + im.load() + return im + if Image.getmodebase(im.mode) == "RGB": + if initial_call: + palette_size = 256 + if im.palette: + palette_size = len(im.palette.getdata()[1]) // 3 + return im.convert("P", palette=Image.ADAPTIVE, colors=palette_size) + else: + return im.convert("P") + return im.convert("L") + + +def _normalize_palette(im, palette, info): + """ + Normalizes the palette for image. + - Sets the palette to the incoming palette, if provided. + - Ensures that there's a palette for L mode images + - Optimizes the palette if necessary/desired. + + :param im: Image object + :param palette: bytes object containing the source palette, or .... + :param info: encoderinfo + :returns: Image object + """ + source_palette = None + if palette: + # a bytes palette + if isinstance(palette, (bytes, bytearray, list)): + source_palette = bytearray(palette[:768]) + if isinstance(palette, ImagePalette.ImagePalette): + source_palette = bytearray(itertools.chain.from_iterable( + zip(palette.palette[:256], + palette.palette[256:512], + palette.palette[512:768]))) + + if im.mode == "P": + if not source_palette: + source_palette = im.im.getpalette("RGB")[:768] + else: # L-mode + if not source_palette: + source_palette = bytearray(i//3 for i in range(768)) + im.palette = ImagePalette.ImagePalette("RGB", + palette=source_palette) + + used_palette_colors = _get_optimize(im, info) + if used_palette_colors is not None: + return im.remap_palette(used_palette_colors, source_palette) + + im.palette.palette = source_palette + return im + + +def _write_single_frame(im, fp, palette): + im_out = _normalize_mode(im, True) + im_out = _normalize_palette(im_out, palette, im.encoderinfo) + + for s in _get_global_header(im_out, im.encoderinfo): + fp.write(s) + + # local image header + flags = 0 + if get_interlace(im): + flags = flags | 64 + _write_local_header(fp, im, (0, 0), flags) + + im_out.encoderconfig = (8, get_interlace(im)) + ImageFile._save(im_out, fp, [("gif", (0, 0)+im.size, 0, + RAWMODE[im_out.mode])]) + + fp.write(b"\0") # end of image data + + +def _write_multiple_frames(im, fp, palette): + + duration = im.encoderinfo.get("duration", None) + disposal = im.encoderinfo.get('disposal', None) + + im_frames = [] + frame_count = 0 + for imSequence in itertools.chain([im], + im.encoderinfo.get("append_images", [])): + for im_frame in ImageSequence.Iterator(imSequence): + # a copy is required here since seek can still mutate the image + im_frame = _normalize_mode(im_frame.copy()) + im_frame = _normalize_palette(im_frame, palette, im.encoderinfo) + + encoderinfo = im.encoderinfo.copy() + if isinstance(duration, (list, tuple)): + encoderinfo['duration'] = duration[frame_count] + if isinstance(disposal, (list, tuple)): + encoderinfo["disposal"] = disposal[frame_count] + frame_count += 1 + + if im_frames: + # delta frame + previous = im_frames[-1] + if _get_palette_bytes(im_frame) == \ + _get_palette_bytes(previous['im']): + delta = ImageChops.subtract_modulo(im_frame, + previous['im']) + else: + delta = ImageChops.subtract_modulo( + im_frame.convert('RGB'), previous['im'].convert('RGB')) + bbox = delta.getbbox() + if not bbox: + # This frame is identical to the previous frame + if duration: + previous['encoderinfo']['duration'] += \ + encoderinfo['duration'] + continue + else: + bbox = None + im_frames.append({ + 'im': im_frame, + 'bbox': bbox, + 'encoderinfo': encoderinfo + }) + + if len(im_frames) > 1: + for frame_data in im_frames: + im_frame = frame_data['im'] + if not frame_data['bbox']: + # global header + for s in _get_global_header(im_frame, + frame_data['encoderinfo']): + fp.write(s) + offset = (0, 0) + else: + # compress difference + frame_data['encoderinfo']['include_color_table'] = True + + im_frame = im_frame.crop(frame_data['bbox']) + offset = frame_data['bbox'][:2] + _write_frame_data(fp, im_frame, offset, frame_data['encoderinfo']) + return True + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +def _save(im, fp, filename, save_all=False): + for k, v in im.info.items(): + im.encoderinfo.setdefault(k, v) + # header + try: + palette = im.encoderinfo["palette"] + except KeyError: + palette = None + im.encoderinfo["optimize"] = im.encoderinfo.get("optimize", True) + + if not save_all or not _write_multiple_frames(im, fp, palette): + _write_single_frame(im, fp, palette) + + fp.write(b";") # end of file + + if hasattr(fp, "flush"): + fp.flush() + + +def get_interlace(im): + interlace = im.encoderinfo.get("interlace", 1) + + # workaround for @PIL153 + if min(im.size) < 16: + interlace = 0 + + return interlace + + +def _write_local_header(fp, im, offset, flags): + transparent_color_exists = False + try: + transparency = im.encoderinfo["transparency"] + except KeyError: + pass + else: + transparency = int(transparency) + # optimize the block away if transparent color is not used + transparent_color_exists = True + + used_palette_colors = _get_optimize(im, im.encoderinfo) + if used_palette_colors is not None: + # adjust the transparency index after optimize + try: + transparency = used_palette_colors.index(transparency) + except ValueError: + transparent_color_exists = False + + if "duration" in im.encoderinfo: + duration = int(im.encoderinfo["duration"] / 10) + else: + duration = 0 + + disposal = int(im.encoderinfo.get('disposal', 0)) + + if transparent_color_exists or duration != 0 or disposal: + packed_flag = 1 if transparent_color_exists else 0 + packed_flag |= disposal << 2 + if not transparent_color_exists: + transparency = 0 + + fp.write(b"!" + + o8(249) + # extension intro + o8(4) + # length + o8(packed_flag) + # packed fields + o16(duration) + # duration + o8(transparency) + # transparency index + o8(0)) + + if "comment" in im.encoderinfo and \ + 1 <= len(im.encoderinfo["comment"]) <= 255: + fp.write(b"!" + + o8(254) + # extension intro + o8(len(im.encoderinfo["comment"])) + + im.encoderinfo["comment"] + + o8(0)) + if "loop" in im.encoderinfo: + number_of_loops = im.encoderinfo["loop"] + fp.write(b"!" + + o8(255) + # extension intro + o8(11) + + b"NETSCAPE2.0" + + o8(3) + + o8(1) + + o16(number_of_loops) + # number of loops + o8(0)) + include_color_table = im.encoderinfo.get('include_color_table') + if include_color_table: + palette_bytes = _get_palette_bytes(im) + color_table_size = _get_color_table_size(palette_bytes) + if color_table_size: + flags = flags | 128 # local color table flag + flags = flags | color_table_size + + fp.write(b"," + + o16(offset[0]) + # offset + o16(offset[1]) + + o16(im.size[0]) + # size + o16(im.size[1]) + + o8(flags)) # flags + if include_color_table and color_table_size: + fp.write(_get_header_palette(palette_bytes)) + fp.write(o8(8)) # bits + + +def _save_netpbm(im, fp, filename): + + # Unused by default. + # To use, uncomment the register_save call at the end of the file. + # + # If you need real GIF compression and/or RGB quantization, you + # can use the external NETPBM/PBMPLUS utilities. See comments + # below for information on how to enable this. + + import os + from subprocess import Popen, check_call, PIPE, CalledProcessError + file = im._dump() + + with open(filename, 'wb') as f: + if im.mode != "RGB": + with open(os.devnull, 'wb') as devnull: + check_call(["ppmtogif", file], stdout=f, stderr=devnull) + else: + # Pipe ppmquant output into ppmtogif + # "ppmquant 256 %s | ppmtogif > %s" % (file, filename) + quant_cmd = ["ppmquant", "256", file] + togif_cmd = ["ppmtogif"] + with open(os.devnull, 'wb') as devnull: + quant_proc = Popen(quant_cmd, stdout=PIPE, stderr=devnull) + togif_proc = Popen(togif_cmd, stdin=quant_proc.stdout, + stdout=f, stderr=devnull) + + # Allow ppmquant to receive SIGPIPE if ppmtogif exits + quant_proc.stdout.close() + + retcode = quant_proc.wait() + if retcode: + raise CalledProcessError(retcode, quant_cmd) + + retcode = togif_proc.wait() + if retcode: + raise CalledProcessError(retcode, togif_cmd) + + try: + os.unlink(file) + except OSError: + pass + + +# Force optimization so that we can test performance against +# cases where it took lots of memory and time previously. +_FORCE_OPTIMIZE = False + + +def _get_optimize(im, info): + """ + Palette optimization is a potentially expensive operation. + + This function determines if the palette should be optimized using + some heuristics, then returns the list of palette entries in use. + + :param im: Image object + :param info: encoderinfo + :returns: list of indexes of palette entries in use, or None + """ + if im.mode in ("P", "L") and info and info.get("optimize", 0): + # Potentially expensive operation. + + # The palette saves 3 bytes per color not used, but palette + # lengths are restricted to 3*(2**N) bytes. Max saving would + # be 768 -> 6 bytes if we went all the way down to 2 colors. + # * If we're over 128 colors, we can't save any space. + # * If there aren't any holes, it's not worth collapsing. + # * If we have a 'large' image, the palette is in the noise. + + # create the new palette if not every color is used + optimise = _FORCE_OPTIMIZE or im.mode == 'L' + if optimise or im.width * im.height < 512 * 512: + # check which colors are used + used_palette_colors = [] + for i, count in enumerate(im.histogram()): + if count: + used_palette_colors.append(i) + + if optimise or (len(used_palette_colors) <= 128 and + max(used_palette_colors) > len(used_palette_colors)): + return used_palette_colors + + +def _get_color_table_size(palette_bytes): + # calculate the palette size for the header + import math + color_table_size = int(math.ceil(math.log(len(palette_bytes)//3, 2)))-1 + if color_table_size < 0: + color_table_size = 0 + return color_table_size + + +def _get_header_palette(palette_bytes): + """ + Returns the palette, null padded to the next power of 2 (*3) bytes + suitable for direct inclusion in the GIF header + + :param palette_bytes: Unpadded palette bytes, in RGBRGB form + :returns: Null padded palette + """ + color_table_size = _get_color_table_size(palette_bytes) + + # add the missing amount of bytes + # the palette has to be 2< 0: + palette_bytes += o8(0) * 3 * actual_target_size_diff + return palette_bytes + + +def _get_palette_bytes(im): + """ + Gets the palette for inclusion in the gif header + + :param im: Image object + :returns: Bytes, len<=768 suitable for inclusion in gif header + """ + return im.palette.palette + + +def _get_global_header(im, info): + """Return a list of strings representing a GIF header""" + + # Header Block + # http://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp + + version = b"87a" + for extensionKey in ["transparency", "duration", "loop", "comment"]: + if info and extensionKey in info: + if ((extensionKey == "duration" and info[extensionKey] == 0) or + (extensionKey == "comment" and + not (1 <= len(info[extensionKey]) <= 255))): + continue + version = b"89a" + break + else: + if im.info.get("version") == b"89a": + version = b"89a" + + palette_bytes = _get_palette_bytes(im) + color_table_size = _get_color_table_size(palette_bytes) + + background = info["background"] if "background" in info else 0 + + return [ + b"GIF"+version + # signature + version + o16(im.size[0]) + # canvas width + o16(im.size[1]), # canvas height + + # Logical Screen Descriptor + # size of global color table + global color table flag + o8(color_table_size + 128), # packed fields + # background + reserved/aspect + o8(background) + o8(0), + + # Global Color Table + _get_header_palette(palette_bytes) + ] + + +def _write_frame_data(fp, im_frame, offset, params): + try: + im_frame.encoderinfo = params + + # local image header + _write_local_header(fp, im_frame, offset, 0) + + ImageFile._save(im_frame, fp, [("gif", (0, 0)+im_frame.size, 0, + RAWMODE[im_frame.mode])]) + + fp.write(b"\0") # end of image data + finally: + del im_frame.encoderinfo + +# -------------------------------------------------------------------- +# Legacy GIF utilities + + +def getheader(im, palette=None, info=None): + """ + Legacy Method to get Gif data from image. + + Warning:: May modify image data. + + :param im: Image object + :param palette: bytes object containing the source palette, or .... + :param info: encoderinfo + :returns: tuple of(list of header items, optimized palette) + + """ + used_palette_colors = _get_optimize(im, info) + + if info is None: + info = {} + + if "background" not in info and "background" in im.info: + info["background"] = im.info["background"] + + im_mod = _normalize_palette(im, palette, info) + im.palette = im_mod.palette + im.im = im_mod.im + header = _get_global_header(im, info) + + return header, used_palette_colors + + +# To specify duration, add the time in milliseconds to getdata(), +# e.g. getdata(im_frame, duration=1000) +def getdata(im, offset=(0, 0), **params): + """ + Legacy Method + + Return a list of strings representing this image. + The first string is a local image header, the rest contains + encoded image data. + + :param im: Image object + :param offset: Tuple of (x, y) pixels. Defaults to (0,0) + :param \\**params: E.g. duration or other encoder info parameters + :returns: List of Bytes containing gif encoded frame data + + """ + class Collector(object): + data = [] + + def write(self, data): + self.data.append(data) + + im.load() # make sure raster data is available + + fp = Collector() + + _write_frame_data(fp, im, offset, params) + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GifImageFile.format, GifImageFile, _accept) +Image.register_save(GifImageFile.format, _save) +Image.register_save_all(GifImageFile.format, _save_all) +Image.register_extension(GifImageFile.format, ".gif") +Image.register_mime(GifImageFile.format, "image/gif") + +# +# Uncomment the following line if you wish to use NETPBM/PBMPLUS +# instead of the built-in "uncompressed" GIF encoder + +# Image.register_save(GifImageFile.format, _save_netpbm) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/GimpGradientFile.py b/thesisenv/lib/python3.6/site-packages/PIL/GimpGradientFile.py new file mode 100644 index 0000000..10593da --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/GimpGradientFile.py @@ -0,0 +1,138 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read (and render) GIMP gradient files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from math import pi, log, sin, sqrt +from ._binary import o8 + +# -------------------------------------------------------------------- +# Stuff to translate curve segments to palette values (derived from +# the corresponding code in GIMP, written by Federico Mena Quintero. +# See the GIMP distribution for more information.) +# + +EPSILON = 1e-10 + + +def linear(middle, pos): + if pos <= middle: + if middle < EPSILON: + return 0.0 + else: + return 0.5 * pos / middle + else: + pos = pos - middle + middle = 1.0 - middle + if middle < EPSILON: + return 1.0 + else: + return 0.5 + 0.5 * pos / middle + + +def curved(middle, pos): + return pos ** (log(0.5) / log(max(middle, EPSILON))) + + +def sine(middle, pos): + return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0 + + +def sphere_increasing(middle, pos): + return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2) + + +def sphere_decreasing(middle, pos): + return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2) + + +SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing] + + +class GradientFile(object): + + gradient = None + + def getpalette(self, entries=256): + + palette = [] + + ix = 0 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + for i in range(entries): + + x = i / float(entries-1) + + while x1 < x: + ix += 1 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + w = x1 - x0 + + if w < EPSILON: + scale = segment(0.5, 0.5) + else: + scale = segment((xm - x0) / w, (x - x0) / w) + + # expand to RGBA + r = o8(int(255 * ((rgb1[0] - rgb0[0]) * scale + rgb0[0]) + 0.5)) + g = o8(int(255 * ((rgb1[1] - rgb0[1]) * scale + rgb0[1]) + 0.5)) + b = o8(int(255 * ((rgb1[2] - rgb0[2]) * scale + rgb0[2]) + 0.5)) + a = o8(int(255 * ((rgb1[3] - rgb0[3]) * scale + rgb0[3]) + 0.5)) + + # add to palette + palette.append(r + g + b + a) + + return b"".join(palette), "RGBA" + + +## +# File handler for GIMP's gradient format. + +class GimpGradientFile(GradientFile): + + def __init__(self, fp): + + if fp.readline()[:13] != b"GIMP Gradient": + raise SyntaxError("not a GIMP gradient file") + + line = fp.readline() + + # GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do + if line.startswith(b"Name: "): + line = fp.readline().strip() + + count = int(line) + + gradient = [] + + for i in range(count): + + s = fp.readline().split() + w = [float(x) for x in s[:11]] + + x0, x1 = w[0], w[2] + xm = w[1] + rgb0 = w[3:7] + rgb1 = w[7:11] + + segment = SEGMENTS[int(s[11])] + cspace = int(s[12]) + + if cspace != 0: + raise IOError("cannot handle HSV colour space") + + gradient.append((x0, x1, xm, rgb0, rgb1, segment)) + + self.gradient = gradient diff --git a/thesisenv/lib/python3.6/site-packages/PIL/GimpPaletteFile.py b/thesisenv/lib/python3.6/site-packages/PIL/GimpPaletteFile.py new file mode 100644 index 0000000..6eef6a2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/GimpPaletteFile.py @@ -0,0 +1,62 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read GIMP palette files +# +# History: +# 1997-08-23 fl Created +# 2004-09-07 fl Support GIMP 2.0 palette files. +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1997-2004. +# +# See the README file for information on usage and redistribution. +# + +import re +from ._binary import o8 + + +## +# File handler for GIMP's palette format. + +class GimpPaletteFile(object): + + rawmode = "RGB" + + def __init__(self, fp): + + self.palette = [o8(i)*3 for i in range(256)] + + if fp.readline()[:12] != b"GIMP Palette": + raise SyntaxError("not a GIMP palette file") + + i = 0 + + while i <= 255: + + s = fp.readline() + + if not s: + break + # skip fields and comment lines + if re.match(br"\w+:|#", s): + continue + if len(s) > 100: + raise SyntaxError("bad palette file") + + v = tuple(map(int, s.split()[:3])) + if len(v) != 3: + raise ValueError("bad palette entry") + + if 0 <= i <= 255: + self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2]) + + i += 1 + + self.palette = b"".join(self.palette) + + def getpalette(self): + + return self.palette, self.rawmode diff --git a/thesisenv/lib/python3.6/site-packages/PIL/GribStubImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/GribStubImagePlugin.py new file mode 100644 index 0000000..243ea2a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/GribStubImagePlugin.py @@ -0,0 +1,73 @@ +# +# The Python Imaging Library +# $Id$ +# +# GRIB stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile +from ._binary import i8 + +_handler = None + + +def register_handler(handler): + """ + Install application-specific GRIB image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + +def _accept(prefix): + return prefix[0:4] == b"GRIB" and i8(prefix[7]) == 1 + + +class GribStubImageFile(ImageFile.StubImageFile): + + format = "GRIB" + format_description = "GRIB" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not a GRIB file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("GRIB save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept) +Image.register_save(GribStubImageFile.format, _save) + +Image.register_extension(GribStubImageFile.format, ".grib") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/Hdf5StubImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/Hdf5StubImagePlugin.py new file mode 100644 index 0000000..8783f80 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/Hdf5StubImagePlugin.py @@ -0,0 +1,72 @@ +# +# The Python Imaging Library +# $Id$ +# +# HDF5 stub adapter +# +# Copyright (c) 2000-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile + +_handler = None + + +def register_handler(handler): + """ + Install application-specific HDF5 image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + +def _accept(prefix): + return prefix[:8] == b"\x89HDF\r\n\x1a\n" + + +class HDF5StubImageFile(ImageFile.StubImageFile): + + format = "HDF5" + format_description = "HDF5" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not an HDF file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self._size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("HDF5 save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(HDF5StubImageFile.format, HDF5StubImageFile, _accept) +Image.register_save(HDF5StubImageFile.format, _save) + +Image.register_extensions(HDF5StubImageFile.format, [".h5", ".hdf"]) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/IcnsImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/IcnsImagePlugin.py new file mode 100644 index 0000000..21236d4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/IcnsImagePlugin.py @@ -0,0 +1,394 @@ +# +# The Python Imaging Library. +# $Id$ +# +# macOS icns file decoder, based on icns.py by Bob Ippolito. +# +# history: +# 2004-10-09 fl Turned into a PIL plugin; removed 2.3 dependencies. +# +# Copyright (c) 2004 by Bob Ippolito. +# Copyright (c) 2004 by Secret Labs. +# Copyright (c) 2004 by Fredrik Lundh. +# Copyright (c) 2014 by Alastair Houghton. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, PngImagePlugin +from PIL._binary import i8 +import io +import os +import shutil +import struct +import sys +import tempfile + +enable_jpeg2k = hasattr(Image.core, 'jp2klib_version') +if enable_jpeg2k: + from PIL import Jpeg2KImagePlugin + +HEADERSIZE = 8 + + +def nextheader(fobj): + return struct.unpack('>4sI', fobj.read(HEADERSIZE)) + + +def read_32t(fobj, start_length, size): + # The 128x128 icon seems to have an extra header for some reason. + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(4) + if sig != b'\x00\x00\x00\x00': + raise SyntaxError('Unknown signature, expecting 0x00000000') + return read_32(fobj, (start + 4, length - 4), size) + + +def read_32(fobj, start_length, size): + """ + Read a 32bit RGB icon resource. Seems to be either uncompressed or + an RLE packbits-like scheme. + """ + (start, length) = start_length + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + if length == sizesq * 3: + # uncompressed ("RGBRGBGB") + indata = fobj.read(length) + im = Image.frombuffer("RGB", pixel_size, indata, "raw", "RGB", 0, 1) + else: + # decode image + im = Image.new("RGB", pixel_size, None) + for band_ix in range(3): + data = [] + bytesleft = sizesq + while bytesleft > 0: + byte = fobj.read(1) + if not byte: + break + byte = i8(byte) + if byte & 0x80: + blocksize = byte - 125 + byte = fobj.read(1) + for i in range(blocksize): + data.append(byte) + else: + blocksize = byte + 1 + data.append(fobj.read(blocksize)) + bytesleft -= blocksize + if bytesleft <= 0: + break + if bytesleft != 0: + raise SyntaxError( + "Error reading channel [%r left]" % bytesleft + ) + band = Image.frombuffer( + "L", pixel_size, b"".join(data), "raw", "L", 0, 1 + ) + im.im.putband(band.im, band_ix) + return {"RGB": im} + + +def read_mk(fobj, start_length, size): + # Alpha masks seem to be uncompressed + start = start_length[0] + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + band = Image.frombuffer( + "L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1 + ) + return {"A": band} + + +def read_png_or_jpeg2000(fobj, start_length, size): + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(12) + if sig[:8] == b'\x89PNG\x0d\x0a\x1a\x0a': + fobj.seek(start) + im = PngImagePlugin.PngImageFile(fobj) + return {"RGBA": im} + elif sig[:4] == b'\xff\x4f\xff\x51' \ + or sig[:4] == b'\x0d\x0a\x87\x0a' \ + or sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': + if not enable_jpeg2k: + raise ValueError('Unsupported icon subimage format (rebuild PIL ' + 'with JPEG 2000 support to fix this)') + # j2k, jpc or j2c + fobj.seek(start) + jp2kstream = fobj.read(length) + f = io.BytesIO(jp2kstream) + im = Jpeg2KImagePlugin.Jpeg2KImageFile(f) + if im.mode != 'RGBA': + im = im.convert('RGBA') + return {"RGBA": im} + else: + raise ValueError('Unsupported icon subimage format') + + +class IcnsFile(object): + + SIZES = { + (512, 512, 2): [ + (b'ic10', read_png_or_jpeg2000), + ], + (512, 512, 1): [ + (b'ic09', read_png_or_jpeg2000), + ], + (256, 256, 2): [ + (b'ic14', read_png_or_jpeg2000), + ], + (256, 256, 1): [ + (b'ic08', read_png_or_jpeg2000), + ], + (128, 128, 2): [ + (b'ic13', read_png_or_jpeg2000), + ], + (128, 128, 1): [ + (b'ic07', read_png_or_jpeg2000), + (b'it32', read_32t), + (b't8mk', read_mk), + ], + (64, 64, 1): [ + (b'icp6', read_png_or_jpeg2000), + ], + (32, 32, 2): [ + (b'ic12', read_png_or_jpeg2000), + ], + (48, 48, 1): [ + (b'ih32', read_32), + (b'h8mk', read_mk), + ], + (32, 32, 1): [ + (b'icp5', read_png_or_jpeg2000), + (b'il32', read_32), + (b'l8mk', read_mk), + ], + (16, 16, 2): [ + (b'ic11', read_png_or_jpeg2000), + ], + (16, 16, 1): [ + (b'icp4', read_png_or_jpeg2000), + (b'is32', read_32), + (b's8mk', read_mk), + ], + } + + def __init__(self, fobj): + """ + fobj is a file-like object as an icns resource + """ + # signature : (start, length) + self.dct = dct = {} + self.fobj = fobj + sig, filesize = nextheader(fobj) + if sig != b'icns': + raise SyntaxError('not an icns file') + i = HEADERSIZE + while i < filesize: + sig, blocksize = nextheader(fobj) + if blocksize <= 0: + raise SyntaxError('invalid block header') + i += HEADERSIZE + blocksize -= HEADERSIZE + dct[sig] = (i, blocksize) + fobj.seek(blocksize, 1) + i += blocksize + + def itersizes(self): + sizes = [] + for size, fmts in self.SIZES.items(): + for (fmt, reader) in fmts: + if fmt in self.dct: + sizes.append(size) + break + return sizes + + def bestsize(self): + sizes = self.itersizes() + if not sizes: + raise SyntaxError("No 32bit icon resources found") + return max(sizes) + + def dataforsize(self, size): + """ + Get an icon resource as {channel: array}. Note that + the arrays are bottom-up like windows bitmaps and will likely + need to be flipped or transposed in some way. + """ + dct = {} + for code, reader in self.SIZES[size]: + desc = self.dct.get(code) + if desc is not None: + dct.update(reader(self.fobj, desc, size)) + return dct + + def getimage(self, size=None): + if size is None: + size = self.bestsize() + if len(size) == 2: + size = (size[0], size[1], 1) + channels = self.dataforsize(size) + + im = channels.get('RGBA', None) + if im: + return im + + im = channels.get("RGB").copy() + try: + im.putalpha(channels["A"]) + except KeyError: + pass + return im + + +## +# Image plugin for Mac OS icons. + +class IcnsImageFile(ImageFile.ImageFile): + """ + PIL image support for Mac OS .icns files. + Chooses the best resolution, but will possibly load + a different size image if you mutate the size attribute + before calling 'load'. + + The info dictionary has a key 'sizes' that is a list + of sizes that the icns file has. + """ + + format = "ICNS" + format_description = "Mac OS icns resource" + + def _open(self): + self.icns = IcnsFile(self.fp) + self.mode = 'RGBA' + self.info['sizes'] = self.icns.itersizes() + self.best_size = self.icns.bestsize() + self.size = (self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2]) + # Just use this to see if it's loaded or not yet. + self.tile = ('',) + + @property + def size(self): + return self._size + + @size.setter + def size(self, value): + info_size = value + if info_size not in self.info['sizes'] and len(info_size) == 2: + info_size = (info_size[0], info_size[1], 1) + if info_size not in self.info['sizes'] and len(info_size) == 3 and \ + info_size[2] == 1: + simple_sizes = [(size[0] * size[2], size[1] * size[2]) + for size in self.info['sizes']] + if value in simple_sizes: + info_size = self.info['sizes'][simple_sizes.index(value)] + if info_size not in self.info['sizes']: + raise ValueError( + "This is not one of the allowed sizes of this image") + self._size = value + + def load(self): + if len(self.size) == 3: + self.best_size = self.size + self.size = (self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2]) + + Image.Image.load(self) + if not self.tile: + return + self.load_prepare() + # This is likely NOT the best way to do it, but whatever. + im = self.icns.getimage(self.best_size) + + # If this is a PNG or JPEG 2000, it won't be loaded yet + im.load() + + self.im = im.im + self.mode = im.mode + self.size = im.size + self.fp = None + self.icns = None + self.tile = () + self.load_end() + + +def _save(im, fp, filename): + """ + Saves the image as a series of PNG files, + that are then converted to a .icns file + using the macOS command line utility 'iconutil'. + + macOS only. + """ + if hasattr(fp, "flush"): + fp.flush() + + # create the temporary set of pngs + iconset = tempfile.mkdtemp('.iconset') + provided_images = {im.width: im + for im in im.encoderinfo.get("append_images", [])} + last_w = None + for w in [16, 32, 128, 256, 512]: + prefix = 'icon_{}x{}'.format(w, w) + + first_path = os.path.join(iconset, prefix+'.png') + if last_w == w: + shutil.copyfile(second_path, first_path) + else: + im_w = provided_images.get(w, im.resize((w, w), Image.LANCZOS)) + im_w.save(first_path) + + second_path = os.path.join(iconset, prefix+'@2x.png') + im_w2 = provided_images.get(w*2, im.resize((w*2, w*2), Image.LANCZOS)) + im_w2.save(second_path) + last_w = w*2 + + # iconutil -c icns -o {} {} + from subprocess import Popen, PIPE, CalledProcessError + + convert_cmd = ["iconutil", "-c", "icns", "-o", filename, iconset] + with open(os.devnull, 'wb') as devnull: + convert_proc = Popen(convert_cmd, stdout=PIPE, stderr=devnull) + + convert_proc.stdout.close() + + retcode = convert_proc.wait() + + # remove the temporary files + shutil.rmtree(iconset) + + if retcode: + raise CalledProcessError(retcode, convert_cmd) + + +Image.register_open(IcnsImageFile.format, IcnsImageFile, + lambda x: x[:4] == b'icns') +Image.register_extension(IcnsImageFile.format, '.icns') + +if sys.platform == 'darwin': + Image.register_save(IcnsImageFile.format, _save) + + Image.register_mime(IcnsImageFile.format, "image/icns") + + +if __name__ == '__main__': + + if len(sys.argv) < 2: + print("Syntax: python IcnsImagePlugin.py [file]") + sys.exit() + + imf = IcnsImageFile(open(sys.argv[1], 'rb')) + for size in imf.info['sizes']: + imf.size = size + imf.load() + im = imf.im + im.save('out-%s-%s-%s.png' % size) + im = Image.open(sys.argv[1]) + im.save("out.png") + if sys.platform == 'windows': + os.startfile("out.png") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/IcoImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/IcoImagePlugin.py new file mode 100644 index 0000000..589ef3c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/IcoImagePlugin.py @@ -0,0 +1,295 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Icon support for PIL +# +# History: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis +# . +# https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki +# +# Icon format references: +# * https://en.wikipedia.org/wiki/ICO_(file_format) +# * https://msdn.microsoft.com/en-us/library/ms997538.aspx + + +import struct +from io import BytesIO + +from . import Image, ImageFile, BmpImagePlugin, PngImagePlugin +from ._binary import i8, i16le as i16, i32le as i32 +from math import log, ceil + +__version__ = "0.1" + +# +# -------------------------------------------------------------------- + +_MAGIC = b"\0\0\1\0" + + +def _save(im, fp, filename): + fp.write(_MAGIC) # (2+2) + sizes = im.encoderinfo.get("sizes", + [(16, 16), (24, 24), (32, 32), (48, 48), + (64, 64), (128, 128), (256, 256)]) + width, height = im.size + sizes = filter(lambda x: False if (x[0] > width or x[1] > height or + x[0] > 256 or x[1] > 256) else True, + sizes) + sizes = list(sizes) + fp.write(struct.pack("=8bpp) + 'reserved': i8(s[3]), + 'planes': i16(s[4:]), + 'bpp': i16(s[6:]), + 'size': i32(s[8:]), + 'offset': i32(s[12:]) + } + + # See Wikipedia + for j in ('width', 'height'): + if not icon_header[j]: + icon_header[j] = 256 + + # See Wikipedia notes about color depth. + # We need this just to differ images with equal sizes + icon_header['color_depth'] = (icon_header['bpp'] or + (icon_header['nb_color'] != 0 and + ceil(log(icon_header['nb_color'], + 2))) or 256) + + icon_header['dim'] = (icon_header['width'], icon_header['height']) + icon_header['square'] = (icon_header['width'] * + icon_header['height']) + + self.entry.append(icon_header) + + self.entry = sorted(self.entry, key=lambda x: x['color_depth']) + # ICO images are usually squares + # self.entry = sorted(self.entry, key=lambda x: x['width']) + self.entry = sorted(self.entry, key=lambda x: x['square']) + self.entry.reverse() + + def sizes(self): + """ + Get a list of all available icon sizes and color depths. + """ + return {(h['width'], h['height']) for h in self.entry} + + def getimage(self, size, bpp=False): + """ + Get an image from the icon + """ + for (i, h) in enumerate(self.entry): + if size == h['dim'] and (bpp is False or bpp == h['color_depth']): + return self.frame(i) + return self.frame(0) + + def frame(self, idx): + """ + Get an image from frame idx + """ + + header = self.entry[idx] + + self.buf.seek(header['offset']) + data = self.buf.read(8) + self.buf.seek(header['offset']) + + if data[:8] == PngImagePlugin._MAGIC: + # png frame + im = PngImagePlugin.PngImageFile(self.buf) + else: + # XOR + AND mask bmp frame + im = BmpImagePlugin.DibImageFile(self.buf) + + # change tile dimension to only encompass XOR image + im._size = (im.size[0], int(im.size[1] / 2)) + d, e, o, a = im.tile[0] + im.tile[0] = d, (0, 0) + im.size, o, a + + # figure out where AND mask image starts + mode = a[0] + bpp = 8 + for k, v in BmpImagePlugin.BIT2MODE.items(): + if mode == v[1]: + bpp = k + break + + if 32 == bpp: + # 32-bit color depth icon image allows semitransparent areas + # PIL's DIB format ignores transparency bits, recover them. + # The DIB is packed in BGRX byte order where X is the alpha + # channel. + + # Back up to start of bmp data + self.buf.seek(o) + # extract every 4th byte (eg. 3,7,11,15,...) + alpha_bytes = self.buf.read(im.size[0] * im.size[1] * 4)[3::4] + + # convert to an 8bpp grayscale image + mask = Image.frombuffer( + 'L', # 8bpp + im.size, # (w, h) + alpha_bytes, # source chars + 'raw', # raw decoder + ('L', 0, -1) # 8bpp inverted, unpadded, reversed + ) + else: + # get AND image from end of bitmap + w = im.size[0] + if (w % 32) > 0: + # bitmap row data is aligned to word boundaries + w += 32 - (im.size[0] % 32) + + # the total mask data is + # padded row size * height / bits per char + + and_mask_offset = o + int(im.size[0] * im.size[1] * + (bpp / 8.0)) + total_bytes = int((w * im.size[1]) / 8) + + self.buf.seek(and_mask_offset) + mask_data = self.buf.read(total_bytes) + + # convert raw data to image + mask = Image.frombuffer( + '1', # 1 bpp + im.size, # (w, h) + mask_data, # source chars + 'raw', # raw decoder + ('1;I', int(w/8), -1) # 1bpp inverted, padded, reversed + ) + + # now we have two images, im is XOR image and mask is AND image + + # apply mask image as alpha channel + im = im.convert('RGBA') + im.putalpha(mask) + + return im + + +## +# Image plugin for Windows Icon files. + +class IcoImageFile(ImageFile.ImageFile): + """ + PIL read-only image support for Microsoft Windows .ico files. + + By default the largest resolution image in the file will be loaded. This + can be changed by altering the 'size' attribute before calling 'load'. + + The info dictionary has a key 'sizes' that is a list of the sizes available + in the icon file. + + Handles classic, XP and Vista icon formats. + + This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis + . + https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki + """ + format = "ICO" + format_description = "Windows Icon" + + def _open(self): + self.ico = IcoFile(self.fp) + self.info['sizes'] = self.ico.sizes() + self.size = self.ico.entry[0]['dim'] + self.load() + + @property + def size(self): + return self._size + + @size.setter + def size(self, value): + if value not in self.info['sizes']: + raise ValueError( + "This is not one of the allowed sizes of this image") + self._size = value + + def load(self): + im = self.ico.getimage(self.size) + # if tile is PNG, it won't really be loaded yet + im.load() + self.im = im.im + self.mode = im.mode + self.size = im.size + + def load_seek(self): + # Flag the ImageFile.Parser so that it + # just does all the decode at the end. + pass +# +# -------------------------------------------------------------------- + + +Image.register_open(IcoImageFile.format, IcoImageFile, _accept) +Image.register_save(IcoImageFile.format, _save) +Image.register_extension(IcoImageFile.format, ".ico") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/ImImagePlugin.py new file mode 100644 index 0000000..2896bb4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImImagePlugin.py @@ -0,0 +1,347 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IFUNC IM file handling for PIL +# +# history: +# 1995-09-01 fl Created. +# 1997-01-03 fl Save palette images +# 1997-01-08 fl Added sequence support +# 1997-01-23 fl Added P and RGB save support +# 1997-05-31 fl Read floating point images +# 1997-06-22 fl Save floating point images +# 1997-08-27 fl Read and save 1-bit images +# 1998-06-25 fl Added support for RGB+LUT images +# 1998-07-02 fl Added support for YCC images +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 1998-12-29 fl Added I;16 support +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# 2003-09-26 fl Added LA/PA support +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +import re +from . import Image, ImageFile, ImagePalette +from ._binary import i8 + +__version__ = "0.7" + + +# -------------------------------------------------------------------- +# Standard tags + +COMMENT = "Comment" +DATE = "Date" +EQUIPMENT = "Digitalization equipment" +FRAMES = "File size (no of images)" +LUT = "Lut" +NAME = "Name" +SCALE = "Scale (x,y)" +SIZE = "Image size (x*y)" +MODE = "Image type" + +TAGS = {COMMENT: 0, DATE: 0, EQUIPMENT: 0, FRAMES: 0, LUT: 0, NAME: 0, + SCALE: 0, SIZE: 0, MODE: 0} + +OPEN = { + # ifunc93/p3cfunc formats + "0 1 image": ("1", "1"), + "L 1 image": ("1", "1"), + "Greyscale image": ("L", "L"), + "Grayscale image": ("L", "L"), + "RGB image": ("RGB", "RGB;L"), + "RLB image": ("RGB", "RLB"), + "RYB image": ("RGB", "RLB"), + "B1 image": ("1", "1"), + "B2 image": ("P", "P;2"), + "B4 image": ("P", "P;4"), + "X 24 image": ("RGB", "RGB"), + "L 32 S image": ("I", "I;32"), + "L 32 F image": ("F", "F;32"), + # old p3cfunc formats + "RGB3 image": ("RGB", "RGB;T"), + "RYB3 image": ("RGB", "RYB;T"), + # extensions + "LA image": ("LA", "LA;L"), + "RGBA image": ("RGBA", "RGBA;L"), + "RGBX image": ("RGBX", "RGBX;L"), + "CMYK image": ("CMYK", "CMYK;L"), + "YCC image": ("YCbCr", "YCbCr;L"), +} + +# ifunc95 extensions +for i in ["8", "8S", "16", "16S", "32", "32F"]: + OPEN["L %s image" % i] = ("F", "F;%s" % i) + OPEN["L*%s image" % i] = ("F", "F;%s" % i) +for i in ["16", "16L", "16B"]: + OPEN["L %s image" % i] = ("I;%s" % i, "I;%s" % i) + OPEN["L*%s image" % i] = ("I;%s" % i, "I;%s" % i) +for i in ["32S"]: + OPEN["L %s image" % i] = ("I", "I;%s" % i) + OPEN["L*%s image" % i] = ("I", "I;%s" % i) +for i in range(2, 33): + OPEN["L*%s image" % i] = ("F", "F;%s" % i) + + +# -------------------------------------------------------------------- +# Read IM directory + +split = re.compile(br"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$") + + +def number(s): + try: + return int(s) + except ValueError: + return float(s) + + +## +# Image plugin for the IFUNC IM file format. + +class ImImageFile(ImageFile.ImageFile): + + format = "IM" + format_description = "IFUNC Image Memory" + _close_exclusive_fp_after_loading = False + + def _open(self): + + # Quick rejection: if there's not an LF among the first + # 100 bytes, this is (probably) not a text header. + + if b"\n" not in self.fp.read(100): + raise SyntaxError("not an IM file") + self.fp.seek(0) + + n = 0 + + # Default values + self.info[MODE] = "L" + self.info[SIZE] = (512, 512) + self.info[FRAMES] = 1 + + self.rawmode = "L" + + while True: + + s = self.fp.read(1) + + # Some versions of IFUNC uses \n\r instead of \r\n... + if s == b"\r": + continue + + if not s or s == b'\0' or s == b'\x1A': + break + + # FIXME: this may read whole file if not a text file + s = s + self.fp.readline() + + if len(s) > 100: + raise SyntaxError("not an IM file") + + if s[-2:] == b'\r\n': + s = s[:-2] + elif s[-1:] == b'\n': + s = s[:-1] + + try: + m = split.match(s) + except re.error as v: + raise SyntaxError("not an IM file") + + if m: + + k, v = m.group(1, 2) + + # Don't know if this is the correct encoding, + # but a decent guess (I guess) + k = k.decode('latin-1', 'replace') + v = v.decode('latin-1', 'replace') + + # Convert value as appropriate + if k in [FRAMES, SCALE, SIZE]: + v = v.replace("*", ",") + v = tuple(map(number, v.split(","))) + if len(v) == 1: + v = v[0] + elif k == MODE and v in OPEN: + v, self.rawmode = OPEN[v] + + # Add to dictionary. Note that COMMENT tags are + # combined into a list of strings. + if k == COMMENT: + if k in self.info: + self.info[k].append(v) + else: + self.info[k] = [v] + else: + self.info[k] = v + + if k in TAGS: + n += 1 + + else: + + raise SyntaxError("Syntax error in IM header: " + + s.decode('ascii', 'replace')) + + if not n: + raise SyntaxError("Not an IM file") + + # Basic attributes + self._size = self.info[SIZE] + self.mode = self.info[MODE] + + # Skip forward to start of image data + while s and s[0:1] != b'\x1A': + s = self.fp.read(1) + if not s: + raise SyntaxError("File truncated") + + if LUT in self.info: + # convert lookup table to palette or lut attribute + palette = self.fp.read(768) + greyscale = 1 # greyscale palette + linear = 1 # linear greyscale palette + for i in range(256): + if palette[i] == palette[i+256] == palette[i+512]: + if i8(palette[i]) != i: + linear = 0 + else: + greyscale = 0 + if self.mode == "L" or self.mode == "LA": + if greyscale: + if not linear: + self.lut = [i8(c) for c in palette[:256]] + else: + if self.mode == "L": + self.mode = self.rawmode = "P" + elif self.mode == "LA": + self.mode = self.rawmode = "PA" + self.palette = ImagePalette.raw("RGB;L", palette) + elif self.mode == "RGB": + if not greyscale or not linear: + self.lut = [i8(c) for c in palette] + + self.frame = 0 + + self.__offset = offs = self.fp.tell() + + self.__fp = self.fp # FIXME: hack + + if self.rawmode[:2] == "F;": + + # ifunc95 formats + try: + # use bit decoder (if necessary) + bits = int(self.rawmode[2:]) + if bits not in [8, 16, 32]: + self.tile = [("bit", (0, 0)+self.size, offs, + (bits, 8, 3, 0, -1))] + return + except ValueError: + pass + + if self.rawmode in ["RGB;T", "RYB;T"]: + # Old LabEye/3PC files. Would be very surprised if anyone + # ever stumbled upon such a file ;-) + size = self.size[0] * self.size[1] + self.tile = [("raw", (0, 0)+self.size, offs, ("G", 0, -1)), + ("raw", (0, 0)+self.size, offs+size, ("R", 0, -1)), + ("raw", (0, 0)+self.size, offs+2*size, ("B", 0, -1))] + else: + # LabEye/IFUNC files + self.tile = [("raw", (0, 0)+self.size, offs, + (self.rawmode, 0, -1))] + + @property + def n_frames(self): + return self.info[FRAMES] + + @property + def is_animated(self): + return self.info[FRAMES] > 1 + + def seek(self, frame): + if not self._seek_check(frame): + return + + self.frame = frame + + if self.mode == "1": + bits = 1 + else: + bits = 8 * len(self.mode) + + size = ((self.size[0] * bits + 7) // 8) * self.size[1] + offs = self.__offset + frame * size + + self.fp = self.__fp + + self.tile = [("raw", (0, 0)+self.size, offs, (self.rawmode, 0, -1))] + + def tell(self): + return self.frame + +# +# -------------------------------------------------------------------- +# Save IM files + + +SAVE = { + # mode: (im type, raw mode) + "1": ("0 1", "1"), + "L": ("Greyscale", "L"), + "LA": ("LA", "LA;L"), + "P": ("Greyscale", "P"), + "PA": ("LA", "PA;L"), + "I": ("L 32S", "I;32S"), + "I;16": ("L 16", "I;16"), + "I;16L": ("L 16L", "I;16L"), + "I;16B": ("L 16B", "I;16B"), + "F": ("L 32F", "F;32F"), + "RGB": ("RGB", "RGB;L"), + "RGBA": ("RGBA", "RGBA;L"), + "RGBX": ("RGBX", "RGBX;L"), + "CMYK": ("CMYK", "CMYK;L"), + "YCbCr": ("YCC", "YCbCr;L") +} + + +def _save(im, fp, filename): + + try: + image_type, rawmode = SAVE[im.mode] + except KeyError: + raise ValueError("Cannot save %s images as IM" % im.mode) + + frames = im.encoderinfo.get("frames", 1) + + fp.write(("Image type: %s image\r\n" % image_type).encode('ascii')) + if filename: + fp.write(("Name: %s\r\n" % filename).encode('ascii')) + fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode('ascii')) + fp.write(("File size (no of images): %d\r\n" % frames).encode('ascii')) + if im.mode == "P": + fp.write(b"Lut: 1\r\n") + fp.write(b"\000" * (511-fp.tell()) + b"\032") + if im.mode == "P": + fp.write(im.im.getpalette("RGB", "RGB;L")) # 768 bytes + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, -1))]) + +# +# -------------------------------------------------------------------- +# Registry + + +Image.register_open(ImImageFile.format, ImImageFile) +Image.register_save(ImImageFile.format, _save) + +Image.register_extension(ImImageFile.format, ".im") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/Image.py b/thesisenv/lib/python3.6/site-packages/PIL/Image.py new file mode 100644 index 0000000..edea312 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/Image.py @@ -0,0 +1,2965 @@ +# +# The Python Imaging Library. +# $Id$ +# +# the Image class wrapper +# +# partial release history: +# 1995-09-09 fl Created +# 1996-03-11 fl PIL release 0.0 (proof of concept) +# 1996-04-30 fl PIL release 0.1b1 +# 1999-07-28 fl PIL release 1.0 final +# 2000-06-07 fl PIL release 1.1 +# 2000-10-20 fl PIL release 1.1.1 +# 2001-05-07 fl PIL release 1.1.2 +# 2002-03-15 fl PIL release 1.1.3 +# 2003-05-10 fl PIL release 1.1.4 +# 2005-03-28 fl PIL release 1.1.5 +# 2006-12-02 fl PIL release 1.1.6 +# 2009-11-15 fl PIL release 1.1.7 +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +# VERSION is deprecated and will be removed in Pillow 6.0.0. +# PILLOW_VERSION is deprecated and will be removed after that. +# Use __version__ instead. +from . import VERSION, PILLOW_VERSION, __version__, _plugins +from ._util import py3 + +import logging +import warnings +import math + +logger = logging.getLogger(__name__) + + +class DecompressionBombWarning(RuntimeWarning): + pass + + +class DecompressionBombError(Exception): + pass + + +class _imaging_not_installed(object): + # module placeholder + def __getattr__(self, id): + raise ImportError("The _imaging C module is not installed") + + +# Limit to around a quarter gigabyte for a 24 bit (3 bpp) image +MAX_IMAGE_PIXELS = int(1024 * 1024 * 1024 // 4 // 3) + + +try: + # If the _imaging C module is not present, Pillow will not load. + # Note that other modules should not refer to _imaging directly; + # import Image and use the Image.core variable instead. + # Also note that Image.core is not a publicly documented interface, + # and should be considered private and subject to change. + from . import _imaging as core + if __version__ != getattr(core, 'PILLOW_VERSION', None): + raise ImportError("The _imaging extension was built for another " + "version of Pillow or PIL:\n" + "Core version: %s\n" + "Pillow version: %s" % + (getattr(core, 'PILLOW_VERSION', None), + __version__)) + +except ImportError as v: + core = _imaging_not_installed() + # Explanations for ways that we know we might have an import error + if str(v).startswith("Module use of python"): + # The _imaging C module is present, but not compiled for + # the right version (windows only). Print a warning, if + # possible. + warnings.warn( + "The _imaging extension was built for another version " + "of Python.", + RuntimeWarning + ) + elif str(v).startswith("The _imaging extension"): + warnings.warn(str(v), RuntimeWarning) + elif "Symbol not found: _PyUnicodeUCS2_" in str(v): + # should match _PyUnicodeUCS2_FromString and + # _PyUnicodeUCS2_AsLatin1String + warnings.warn( + "The _imaging extension was built for Python with UCS2 support; " + "recompile Pillow or build Python --without-wide-unicode. ", + RuntimeWarning + ) + elif "Symbol not found: _PyUnicodeUCS4_" in str(v): + # should match _PyUnicodeUCS4_FromString and + # _PyUnicodeUCS4_AsLatin1String + warnings.warn( + "The _imaging extension was built for Python with UCS4 support; " + "recompile Pillow or build Python --with-wide-unicode. ", + RuntimeWarning + ) + # Fail here anyway. Don't let people run with a mostly broken Pillow. + # see docs/porting.rst + raise + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +from . import ImageMode +from ._binary import i8 +from ._util import isPath, isStringType, deferred_error + +import os +import sys +import io +import struct +import atexit + +# type stuff +import numbers +try: + # Python 3 + from collections.abc import Callable +except ImportError: + # Python 2.7 + from collections import Callable + + +# works everywhere, win for pypy, not cpython +USE_CFFI_ACCESS = hasattr(sys, 'pypy_version_info') +try: + import cffi + HAS_CFFI = True +except ImportError: + HAS_CFFI = False + +try: + from pathlib import Path + HAS_PATHLIB = True +except ImportError: + try: + from pathlib2 import Path + HAS_PATHLIB = True + except ImportError: + HAS_PATHLIB = False + + +def isImageType(t): + """ + Checks if an object is an image object. + + .. warning:: + + This function is for internal use only. + + :param t: object to check if it's an image + :returns: True if the object is an image + """ + return hasattr(t, "im") + + +# +# Constants (also defined in _imagingmodule.c!) + +NONE = 0 + +# transpose +FLIP_LEFT_RIGHT = 0 +FLIP_TOP_BOTTOM = 1 +ROTATE_90 = 2 +ROTATE_180 = 3 +ROTATE_270 = 4 +TRANSPOSE = 5 +TRANSVERSE = 6 + +# transforms +AFFINE = 0 +EXTENT = 1 +PERSPECTIVE = 2 +QUAD = 3 +MESH = 4 + +# resampling filters +NEAREST = NONE = 0 +BOX = 4 +BILINEAR = LINEAR = 2 +HAMMING = 5 +BICUBIC = CUBIC = 3 +LANCZOS = ANTIALIAS = 1 + +# dithers +NEAREST = NONE = 0 +ORDERED = 1 # Not yet implemented +RASTERIZE = 2 # Not yet implemented +FLOYDSTEINBERG = 3 # default + +# palettes/quantizers +WEB = 0 +ADAPTIVE = 1 + +MEDIANCUT = 0 +MAXCOVERAGE = 1 +FASTOCTREE = 2 +LIBIMAGEQUANT = 3 + +# categories +NORMAL = 0 +SEQUENCE = 1 +CONTAINER = 2 + +if hasattr(core, 'DEFAULT_STRATEGY'): + DEFAULT_STRATEGY = core.DEFAULT_STRATEGY + FILTERED = core.FILTERED + HUFFMAN_ONLY = core.HUFFMAN_ONLY + RLE = core.RLE + FIXED = core.FIXED + + +# -------------------------------------------------------------------- +# Registries + +ID = [] +OPEN = {} +MIME = {} +SAVE = {} +SAVE_ALL = {} +EXTENSION = {} +DECODERS = {} +ENCODERS = {} + +# -------------------------------------------------------------------- +# Modes supported by this version + +_MODEINFO = { + # NOTE: this table will be removed in future versions. use + # getmode* functions or ImageMode descriptors instead. + + # official modes + "1": ("L", "L", ("1",)), + "L": ("L", "L", ("L",)), + "I": ("L", "I", ("I",)), + "F": ("L", "F", ("F",)), + "P": ("RGB", "L", ("P",)), + "RGB": ("RGB", "L", ("R", "G", "B")), + "RGBX": ("RGB", "L", ("R", "G", "B", "X")), + "RGBA": ("RGB", "L", ("R", "G", "B", "A")), + "CMYK": ("RGB", "L", ("C", "M", "Y", "K")), + "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr")), + "LAB": ("RGB", "L", ("L", "A", "B")), + "HSV": ("RGB", "L", ("H", "S", "V")), + + # Experimental modes include I;16, I;16L, I;16B, RGBa, BGR;15, and + # BGR;24. Use these modes only if you know exactly what you're + # doing... + +} + +if sys.byteorder == 'little': + _ENDIAN = '<' +else: + _ENDIAN = '>' + +_MODE_CONV = { + # official modes + "1": ('|b1', None), # Bits need to be extended to bytes + "L": ('|u1', None), + "LA": ('|u1', 2), + "I": (_ENDIAN + 'i4', None), + "F": (_ENDIAN + 'f4', None), + "P": ('|u1', None), + "RGB": ('|u1', 3), + "RGBX": ('|u1', 4), + "RGBA": ('|u1', 4), + "CMYK": ('|u1', 4), + "YCbCr": ('|u1', 3), + "LAB": ('|u1', 3), # UNDONE - unsigned |u1i1i1 + "HSV": ('|u1', 3), + # I;16 == I;16L, and I;32 == I;32L + "I;16": ('u2', None), + "I;16L": ('i2', None), + "I;16LS": ('u4', None), + "I;32L": ('i4', None), + "I;32LS": ('= 1: + return + + try: + from . import BmpImagePlugin + except ImportError: + pass + try: + from . import GifImagePlugin + except ImportError: + pass + try: + from . import JpegImagePlugin + except ImportError: + pass + try: + from . import PpmImagePlugin + except ImportError: + pass + try: + from . import PngImagePlugin + except ImportError: + pass +# try: +# import TiffImagePlugin +# except ImportError: +# pass + + _initialized = 1 + + +def init(): + """ + Explicitly initializes the Python Imaging Library. This function + loads all available file format drivers. + """ + + global _initialized + if _initialized >= 2: + return 0 + + for plugin in _plugins: + try: + logger.debug("Importing %s", plugin) + __import__("PIL.%s" % plugin, globals(), locals(), []) + except ImportError as e: + logger.debug("Image: failed to import %s: %s", plugin, e) + + if OPEN or SAVE: + _initialized = 2 + return 1 + + +# -------------------------------------------------------------------- +# Codec factories (used by tobytes/frombytes and ImageFile.load) + +def _getdecoder(mode, decoder_name, args, extra=()): + + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + decoder = DECODERS[decoder_name] + return decoder(mode, *args + extra) + except KeyError: + pass + try: + # get decoder + decoder = getattr(core, decoder_name + "_decoder") + return decoder(mode, *args + extra) + except AttributeError: + raise IOError("decoder %s not available" % decoder_name) + + +def _getencoder(mode, encoder_name, args, extra=()): + + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + encoder = ENCODERS[encoder_name] + return encoder(mode, *args + extra) + except KeyError: + pass + try: + # get encoder + encoder = getattr(core, encoder_name + "_encoder") + return encoder(mode, *args + extra) + except AttributeError: + raise IOError("encoder %s not available" % encoder_name) + + +# -------------------------------------------------------------------- +# Simple expression analyzer + +def coerce_e(value): + return value if isinstance(value, _E) else _E(value) + + +class _E(object): + def __init__(self, data): + self.data = data + + def __add__(self, other): + return _E((self.data, "__add__", coerce_e(other).data)) + + def __mul__(self, other): + return _E((self.data, "__mul__", coerce_e(other).data)) + + +def _getscaleoffset(expr): + stub = ["stub"] + data = expr(_E(stub)).data + try: + (a, b, c) = data # simplified syntax + if (a is stub and b == "__mul__" and isinstance(c, numbers.Number)): + return c, 0.0 + if a is stub and b == "__add__" and isinstance(c, numbers.Number): + return 1.0, c + except TypeError: + pass + try: + ((a, b, c), d, e) = data # full syntax + if (a is stub and b == "__mul__" and isinstance(c, numbers.Number) and + d == "__add__" and isinstance(e, numbers.Number)): + return c, e + except TypeError: + pass + raise ValueError("illegal expression") + + +# -------------------------------------------------------------------- +# Implementation wrapper + +class Image(object): + """ + This class represents an image object. To create + :py:class:`~PIL.Image.Image` objects, use the appropriate factory + functions. There's hardly ever any reason to call the Image constructor + directly. + + * :py:func:`~PIL.Image.open` + * :py:func:`~PIL.Image.new` + * :py:func:`~PIL.Image.frombytes` + """ + format = None + format_description = None + _close_exclusive_fp_after_loading = True + + def __init__(self): + # FIXME: take "new" parameters / other image? + # FIXME: turn mode and size into delegating properties? + self.im = None + self.mode = "" + self._size = (0, 0) + self.palette = None + self.info = {} + self.category = NORMAL + self.readonly = 0 + self.pyaccess = None + + @property + def width(self): + return self.size[0] + + @property + def height(self): + return self.size[1] + + @property + def size(self): + return self._size + + def _new(self, im): + new = Image() + new.im = im + new.mode = im.mode + new._size = im.size + if im.mode in ('P', 'PA'): + if self.palette: + new.palette = self.palette.copy() + else: + from . import ImagePalette + new.palette = ImagePalette.ImagePalette() + new.info = self.info.copy() + return new + + # Context Manager Support + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + """ + Closes the file pointer, if possible. + + This operation will destroy the image core and release its memory. + The image data will be unusable afterward. + + This function is only required to close images that have not + had their file read and closed by the + :py:meth:`~PIL.Image.Image.load` method. See + :ref:`file-handling` for more information. + """ + try: + self.fp.close() + self.fp = None + except Exception as msg: + logger.debug("Error closing: %s", msg) + + if getattr(self, 'map', None): + self.map = None + + # Instead of simply setting to None, we're setting up a + # deferred error that will better explain that the core image + # object is gone. + self.im = deferred_error(ValueError("Operation on closed image")) + + if sys.version_info.major >= 3: + def __del__(self): + if (hasattr(self, 'fp') and hasattr(self, '_exclusive_fp') + and self.fp and self._exclusive_fp): + self.fp.close() + self.fp = None + + def _copy(self): + self.load() + self.im = self.im.copy() + self.pyaccess = None + self.readonly = 0 + + def _ensure_mutable(self): + if self.readonly: + self._copy() + else: + self.load() + + def _dump(self, file=None, format=None, **options): + import tempfile + + suffix = '' + if format: + suffix = '.'+format + + if not file: + f, filename = tempfile.mkstemp(suffix) + os.close(f) + else: + filename = file + if not filename.endswith(suffix): + filename = filename + suffix + + self.load() + + if not format or format == "PPM": + self.im.save_ppm(filename) + else: + self.save(filename, format, **options) + + return filename + + def __eq__(self, other): + return (isinstance(other, Image) and + self.__class__.__name__ == other.__class__.__name__ and + self.mode == other.mode and + self.size == other.size and + self.info == other.info and + self.category == other.category and + self.readonly == other.readonly and + self.getpalette() == other.getpalette() and + self.tobytes() == other.tobytes()) + + def __ne__(self, other): + eq = (self == other) + return not eq + + def __repr__(self): + return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % ( + self.__class__.__module__, self.__class__.__name__, + self.mode, self.size[0], self.size[1], + id(self) + ) + + def _repr_png_(self): + """ iPython display hook support + + :returns: png version of the image as bytes + """ + from io import BytesIO + b = BytesIO() + self.save(b, 'PNG') + return b.getvalue() + + @property + def __array_interface__(self): + # numpy array interface support + new = {} + shape, typestr = _conv_type_shape(self) + new['shape'] = shape + new['typestr'] = typestr + new['version'] = 3 + if self.mode == '1': + # Binary images need to be extended from bits to bytes + # See: https://github.com/python-pillow/Pillow/issues/350 + new['data'] = self.tobytes('raw', 'L') + else: + new['data'] = self.tobytes() + return new + + def __getstate__(self): + return [ + self.info, + self.mode, + self.size, + self.getpalette(), + self.tobytes()] + + def __setstate__(self, state): + Image.__init__(self) + self.tile = [] + info, mode, size, palette, data = state + self.info = info + self.mode = mode + self._size = size + self.im = core.new(mode, size) + if mode in ("L", "P") and palette: + self.putpalette(palette) + self.frombytes(data) + + def tobytes(self, encoder_name="raw", *args): + """ + Return image as a bytes object. + + .. warning:: + + This method returns the raw image data from the internal + storage. For compressed image data (e.g. PNG, JPEG) use + :meth:`~.save`, with a BytesIO parameter for in-memory + data. + + :param encoder_name: What encoder to use. The default is to + use the standard "raw" encoder. + :param args: Extra arguments to the encoder. + :rtype: A bytes object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if encoder_name == "raw" and args == (): + args = self.mode + + self.load() + + # unpack data + e = _getencoder(self.mode, encoder_name, args) + e.setimage(self.im) + + bufsize = max(65536, self.size[0] * 4) # see RawEncode.c + + data = [] + while True: + l, s, d = e.encode(bufsize) + data.append(d) + if s: + break + if s < 0: + raise RuntimeError("encoder error %d in tobytes" % s) + + return b"".join(data) + + def tostring(self, *args, **kw): + raise NotImplementedError("tostring() has been removed. " + "Please call tobytes() instead.") + + def tobitmap(self, name="image"): + """ + Returns the image converted to an X11 bitmap. + + .. note:: This method only works for mode "1" images. + + :param name: The name prefix to use for the bitmap variables. + :returns: A string containing an X11 bitmap. + :raises ValueError: If the mode is not "1" + """ + + self.load() + if self.mode != "1": + raise ValueError("not a bitmap") + data = self.tobytes("xbm") + return b"".join([ + ("#define %s_width %d\n" % (name, self.size[0])).encode('ascii'), + ("#define %s_height %d\n" % (name, self.size[1])).encode('ascii'), + ("static char %s_bits[] = {\n" % name).encode('ascii'), data, b"};" + ]) + + def frombytes(self, data, decoder_name="raw", *args): + """ + Loads this image with pixel data from a bytes object. + + This method is similar to the :py:func:`~PIL.Image.frombytes` function, + but loads data into this image instead of creating a new image object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + # default format + if decoder_name == "raw" and args == (): + args = self.mode + + # unpack data + d = _getdecoder(self.mode, decoder_name, args) + d.setimage(self.im) + s = d.decode(data) + + if s[0] >= 0: + raise ValueError("not enough image data") + if s[1] != 0: + raise ValueError("cannot decode image data") + + def fromstring(self, *args, **kw): + raise NotImplementedError("fromstring() has been removed. " + "Please call frombytes() instead.") + + def load(self): + """ + Allocates storage for the image and loads the pixel data. In + normal cases, you don't need to call this method, since the + Image class automatically loads an opened image when it is + accessed for the first time. + + This method will close the file associated with the image. See + :ref:`file-handling` for more information. + + :returns: An image access object. + :rtype: :ref:`PixelAccess` or :py:class:`PIL.PyAccess` + """ + if self.im and self.palette and self.palette.dirty: + # realize palette + self.im.putpalette(*self.palette.getdata()) + self.palette.dirty = 0 + self.palette.mode = "RGB" + self.palette.rawmode = None + if "transparency" in self.info: + if isinstance(self.info["transparency"], int): + self.im.putpalettealpha(self.info["transparency"], 0) + else: + self.im.putpalettealphas(self.info["transparency"]) + self.palette.mode = "RGBA" + + if self.im: + if HAS_CFFI and USE_CFFI_ACCESS: + if self.pyaccess: + return self.pyaccess + from . import PyAccess + self.pyaccess = PyAccess.new(self, self.readonly) + if self.pyaccess: + return self.pyaccess + return self.im.pixel_access(self.readonly) + + def verify(self): + """ + Verifies the contents of a file. For data read from a file, this + method attempts to determine if the file is broken, without + actually decoding the image data. If this method finds any + problems, it raises suitable exceptions. If you need to load + the image after using this method, you must reopen the image + file. + """ + pass + + def convert(self, mode=None, matrix=None, dither=None, + palette=WEB, colors=256): + """ + Returns a converted copy of this image. For the "P" mode, this + method translates pixels through the palette. If mode is + omitted, a mode is chosen so that all information in the image + and the palette can be represented without a palette. + + The current version supports all possible conversions between + "L", "RGB" and "CMYK." The **matrix** argument only supports "L" + and "RGB". + + When translating a color image to black and white (mode "L"), + the library uses the ITU-R 601-2 luma transform:: + + L = R * 299/1000 + G * 587/1000 + B * 114/1000 + + The default method of converting a greyscale ("L") or "RGB" + image into a bilevel (mode "1") image uses Floyd-Steinberg + dither to approximate the original image luminosity levels. If + dither is NONE, all non-zero values are set to 255 (white). To + use other thresholds, use the :py:meth:`~PIL.Image.Image.point` + method. + + When converting from "RGBA" to "P" without a **matrix** argument, + this passes the operation to :py:meth:`~PIL.Image.Image.quantize`, + and **dither** and **palette** are ignored. + + :param mode: The requested mode. See: :ref:`concept-modes`. + :param matrix: An optional conversion matrix. If given, this + should be 4- or 12-tuple containing floating point values. + :param dither: Dithering method, used when converting from + mode "RGB" to "P" or from "RGB" or "L" to "1". + Available methods are NONE or FLOYDSTEINBERG (default). + Note that this is not used when **matrix** is supplied. + :param palette: Palette to use when converting from mode "RGB" + to "P". Available palettes are WEB or ADAPTIVE. + :param colors: Number of colors to use for the ADAPTIVE palette. + Defaults to 256. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + self.load() + + if not mode and self.mode == "P": + # determine default mode + if self.palette: + mode = self.palette.mode + else: + mode = "RGB" + if not mode or (mode == self.mode and not matrix): + return self.copy() + + has_transparency = self.info.get('transparency') is not None + if matrix: + # matrix conversion + if mode not in ("L", "RGB"): + raise ValueError("illegal conversion") + im = self.im.convert_matrix(mode, matrix) + new = self._new(im) + if has_transparency and self.im.bands == 3: + transparency = new.info['transparency'] + + def convert_transparency(m, v): + v = m[0]*v[0] + m[1]*v[1] + m[2]*v[2] + m[3]*0.5 + return max(0, min(255, int(v))) + if mode == "L": + transparency = convert_transparency(matrix, transparency) + elif len(mode) == 3: + transparency = tuple([ + convert_transparency(matrix[i*4:i*4+4], transparency) + for i in range(0, len(transparency)) + ]) + new.info['transparency'] = transparency + return new + + if mode == "P" and self.mode == "RGBA": + return self.quantize(colors) + + trns = None + delete_trns = False + # transparency handling + if has_transparency: + if self.mode in ('L', 'RGB') and mode == 'RGBA': + # Use transparent conversion to promote from transparent + # color to an alpha channel. + new_im = self._new(self.im.convert_transparent( + mode, self.info['transparency'])) + del(new_im.info['transparency']) + return new_im + elif self.mode in ('L', 'RGB', 'P') and mode in ('L', 'RGB', 'P'): + t = self.info['transparency'] + if isinstance(t, bytes): + # Dragons. This can't be represented by a single color + warnings.warn('Palette images with Transparency ' + + ' expressed in bytes should be converted ' + + 'to RGBA images') + delete_trns = True + else: + # get the new transparency color. + # use existing conversions + trns_im = Image()._new(core.new(self.mode, (1, 1))) + if self.mode == 'P': + trns_im.putpalette(self.palette) + if isinstance(t, tuple): + try: + t = trns_im.palette.getcolor(t) + except: + raise ValueError("Couldn't allocate a palette " + "color for transparency") + trns_im.putpixel((0, 0), t) + + if mode in ('L', 'RGB'): + trns_im = trns_im.convert(mode) + else: + # can't just retrieve the palette number, got to do it + # after quantization. + trns_im = trns_im.convert('RGB') + trns = trns_im.getpixel((0, 0)) + + elif self.mode == 'P' and mode == 'RGBA': + t = self.info['transparency'] + delete_trns = True + + if isinstance(t, bytes): + self.im.putpalettealphas(t) + elif isinstance(t, int): + self.im.putpalettealpha(t, 0) + else: + raise ValueError("Transparency for P mode should" + + " be bytes or int") + + if mode == "P" and palette == ADAPTIVE: + im = self.im.quantize(colors) + new = self._new(im) + from . import ImagePalette + new.palette = ImagePalette.raw("RGB", new.im.getpalette("RGB")) + if delete_trns: + # This could possibly happen if we requantize to fewer colors. + # The transparency would be totally off in that case. + del(new.info['transparency']) + if trns is not None: + try: + new.info['transparency'] = new.palette.getcolor(trns) + except: + # if we can't make a transparent color, don't leave the old + # transparency hanging around to mess us up. + del(new.info['transparency']) + warnings.warn("Couldn't allocate palette entry " + + "for transparency") + return new + + # colorspace conversion + if dither is None: + dither = FLOYDSTEINBERG + + try: + im = self.im.convert(mode, dither) + except ValueError: + try: + # normalize source image and try again + im = self.im.convert(getmodebase(self.mode)) + im = im.convert(mode, dither) + except KeyError: + raise ValueError("illegal conversion") + + new_im = self._new(im) + if delete_trns: + # crash fail if we leave a bytes transparency in an rgb/l mode. + del(new_im.info['transparency']) + if trns is not None: + if new_im.mode == 'P': + try: + new_im.info['transparency'] = new_im.palette.getcolor(trns) + except: + del(new_im.info['transparency']) + warnings.warn("Couldn't allocate palette entry " + + "for transparency") + else: + new_im.info['transparency'] = trns + return new_im + + def quantize(self, colors=256, method=None, kmeans=0, palette=None): + """ + Convert the image to 'P' mode with the specified number + of colors. + + :param colors: The desired number of colors, <= 256 + :param method: 0 = median cut + 1 = maximum coverage + 2 = fast octree + 3 = libimagequant + :param kmeans: Integer + :param palette: Quantize to the palette of given :py:class:`PIL.Image.Image`. + :returns: A new image + + """ + + self.load() + + if method is None: + # defaults: + method = 0 + if self.mode == 'RGBA': + method = 2 + + if self.mode == 'RGBA' and method not in (2, 3): + # Caller specified an invalid mode. + raise ValueError( + 'Fast Octree (method == 2) and libimagequant (method == 3) ' + + 'are the only valid methods for quantizing RGBA images') + + if palette: + # use palette from reference image + palette.load() + if palette.mode != "P": + raise ValueError("bad mode for palette image") + if self.mode != "RGB" and self.mode != "L": + raise ValueError( + "only RGB or L mode images can be quantized to a palette" + ) + im = self.im.convert("P", 1, palette.im) + return self._new(im) + + return self._new(self.im.quantize(colors, method, kmeans)) + + def copy(self): + """ + Copies this image. Use this method if you wish to paste things + into an image, but still retain the original. + + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + self.load() + return self._new(self.im.copy()) + + __copy__ = copy + + def crop(self, box=None): + """ + Returns a rectangular region from this image. The box is a + 4-tuple defining the left, upper, right, and lower pixel + coordinate. See :ref:`coordinate-system`. + + Note: Prior to Pillow 3.4.0, this was a lazy operation. + + :param box: The crop rectangle, as a (left, upper, right, lower)-tuple. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if box is None: + return self.copy() + + self.load() + return self._new(self._crop(self.im, box)) + + def _crop(self, im, box): + """ + Returns a rectangular region from the core image object im. + + This is equivalent to calling im.crop((x0, y0, x1, y1)), but + includes additional sanity checks. + + :param im: a core image object + :param box: The crop rectangle, as a (left, upper, right, lower)-tuple. + :returns: A core image object. + """ + + x0, y0, x1, y1 = map(int, map(round, box)) + + absolute_values = (abs(x1 - x0), abs(y1 - y0)) + + _decompression_bomb_check(absolute_values) + + return im.crop((x0, y0, x1, y1)) + + def draft(self, mode, size): + """ + Configures the image file loader so it returns a version of the + image that as closely as possible matches the given mode and + size. For example, you can use this method to convert a color + JPEG to greyscale while loading it, or to extract a 128x192 + version from a PCD file. + + Note that this method modifies the :py:class:`~PIL.Image.Image` object + in place. If the image has already been loaded, this method has no + effect. + + Note: This method is not implemented for most images. It is + currently implemented only for JPEG and PCD images. + + :param mode: The requested mode. + :param size: The requested size. + """ + pass + + def _expand(self, xmargin, ymargin=None): + if ymargin is None: + ymargin = xmargin + self.load() + return self._new(self.im.expand(xmargin, ymargin, 0)) + + def filter(self, filter): + """ + Filters this image using the given filter. For a list of + available filters, see the :py:mod:`~PIL.ImageFilter` module. + + :param filter: Filter kernel. + :returns: An :py:class:`~PIL.Image.Image` object. """ + + from . import ImageFilter + + self.load() + + if isinstance(filter, Callable): + filter = filter() + if not hasattr(filter, "filter"): + raise TypeError("filter argument should be ImageFilter.Filter " + + "instance or class") + + multiband = isinstance(filter, ImageFilter.MultibandFilter) + if self.im.bands == 1 or multiband: + return self._new(filter.filter(self.im)) + + ims = [] + for c in range(self.im.bands): + ims.append(self._new(filter.filter(self.im.getband(c)))) + return merge(self.mode, ims) + + def getbands(self): + """ + Returns a tuple containing the name of each band in this image. + For example, **getbands** on an RGB image returns ("R", "G", "B"). + + :returns: A tuple containing band names. + :rtype: tuple + """ + return ImageMode.getmode(self.mode).bands + + def getbbox(self): + """ + Calculates the bounding box of the non-zero regions in the + image. + + :returns: The bounding box is returned as a 4-tuple defining the + left, upper, right, and lower pixel coordinate. See + :ref:`coordinate-system`. If the image is completely empty, this + method returns None. + + """ + + self.load() + return self.im.getbbox() + + def getcolors(self, maxcolors=256): + """ + Returns a list of colors used in this image. + + :param maxcolors: Maximum number of colors. If this number is + exceeded, this method returns None. The default limit is + 256 colors. + :returns: An unsorted list of (count, pixel) values. + """ + + self.load() + if self.mode in ("1", "L", "P"): + h = self.im.histogram() + out = [] + for i in range(256): + if h[i]: + out.append((h[i], i)) + if len(out) > maxcolors: + return None + return out + return self.im.getcolors(maxcolors) + + def getdata(self, band=None): + """ + Returns the contents of this image as a sequence object + containing pixel values. The sequence object is flattened, so + that values for line one follow directly after the values of + line zero, and so on. + + Note that the sequence object returned by this method is an + internal PIL data type, which only supports certain sequence + operations. To convert it to an ordinary sequence (e.g. for + printing), use **list(im.getdata())**. + + :param band: What band to return. The default is to return + all bands. To return a single band, pass in the index + value (e.g. 0 to get the "R" band from an "RGB" image). + :returns: A sequence-like object. + """ + + self.load() + if band is not None: + return self.im.getband(band) + return self.im # could be abused + + def getextrema(self): + """ + Gets the the minimum and maximum pixel values for each band in + the image. + + :returns: For a single-band image, a 2-tuple containing the + minimum and maximum pixel value. For a multi-band image, + a tuple containing one 2-tuple for each band. + """ + + self.load() + if self.im.bands > 1: + extrema = [] + for i in range(self.im.bands): + extrema.append(self.im.getband(i).getextrema()) + return tuple(extrema) + return self.im.getextrema() + + def getim(self): + """ + Returns a capsule that points to the internal image memory. + + :returns: A capsule object. + """ + + self.load() + return self.im.ptr + + def getpalette(self): + """ + Returns the image palette as a list. + + :returns: A list of color values [r, g, b, ...], or None if the + image has no palette. + """ + + self.load() + try: + if py3: + return list(self.im.getpalette()) + else: + return [i8(c) for c in self.im.getpalette()] + except ValueError: + return None # no palette + + def getpixel(self, xy): + """ + Returns the pixel value at a given position. + + :param xy: The coordinate, given as (x, y). See + :ref:`coordinate-system`. + :returns: The pixel value. If the image is a multi-layer image, + this method returns a tuple. + """ + + self.load() + if self.pyaccess: + return self.pyaccess.getpixel(xy) + return self.im.getpixel(xy) + + def getprojection(self): + """ + Get projection to x and y axes + + :returns: Two sequences, indicating where there are non-zero + pixels along the X-axis and the Y-axis, respectively. + """ + + self.load() + x, y = self.im.getprojection() + return [i8(c) for c in x], [i8(c) for c in y] + + def histogram(self, mask=None, extrema=None): + """ + Returns a histogram for the image. The histogram is returned as + a list of pixel counts, one for each pixel value in the source + image. If the image has more than one band, the histograms for + all bands are concatenated (for example, the histogram for an + "RGB" image contains 768 values). + + A bilevel image (mode "1") is treated as a greyscale ("L") image + by this method. + + If a mask is provided, the method returns a histogram for those + parts of the image where the mask image is non-zero. The mask + image must have the same size as the image, and be either a + bi-level image (mode "1") or a greyscale image ("L"). + + :param mask: An optional mask. + :returns: A list containing pixel counts. + """ + self.load() + if mask: + mask.load() + return self.im.histogram((0, 0), mask.im) + if self.mode in ("I", "F"): + if extrema is None: + extrema = self.getextrema() + return self.im.histogram(extrema) + return self.im.histogram() + + def offset(self, xoffset, yoffset=None): + raise NotImplementedError("offset() has been removed. " + "Please call ImageChops.offset() instead.") + + def paste(self, im, box=None, mask=None): + """ + Pastes another image into this image. The box argument is either + a 2-tuple giving the upper left corner, a 4-tuple defining the + left, upper, right, and lower pixel coordinate, or None (same as + (0, 0)). See :ref:`coordinate-system`. If a 4-tuple is given, the size + of the pasted image must match the size of the region. + + If the modes don't match, the pasted image is converted to the mode of + this image (see the :py:meth:`~PIL.Image.Image.convert` method for + details). + + Instead of an image, the source can be a integer or tuple + containing pixel values. The method then fills the region + with the given color. When creating RGB images, you can + also use color strings as supported by the ImageColor module. + + If a mask is given, this method updates only the regions + indicated by the mask. You can use either "1", "L" or "RGBA" + images (in the latter case, the alpha band is used as mask). + Where the mask is 255, the given image is copied as is. Where + the mask is 0, the current value is preserved. Intermediate + values will mix the two images together, including their alpha + channels if they have them. + + See :py:meth:`~PIL.Image.Image.alpha_composite` if you want to + combine images with respect to their alpha channels. + + :param im: Source image or pixel value (integer or tuple). + :param box: An optional 4-tuple giving the region to paste into. + If a 2-tuple is used instead, it's treated as the upper left + corner. If omitted or None, the source is pasted into the + upper left corner. + + If an image is given as the second argument and there is no + third, the box defaults to (0, 0), and the second argument + is interpreted as a mask image. + :param mask: An optional mask image. + """ + + if isImageType(box) and mask is None: + # abbreviated paste(im, mask) syntax + mask = box + box = None + + if box is None: + box = (0, 0) + + if len(box) == 2: + # upper left corner given; get size from image or mask + if isImageType(im): + size = im.size + elif isImageType(mask): + size = mask.size + else: + # FIXME: use self.size here? + raise ValueError( + "cannot determine region size; use 4-item box" + ) + box += (box[0]+size[0], box[1]+size[1]) + + if isStringType(im): + from . import ImageColor + im = ImageColor.getcolor(im, self.mode) + + elif isImageType(im): + im.load() + if self.mode != im.mode: + if self.mode != "RGB" or im.mode not in ("RGBA", "RGBa"): + # should use an adapter for this! + im = im.convert(self.mode) + im = im.im + + self._ensure_mutable() + + if mask: + mask.load() + self.im.paste(im, box, mask.im) + else: + self.im.paste(im, box) + + def alpha_composite(self, im, dest=(0, 0), source=(0, 0)): + """ 'In-place' analog of Image.alpha_composite. Composites an image + onto this image. + + :param im: image to composite over this one + :param dest: Optional 2 tuple (left, top) specifying the upper + left corner in this (destination) image. + :param source: Optional 2 (left, top) tuple for the upper left + corner in the overlay source image, or 4 tuple (left, top, right, + bottom) for the bounds of the source rectangle + + Performance Note: Not currently implemented in-place in the core layer. + """ + + if not isinstance(source, (list, tuple)): + raise ValueError("Source must be a tuple") + if not isinstance(dest, (list, tuple)): + raise ValueError("Destination must be a tuple") + if not len(source) in (2, 4): + raise ValueError("Source must be a 2 or 4-tuple") + if not len(dest) == 2: + raise ValueError("Destination must be a 2-tuple") + if min(source) < 0: + raise ValueError("Source must be non-negative") + if min(dest) < 0: + raise ValueError("Destination must be non-negative") + + if len(source) == 2: + source = source + im.size + + # over image, crop if it's not the whole thing. + if source == (0, 0) + im.size: + overlay = im + else: + overlay = im.crop(source) + + # target for the paste + box = dest + (dest[0] + overlay.width, dest[1] + overlay.height) + + # destination image. don't copy if we're using the whole image. + if box == (0, 0) + self.size: + background = self + else: + background = self.crop(box) + + result = alpha_composite(background, overlay) + self.paste(result, box) + + def point(self, lut, mode=None): + """ + Maps this image through a lookup table or function. + + :param lut: A lookup table, containing 256 (or 65536 if + self.mode=="I" and mode == "L") values per band in the + image. A function can be used instead, it should take a + single argument. The function is called once for each + possible pixel value, and the resulting table is applied to + all bands of the image. + :param mode: Output mode (default is same as input). In the + current version, this can only be used if the source image + has mode "L" or "P", and the output has mode "1" or the + source image mode is "I" and the output mode is "L". + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + self.load() + + if isinstance(lut, ImagePointHandler): + return lut.point(self) + + if callable(lut): + # if it isn't a list, it should be a function + if self.mode in ("I", "I;16", "F"): + # check if the function can be used with point_transform + # UNDONE wiredfool -- I think this prevents us from ever doing + # a gamma function point transform on > 8bit images. + scale, offset = _getscaleoffset(lut) + return self._new(self.im.point_transform(scale, offset)) + # for other modes, convert the function to a table + lut = [lut(i) for i in range(256)] * self.im.bands + + if self.mode == "F": + # FIXME: _imaging returns a confusing error message for this case + raise ValueError("point operation not supported for this mode") + + return self._new(self.im.point(lut, mode)) + + def putalpha(self, alpha): + """ + Adds or replaces the alpha layer in this image. If the image + does not have an alpha layer, it's converted to "LA" or "RGBA". + The new layer must be either "L" or "1". + + :param alpha: The new alpha layer. This can either be an "L" or "1" + image having the same size as this image, or an integer or + other color value. + """ + + self._ensure_mutable() + + if self.mode not in ("LA", "RGBA"): + # attempt to promote self to a matching alpha mode + try: + mode = getmodebase(self.mode) + "A" + try: + self.im.setmode(mode) + except (AttributeError, ValueError): + # do things the hard way + im = self.im.convert(mode) + if im.mode not in ("LA", "RGBA"): + raise ValueError # sanity check + self.im = im + self.pyaccess = None + self.mode = self.im.mode + except (KeyError, ValueError): + raise ValueError("illegal image mode") + + if self.mode == "LA": + band = 1 + else: + band = 3 + + if isImageType(alpha): + # alpha layer + if alpha.mode not in ("1", "L"): + raise ValueError("illegal image mode") + alpha.load() + if alpha.mode == "1": + alpha = alpha.convert("L") + else: + # constant alpha + try: + self.im.fillband(band, alpha) + except (AttributeError, ValueError): + # do things the hard way + alpha = new("L", self.size, alpha) + else: + return + + self.im.putband(alpha.im, band) + + def putdata(self, data, scale=1.0, offset=0.0): + """ + Copies pixel data to this image. This method copies data from a + sequence object into the image, starting at the upper left + corner (0, 0), and continuing until either the image or the + sequence ends. The scale and offset values are used to adjust + the sequence values: **pixel = value*scale + offset**. + + :param data: A sequence object. + :param scale: An optional scale value. The default is 1.0. + :param offset: An optional offset value. The default is 0.0. + """ + + self._ensure_mutable() + + self.im.putdata(data, scale, offset) + + def putpalette(self, data, rawmode="RGB"): + """ + Attaches a palette to this image. The image must be a "P" or + "L" image, and the palette sequence must contain 768 integer + values, where each group of three values represent the red, + green, and blue values for the corresponding pixel + index. Instead of an integer sequence, you can use an 8-bit + string. + + :param data: A palette sequence (either a list or a string). + :param rawmode: The raw mode of the palette. + """ + from . import ImagePalette + + if self.mode not in ("L", "P"): + raise ValueError("illegal image mode") + self.load() + if isinstance(data, ImagePalette.ImagePalette): + palette = ImagePalette.raw(data.rawmode, data.palette) + else: + if not isinstance(data, bytes): + if py3: + data = bytes(data) + else: + data = "".join(chr(x) for x in data) + palette = ImagePalette.raw(rawmode, data) + self.mode = "P" + self.palette = palette + self.palette.mode = "RGB" + self.load() # install new palette + + def putpixel(self, xy, value): + """ + Modifies the pixel at the given position. The color is given as + a single numerical value for single-band images, and a tuple for + multi-band images. + + Note that this method is relatively slow. For more extensive changes, + use :py:meth:`~PIL.Image.Image.paste` or the :py:mod:`~PIL.ImageDraw` + module instead. + + See: + + * :py:meth:`~PIL.Image.Image.paste` + * :py:meth:`~PIL.Image.Image.putdata` + * :py:mod:`~PIL.ImageDraw` + + :param xy: The pixel coordinate, given as (x, y). See + :ref:`coordinate-system`. + :param value: The pixel value. + """ + + if self.readonly: + self._copy() + self.load() + + if self.pyaccess: + return self.pyaccess.putpixel(xy, value) + return self.im.putpixel(xy, value) + + def remap_palette(self, dest_map, source_palette=None): + """ + Rewrites the image to reorder the palette. + + :param dest_map: A list of indexes into the original palette. + e.g. [1,0] would swap a two item palette, and list(range(255)) + is the identity transform. + :param source_palette: Bytes or None. + :returns: An :py:class:`~PIL.Image.Image` object. + + """ + from . import ImagePalette + + if self.mode not in ("L", "P"): + raise ValueError("illegal image mode") + + if source_palette is None: + if self.mode == "P": + real_source_palette = self.im.getpalette("RGB")[:768] + else: # L-mode + real_source_palette = bytearray(i//3 for i in range(768)) + else: + real_source_palette = source_palette + + palette_bytes = b"" + new_positions = [0]*256 + + # pick only the used colors from the palette + for i, oldPosition in enumerate(dest_map): + palette_bytes += real_source_palette[oldPosition*3:oldPosition*3+3] + new_positions[oldPosition] = i + + # replace the palette color id of all pixel with the new id + + # Palette images are [0..255], mapped through a 1 or 3 + # byte/color map. We need to remap the whole image + # from palette 1 to palette 2. New_positions is + # an array of indexes into palette 1. Palette 2 is + # palette 1 with any holes removed. + + # We're going to leverage the convert mechanism to use the + # C code to remap the image from palette 1 to palette 2, + # by forcing the source image into 'L' mode and adding a + # mapping 'L' mode palette, then converting back to 'L' + # sans palette thus converting the image bytes, then + # assigning the optimized RGB palette. + + # perf reference, 9500x4000 gif, w/~135 colors + # 14 sec prepatch, 1 sec postpatch with optimization forced. + + mapping_palette = bytearray(new_positions) + + m_im = self.copy() + m_im.mode = 'P' + + m_im.palette = ImagePalette.ImagePalette("RGB", + palette=mapping_palette*3, + size=768) + # possibly set palette dirty, then + # m_im.putpalette(mapping_palette, 'L') # converts to 'P' + # or just force it. + # UNDONE -- this is part of the general issue with palettes + m_im.im.putpalette(*m_im.palette.getdata()) + + m_im = m_im.convert('L') + + # Internally, we require 768 bytes for a palette. + new_palette_bytes = (palette_bytes + + (768 - len(palette_bytes)) * b'\x00') + m_im.putpalette(new_palette_bytes) + m_im.palette = ImagePalette.ImagePalette("RGB", + palette=palette_bytes, + size=len(palette_bytes)) + + return m_im + + def resize(self, size, resample=NEAREST, box=None): + """ + Returns a resized copy of this image. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param resample: An optional resampling filter. This can be + one of :py:attr:`PIL.Image.NEAREST`, :py:attr:`PIL.Image.BOX`, + :py:attr:`PIL.Image.BILINEAR`, :py:attr:`PIL.Image.HAMMING`, + :py:attr:`PIL.Image.BICUBIC` or :py:attr:`PIL.Image.LANCZOS`. + If omitted, or if the image has mode "1" or "P", it is + set :py:attr:`PIL.Image.NEAREST`. + See: :ref:`concept-filters`. + :param box: An optional 4-tuple of floats giving the region + of the source image which should be scaled. + The values should be within (0, 0, width, height) rectangle. + If omitted or None, the entire source is used. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if resample not in ( + NEAREST, BILINEAR, BICUBIC, LANCZOS, BOX, HAMMING, + ): + raise ValueError("unknown resampling filter") + + size = tuple(size) + + if box is None: + box = (0, 0) + self.size + else: + box = tuple(box) + + if self.size == size and box == (0, 0) + self.size: + return self.copy() + + if self.mode in ("1", "P"): + resample = NEAREST + + if self.mode == 'LA': + return self.convert('La').resize(size, resample, box).convert('LA') + + if self.mode == 'RGBA': + return self.convert('RGBa').resize(size, resample, box).convert('RGBA') + + self.load() + + return self._new(self.im.resize(size, resample, box)) + + def rotate(self, angle, resample=NEAREST, expand=0, center=None, + translate=None, fillcolor=None): + """ + Returns a rotated copy of this image. This method returns a + copy of this image, rotated the given number of degrees counter + clockwise around its centre. + + :param angle: In degrees counter clockwise. + :param resample: An optional resampling filter. This can be + one of :py:attr:`PIL.Image.NEAREST` (use nearest neighbour), + :py:attr:`PIL.Image.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:attr:`PIL.Image.BICUBIC` + (cubic spline interpolation in a 4x4 environment). + If omitted, or if the image has mode "1" or "P", it is + set :py:attr:`PIL.Image.NEAREST`. See :ref:`concept-filters`. + :param expand: Optional expansion flag. If true, expands the output + image to make it large enough to hold the entire rotated image. + If false or omitted, make the output image the same size as the + input image. Note that the expand flag assumes rotation around + the center and no translation. + :param center: Optional center of rotation (a 2-tuple). Origin is + the upper left corner. Default is the center of the image. + :param translate: An optional post-rotate translation (a 2-tuple). + :param fillcolor: An optional color for area outside the rotated image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + angle = angle % 360.0 + + # Fast paths regardless of filter, as long as we're not + # translating or changing the center. + if not (center or translate): + if angle == 0: + return self.copy() + if angle == 180: + return self.transpose(ROTATE_180) + if angle == 90 and expand: + return self.transpose(ROTATE_90) + if angle == 270 and expand: + return self.transpose(ROTATE_270) + + # Calculate the affine matrix. Note that this is the reverse + # transformation (from destination image to source) because we + # want to interpolate the (discrete) destination pixel from + # the local area around the (floating) source pixel. + + # The matrix we actually want (note that it operates from the right): + # (1, 0, tx) (1, 0, cx) ( cos a, sin a, 0) (1, 0, -cx) + # (0, 1, ty) * (0, 1, cy) * (-sin a, cos a, 0) * (0, 1, -cy) + # (0, 0, 1) (0, 0, 1) ( 0, 0, 1) (0, 0, 1) + + # The reverse matrix is thus: + # (1, 0, cx) ( cos -a, sin -a, 0) (1, 0, -cx) (1, 0, -tx) + # (0, 1, cy) * (-sin -a, cos -a, 0) * (0, 1, -cy) * (0, 1, -ty) + # (0, 0, 1) ( 0, 0, 1) (0, 0, 1) (0, 0, 1) + + # In any case, the final translation may be updated at the end to + # compensate for the expand flag. + + w, h = self.size + + if translate is None: + post_trans = (0, 0) + else: + post_trans = translate + if center is None: + rotn_center = (w / 2.0, h / 2.0) # FIXME These should be rounded to ints? + else: + rotn_center = center + + angle = - math.radians(angle) + matrix = [ + round(math.cos(angle), 15), round(math.sin(angle), 15), 0.0, + round(-math.sin(angle), 15), round(math.cos(angle), 15), 0.0 + ] + + def transform(x, y, matrix): + (a, b, c, d, e, f) = matrix + return a*x + b*y + c, d*x + e*y + f + + matrix[2], matrix[5] = transform(-rotn_center[0] - post_trans[0], + -rotn_center[1] - post_trans[1], matrix) + matrix[2] += rotn_center[0] + matrix[5] += rotn_center[1] + + if expand: + # calculate output size + xx = [] + yy = [] + for x, y in ((0, 0), (w, 0), (w, h), (0, h)): + x, y = transform(x, y, matrix) + xx.append(x) + yy.append(y) + nw = int(math.ceil(max(xx)) - math.floor(min(xx))) + nh = int(math.ceil(max(yy)) - math.floor(min(yy))) + + # We multiply a translation matrix from the right. Because of its + # special form, this is the same as taking the image of the + # translation vector as new translation vector. + matrix[2], matrix[5] = transform(-(nw - w) / 2.0, + -(nh - h) / 2.0, + matrix) + w, h = nw, nh + + return self.transform((w, h), AFFINE, matrix, resample, fillcolor=fillcolor) + + def save(self, fp, format=None, **params): + """ + Saves this image under the given filename. If no format is + specified, the format to use is determined from the filename + extension, if possible. + + Keyword options can be used to provide additional instructions + to the writer. If a writer doesn't recognise an option, it is + silently ignored. The available options are described in the + :doc:`image format documentation + <../handbook/image-file-formats>` for each writer. + + You can use a file object instead of a filename. In this case, + you must always specify the format. The file object must + implement the ``seek``, ``tell``, and ``write`` + methods, and be opened in binary mode. + + :param fp: A filename (string), pathlib.Path object or file object. + :param format: Optional format override. If omitted, the + format to use is determined from the filename extension. + If a file object was used instead of a filename, this + parameter should always be used. + :param params: Extra parameters to the image writer. + :returns: None + :exception ValueError: If the output format could not be determined + from the file name. Use the format option to solve this. + :exception IOError: If the file could not be written. The file + may have been created, and may contain partial data. + """ + + filename = "" + open_fp = False + if isPath(fp): + filename = fp + open_fp = True + elif HAS_PATHLIB and isinstance(fp, Path): + filename = str(fp) + open_fp = True + if not filename and hasattr(fp, "name") and isPath(fp.name): + # only set the name for metadata purposes + filename = fp.name + + # may mutate self! + self.load() + + save_all = params.pop('save_all', False) + self.encoderinfo = params + self.encoderconfig = () + + preinit() + + ext = os.path.splitext(filename)[1].lower() + + if not format: + if ext not in EXTENSION: + init() + try: + format = EXTENSION[ext] + except KeyError: + raise ValueError('unknown file extension: {}'.format(ext)) + + if format.upper() not in SAVE: + init() + if save_all: + save_handler = SAVE_ALL[format.upper()] + else: + save_handler = SAVE[format.upper()] + + if open_fp: + if params.get('append', False): + fp = builtins.open(filename, "r+b") + else: + # Open also for reading ("+"), because TIFF save_all + # writer needs to go back and edit the written data. + fp = builtins.open(filename, "w+b") + + try: + save_handler(self, fp, filename) + finally: + # do what we can to clean up + if open_fp: + fp.close() + + def seek(self, frame): + """ + Seeks to the given frame in this sequence file. If you seek + beyond the end of the sequence, the method raises an + **EOFError** exception. When a sequence file is opened, the + library automatically seeks to frame 0. + + Note that in the current version of the library, most sequence + formats only allows you to seek to the next frame. + + See :py:meth:`~PIL.Image.Image.tell`. + + :param frame: Frame number, starting at 0. + :exception EOFError: If the call attempts to seek beyond the end + of the sequence. + """ + + # overridden by file handlers + if frame != 0: + raise EOFError + + def show(self, title=None, command=None): + """ + Displays this image. This method is mainly intended for + debugging purposes. + + On Unix platforms, this method saves the image to a temporary + PPM file, and calls either the **xv** utility or the **display** + utility, depending on which one can be found. + + On macOS, this method saves the image to a temporary BMP file, and + opens it with the native Preview application. + + On Windows, it saves the image to a temporary BMP file, and uses + the standard BMP display utility to show it (usually Paint). + + :param title: Optional title to use for the image window, + where possible. + :param command: command used to show the image + """ + + _show(self, title=title, command=command) + + def split(self): + """ + Split this image into individual bands. This method returns a + tuple of individual image bands from an image. For example, + splitting an "RGB" image creates three new images each + containing a copy of one of the original bands (red, green, + blue). + + If you need only one band, :py:meth:`~PIL.Image.Image.getchannel` + method can be more convenient and faster. + + :returns: A tuple containing bands. + """ + + self.load() + if self.im.bands == 1: + ims = [self.copy()] + else: + ims = map(self._new, self.im.split()) + return tuple(ims) + + def getchannel(self, channel): + """ + Returns an image containing a single channel of the source image. + + :param channel: What channel to return. Could be index + (0 for "R" channel of "RGB") or channel name + ("A" for alpha channel of "RGBA"). + :returns: An image in "L" mode. + + .. versionadded:: 4.3.0 + """ + self.load() + + if isStringType(channel): + try: + channel = self.getbands().index(channel) + except ValueError: + raise ValueError( + 'The image has no channel "{}"'.format(channel)) + + return self._new(self.im.getband(channel)) + + def tell(self): + """ + Returns the current frame number. See :py:meth:`~PIL.Image.Image.seek`. + + :returns: Frame number, starting with 0. + """ + return 0 + + def thumbnail(self, size, resample=BICUBIC): + """ + Make this image into a thumbnail. This method modifies the + image to contain a thumbnail version of itself, no larger than + the given size. This method calculates an appropriate thumbnail + size to preserve the aspect of the image, calls the + :py:meth:`~PIL.Image.Image.draft` method to configure the file reader + (where applicable), and finally resizes the image. + + Note that this function modifies the :py:class:`~PIL.Image.Image` + object in place. If you need to use the full resolution image as well, + apply this method to a :py:meth:`~PIL.Image.Image.copy` of the original + image. + + :param size: Requested size. + :param resample: Optional resampling filter. This can be one + of :py:attr:`PIL.Image.NEAREST`, :py:attr:`PIL.Image.BILINEAR`, + :py:attr:`PIL.Image.BICUBIC`, or :py:attr:`PIL.Image.LANCZOS`. + If omitted, it defaults to :py:attr:`PIL.Image.BICUBIC`. + (was :py:attr:`PIL.Image.NEAREST` prior to version 2.5.0) + :returns: None + """ + + # preserve aspect ratio + x, y = self.size + if x > size[0]: + y = int(max(y * size[0] / x, 1)) + x = int(size[0]) + if y > size[1]: + x = int(max(x * size[1] / y, 1)) + y = int(size[1]) + size = x, y + + if size == self.size: + return + + self.draft(None, size) + + im = self.resize(size, resample) + + self.im = im.im + self.mode = im.mode + self._size = size + + self.readonly = 0 + self.pyaccess = None + + # FIXME: the different transform methods need further explanation + # instead of bloating the method docs, add a separate chapter. + def transform(self, size, method, data=None, resample=NEAREST, + fill=1, fillcolor=None): + """ + Transforms this image. This method creates a new image with the + given size, and the same mode as the original, and copies data + to the new image using the given transform. + + :param size: The output size. + :param method: The transformation method. This is one of + :py:attr:`PIL.Image.EXTENT` (cut out a rectangular subregion), + :py:attr:`PIL.Image.AFFINE` (affine transform), + :py:attr:`PIL.Image.PERSPECTIVE` (perspective transform), + :py:attr:`PIL.Image.QUAD` (map a quadrilateral to a rectangle), or + :py:attr:`PIL.Image.MESH` (map a number of source quadrilaterals + in one operation). + + It may also be an :py:class:`~PIL.Image.ImageTransformHandler` + object:: + class Example(Image.ImageTransformHandler): + def transform(size, method, data, resample, fill=1): + # Return result + + It may also be an object with a :py:meth:`~method.getdata` method + that returns a tuple supplying new **method** and **data** values:: + class Example(object): + def getdata(self): + method = Image.EXTENT + data = (0, 0, 100, 100) + return method, data + :param data: Extra data to the transformation method. + :param resample: Optional resampling filter. It can be one of + :py:attr:`PIL.Image.NEAREST` (use nearest neighbour), + :py:attr:`PIL.Image.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:attr:`PIL.Image.BICUBIC` (cubic spline + interpolation in a 4x4 environment). If omitted, or if the image + has mode "1" or "P", it is set to :py:attr:`PIL.Image.NEAREST`. + :param fill: If **method** is an + :py:class:`~PIL.Image.ImageTransformHandler` object, this is one of + the arguments passed to it. Otherwise, it is unused. + :param fillcolor: Optional fill color for the area outside the transform + in the output image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if self.mode == 'LA': + return self.convert('La').transform( + size, method, data, resample, fill, fillcolor).convert('LA') + + if self.mode == 'RGBA': + return self.convert('RGBa').transform( + size, method, data, resample, fill, fillcolor).convert('RGBA') + + if isinstance(method, ImageTransformHandler): + return method.transform(size, self, resample=resample, fill=fill) + + if hasattr(method, "getdata"): + # compatibility w. old-style transform objects + method, data = method.getdata() + + if data is None: + raise ValueError("missing method data") + + im = new(self.mode, size, fillcolor) + if method == MESH: + # list of quads + for box, quad in data: + im.__transformer(box, self, QUAD, quad, resample, + fillcolor is None) + else: + im.__transformer((0, 0)+size, self, method, data, + resample, fillcolor is None) + + return im + + def __transformer(self, box, image, method, data, + resample=NEAREST, fill=1): + w = box[2] - box[0] + h = box[3] - box[1] + + if method == AFFINE: + data = data[0:6] + + elif method == EXTENT: + # convert extent to an affine transform + x0, y0, x1, y1 = data + xs = float(x1 - x0) / w + ys = float(y1 - y0) / h + method = AFFINE + data = (xs, 0, x0, 0, ys, y0) + + elif method == PERSPECTIVE: + data = data[0:8] + + elif method == QUAD: + # quadrilateral warp. data specifies the four corners + # given as NW, SW, SE, and NE. + nw = data[0:2] + sw = data[2:4] + se = data[4:6] + ne = data[6:8] + x0, y0 = nw + As = 1.0 / w + At = 1.0 / h + data = (x0, (ne[0]-x0)*As, (sw[0]-x0)*At, + (se[0]-sw[0]-ne[0]+x0)*As*At, + y0, (ne[1]-y0)*As, (sw[1]-y0)*At, + (se[1]-sw[1]-ne[1]+y0)*As*At) + + else: + raise ValueError("unknown transformation method") + + if resample not in (NEAREST, BILINEAR, BICUBIC): + raise ValueError("unknown resampling filter") + + image.load() + + self.load() + + if image.mode in ("1", "P"): + resample = NEAREST + + self.im.transform2(box, image.im, method, data, resample, fill) + + def transpose(self, method): + """ + Transpose image (flip or rotate in 90 degree steps) + + :param method: One of :py:attr:`PIL.Image.FLIP_LEFT_RIGHT`, + :py:attr:`PIL.Image.FLIP_TOP_BOTTOM`, :py:attr:`PIL.Image.ROTATE_90`, + :py:attr:`PIL.Image.ROTATE_180`, :py:attr:`PIL.Image.ROTATE_270`, + :py:attr:`PIL.Image.TRANSPOSE` or :py:attr:`PIL.Image.TRANSVERSE`. + :returns: Returns a flipped or rotated copy of this image. + """ + + self.load() + return self._new(self.im.transpose(method)) + + def effect_spread(self, distance): + """ + Randomly spread pixels in an image. + + :param distance: Distance to spread pixels. + """ + self.load() + return self._new(self.im.effect_spread(distance)) + + def toqimage(self): + """Returns a QImage copy of this image""" + from . import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqimage(self) + + def toqpixmap(self): + """Returns a QPixmap copy of this image""" + from . import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqpixmap(self) + + +# -------------------------------------------------------------------- +# Abstract handlers. + +class ImagePointHandler(object): + # used as a mixin by point transforms (for use with im.point) + pass + + +class ImageTransformHandler(object): + # used as a mixin by geometry transforms (for use with im.transform) + pass + + +# -------------------------------------------------------------------- +# Factories + +# +# Debugging + +def _wedge(): + """Create greyscale wedge (for debugging only)""" + + return Image()._new(core.wedge("L")) + + +def _check_size(size): + """ + Common check to enforce type and sanity check on size tuples + + :param size: Should be a 2 tuple of (width, height) + :returns: True, or raises a ValueError + """ + + if not isinstance(size, (list, tuple)): + raise ValueError("Size must be a tuple") + if len(size) != 2: + raise ValueError("Size must be a tuple of length 2") + if size[0] < 0 or size[1] < 0: + raise ValueError("Width and height must be >= 0") + + return True + + +def new(mode, size, color=0): + """ + Creates a new image with the given mode and size. + + :param mode: The mode to use for the new image. See: + :ref:`concept-modes`. + :param size: A 2-tuple, containing (width, height) in pixels. + :param color: What color to use for the image. Default is black. + If given, this should be a single integer or floating point value + for single-band modes, and a tuple for multi-band modes (one value + per band). When creating RGB images, you can also use color + strings as supported by the ImageColor module. If the color is + None, the image is not initialised. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + _check_size(size) + + if color is None: + # don't initialize + return Image()._new(core.new(mode, size)) + + if isStringType(color): + # css3-style specifier + + from . import ImageColor + color = ImageColor.getcolor(color, mode) + + return Image()._new(core.fill(mode, size, color)) + + +def frombytes(mode, size, data, decoder_name="raw", *args): + """ + Creates a copy of an image memory from pixel data in a buffer. + + In its simplest form, this function takes three arguments + (mode, size, and unpacked pixel data). + + You can also use any pixel decoder supported by PIL. For more + information on available decoders, see the section + :ref:`Writing Your Own File Decoder `. + + Note that this function decodes pixel data only, not entire images. + If you have an entire image in a string, wrap it in a + :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load + it. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A byte buffer containing raw data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + _check_size(size) + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if decoder_name == "raw" and args == (): + args = mode + + im = new(mode, size) + im.frombytes(data, decoder_name, args) + return im + + +def fromstring(*args, **kw): + raise NotImplementedError("fromstring() has been removed. " + + "Please call frombytes() instead.") + + +def frombuffer(mode, size, data, decoder_name="raw", *args): + """ + Creates an image memory referencing pixel data in a byte buffer. + + This function is similar to :py:func:`~PIL.Image.frombytes`, but uses data + in the byte buffer, where possible. This means that changes to the + original buffer object are reflected in this image). Not all modes can + share memory; supported modes include "L", "RGBX", "RGBA", and "CMYK". + + Note that this function decodes pixel data only, not entire images. + If you have an entire image file in a string, wrap it in a + **BytesIO** object, and use :py:func:`~PIL.Image.open` to load it. + + In the current version, the default parameters used for the "raw" decoder + differs from that used for :py:func:`~PIL.Image.frombytes`. This is a + bug, and will probably be fixed in a future release. The current release + issues a warning if you do this; to disable the warning, you should provide + the full set of parameters. See below for details. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A bytes or other buffer object containing raw + data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. For the + default encoder ("raw"), it's recommended that you provide the + full set of parameters:: + + frombuffer(mode, size, data, "raw", mode, 0, 1) + + :returns: An :py:class:`~PIL.Image.Image` object. + + .. versionadded:: 1.1.4 + """ + + _check_size(size) + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if decoder_name == "raw": + if args == (): + warnings.warn( + "the frombuffer defaults may change in a future release; " + "for portability, change the call to read:\n" + " frombuffer(mode, size, data, 'raw', mode, 0, 1)", + RuntimeWarning, stacklevel=2 + ) + args = mode, 0, -1 # may change to (mode, 0, 1) post-1.1.6 + if args[0] in _MAPMODES: + im = new(mode, (1, 1)) + im = im._new( + core.map_buffer(data, size, decoder_name, None, 0, args) + ) + im.readonly = 1 + return im + + return frombytes(mode, size, data, decoder_name, args) + + +def fromarray(obj, mode=None): + """ + Creates an image memory from an object exporting the array interface + (using the buffer protocol). + + If **obj** is not contiguous, then the tobytes method is called + and :py:func:`~PIL.Image.frombuffer` is used. + + If you have an image in NumPy:: + + from PIL import Image + import numpy as np + im = Image.open('hopper.jpg') + a = np.asarray(im) + + Then this can be used to convert it to a Pillow image:: + + im = Image.fromarray(a) + + :param obj: Object with array interface + :param mode: Mode to use (will be determined from type if None) + See: :ref:`concept-modes`. + :returns: An image object. + + .. versionadded:: 1.1.6 + """ + arr = obj.__array_interface__ + shape = arr['shape'] + ndim = len(shape) + strides = arr.get('strides', None) + if mode is None: + try: + typekey = (1, 1) + shape[2:], arr['typestr'] + mode, rawmode = _fromarray_typemap[typekey] + except KeyError: + raise TypeError("Cannot handle this data type") + else: + rawmode = mode + if mode in ["1", "L", "I", "P", "F"]: + ndmax = 2 + elif mode == "RGB": + ndmax = 3 + else: + ndmax = 4 + if ndim > ndmax: + raise ValueError("Too many dimensions: %d > %d." % (ndim, ndmax)) + + size = shape[1], shape[0] + if strides is not None: + if hasattr(obj, 'tobytes'): + obj = obj.tobytes() + else: + obj = obj.tostring() + + return frombuffer(mode, size, obj, "raw", rawmode, 0, 1) + + +def fromqimage(im): + """Creates an image instance from a QImage image""" + from . import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqimage(im) + + +def fromqpixmap(im): + """Creates an image instance from a QPixmap image""" + from . import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqpixmap(im) + + +_fromarray_typemap = { + # (shape, typestr) => mode, rawmode + # first two members of shape are set to one + ((1, 1), "|b1"): ("1", "1;8"), + ((1, 1), "|u1"): ("L", "L"), + ((1, 1), "|i1"): ("I", "I;8"), + ((1, 1), "u2"): ("I", "I;16B"), + ((1, 1), "i2"): ("I", "I;16BS"), + ((1, 1), "u4"): ("I", "I;32B"), + ((1, 1), "i4"): ("I", "I;32BS"), + ((1, 1), "f4"): ("F", "F;32BF"), + ((1, 1), "f8"): ("F", "F;64BF"), + ((1, 1, 2), "|u1"): ("LA", "LA"), + ((1, 1, 3), "|u1"): ("RGB", "RGB"), + ((1, 1, 4), "|u1"): ("RGBA", "RGBA"), + } + +# shortcuts +_fromarray_typemap[((1, 1), _ENDIAN + "i4")] = ("I", "I") +_fromarray_typemap[((1, 1), _ENDIAN + "f4")] = ("F", "F") + + +def _decompression_bomb_check(size): + if MAX_IMAGE_PIXELS is None: + return + + pixels = size[0] * size[1] + + if pixels > 2 * MAX_IMAGE_PIXELS: + raise DecompressionBombError( + "Image size (%d pixels) exceeds limit of %d pixels, " + "could be decompression bomb DOS attack." % + (pixels, 2 * MAX_IMAGE_PIXELS)) + + if pixels > MAX_IMAGE_PIXELS: + warnings.warn( + "Image size (%d pixels) exceeds limit of %d pixels, " + "could be decompression bomb DOS attack." % + (pixels, MAX_IMAGE_PIXELS), + DecompressionBombWarning) + + +def open(fp, mode="r"): + """ + Opens and identifies the given image file. + + This is a lazy operation; this function identifies the file, but + the file remains open and the actual image data is not read from + the file until you try to process the data (or call the + :py:meth:`~PIL.Image.Image.load` method). See + :py:func:`~PIL.Image.new`. See :ref:`file-handling`. + + :param fp: A filename (string), pathlib.Path object or a file object. + The file object must implement :py:meth:`~file.read`, + :py:meth:`~file.seek`, and :py:meth:`~file.tell` methods, + and be opened in binary mode. + :param mode: The mode. If given, this argument must be "r". + :returns: An :py:class:`~PIL.Image.Image` object. + :exception IOError: If the file cannot be found, or the image cannot be + opened and identified. + """ + + if mode != "r": + raise ValueError("bad mode %r" % mode) + + exclusive_fp = False + filename = "" + if isPath(fp): + filename = fp + elif HAS_PATHLIB and isinstance(fp, Path): + filename = str(fp.resolve()) + + if filename: + fp = builtins.open(filename, "rb") + exclusive_fp = True + + try: + fp.seek(0) + except (AttributeError, io.UnsupportedOperation): + fp = io.BytesIO(fp.read()) + exclusive_fp = True + + prefix = fp.read(16) + + preinit() + + accept_warnings = [] + def _open_core(fp, filename, prefix): + for i in ID: + try: + factory, accept = OPEN[i] + result = not accept or accept(prefix) + if type(result) in [str, bytes]: + accept_warnings.append(result) + elif result: + fp.seek(0) + im = factory(fp, filename) + _decompression_bomb_check(im.size) + return im + except (SyntaxError, IndexError, TypeError, struct.error): + # Leave disabled by default, spams the logs with image + # opening failures that are entirely expected. + # logger.debug("", exc_info=True) + continue + return None + + im = _open_core(fp, filename, prefix) + + if im is None: + if init(): + im = _open_core(fp, filename, prefix) + + if im: + im._exclusive_fp = exclusive_fp + return im + + if exclusive_fp: + fp.close() + for message in accept_warnings: + warnings.warn(message) + raise IOError("cannot identify image file %r" + % (filename if filename else fp)) + +# +# Image processing. + + +def alpha_composite(im1, im2): + """ + Alpha composite im2 over im1. + + :param im1: The first image. Must have mode RGBA. + :param im2: The second image. Must have mode RGBA, and the same size as + the first image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.alpha_composite(im1.im, im2.im)) + + +def blend(im1, im2, alpha): + """ + Creates a new image by interpolating between two input images, using + a constant alpha.:: + + out = image1 * (1.0 - alpha) + image2 * alpha + + :param im1: The first image. + :param im2: The second image. Must have the same mode and size as + the first image. + :param alpha: The interpolation alpha factor. If alpha is 0.0, a + copy of the first image is returned. If alpha is 1.0, a copy of + the second image is returned. There are no restrictions on the + alpha value. If necessary, the result is clipped to fit into + the allowed output range. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.blend(im1.im, im2.im, alpha)) + + +def composite(image1, image2, mask): + """ + Create composite image by blending images using a transparency mask. + + :param image1: The first image. + :param image2: The second image. Must have the same mode and + size as the first image. + :param mask: A mask image. This image can have mode + "1", "L", or "RGBA", and must have the same size as the + other two images. + """ + + image = image2.copy() + image.paste(image1, None, mask) + return image + + +def eval(image, *args): + """ + Applies the function (which should take one argument) to each pixel + in the given image. If the image has more than one band, the same + function is applied to each band. Note that the function is + evaluated once for each possible pixel value, so you cannot use + random components or other generators. + + :param image: The input image. + :param function: A function object, taking one integer argument. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + return image.point(args[0]) + + +def merge(mode, bands): + """ + Merge a set of single band images into a new multiband image. + + :param mode: The mode to use for the output image. See: + :ref:`concept-modes`. + :param bands: A sequence containing one single-band image for + each band in the output image. All bands must have the + same size. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if getmodebands(mode) != len(bands) or "*" in mode: + raise ValueError("wrong number of bands") + for band in bands[1:]: + if band.mode != getmodetype(mode): + raise ValueError("mode mismatch") + if band.size != bands[0].size: + raise ValueError("size mismatch") + for band in bands: + band.load() + return bands[0]._new(core.merge(mode, *[b.im for b in bands])) + + +# -------------------------------------------------------------------- +# Plugin registry + +def register_open(id, factory, accept=None): + """ + Register an image file plugin. This function should not be used + in application code. + + :param id: An image format identifier. + :param factory: An image file factory method. + :param accept: An optional function that can be used to quickly + reject images having another format. + """ + id = id.upper() + ID.append(id) + OPEN[id] = factory, accept + + +def register_mime(id, mimetype): + """ + Registers an image MIME type. This function should not be used + in application code. + + :param id: An image format identifier. + :param mimetype: The image MIME type for this format. + """ + MIME[id.upper()] = mimetype + + +def register_save(id, driver): + """ + Registers an image save function. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE[id.upper()] = driver + + +def register_save_all(id, driver): + """ + Registers an image function to save all the frames + of a multiframe format. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE_ALL[id.upper()] = driver + + +def register_extension(id, extension): + """ + Registers an image extension. This function should not be + used in application code. + + :param id: An image format identifier. + :param extension: An extension used for this format. + """ + EXTENSION[extension.lower()] = id.upper() + + +def register_extensions(id, extensions): + """ + Registers image extensions. This function should not be + used in application code. + + :param id: An image format identifier. + :param extensions: A list of extensions used for this format. + """ + for extension in extensions: + register_extension(id, extension) + + +def registered_extensions(): + """ + Returns a dictionary containing all file extensions belonging + to registered plugins + """ + if not EXTENSION: + init() + return EXTENSION + + +def register_decoder(name, decoder): + """ + Registers an image decoder. This function should not be + used in application code. + + :param name: The name of the decoder + :param decoder: A callable(mode, args) that returns an + ImageFile.PyDecoder object + + .. versionadded:: 4.1.0 + """ + DECODERS[name] = decoder + + +def register_encoder(name, encoder): + """ + Registers an image encoder. This function should not be + used in application code. + + :param name: The name of the encoder + :param encoder: A callable(mode, args) that returns an + ImageFile.PyEncoder object + + .. versionadded:: 4.1.0 + """ + ENCODERS[name] = encoder + + +# -------------------------------------------------------------------- +# Simple display support. User code may override this. + +def _show(image, **options): + # override me, as necessary + _showxv(image, **options) + + +def _showxv(image, title=None, **options): + from . import ImageShow + ImageShow.show(image, title, **options) + + +# -------------------------------------------------------------------- +# Effects + +def effect_mandelbrot(size, extent, quality): + """ + Generate a Mandelbrot set covering the given extent. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param extent: The extent to cover, as a 4-tuple: + (x0, y0, x1, y2). + :param quality: Quality. + """ + return Image()._new(core.effect_mandelbrot(size, extent, quality)) + + +def effect_noise(size, sigma): + """ + Generate Gaussian noise centered around 128. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param sigma: Standard deviation of noise. + """ + return Image()._new(core.effect_noise(size, sigma)) + + +def linear_gradient(mode): + """ + Generate 256x256 linear gradient from black to white, top to bottom. + + :param mode: Input mode. + """ + return Image()._new(core.linear_gradient(mode)) + + +def radial_gradient(mode): + """ + Generate 256x256 radial gradient from black to white, centre to edge. + + :param mode: Input mode. + """ + return Image()._new(core.radial_gradient(mode)) + + +# -------------------------------------------------------------------- +# Resources + +def _apply_env_variables(env=None): + if env is None: + env = os.environ + + for var_name, setter in [ + ('PILLOW_ALIGNMENT', core.set_alignment), + ('PILLOW_BLOCK_SIZE', core.set_block_size), + ('PILLOW_BLOCKS_MAX', core.set_blocks_max), + ]: + if var_name not in env: + continue + + var = env[var_name].lower() + + units = 1 + for postfix, mul in [('k', 1024), ('m', 1024*1024)]: + if var.endswith(postfix): + units = mul + var = var[:-len(postfix)] + + try: + var = int(var) * units + except ValueError: + warnings.warn("{0} is not int".format(var_name)) + continue + + try: + setter(var) + except ValueError as e: + warnings.warn("{0}: {1}".format(var_name, e)) + + +_apply_env_variables() +atexit.register(core.clear_cache) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageChops.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageChops.py new file mode 100644 index 0000000..8901673 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageChops.py @@ -0,0 +1,283 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard channel operations +# +# History: +# 1996-03-24 fl Created +# 1996-08-13 fl Added logical operations (for "1" images) +# 2000-10-12 fl Added offset method (from Image.py) +# +# Copyright (c) 1997-2000 by Secret Labs AB +# Copyright (c) 1996-2000 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image + + +def constant(image, value): + """Fill a channel with a given grey level. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.new("L", image.size, value) + + +def duplicate(image): + """Copy a channel. Alias for :py:meth:`PIL.Image.Image.copy`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return image.copy() + + +def invert(image): + """ + Invert an image (channel). + + .. code-block:: python + + out = MAX - image + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image.load() + return image._new(image.im.chop_invert()) + + +def lighter(image1, image2): + """ + Compares the two images, pixel by pixel, and returns a new image containing + the lighter values. + + .. code-block:: python + + out = max(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_lighter(image2.im)) + + +def darker(image1, image2): + """ + Compares the two images, pixel by pixel, and returns a new image + containing the darker values. + + .. code-block:: python + + out = min(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_darker(image2.im)) + + +def difference(image1, image2): + """ + Returns the absolute value of the pixel-by-pixel difference between the two + images. + + .. code-block:: python + + out = abs(image1 - image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_difference(image2.im)) + + +def multiply(image1, image2): + """ + Superimposes two images on top of each other. + + If you multiply an image with a solid black image, the result is black. If + you multiply with a solid white image, the image is unaffected. + + .. code-block:: python + + out = image1 * image2 / MAX + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_multiply(image2.im)) + + +def screen(image1, image2): + """ + Superimposes two inverted images on top of each other. + + .. code-block:: python + + out = MAX - ((MAX - image1) * (MAX - image2) / MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_screen(image2.im)) + + +def add(image1, image2, scale=1.0, offset=0): + """ + Adds two images, dividing the result by scale and adding the + offset. If omitted, scale defaults to 1.0, and offset to 0.0. + + .. code-block:: python + + out = ((image1 + image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add(image2.im, scale, offset)) + + +def subtract(image1, image2, scale=1.0, offset=0): + """ + Subtracts two images, dividing the result by scale and adding the + offset. If omitted, scale defaults to 1.0, and offset to 0.0. + + .. code-block:: python + + out = ((image1 - image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract(image2.im, scale, offset)) + + +def add_modulo(image1, image2): + """Add two images, without clipping the result. + + .. code-block:: python + + out = ((image1 + image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add_modulo(image2.im)) + + +def subtract_modulo(image1, image2): + """Subtract two images, without clipping the result. + + .. code-block:: python + + out = ((image1 - image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract_modulo(image2.im)) + + +def logical_and(image1, image2): + """Logical AND between two images. + + .. code-block:: python + + out = ((image1 and image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_and(image2.im)) + + +def logical_or(image1, image2): + """Logical OR between two images. + + .. code-block:: python + + out = ((image1 or image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_or(image2.im)) + + +def logical_xor(image1, image2): + """Logical XOR between two images. + + .. code-block:: python + + out = ((bool(image1) != bool(image2)) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_xor(image2.im)) + + +def blend(image1, image2, alpha): + """Blend images using constant transparency weight. Alias for + :py:meth:`PIL.Image.Image.blend`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.blend(image1, image2, alpha) + + +def composite(image1, image2, mask): + """Create composite using transparency mask. Alias for + :py:meth:`PIL.Image.Image.composite`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.composite(image1, image2, mask) + + +def offset(image, xoffset, yoffset=None): + """Returns a copy of the image where data has been offset by the given + distances. Data wraps around the edges. If **yoffset** is omitted, it + is assumed to be equal to **xoffset**. + + :param xoffset: The horizontal distance. + :param yoffset: The vertical distance. If omitted, both + distances are set to the same value. + :rtype: :py:class:`~PIL.Image.Image` + """ + + if yoffset is None: + yoffset = xoffset + image.load() + return image._new(image.im.offset(xoffset, yoffset)) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageCms.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageCms.py new file mode 100644 index 0000000..4b6281f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageCms.py @@ -0,0 +1,955 @@ +# The Python Imaging Library. +# $Id$ + +# Optional color management support, based on Kevin Cazabon's PyCMS +# library. + +# History: + +# 2009-03-08 fl Added to PIL. + +# Copyright (C) 2002-2003 Kevin Cazabon +# Copyright (c) 2009 by Fredrik Lundh +# Copyright (c) 2013 by Eric Soroos + +# See the README file for information on usage and redistribution. See +# below for the original description. + +from __future__ import print_function +import sys + +from PIL import Image +try: + from PIL import _imagingcms +except ImportError as ex: + # Allow error import for doc purposes, but error out when accessing + # anything in core. + from _util import deferred_error + _imagingcms = deferred_error(ex) +from PIL._util import isStringType + +DESCRIPTION = """ +pyCMS + + a Python / PIL interface to the littleCMS ICC Color Management System + Copyright (C) 2002-2003 Kevin Cazabon + kevin@cazabon.com + http://www.cazabon.com + + pyCMS home page: http://www.cazabon.com/pyCMS + littleCMS home page: http://www.littlecms.com + (littleCMS is Copyright (C) 1998-2001 Marti Maria) + + Originally released under LGPL. Graciously donated to PIL in + March 2009, for distribution under the standard PIL license + + The pyCMS.py module provides a "clean" interface between Python/PIL and + pyCMSdll, taking care of some of the more complex handling of the direct + pyCMSdll functions, as well as error-checking and making sure that all + relevant data is kept together. + + While it is possible to call pyCMSdll functions directly, it's not highly + recommended. + + Version History: + + 1.0.0 pil Oct 2013 Port to LCMS 2. + + 0.1.0 pil mod March 10, 2009 + + Renamed display profile to proof profile. The proof + profile is the profile of the device that is being + simulated, not the profile of the device which is + actually used to display/print the final simulation + (that'd be the output profile) - also see LCMSAPI.txt + input colorspace -> using 'renderingIntent' -> proof + colorspace -> using 'proofRenderingIntent' -> output + colorspace + + Added LCMS FLAGS support. + Added FLAGS["SOFTPROOFING"] as default flag for + buildProofTransform (otherwise the proof profile/intent + would be ignored). + + 0.1.0 pil March 2009 - added to PIL, as PIL.ImageCms + + 0.0.2 alpha Jan 6, 2002 + + Added try/except statements around type() checks of + potential CObjects... Python won't let you use type() + on them, and raises a TypeError (stupid, if you ask + me!) + + Added buildProofTransformFromOpenProfiles() function. + Additional fixes in DLL, see DLL code for details. + + 0.0.1 alpha first public release, Dec. 26, 2002 + + Known to-do list with current version (of Python interface, not pyCMSdll): + + none + +""" + +VERSION = "1.0.0 pil" + +# --------------------------------------------------------------------. + +core = _imagingcms + +# +# intent/direction values + +INTENT_PERCEPTUAL = 0 +INTENT_RELATIVE_COLORIMETRIC = 1 +INTENT_SATURATION = 2 +INTENT_ABSOLUTE_COLORIMETRIC = 3 + +DIRECTION_INPUT = 0 +DIRECTION_OUTPUT = 1 +DIRECTION_PROOF = 2 + +# +# flags + +FLAGS = { + "MATRIXINPUT": 1, + "MATRIXOUTPUT": 2, + "MATRIXONLY": (1 | 2), + "NOWHITEONWHITEFIXUP": 4, # Don't hot fix scum dot + # Don't create prelinearization tables on precalculated transforms + # (internal use): + "NOPRELINEARIZATION": 16, + "GUESSDEVICECLASS": 32, # Guess device class (for transform2devicelink) + "NOTCACHE": 64, # Inhibit 1-pixel cache + "NOTPRECALC": 256, + "NULLTRANSFORM": 512, # Don't transform anyway + "HIGHRESPRECALC": 1024, # Use more memory to give better accuracy + "LOWRESPRECALC": 2048, # Use less memory to minimize resources + "WHITEBLACKCOMPENSATION": 8192, + "BLACKPOINTCOMPENSATION": 8192, + "GAMUTCHECK": 4096, # Out of Gamut alarm + "SOFTPROOFING": 16384, # Do softproofing + "PRESERVEBLACK": 32768, # Black preservation + "NODEFAULTRESOURCEDEF": 16777216, # CRD special + "GRIDPOINTS": lambda n: ((n) & 0xFF) << 16 # Gridpoints +} + +_MAX_FLAG = 0 +for flag in FLAGS.values(): + if isinstance(flag, int): + _MAX_FLAG = _MAX_FLAG | flag + + +# --------------------------------------------------------------------. +# Experimental PIL-level API +# --------------------------------------------------------------------. + +## +# Profile. + +class ImageCmsProfile(object): + + def __init__(self, profile): + """ + :param profile: Either a string representing a filename, + a file like object containing a profile or a + low-level profile object + + """ + + if isStringType(profile): + self._set(core.profile_open(profile), profile) + elif hasattr(profile, "read"): + self._set(core.profile_frombytes(profile.read())) + elif isinstance(profile, _imagingcms.CmsProfile): + self._set(profile) + else: + raise TypeError("Invalid type for Profile") + + def _set(self, profile, filename=None): + self.profile = profile + self.filename = filename + if profile: + self.product_name = None # profile.product_name + self.product_info = None # profile.product_info + else: + self.product_name = None + self.product_info = None + + def tobytes(self): + """ + Returns the profile in a format suitable for embedding in + saved images. + + :returns: a bytes object containing the ICC profile. + """ + + return core.profile_tobytes(self.profile) + + +class ImageCmsTransform(Image.ImagePointHandler): + + """ + Transform. This can be used with the procedural API, or with the standard + Image.point() method. + + Will return the output profile in the output.info['icc_profile']. + """ + + def __init__(self, input, output, input_mode, output_mode, + intent=INTENT_PERCEPTUAL, proof=None, + proof_intent=INTENT_ABSOLUTE_COLORIMETRIC, flags=0): + if proof is None: + self.transform = core.buildTransform( + input.profile, output.profile, + input_mode, output_mode, + intent, + flags + ) + else: + self.transform = core.buildProofTransform( + input.profile, output.profile, proof.profile, + input_mode, output_mode, + intent, proof_intent, + flags + ) + # Note: inputMode and outputMode are for pyCMS compatibility only + self.input_mode = self.inputMode = input_mode + self.output_mode = self.outputMode = output_mode + + self.output_profile = output + + def point(self, im): + return self.apply(im) + + def apply(self, im, imOut=None): + im.load() + if imOut is None: + imOut = Image.new(self.output_mode, im.size, None) + self.transform.apply(im.im.id, imOut.im.id) + imOut.info['icc_profile'] = self.output_profile.tobytes() + return imOut + + def apply_in_place(self, im): + im.load() + if im.mode != self.output_mode: + raise ValueError("mode mismatch") # wrong output mode + self.transform.apply(im.im.id, im.im.id) + im.info['icc_profile'] = self.output_profile.tobytes() + return im + + +def get_display_profile(handle=None): + """ (experimental) Fetches the profile for the current display device. + :returns: None if the profile is not known. + """ + + if sys.platform == "win32": + from PIL import ImageWin + if isinstance(handle, ImageWin.HDC): + profile = core.get_display_profile_win32(handle, 1) + else: + profile = core.get_display_profile_win32(handle or 0) + else: + try: + get = _imagingcms.get_display_profile + except AttributeError: + return None + else: + profile = get() + return ImageCmsProfile(profile) + + +# --------------------------------------------------------------------. +# pyCMS compatible layer +# --------------------------------------------------------------------. + +class PyCMSError(Exception): + + """ (pyCMS) Exception class. + This is used for all errors in the pyCMS API. """ + pass + + +def profileToProfile( + im, inputProfile, outputProfile, renderingIntent=INTENT_PERCEPTUAL, + outputMode=None, inPlace=0, flags=0): + """ + (pyCMS) Applies an ICC transformation to a given image, mapping from + inputProfile to outputProfile. + + If the input or output profiles specified are not valid filenames, a + PyCMSError will be raised. If inPlace == TRUE and outputMode != im.mode, + a PyCMSError will be raised. If an error occurs during application of + the profiles, a PyCMSError will be raised. If outputMode is not a mode + supported by the outputProfile (or by pyCMS), a PyCMSError will be + raised. + + This function applies an ICC transformation to im from inputProfile's + color space to outputProfile's color space using the specified rendering + intent to decide how to handle out-of-gamut colors. + + OutputMode can be used to specify that a color mode conversion is to + be done using these profiles, but the specified profiles must be able + to handle that mode. I.e., if converting im from RGB to CMYK using + profiles, the input profile must handle RGB data, and the output + profile must handle CMYK data. + + :param im: An open PIL image object (i.e. Image.new(...) or + Image.open(...), etc.) + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this image, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this image, or a profile object + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT) + ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1 + ImageCms.INTENT_SATURATION = 2 + ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param outputMode: A valid PIL mode for the output image (i.e. "RGB", + "CMYK", etc.). Note: if rendering the image "inPlace", outputMode + MUST be the same mode as the input, or omitted completely. If + omitted, the outputMode will be the same as the mode of the input + image (im.mode) + :param inPlace: Boolean (1 = True, None or 0 = False). If True, the + original image is modified in-place, and None is returned. If False + (default), a new Image object is returned with the transform applied. + :param flags: Integer (0-...) specifying additional flags + :returns: Either None or a new PIL image object, depending on value of + inPlace + :exception PyCMSError: + """ + + if outputMode is None: + outputMode = im.mode + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError( + "flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + transform = ImageCmsTransform( + inputProfile, outputProfile, im.mode, outputMode, + renderingIntent, flags=flags + ) + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + return imOut + + +def getOpenProfile(profileFilename): + """ + (pyCMS) Opens an ICC profile file. + + The PyCMSProfile object can be passed back into pyCMS for use in creating + transforms and such (as in ImageCms.buildTransformFromOpenProfiles()). + + If profileFilename is not a valid filename for an ICC profile, a PyCMSError + will be raised. + + :param profileFilename: String, as a valid filename path to the ICC profile + you wish to open, or a file-like object. + :returns: A CmsProfile class object. + :exception PyCMSError: + """ + + try: + return ImageCmsProfile(profileFilename) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def buildTransform( + inputProfile, outputProfile, inMode, outMode, + renderingIntent=INTENT_PERCEPTUAL, flags=0): + """ + (pyCMS) Builds an ICC transform mapping from the inputProfile to the + outputProfile. Use applyTransform to apply the transform to a given + image. + + If the input or output profiles specified are not valid filenames, a + PyCMSError will be raised. If an error occurs during creation of the + transform, a PyCMSError will be raised. + + If inMode or outMode are not a mode supported by the outputProfile (or + by pyCMS), a PyCMSError will be raised. + + This function builds and returns an ICC transform from the inputProfile + to the outputProfile using the renderingIntent to determine what to do + with out-of-gamut colors. It will ONLY work for converting images that + are in inMode to images that are in outMode color format (PIL mode, + i.e. "RGB", "RGBA", "CMYK", etc.). + + Building the transform is a fair part of the overhead in + ImageCms.profileToProfile(), so if you're planning on converting multiple + images using the same input/output settings, this can save you time. + Once you have a transform object, it can be used with + ImageCms.applyProfile() to convert images without the need to re-compute + the lookup table for the transform. + + The reason pyCMS returns a class object rather than a handle directly + to the transform is that it needs to keep track of the PIL input/output + modes that the transform is meant for. These attributes are stored in + the "inMode" and "outMode" attributes of the object (which can be + manually overridden if you really want to, but I don't know of any + time that would be of use, or would even work). + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT) + ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1 + ImageCms.INTENT_SATURATION = 2 + ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError( + "flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + return ImageCmsTransform( + inputProfile, outputProfile, inMode, outMode, + renderingIntent, flags=flags) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def buildProofTransform( + inputProfile, outputProfile, proofProfile, inMode, outMode, + renderingIntent=INTENT_PERCEPTUAL, + proofRenderingIntent=INTENT_ABSOLUTE_COLORIMETRIC, + flags=FLAGS["SOFTPROOFING"]): + """ + (pyCMS) Builds an ICC transform mapping from the inputProfile to the + outputProfile, but tries to simulate the result that would be + obtained on the proofProfile device. + + If the input, output, or proof profiles specified are not valid + filenames, a PyCMSError will be raised. + + If an error occurs during creation of the transform, a PyCMSError will + be raised. + + If inMode or outMode are not a mode supported by the outputProfile + (or by pyCMS), a PyCMSError will be raised. + + This function builds and returns an ICC transform from the inputProfile + to the outputProfile, but tries to simulate the result that would be + obtained on the proofProfile device using renderingIntent and + proofRenderingIntent to determine what to do with out-of-gamut + colors. This is known as "soft-proofing". It will ONLY work for + converting images that are in inMode to images that are in outMode + color format (PIL mode, i.e. "RGB", "RGBA", "CMYK", etc.). + + Usage of the resulting transform object is exactly the same as with + ImageCms.buildTransform(). + + Proof profiling is generally used when using an output device to get a + good idea of what the final printed/displayed image would look like on + the proofProfile device when it's quicker and easier to use the + output device for judging color. Generally, this means that the + output device is a monitor, or a dye-sub printer (etc.), and the simulated + device is something more expensive, complicated, or time consuming + (making it difficult to make a real print for color judgement purposes). + + Soft-proofing basically functions by adjusting the colors on the + output device to match the colors of the device being simulated. However, + when the simulated device has a much wider gamut than the output + device, you may obtain marginal results. + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + (monitor, usually) profile you wish to use for this transform, or a + profile object + :param proofProfile: String, as a valid filename path to the ICC proof + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the input->proof (simulated) transform + + ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT) + ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1 + ImageCms.INTENT_SATURATION = 2 + ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param proofRenderingIntent: Integer (0-3) specifying the rendering intent + you wish to use for proof->output transform + + ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT) + ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1 + ImageCms.INTENT_SATURATION = 2 + ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError( + "flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + if not isinstance(proofProfile, ImageCmsProfile): + proofProfile = ImageCmsProfile(proofProfile) + return ImageCmsTransform( + inputProfile, outputProfile, inMode, outMode, renderingIntent, + proofProfile, proofRenderingIntent, flags) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +buildTransformFromOpenProfiles = buildTransform +buildProofTransformFromOpenProfiles = buildProofTransform + + +def applyTransform(im, transform, inPlace=0): + """ + (pyCMS) Applies a transform to a given image. + + If im.mode != transform.inMode, a PyCMSError is raised. + + If inPlace == TRUE and transform.inMode != transform.outMode, a + PyCMSError is raised. + + If im.mode, transfer.inMode, or transfer.outMode is not supported by + pyCMSdll or the profiles you used for the transform, a PyCMSError is + raised. + + If an error occurs while the transform is being applied, a PyCMSError + is raised. + + This function applies a pre-calculated transform (from + ImageCms.buildTransform() or ImageCms.buildTransformFromOpenProfiles()) + to an image. The transform can be used for multiple images, saving + considerable calculation time if doing the same conversion multiple times. + + If you want to modify im in-place instead of receiving a new image as + the return value, set inPlace to TRUE. This can only be done if + transform.inMode and transform.outMode are the same, because we can't + change the mode in-place (the buffer sizes for some modes are + different). The default behavior is to return a new Image object of + the same dimensions in mode transform.outMode. + + :param im: A PIL Image object, and im.mode must be the same as the inMode + supported by the transform. + :param transform: A valid CmsTransform class object + :param inPlace: Bool (1 == True, 0 or None == False). If True, im is + modified in place and None is returned, if False, a new Image object + with the transform applied is returned (and im is not changed). The + default is False. + :returns: Either None, or a new PIL Image object, depending on the value of + inPlace. The profile will be returned in the image's + info['icc_profile']. + :exception PyCMSError: + """ + + try: + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (TypeError, ValueError) as v: + raise PyCMSError(v) + + return imOut + + +def createProfile(colorSpace, colorTemp=-1): + """ + (pyCMS) Creates a profile. + + If colorSpace not in ["LAB", "XYZ", "sRGB"], a PyCMSError is raised + + If using LAB and colorTemp != a positive integer, a PyCMSError is raised. + + If an error occurs while creating the profile, a PyCMSError is raised. + + Use this function to create common profiles on-the-fly instead of + having to supply a profile on disk and knowing the path to it. It + returns a normal CmsProfile object that can be passed to + ImageCms.buildTransformFromOpenProfiles() to create a transform to apply + to images. + + :param colorSpace: String, the color space of the profile you wish to + create. + Currently only "LAB", "XYZ", and "sRGB" are supported. + :param colorTemp: Positive integer for the white point for the profile, in + degrees Kelvin (i.e. 5000, 6500, 9600, etc.). The default is for D50 + illuminant if omitted (5000k). colorTemp is ONLY applied to LAB + profiles, and is ignored for XYZ and sRGB. + :returns: A CmsProfile class object + :exception PyCMSError: + """ + + if colorSpace not in ["LAB", "XYZ", "sRGB"]: + raise PyCMSError( + "Color space not supported for on-the-fly profile creation (%s)" + % colorSpace) + + if colorSpace == "LAB": + try: + colorTemp = float(colorTemp) + except: + raise PyCMSError( + "Color temperature must be numeric, \"%s\" not valid" + % colorTemp) + + try: + return core.createProfile(colorSpace, colorTemp) + except (TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileName(profile): + """ + + (pyCMS) Gets the internal product name for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised If an error occurs while trying to obtain the + name tag, a PyCMSError is raised. + + Use this function to obtain the INTERNAL name of the profile (stored + in an ICC tag in the profile itself), usually the one used when the + profile was originally created. Sometimes this tag also contains + additional information supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal name of the profile as stored + in an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # do it in python, not c. + # // name was "%s - %s" (model, manufacturer) || Description , + # // but if the Model and Manufacturer were the same or the model + # // was long, Just the model, in 1.x + model = profile.profile.product_model + manufacturer = profile.profile.product_manufacturer + + if not (model or manufacturer): + return profile.profile.product_description + "\n" + if not manufacturer or len(model) > 30: + return model + "\n" + return "%s - %s\n" % (model, manufacturer) + + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileInfo(profile): + """ + (pyCMS) Gets the internal product information for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the info tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + info tag. This often contains details about the profile, and how it + was created, as supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # add an extra newline to preserve pyCMS compatibility + # Python, not C. the white point bits weren't working well, + # so skipping. + # // info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint + description = profile.profile.product_description + cpright = profile.profile.product_copyright + arr = [] + for elt in (description, cpright): + if elt: + arr.append(elt) + return "\r\n\r\n".join(arr) + "\r\n\r\n" + + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileCopyright(profile): + """ + (pyCMS) Gets the copyright for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the copyright tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + copyright tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_copyright + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileManufacturer(profile): + """ + (pyCMS) Gets the manufacturer for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the manufacturer tag, a + PyCMSError is raised + + Use this function to obtain the information stored in the profile's + manufacturer tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_manufacturer + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileModel(profile): + """ + (pyCMS) Gets the model for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the model tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + model tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_model + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileDescription(profile): + """ + (pyCMS) Gets the description for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the description tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + description tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in an + ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_description + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getDefaultIntent(profile): + """ + (pyCMS) Gets the default intent name for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the default intent, a + PyCMSError is raised. + + Use this function to determine the default (and usually best optimized) + rendering intent for this profile. Most profiles support multiple + rendering intents, but are intended mostly for one type of conversion. + If you wish to use a different intent than returned, use + ImageCms.isIntentSupported() to verify it will work first. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: Integer 0-3 specifying the default rendering intent for this + profile. + + ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT) + ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1 + ImageCms.INTENT_SATURATION = 2 + ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.rendering_intent + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def isIntentSupported(profile, intent, direction): + """ + (pyCMS) Checks if a given intent is supported. + + Use this function to verify that you can use your desired + renderingIntent with profile, and that profile can be used for the + input/output/proof profile as you desire. + + Some profiles are created specifically for one "direction", can cannot + be used for others. Some profiles can only be used for certain + rendering intents... so it's best to either verify this before trying + to create a transform with them (using this function), or catch the + potential PyCMSError that will occur if they don't support the modes + you select. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :param intent: Integer (0-3) specifying the rendering intent you wish to + use with this profile + + ImageCms.INTENT_PERCEPTUAL = 0 (DEFAULT) + ImageCms.INTENT_RELATIVE_COLORIMETRIC = 1 + ImageCms.INTENT_SATURATION = 2 + ImageCms.INTENT_ABSOLUTE_COLORIMETRIC = 3 + + see the pyCMS documentation for details on rendering intents and what + they do. + :param direction: Integer specifying if the profile is to be used for + input, output, or proof + + INPUT = 0 (or use ImageCms.DIRECTION_INPUT) + OUTPUT = 1 (or use ImageCms.DIRECTION_OUTPUT) + PROOF = 2 (or use ImageCms.DIRECTION_PROOF) + + :returns: 1 if the intent/direction are supported, -1 if they are not. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # FIXME: I get different results for the same data w. different + # compilers. Bug in LittleCMS or in the binding? + if profile.profile.is_intent_supported(intent, direction): + return 1 + else: + return -1 + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def versions(): + """ + (pyCMS) Fetches versions. + """ + + return ( + VERSION, core.littlecms_version, + sys.version.split()[0], Image.VERSION + ) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageColor.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageColor.py new file mode 100644 index 0000000..08c00fd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageColor.py @@ -0,0 +1,309 @@ +# +# The Python Imaging Library +# $Id$ +# +# map CSS3-style colour description strings to RGB +# +# History: +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-15 fl Added RGBA support +# 2004-03-27 fl Fixed remaining int() problems for Python 1.5.2 +# 2004-07-19 fl Fixed gray/grey spelling issues +# 2009-03-05 fl Fixed rounding error in grayscale calculation +# +# Copyright (c) 2002-2004 by Secret Labs AB +# Copyright (c) 2002-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image +import re + + +def getrgb(color): + """ + Convert a color string to an RGB tuple. If the string cannot be parsed, + this function raises a :py:exc:`ValueError` exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :return: ``(red, green, blue[, alpha])`` + """ + color = color.lower() + + rgb = colormap.get(color, None) + if rgb: + if isinstance(rgb, tuple): + return rgb + colormap[color] = rgb = getrgb(rgb) + return rgb + + # check for known string formats + if re.match('#[a-f0-9]{3}$', color): + return ( + int(color[1]*2, 16), + int(color[2]*2, 16), + int(color[3]*2, 16), + ) + + if re.match('#[a-f0-9]{4}$', color): + return ( + int(color[1]*2, 16), + int(color[2]*2, 16), + int(color[3]*2, 16), + int(color[4]*2, 16), + ) + + if re.match('#[a-f0-9]{6}$', color): + return ( + int(color[1:3], 16), + int(color[3:5], 16), + int(color[5:7], 16), + ) + + if re.match('#[a-f0-9]{8}$', color): + return ( + int(color[1:3], 16), + int(color[3:5], 16), + int(color[5:7], 16), + int(color[7:9], 16), + ) + + m = re.match(r"rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) + if m: + return ( + int(m.group(1)), + int(m.group(2)), + int(m.group(3)) + ) + + m = re.match(r"rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color) + if m: + return ( + int((int(m.group(1)) * 255) / 100.0 + 0.5), + int((int(m.group(2)) * 255) / 100.0 + 0.5), + int((int(m.group(3)) * 255) / 100.0 + 0.5) + ) + + m = re.match(r"hsl\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color) + if m: + from colorsys import hls_to_rgb + rgb = hls_to_rgb( + float(m.group(1)) / 360.0, + float(m.group(3)) / 100.0, + float(m.group(2)) / 100.0, + ) + return ( + int(rgb[0] * 255 + 0.5), + int(rgb[1] * 255 + 0.5), + int(rgb[2] * 255 + 0.5) + ) + + m = re.match(r"hs[bv]\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color) + if m: + from colorsys import hsv_to_rgb + rgb = hsv_to_rgb( + float(m.group(1)) / 360.0, + float(m.group(2)) / 100.0, + float(m.group(3)) / 100.0, + ) + return ( + int(rgb[0] * 255 + 0.5), + int(rgb[1] * 255 + 0.5), + int(rgb[2] * 255 + 0.5) + ) + + m = re.match(r"rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", + color) + if m: + return ( + int(m.group(1)), + int(m.group(2)), + int(m.group(3)), + int(m.group(4)) + ) + raise ValueError("unknown color specifier: %r" % color) + + +def getcolor(color, mode): + """ + Same as :py:func:`~PIL.ImageColor.getrgb`, but converts the RGB value to a + greyscale value if the mode is not color or a palette image. If the string + cannot be parsed, this function raises a :py:exc:`ValueError` exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :return: ``(graylevel [, alpha]) or (red, green, blue[, alpha])`` + """ + # same as getrgb, but converts the result to the given mode + color, alpha = getrgb(color), 255 + if len(color) == 4: + color, alpha = color[0:3], color[3] + + if Image.getmodebase(mode) == "L": + r, g, b = color + color = (r*299 + g*587 + b*114)//1000 + if mode[-1] == 'A': + return (color, alpha) + else: + if mode[-1] == 'A': + return color + (alpha,) + return color + + +colormap = { + # X11 colour table from https://drafts.csswg.org/css-color-4/, with + # gray/grey spelling issues fixed. This is a superset of HTML 4.0 + # colour names used in CSS 1. + "aliceblue": "#f0f8ff", + "antiquewhite": "#faebd7", + "aqua": "#00ffff", + "aquamarine": "#7fffd4", + "azure": "#f0ffff", + "beige": "#f5f5dc", + "bisque": "#ffe4c4", + "black": "#000000", + "blanchedalmond": "#ffebcd", + "blue": "#0000ff", + "blueviolet": "#8a2be2", + "brown": "#a52a2a", + "burlywood": "#deb887", + "cadetblue": "#5f9ea0", + "chartreuse": "#7fff00", + "chocolate": "#d2691e", + "coral": "#ff7f50", + "cornflowerblue": "#6495ed", + "cornsilk": "#fff8dc", + "crimson": "#dc143c", + "cyan": "#00ffff", + "darkblue": "#00008b", + "darkcyan": "#008b8b", + "darkgoldenrod": "#b8860b", + "darkgray": "#a9a9a9", + "darkgrey": "#a9a9a9", + "darkgreen": "#006400", + "darkkhaki": "#bdb76b", + "darkmagenta": "#8b008b", + "darkolivegreen": "#556b2f", + "darkorange": "#ff8c00", + "darkorchid": "#9932cc", + "darkred": "#8b0000", + "darksalmon": "#e9967a", + "darkseagreen": "#8fbc8f", + "darkslateblue": "#483d8b", + "darkslategray": "#2f4f4f", + "darkslategrey": "#2f4f4f", + "darkturquoise": "#00ced1", + "darkviolet": "#9400d3", + "deeppink": "#ff1493", + "deepskyblue": "#00bfff", + "dimgray": "#696969", + "dimgrey": "#696969", + "dodgerblue": "#1e90ff", + "firebrick": "#b22222", + "floralwhite": "#fffaf0", + "forestgreen": "#228b22", + "fuchsia": "#ff00ff", + "gainsboro": "#dcdcdc", + "ghostwhite": "#f8f8ff", + "gold": "#ffd700", + "goldenrod": "#daa520", + "gray": "#808080", + "grey": "#808080", + "green": "#008000", + "greenyellow": "#adff2f", + "honeydew": "#f0fff0", + "hotpink": "#ff69b4", + "indianred": "#cd5c5c", + "indigo": "#4b0082", + "ivory": "#fffff0", + "khaki": "#f0e68c", + "lavender": "#e6e6fa", + "lavenderblush": "#fff0f5", + "lawngreen": "#7cfc00", + "lemonchiffon": "#fffacd", + "lightblue": "#add8e6", + "lightcoral": "#f08080", + "lightcyan": "#e0ffff", + "lightgoldenrodyellow": "#fafad2", + "lightgreen": "#90ee90", + "lightgray": "#d3d3d3", + "lightgrey": "#d3d3d3", + "lightpink": "#ffb6c1", + "lightsalmon": "#ffa07a", + "lightseagreen": "#20b2aa", + "lightskyblue": "#87cefa", + "lightslategray": "#778899", + "lightslategrey": "#778899", + "lightsteelblue": "#b0c4de", + "lightyellow": "#ffffe0", + "lime": "#00ff00", + "limegreen": "#32cd32", + "linen": "#faf0e6", + "magenta": "#ff00ff", + "maroon": "#800000", + "mediumaquamarine": "#66cdaa", + "mediumblue": "#0000cd", + "mediumorchid": "#ba55d3", + "mediumpurple": "#9370db", + "mediumseagreen": "#3cb371", + "mediumslateblue": "#7b68ee", + "mediumspringgreen": "#00fa9a", + "mediumturquoise": "#48d1cc", + "mediumvioletred": "#c71585", + "midnightblue": "#191970", + "mintcream": "#f5fffa", + "mistyrose": "#ffe4e1", + "moccasin": "#ffe4b5", + "navajowhite": "#ffdead", + "navy": "#000080", + "oldlace": "#fdf5e6", + "olive": "#808000", + "olivedrab": "#6b8e23", + "orange": "#ffa500", + "orangered": "#ff4500", + "orchid": "#da70d6", + "palegoldenrod": "#eee8aa", + "palegreen": "#98fb98", + "paleturquoise": "#afeeee", + "palevioletred": "#db7093", + "papayawhip": "#ffefd5", + "peachpuff": "#ffdab9", + "peru": "#cd853f", + "pink": "#ffc0cb", + "plum": "#dda0dd", + "powderblue": "#b0e0e6", + "purple": "#800080", + "rebeccapurple": "#663399", + "red": "#ff0000", + "rosybrown": "#bc8f8f", + "royalblue": "#4169e1", + "saddlebrown": "#8b4513", + "salmon": "#fa8072", + "sandybrown": "#f4a460", + "seagreen": "#2e8b57", + "seashell": "#fff5ee", + "sienna": "#a0522d", + "silver": "#c0c0c0", + "skyblue": "#87ceeb", + "slateblue": "#6a5acd", + "slategray": "#708090", + "slategrey": "#708090", + "snow": "#fffafa", + "springgreen": "#00ff7f", + "steelblue": "#4682b4", + "tan": "#d2b48c", + "teal": "#008080", + "thistle": "#d8bfd8", + "tomato": "#ff6347", + "turquoise": "#40e0d0", + "violet": "#ee82ee", + "wheat": "#f5deb3", + "white": "#ffffff", + "whitesmoke": "#f5f5f5", + "yellow": "#ffff00", + "yellowgreen": "#9acd32", +} diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageDraw.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageDraw.py new file mode 100644 index 0000000..6a70def --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageDraw.py @@ -0,0 +1,440 @@ +# +# The Python Imaging Library +# $Id$ +# +# drawing interface operations +# +# History: +# 1996-04-13 fl Created (experimental) +# 1996-08-07 fl Filled polygons, ellipses. +# 1996-08-13 fl Added text support +# 1998-06-28 fl Handle I and F images +# 1998-12-29 fl Added arc; use arc primitive to draw ellipses +# 1999-01-10 fl Added shape stuff (experimental) +# 1999-02-06 fl Added bitmap support +# 1999-02-11 fl Changed all primitives to take options +# 1999-02-20 fl Fixed backwards compatibility +# 2000-10-12 fl Copy on write, when necessary +# 2001-02-18 fl Use default ink for bitmap/text also in fill mode +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-10 fl Added experimental support for RGBA-on-RGB drawing +# 2002-12-11 fl Refactored low-level drawing API (work in progress) +# 2004-08-26 fl Made Draw() a factory function, added getdraw() support +# 2004-09-04 fl Added width support to line primitive +# 2004-09-10 fl Added font mode handling +# 2006-06-19 fl Added font bearing support (getmask2) +# +# Copyright (c) 1997-2006 by Secret Labs AB +# Copyright (c) 1996-2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import math +import numbers + +from . import Image, ImageColor +from ._util import isStringType + +""" +A simple 2D drawing interface for PIL images. +

+Application code should use the Draw factory, instead of +directly. +""" + + +class ImageDraw(object): + + def __init__(self, im, mode=None): + """ + Create a drawing instance. + + :param im: The image to draw in. + :param mode: Optional mode to use for color values. For RGB + images, this argument can be RGB or RGBA (to blend the + drawing into the image). For all other modes, this argument + must be the same as the image mode. If omitted, the mode + defaults to the mode of the image. + """ + im.load() + if im.readonly: + im._copy() # make it writeable + blend = 0 + if mode is None: + mode = im.mode + if mode != im.mode: + if mode == "RGBA" and im.mode == "RGB": + blend = 1 + else: + raise ValueError("mode mismatch") + if mode == "P": + self.palette = im.palette + else: + self.palette = None + self.im = im.im + self.draw = Image.core.draw(self.im, blend) + self.mode = mode + if mode in ("I", "F"): + self.ink = self.draw.draw_ink(1, mode) + else: + self.ink = self.draw.draw_ink(-1, mode) + if mode in ("1", "P", "I", "F"): + # FIXME: fix Fill2 to properly support matte for I+F images + self.fontmode = "1" + else: + self.fontmode = "L" # aliasing is okay for other modes + self.fill = 0 + self.font = None + + def getfont(self): + """ + Get the current default font. + + :returns: An image font.""" + if not self.font: + # FIXME: should add a font repository + from . import ImageFont + self.font = ImageFont.load_default() + return self.font + + def _getink(self, ink, fill=None): + if ink is None and fill is None: + if self.fill: + fill = self.ink + else: + ink = self.ink + else: + if ink is not None: + if isStringType(ink): + ink = ImageColor.getcolor(ink, self.mode) + if self.palette and not isinstance(ink, numbers.Number): + ink = self.palette.getcolor(ink) + ink = self.draw.draw_ink(ink, self.mode) + if fill is not None: + if isStringType(fill): + fill = ImageColor.getcolor(fill, self.mode) + if self.palette and not isinstance(fill, numbers.Number): + fill = self.palette.getcolor(fill) + fill = self.draw.draw_ink(fill, self.mode) + return ink, fill + + def arc(self, xy, start, end, fill=None, width=0): + """Draw an arc.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_arc(xy, start, end, ink, width) + + def bitmap(self, xy, bitmap, fill=None): + """Draw a bitmap.""" + bitmap.load() + ink, fill = self._getink(fill) + if ink is None: + ink = fill + if ink is not None: + self.draw.draw_bitmap(xy, bitmap.im, ink) + + def chord(self, xy, start, end, fill=None, outline=None, width=0): + """Draw a chord.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_chord(xy, start, end, fill, 1) + if ink is not None and ink != fill: + self.draw.draw_chord(xy, start, end, ink, 0, width) + + def ellipse(self, xy, fill=None, outline=None, width=0): + """Draw an ellipse.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_ellipse(xy, fill, 1) + if ink is not None and ink != fill: + self.draw.draw_ellipse(xy, ink, 0, width) + + def line(self, xy, fill=None, width=0, joint=None): + """Draw a line, or a connected sequence of line segments.""" + ink = self._getink(fill)[0] + if ink is not None: + self.draw.draw_lines(xy, ink, width) + if joint == "curve" and width > 4: + for i in range(1, len(xy)-1): + point = xy[i] + angles = [ + math.degrees(math.atan2( + end[0] - start[0], start[1] - end[1] + )) % 360 + for start, end in ((xy[i-1], point), (point, xy[i+1])) + ] + if angles[0] == angles[1]: + # This is a straight line, so no joint is required + continue + + def coord_at_angle(coord, angle): + x, y = coord + angle -= 90 + distance = width/2 - 1 + return tuple([ + p + + (math.floor(p_d) if p_d > 0 else math.ceil(p_d)) + for p, p_d in + ((x, distance * math.cos(math.radians(angle))), + (y, distance * math.sin(math.radians(angle)))) + ]) + flipped = ((angles[1] > angles[0] and + angles[1] - 180 > angles[0]) or + (angles[1] < angles[0] and + angles[1] + 180 > angles[0])) + coords = [ + (point[0] - width/2 + 1, point[1] - width/2 + 1), + (point[0] + width/2 - 1, point[1] + width/2 - 1) + ] + if flipped: + start, end = (angles[1] + 90, angles[0] + 90) + else: + start, end = (angles[0] - 90, angles[1] - 90) + self.pieslice(coords, start - 90, end - 90, fill) + + if width > 8: + # Cover potential gaps between the line and the joint + if flipped: + gapCoords = [ + coord_at_angle(point, angles[0]+90), + point, + coord_at_angle(point, angles[1]+90) + ] + else: + gapCoords = [ + coord_at_angle(point, angles[0]-90), + point, + coord_at_angle(point, angles[1]-90) + ] + self.line(gapCoords, fill, width=3) + + def shape(self, shape, fill=None, outline=None): + """(Experimental) Draw a shape.""" + shape.close() + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_outline(shape, fill, 1) + if ink is not None and ink != fill: + self.draw.draw_outline(shape, ink, 0) + + def pieslice(self, xy, start, end, fill=None, outline=None, width=0): + """Draw a pieslice.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_pieslice(xy, start, end, fill, 1) + if ink is not None and ink != fill: + self.draw.draw_pieslice(xy, start, end, ink, 0, width) + + def point(self, xy, fill=None): + """Draw one or more individual pixels.""" + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_points(xy, ink) + + def polygon(self, xy, fill=None, outline=None): + """Draw a polygon.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_polygon(xy, fill, 1) + if ink is not None and ink != fill: + self.draw.draw_polygon(xy, ink, 0) + + def rectangle(self, xy, fill=None, outline=None, width=0): + """Draw a rectangle.""" + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_rectangle(xy, fill, 1) + if ink is not None and ink != fill: + self.draw.draw_rectangle(xy, ink, 0, width) + + def _multiline_check(self, text): + """Draw text.""" + split_character = "\n" if isinstance(text, str) else b"\n" + + return split_character in text + + def _multiline_split(self, text): + split_character = "\n" if isinstance(text, str) else b"\n" + + return text.split(split_character) + + def text(self, xy, text, fill=None, font=None, anchor=None, + *args, **kwargs): + if self._multiline_check(text): + return self.multiline_text(xy, text, fill, font, anchor, + *args, **kwargs) + ink, fill = self._getink(fill) + if font is None: + font = self.getfont() + if ink is None: + ink = fill + if ink is not None: + try: + mask, offset = font.getmask2(text, self.fontmode, + *args, **kwargs) + xy = xy[0] + offset[0], xy[1] + offset[1] + except AttributeError: + try: + mask = font.getmask(text, self.fontmode, *args, **kwargs) + except TypeError: + mask = font.getmask(text) + self.draw.draw_bitmap(xy, mask, ink) + + def multiline_text(self, xy, text, fill=None, font=None, anchor=None, + spacing=4, align="left", direction=None, features=None): + widths = [] + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self.textsize('A', font=font)[1] + spacing + for line in lines: + line_width, line_height = self.textsize(line, font) + widths.append(line_width) + max_width = max(max_width, line_width) + left, top = xy + for idx, line in enumerate(lines): + if align == "left": + pass # left = x + elif align == "center": + left += (max_width - widths[idx]) / 2.0 + elif align == "right": + left += (max_width - widths[idx]) + else: + raise ValueError('align must be "left", "center" or "right"') + self.text((left, top), line, fill, font, anchor, + direction=direction, features=features) + top += line_spacing + left = xy[0] + + def textsize(self, text, font=None, spacing=4, direction=None, + features=None): + """Get the size of a given string, in pixels.""" + if self._multiline_check(text): + return self.multiline_textsize(text, font, spacing, + direction, features) + + if font is None: + font = self.getfont() + return font.getsize(text, direction, features) + + def multiline_textsize(self, text, font=None, spacing=4, direction=None, + features=None): + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self.textsize('A', font=font)[1] + spacing + for line in lines: + line_width, line_height = self.textsize(line, font, spacing, + direction, features) + max_width = max(max_width, line_width) + return max_width, len(lines)*line_spacing - spacing + + +def Draw(im, mode=None): + """ + A simple 2D drawing interface for PIL images. + + :param im: The image to draw in. + :param mode: Optional mode to use for color values. For RGB + images, this argument can be RGB or RGBA (to blend the + drawing into the image). For all other modes, this argument + must be the same as the image mode. If omitted, the mode + defaults to the mode of the image. + """ + try: + return im.getdraw(mode) + except AttributeError: + return ImageDraw(im, mode) + + +# experimental access to the outline API +try: + Outline = Image.core.outline +except AttributeError: + Outline = None + + +def getdraw(im=None, hints=None): + """ + (Experimental) A more advanced 2D drawing interface for PIL images, + based on the WCK interface. + + :param im: The image to draw in. + :param hints: An optional list of hints. + :returns: A (drawing context, drawing resource factory) tuple. + """ + # FIXME: this needs more work! + # FIXME: come up with a better 'hints' scheme. + handler = None + if not hints or "nicest" in hints: + try: + from . import _imagingagg as handler + except ImportError: + pass + if handler is None: + from . import ImageDraw2 as handler + if im: + im = handler.Draw(im) + return im, handler + + +def floodfill(image, xy, value, border=None, thresh=0): + """ + (experimental) Fills a bounded region with a given color. + + :param image: Target image. + :param xy: Seed position (a 2-item coordinate tuple). See + :ref:`coordinate-system`. + :param value: Fill color. + :param border: Optional border value. If given, the region consists of + pixels with a color different from the border color. If not given, + the region consists of pixels having the same color as the seed + pixel. + :param thresh: Optional threshold value which specifies a maximum + tolerable difference of a pixel value from the 'background' in + order for it to be replaced. Useful for filling regions of non- + homogeneous, but similar, colors. + """ + # based on an implementation by Eric S. Raymond + # amended by yo1995 @20180806 + pixel = image.load() + x, y = xy + try: + background = pixel[x, y] + if _color_diff(value, background) <= thresh: + return # seed point already has fill color + pixel[x, y] = value + except (ValueError, IndexError): + return # seed point outside image + edge = {(x, y)} + full_edge = set() # use a set to keep record of current and previous edge pixels to reduce memory consumption + while edge: + new_edge = set() + for (x, y) in edge: # 4 adjacent method + for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)): + if (s, t) in full_edge: + continue # if already processed, skip + try: + p = pixel[s, t] + except (ValueError, IndexError): + pass + else: + full_edge.add((s, t)) + if border is None: + fill = _color_diff(p, background) <= thresh + else: + fill = p != value and p != border + if fill: + pixel[s, t] = value + new_edge.add((s, t)) + full_edge = edge # discard pixels processed + edge = new_edge + + +def _color_diff(color1, color2): + """ + Uses 1-norm distance to calculate difference between two values. + """ + if isinstance(color2, tuple): + return sum([abs(color1[i]-color2[i]) for i in range(0, len(color2))]) + else: + return abs(color1-color2) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageDraw2.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageDraw2.py new file mode 100644 index 0000000..f7902b0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageDraw2.py @@ -0,0 +1,108 @@ +# +# The Python Imaging Library +# $Id$ +# +# WCK-style drawing interface operations +# +# History: +# 2003-12-07 fl created +# 2005-05-15 fl updated; added to PIL as ImageDraw2 +# 2005-05-15 fl added text support +# 2005-05-20 fl added arc/chord/pieslice support +# +# Copyright (c) 2003-2005 by Secret Labs AB +# Copyright (c) 2003-2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageColor, ImageDraw, ImageFont, ImagePath + + +class Pen(object): + def __init__(self, color, width=1, opacity=255): + self.color = ImageColor.getrgb(color) + self.width = width + + +class Brush(object): + def __init__(self, color, opacity=255): + self.color = ImageColor.getrgb(color) + + +class Font(object): + def __init__(self, color, file, size=12): + # FIXME: add support for bitmap fonts + self.color = ImageColor.getrgb(color) + self.font = ImageFont.truetype(file, size) + + +class Draw(object): + + def __init__(self, image, size=None, color=None): + if not hasattr(image, "im"): + image = Image.new(image, size, color) + self.draw = ImageDraw.Draw(image) + self.image = image + self.transform = None + + def flush(self): + return self.image + + def render(self, op, xy, pen, brush=None): + # handle color arguments + outline = fill = None + width = 1 + if isinstance(pen, Pen): + outline = pen.color + width = pen.width + elif isinstance(brush, Pen): + outline = brush.color + width = brush.width + if isinstance(brush, Brush): + fill = brush.color + elif isinstance(pen, Brush): + fill = pen.color + # handle transformation + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + # render the item + if op == "line": + self.draw.line(xy, fill=outline, width=width) + else: + getattr(self.draw, op)(xy, fill=fill, outline=outline) + + def settransform(self, offset): + (xoffset, yoffset) = offset + self.transform = (1, 0, xoffset, 0, 1, yoffset) + + def arc(self, xy, start, end, *options): + self.render("arc", xy, start, end, *options) + + def chord(self, xy, start, end, *options): + self.render("chord", xy, start, end, *options) + + def ellipse(self, xy, *options): + self.render("ellipse", xy, *options) + + def line(self, xy, *options): + self.render("line", xy, *options) + + def pieslice(self, xy, start, end, *options): + self.render("pieslice", xy, start, end, *options) + + def polygon(self, xy, *options): + self.render("polygon", xy, *options) + + def rectangle(self, xy, *options): + self.render("rectangle", xy, *options) + + def text(self, xy, text, font): + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + self.draw.text(xy, text, font=font.font, fill=font.color) + + def textsize(self, text, font): + return self.draw.textsize(text, font=font.font) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageEnhance.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageEnhance.py new file mode 100644 index 0000000..1b78bfd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageEnhance.py @@ -0,0 +1,101 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image enhancement classes +# +# For a background, see "Image Processing By Interpolation and +# Extrapolation", Paul Haeberli and Douglas Voorhies. Available +# at http://www.graficaobscura.com/interp/index.html +# +# History: +# 1996-03-23 fl Created +# 2009-06-16 fl Fixed mean calculation +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFilter, ImageStat + + +class _Enhance(object): + + def enhance(self, factor): + """ + Returns an enhanced image. + + :param factor: A floating point value controlling the enhancement. + Factor 1.0 always returns a copy of the original image, + lower factors mean less color (brightness, contrast, + etc), and higher values more. There are no restrictions + on this value. + :rtype: :py:class:`~PIL.Image.Image` + """ + return Image.blend(self.degenerate, self.image, factor) + + +class Color(_Enhance): + """Adjust image color balance. + + This class can be used to adjust the colour balance of an image, in + a manner similar to the controls on a colour TV set. An enhancement + factor of 0.0 gives a black and white image. A factor of 1.0 gives + the original image. + """ + def __init__(self, image): + self.image = image + self.intermediate_mode = 'L' + if 'A' in image.getbands(): + self.intermediate_mode = 'LA' + + self.degenerate = image.convert( + self.intermediate_mode).convert(image.mode) + + +class Contrast(_Enhance): + """Adjust image contrast. + + This class can be used to control the contrast of an image, similar + to the contrast control on a TV set. An enhancement factor of 0.0 + gives a solid grey image. A factor of 1.0 gives the original image. + """ + def __init__(self, image): + self.image = image + mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5) + self.degenerate = Image.new("L", image.size, mean).convert(image.mode) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.getchannel('A')) + + +class Brightness(_Enhance): + """Adjust image brightness. + + This class can be used to control the brightness of an image. An + enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the + original image. + """ + def __init__(self, image): + self.image = image + self.degenerate = Image.new(image.mode, image.size, 0) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.getchannel('A')) + + +class Sharpness(_Enhance): + """Adjust image sharpness. + + This class can be used to adjust the sharpness of an image. An + enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the + original image, and a factor of 2.0 gives a sharpened image. + """ + def __init__(self, image): + self.image = image + self.degenerate = image.filter(ImageFilter.SMOOTH) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.getchannel('A')) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageFile.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageFile.py new file mode 100644 index 0000000..915557a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageFile.py @@ -0,0 +1,673 @@ +# +# The Python Imaging Library. +# $Id$ +# +# base class for image file handlers +# +# history: +# 1995-09-09 fl Created +# 1996-03-11 fl Fixed load mechanism. +# 1996-04-15 fl Added pcx/xbm decoders. +# 1996-04-30 fl Added encoders. +# 1996-12-14 fl Added load helpers +# 1997-01-11 fl Use encode_to_file where possible +# 1997-08-27 fl Flush output in _save +# 1998-03-05 fl Use memory mapping for some modes +# 1999-02-04 fl Use memory mapping also for "I;16" and "I;16B" +# 1999-05-31 fl Added image parser +# 2000-10-12 fl Set readonly flag on memory-mapped images +# 2002-03-20 fl Use better messages for common decoder errors +# 2003-04-21 fl Fall back on mmap/map_buffer if map is not available +# 2003-10-30 fl Added StubImageFile class +# 2004-02-25 fl Made incremental parser more robust +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image +from ._util import isPath +import io +import sys +import struct + +MAXBLOCK = 65536 + +SAFEBLOCK = 1024*1024 + +LOAD_TRUNCATED_IMAGES = False + +ERRORS = { + -1: "image buffer overrun error", + -2: "decoding error", + -3: "unknown error", + -8: "bad configuration", + -9: "out of memory error" +} + + +def raise_ioerror(error): + try: + message = Image.core.getcodecstatus(error) + except AttributeError: + message = ERRORS.get(error) + if not message: + message = "decoder error %d" % error + raise IOError(message + " when reading image file") + + +# +# -------------------------------------------------------------------- +# Helpers + +def _tilesort(t): + # sort on offset + return t[2] + + +# +# -------------------------------------------------------------------- +# ImageFile base class + +class ImageFile(Image.Image): + "Base class for image file format handlers." + + def __init__(self, fp=None, filename=None): + Image.Image.__init__(self) + + self._min_frame = 0 + + self.tile = None + self.readonly = 1 # until we know better + + self.decoderconfig = () + self.decodermaxblock = MAXBLOCK + + if isPath(fp): + # filename + self.fp = open(fp, "rb") + self.filename = fp + self._exclusive_fp = True + else: + # stream + self.fp = fp + self.filename = filename + # can be overridden + self._exclusive_fp = None + + try: + self._open() + except (IndexError, # end of data + TypeError, # end of data (ord) + KeyError, # unsupported mode + EOFError, # got header but not the first frame + struct.error) as v: + # close the file only if we have opened it this constructor + if self._exclusive_fp: + self.fp.close() + raise SyntaxError(v) + + if not self.mode or self.size[0] <= 0: + raise SyntaxError("not identified by this driver") + + def draft(self, mode, size): + "Set draft mode" + + pass + + def get_format_mimetype(self): + if self.format is None: + return + return Image.MIME.get(self.format.upper()) + + def verify(self): + "Check file integrity" + + # raise exception if something's wrong. must be called + # directly after open, and closes file when finished. + if self._exclusive_fp: + self.fp.close() + self.fp = None + + def load(self): + "Load image data based on tile list" + + pixel = Image.Image.load(self) + + if self.tile is None: + raise IOError("cannot load this image") + if not self.tile: + return pixel + + self.map = None + use_mmap = self.filename and len(self.tile) == 1 + # As of pypy 2.1.0, memory mapping was failing here. + use_mmap = use_mmap and not hasattr(sys, 'pypy_version_info') + + readonly = 0 + + # look for read/seek overrides + try: + read = self.load_read + # don't use mmap if there are custom read/seek functions + use_mmap = False + except AttributeError: + read = self.fp.read + + try: + seek = self.load_seek + use_mmap = False + except AttributeError: + seek = self.fp.seek + + if use_mmap: + # try memory mapping + decoder_name, extents, offset, args = self.tile[0] + if decoder_name == "raw" and len(args) >= 3 and \ + args[0] == self.mode and \ + args[0] in Image._MAPMODES: + try: + if hasattr(Image.core, "map"): + # use built-in mapper WIN32 only + self.map = Image.core.map(self.filename) + self.map.seek(offset) + self.im = self.map.readimage( + self.mode, self.size, args[1], args[2] + ) + else: + # use mmap, if possible + import mmap + with open(self.filename, "r") as fp: + self.map = mmap.mmap(fp.fileno(), 0, + access=mmap.ACCESS_READ) + self.im = Image.core.map_buffer( + self.map, self.size, decoder_name, extents, + offset, args) + readonly = 1 + # After trashing self.im, + # we might need to reload the palette data. + if self.palette: + self.palette.dirty = 1 + except (AttributeError, EnvironmentError, ImportError): + self.map = None + + self.load_prepare() + err_code = -3 # initialize to unknown error + if not self.map: + # sort tiles in file order + self.tile.sort(key=_tilesort) + + try: + # FIXME: This is a hack to handle TIFF's JpegTables tag. + prefix = self.tile_prefix + except AttributeError: + prefix = b"" + + for decoder_name, extents, offset, args in self.tile: + decoder = Image._getdecoder(self.mode, decoder_name, + args, self.decoderconfig) + try: + seek(offset) + decoder.setimage(self.im, extents) + if decoder.pulls_fd: + decoder.setfd(self.fp) + status, err_code = decoder.decode(b"") + else: + b = prefix + while True: + try: + s = read(self.decodermaxblock) + except (IndexError, struct.error): + # truncated png/gif + if LOAD_TRUNCATED_IMAGES: + break + else: + raise IOError("image file is truncated") + + if not s: # truncated jpeg + if LOAD_TRUNCATED_IMAGES: + break + else: + self.tile = [] + raise IOError("image file is truncated " + "(%d bytes not processed)" % + len(b)) + + b = b + s + n, err_code = decoder.decode(b) + if n < 0: + break + b = b[n:] + finally: + # Need to cleanup here to prevent leaks + decoder.cleanup() + + self.tile = [] + self.readonly = readonly + + self.load_end() + + if self._exclusive_fp and self._close_exclusive_fp_after_loading: + self.fp.close() + self.fp = None + + if not self.map and not LOAD_TRUNCATED_IMAGES and err_code < 0: + # still raised if decoder fails to return anything + raise_ioerror(err_code) + + return Image.Image.load(self) + + def load_prepare(self): + # create image memory if necessary + if not self.im or\ + self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.new(self.mode, self.size) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + def load_end(self): + # may be overridden + pass + + # may be defined for contained formats + # def load_seek(self, pos): + # pass + + # may be defined for blocked formats (e.g. PNG) + # def load_read(self, bytes): + # pass + + def _seek_check(self, frame): + if (frame < self._min_frame or + # Only check upper limit on frames if additional seek operations + # are not required to do so + (not (hasattr(self, "_n_frames") and self._n_frames is None) and + frame >= self.n_frames+self._min_frame)): + raise EOFError("attempt to seek outside sequence") + + return self.tell() != frame + + +class StubImageFile(ImageFile): + """ + Base class for stub image loaders. + + A stub loader is an image loader that can identify files of a + certain format, but relies on external code to load the file. + """ + + def _open(self): + raise NotImplementedError( + "StubImageFile subclass must implement _open" + ) + + def load(self): + loader = self._load() + if loader is None: + raise IOError("cannot find loader for this %s file" % self.format) + image = loader.load(self) + assert image is not None + # become the other object (!) + self.__class__ = image.__class__ + self.__dict__ = image.__dict__ + + def _load(self): + "(Hook) Find actual image loader." + raise NotImplementedError( + "StubImageFile subclass must implement _load" + ) + + +class Parser(object): + """ + Incremental image parser. This class implements the standard + feed/close consumer interface. + """ + incremental = None + image = None + data = None + decoder = None + offset = 0 + finished = 0 + + def reset(self): + """ + (Consumer) Reset the parser. Note that you can only call this + method immediately after you've created a parser; parser + instances cannot be reused. + """ + assert self.data is None, "cannot reuse parsers" + + def feed(self, data): + """ + (Consumer) Feed data to the parser. + + :param data: A string buffer. + :exception IOError: If the parser failed to parse the image file. + """ + # collect data + + if self.finished: + return + + if self.data is None: + self.data = data + else: + self.data = self.data + data + + # parse what we have + if self.decoder: + + if self.offset > 0: + # skip header + skip = min(len(self.data), self.offset) + self.data = self.data[skip:] + self.offset = self.offset - skip + if self.offset > 0 or not self.data: + return + + n, e = self.decoder.decode(self.data) + + if n < 0: + # end of stream + self.data = None + self.finished = 1 + if e < 0: + # decoding error + self.image = None + raise_ioerror(e) + else: + # end of image + return + self.data = self.data[n:] + + elif self.image: + + # if we end up here with no decoder, this file cannot + # be incrementally parsed. wait until we've gotten all + # available data + pass + + else: + + # attempt to open this file + try: + with io.BytesIO(self.data) as fp: + im = Image.open(fp) + except IOError: + # traceback.print_exc() + pass # not enough data + else: + flag = hasattr(im, "load_seek") or hasattr(im, "load_read") + if flag or len(im.tile) != 1: + # custom load code, or multiple tiles + self.decode = None + else: + # initialize decoder + im.load_prepare() + d, e, o, a = im.tile[0] + im.tile = [] + self.decoder = Image._getdecoder( + im.mode, d, a, im.decoderconfig + ) + self.decoder.setimage(im.im, e) + + # calculate decoder offset + self.offset = o + if self.offset <= len(self.data): + self.data = self.data[self.offset:] + self.offset = 0 + + self.image = im + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + """ + (Consumer) Close the stream. + + :returns: An image object. + :exception IOError: If the parser failed to parse the image file either + because it cannot be identified or cannot be + decoded. + """ + # finish decoding + if self.decoder: + # get rid of what's left in the buffers + self.feed(b"") + self.data = self.decoder = None + if not self.finished: + raise IOError("image was incomplete") + if not self.image: + raise IOError("cannot parse this image") + if self.data: + # incremental parsing not possible; reopen the file + # not that we have all data + with io.BytesIO(self.data) as fp: + try: + self.image = Image.open(fp) + finally: + self.image.load() + return self.image + + +# -------------------------------------------------------------------- + +def _save(im, fp, tile, bufsize=0): + """Helper to save image based on tile list + + :param im: Image object. + :param fp: File object. + :param tile: Tile list. + :param bufsize: Optional buffer size + """ + + im.load() + if not hasattr(im, "encoderconfig"): + im.encoderconfig = () + tile.sort(key=_tilesort) + # FIXME: make MAXBLOCK a configuration parameter + # It would be great if we could have the encoder specify what it needs + # But, it would need at least the image size in most cases. RawEncode is + # a tricky case. + bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c + if fp == sys.stdout: + fp.flush() + return + try: + fh = fp.fileno() + fp.flush() + except (AttributeError, io.UnsupportedOperation): + # compress to Python file-compatible object + for e, b, o, a in tile: + e = Image._getencoder(im.mode, e, a, im.encoderconfig) + if o > 0: + fp.seek(o, 0) + e.setimage(im.im, b) + if e.pushes_fd: + e.setfd(fp) + l, s = e.encode_to_pyfd() + else: + while True: + l, s, d = e.encode(bufsize) + fp.write(d) + if s: + break + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + e.cleanup() + else: + # slight speedup: compress to real file object + for e, b, o, a in tile: + e = Image._getencoder(im.mode, e, a, im.encoderconfig) + if o > 0: + fp.seek(o, 0) + e.setimage(im.im, b) + if e.pushes_fd: + e.setfd(fp) + l, s = e.encode_to_pyfd() + else: + s = e.encode_to_file(fh, bufsize) + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + e.cleanup() + if hasattr(fp, "flush"): + fp.flush() + + +def _safe_read(fp, size): + """ + Reads large blocks in a safe way. Unlike fp.read(n), this function + doesn't trust the user. If the requested size is larger than + SAFEBLOCK, the file is read block by block. + + :param fp: File handle. Must implement a read method. + :param size: Number of bytes to read. + :returns: A string containing up to size bytes of data. + """ + if size <= 0: + return b"" + if size <= SAFEBLOCK: + return fp.read(size) + data = [] + while size > 0: + block = fp.read(min(size, SAFEBLOCK)) + if not block: + break + data.append(block) + size -= len(block) + return b"".join(data) + + +class PyCodecState(object): + def __init__(self): + self.xsize = 0 + self.ysize = 0 + self.xoff = 0 + self.yoff = 0 + + def extents(self): + return (self.xoff, self.yoff, + self.xoff+self.xsize, self.yoff+self.ysize) + + +class PyDecoder(object): + """ + Python implementation of a format decoder. Override this class and + add the decoding logic in the `decode` method. + + See :ref:`Writing Your Own File Decoder in Python` + """ + + _pulls_fd = False + + def __init__(self, mode, *args): + self.im = None + self.state = PyCodecState() + self.fd = None + self.mode = mode + self.init(args) + + def init(self, args): + """ + Override to perform decoder specific initialization + + :param args: Array of args items from the tile entry + :returns: None + """ + self.args = args + + @property + def pulls_fd(self): + return self._pulls_fd + + def decode(self, buffer): + """ + Override to perform the decoding process. + + :param buffer: A bytes object with the data to be decoded. + If `handles_eof` is set, then `buffer` will be empty and `self.fd` + will be set. + :returns: A tuple of (bytes consumed, errcode). + If finished with decoding return <0 for the bytes consumed. + Err codes are from `ERRORS` + """ + raise NotImplementedError() + + def cleanup(self): + """ + Override to perform decoder specific cleanup + + :returns: None + """ + pass + + def setfd(self, fd): + """ + Called from ImageFile to set the python file-like object + + :param fd: A python file-like object + :returns: None + """ + self.fd = fd + + def setimage(self, im, extents=None): + """ + Called from ImageFile to set the core output image for the decoder + + :param im: A core image object + :param extents: a 4 tuple of (x0, y0, x1, y1) defining the rectangle + for this tile + :returns: None + """ + + # following c code + self.im = im + + if extents: + (x0, y0, x1, y1) = extents + else: + (x0, y0, x1, y1) = (0, 0, 0, 0) + + if x0 == 0 and x1 == 0: + self.state.xsize, self.state.ysize = self.im.size + else: + self.state.xoff = x0 + self.state.yoff = y0 + self.state.xsize = x1 - x0 + self.state.ysize = y1 - y0 + + if self.state.xsize <= 0 or self.state.ysize <= 0: + raise ValueError("Size cannot be negative") + + if (self.state.xsize + self.state.xoff > self.im.size[0] or + self.state.ysize + self.state.yoff > self.im.size[1]): + raise ValueError("Tile cannot extend outside image") + + def set_as_raw(self, data, rawmode=None): + """ + Convenience method to set the internal image from a stream of raw data + + :param data: Bytes to be set + :param rawmode: The rawmode to be used for the decoder. + If not specified, it will default to the mode of the image + :returns: None + """ + + if not rawmode: + rawmode = self.mode + d = Image._getdecoder(self.mode, 'raw', (rawmode)) + d.setimage(self.im, self.state.extents()) + s = d.decode(data) + + if s[0] >= 0: + raise ValueError("not enough image data") + if s[1] != 0: + raise ValueError("cannot decode image data") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageFilter.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageFilter.py new file mode 100644 index 0000000..de99e64 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageFilter.py @@ -0,0 +1,483 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard filters +# +# History: +# 1995-11-27 fl Created +# 2002-06-08 fl Added rank and mode filters +# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2002 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import division + +import functools + +try: + import numpy +except ImportError: # pragma: no cover + numpy = None + + +class Filter(object): + pass + + +class MultibandFilter(Filter): + pass + + +class BuiltinFilter(MultibandFilter): + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + return image.filter(*self.filterargs) + + +class Kernel(BuiltinFilter): + """ + Create a convolution kernel. The current version only + supports 3x3 and 5x5 integer and floating point kernels. + + In the current version, kernels can only be applied to + "L" and "RGB" images. + + :param size: Kernel size, given as (width, height). In the current + version, this must be (3,3) or (5,5). + :param kernel: A sequence containing kernel weights. + :param scale: Scale factor. If given, the result for each pixel is + divided by this value. the default is the sum of the + kernel weights. + :param offset: Offset. If given, this value is added to the result, + after it has been divided by the scale factor. + """ + name = "Kernel" + + def __init__(self, size, kernel, scale=None, offset=0): + if scale is None: + # default scale is sum of kernel + scale = functools.reduce(lambda a, b: a+b, kernel) + if size[0] * size[1] != len(kernel): + raise ValueError("not enough coefficients in kernel") + self.filterargs = size, scale, offset, kernel + + +class RankFilter(Filter): + """ + Create a rank filter. The rank filter sorts all pixels in + a window of the given size, and returns the **rank**'th value. + + :param size: The kernel size, in pixels. + :param rank: What pixel value to pick. Use 0 for a min filter, + ``size * size / 2`` for a median filter, ``size * size - 1`` + for a max filter, etc. + """ + name = "Rank" + + def __init__(self, size, rank): + self.size = size + self.rank = rank + + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + image = image.expand(self.size//2, self.size//2) + return image.rankfilter(self.size, self.rank) + + +class MedianFilter(RankFilter): + """ + Create a median filter. Picks the median pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Median" + + def __init__(self, size=3): + self.size = size + self.rank = size*size//2 + + +class MinFilter(RankFilter): + """ + Create a min filter. Picks the lowest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Min" + + def __init__(self, size=3): + self.size = size + self.rank = 0 + + +class MaxFilter(RankFilter): + """ + Create a max filter. Picks the largest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Max" + + def __init__(self, size=3): + self.size = size + self.rank = size*size-1 + + +class ModeFilter(Filter): + """ + Create a mode filter. Picks the most frequent pixel value in a box with the + given size. Pixel values that occur only once or twice are ignored; if no + pixel value occurs more than twice, the original pixel value is preserved. + + :param size: The kernel size, in pixels. + """ + name = "Mode" + + def __init__(self, size=3): + self.size = size + + def filter(self, image): + return image.modefilter(self.size) + + +class GaussianBlur(MultibandFilter): + """Gaussian blur filter. + + :param radius: Blur radius. + """ + name = "GaussianBlur" + + def __init__(self, radius=2): + self.radius = radius + + def filter(self, image): + return image.gaussian_blur(self.radius) + + +class BoxBlur(MultibandFilter): + """Blurs the image by setting each pixel to the average value of the pixels + in a square box extending radius pixels in each direction. + Supports float radius of arbitrary size. Uses an optimized implementation + which runs in linear time relative to the size of the image + for any radius value. + + :param radius: Size of the box in one direction. Radius 0 does not blur, + returns an identical image. Radius 1 takes 1 pixel + in each direction, i.e. 9 pixels in total. + """ + name = "BoxBlur" + + def __init__(self, radius): + self.radius = radius + + def filter(self, image): + return image.box_blur(self.radius) + + +class UnsharpMask(MultibandFilter): + """Unsharp mask filter. + + See Wikipedia's entry on `digital unsharp masking`_ for an explanation of + the parameters. + + :param radius: Blur Radius + :param percent: Unsharp strength, in percent + :param threshold: Threshold controls the minimum brightness change that + will be sharpened + + .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking + + """ + name = "UnsharpMask" + + def __init__(self, radius=2, percent=150, threshold=3): + self.radius = radius + self.percent = percent + self.threshold = threshold + + def filter(self, image): + return image.unsharp_mask(self.radius, self.percent, self.threshold) + + +class BLUR(BuiltinFilter): + name = "Blur" + filterargs = (5, 5), 16, 0, ( + 1, 1, 1, 1, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 1, 1, 1, 1 + ) + + +class CONTOUR(BuiltinFilter): + name = "Contour" + filterargs = (3, 3), 1, 255, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1 + ) + + +class DETAIL(BuiltinFilter): + name = "Detail" + filterargs = (3, 3), 6, 0, ( + 0, -1, 0, + -1, 10, -1, + 0, -1, 0 + ) + + +class EDGE_ENHANCE(BuiltinFilter): + name = "Edge-enhance" + filterargs = (3, 3), 2, 0, ( + -1, -1, -1, + -1, 10, -1, + -1, -1, -1 + ) + + +class EDGE_ENHANCE_MORE(BuiltinFilter): + name = "Edge-enhance More" + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 9, -1, + -1, -1, -1 + ) + + +class EMBOSS(BuiltinFilter): + name = "Emboss" + filterargs = (3, 3), 1, 128, ( + -1, 0, 0, + 0, 1, 0, + 0, 0, 0 + ) + + +class FIND_EDGES(BuiltinFilter): + name = "Find Edges" + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1 + ) + + +class SHARPEN(BuiltinFilter): + name = "Sharpen" + filterargs = (3, 3), 16, 0, ( + -2, -2, -2, + -2, 32, -2, + -2, -2, -2 + ) + + +class SMOOTH(BuiltinFilter): + name = "Smooth" + filterargs = (3, 3), 13, 0, ( + 1, 1, 1, + 1, 5, 1, + 1, 1, 1 + ) + + +class SMOOTH_MORE(BuiltinFilter): + name = "Smooth More" + filterargs = (5, 5), 100, 0, ( + 1, 1, 1, 1, 1, + 1, 5, 5, 5, 1, + 1, 5, 44, 5, 1, + 1, 5, 5, 5, 1, + 1, 1, 1, 1, 1 + ) + + +class Color3DLUT(MultibandFilter): + """Three-dimensional color lookup table. + + Transforms 3-channel pixels using the values of the channels as coordinates + in the 3D lookup table and interpolating the nearest elements. + + This method allows you to apply almost any color transformation + in constant time by using pre-calculated decimated tables. + + .. versionadded:: 5.2.0 + + :param size: Size of the table. One int or tuple of (int, int, int). + Minimal size in any dimension is 2, maximum is 65. + :param table: Flat lookup table. A list of ``channels * size**3`` + float elements or a list of ``size**3`` channels-sized + tuples with floats. Channels are changed first, + then first dimension, then second, then third. + Value 0.0 corresponds lowest value of output, 1.0 highest. + :param channels: Number of channels in the table. Could be 3 or 4. + Default is 3. + :param target_mode: A mode for the result image. Should have not less + than ``channels`` channels. Default is ``None``, + which means that mode wouldn't be changed. + """ + name = "Color 3D LUT" + + def __init__(self, size, table, channels=3, target_mode=None, **kwargs): + if channels not in (3, 4): + raise ValueError("Only 3 or 4 output channels are supported") + self.size = size = self._check_size(size) + self.channels = channels + self.mode = target_mode + + # Hidden flag `_copy_table=False` could be used to avoid extra copying + # of the table if the table is specially made for the constructor. + copy_table = kwargs.get('_copy_table', True) + items = size[0] * size[1] * size[2] + wrong_size = False + + if numpy and isinstance(table, numpy.ndarray): + if copy_table: + table = table.copy() + + if table.shape in [(items * channels,), (items, channels), + (size[2], size[1], size[0], channels)]: + table = table.reshape(items * channels) + else: + wrong_size = True + + else: + if copy_table: + table = list(table) + + # Convert to a flat list + if table and isinstance(table[0], (list, tuple)): + table, raw_table = [], table + for pixel in raw_table: + if len(pixel) != channels: + raise ValueError( + "The elements of the table should " + "have a length of {}.".format(channels)) + table.extend(pixel) + + if wrong_size or len(table) != items * channels: + raise ValueError( + "The table should have either channels * size**3 float items " + "or size**3 items of channels-sized tuples with floats. " + "Table should be: {}x{}x{}x{}. Actual length: {}".format( + channels, size[0], size[1], size[2], len(table))) + self.table = table + + @staticmethod + def _check_size(size): + try: + _, _, _ = size + except ValueError: + raise ValueError("Size should be either an integer or " + "a tuple of three integers.") + except TypeError: + size = (size, size, size) + size = [int(x) for x in size] + for size1D in size: + if not 2 <= size1D <= 65: + raise ValueError("Size should be in [2, 65] range.") + return size + + @classmethod + def generate(cls, size, callback, channels=3, target_mode=None): + """Generates new LUT using provided callback. + + :param size: Size of the table. Passed to the constructor. + :param callback: Function with three parameters which correspond + three color channels. Will be called ``size**3`` + times with values from 0.0 to 1.0 and should return + a tuple with ``channels`` elements. + :param channels: The number of channels which should return callback. + :param target_mode: Passed to the constructor of the resulting + lookup table. + """ + size1D, size2D, size3D = cls._check_size(size) + if channels not in (3, 4): + raise ValueError("Only 3 or 4 output channels are supported") + + table = [0] * (size1D * size2D * size3D * channels) + idx_out = 0 + for b in range(size3D): + for g in range(size2D): + for r in range(size1D): + table[idx_out:idx_out + channels] = callback( + r / (size1D-1), g / (size2D-1), b / (size3D-1)) + idx_out += channels + + return cls((size1D, size2D, size3D), table, channels=channels, + target_mode=target_mode, _copy_table=False) + + def transform(self, callback, with_normals=False, channels=None, + target_mode=None): + """Transforms the table values using provided callback and returns + a new LUT with altered values. + + :param callback: A function which takes old lookup table values + and returns a new set of values. The number + of arguments which function should take is + ``self.channels`` or ``3 + self.channels`` + if ``with_normals`` flag is set. + Should return a tuple of ``self.channels`` or + ``channels`` elements if it is set. + :param with_normals: If true, ``callback`` will be called with + coordinates in the color cube as the first + three arguments. Otherwise, ``callback`` + will be called only with actual color values. + :param channels: The number of channels in the resulting lookup table. + :param target_mode: Passed to the constructor of the resulting + lookup table. + """ + if channels not in (None, 3, 4): + raise ValueError("Only 3 or 4 output channels are supported") + ch_in = self.channels + ch_out = channels or ch_in + size1D, size2D, size3D = self.size + + table = [0] * (size1D * size2D * size3D * ch_out) + idx_in = 0 + idx_out = 0 + for b in range(size3D): + for g in range(size2D): + for r in range(size1D): + values = self.table[idx_in:idx_in + ch_in] + if with_normals: + values = callback(r / (size1D-1), g / (size2D-1), + b / (size3D-1), *values) + else: + values = callback(*values) + table[idx_out:idx_out + ch_out] = values + idx_in += ch_in + idx_out += ch_out + + return type(self)(self.size, table, channels=ch_out, + target_mode=target_mode or self.mode, + _copy_table=False) + + def __repr__(self): + r = [ + "{} from {}".format(self.__class__.__name__, + self.table.__class__.__name__), + "size={:d}x{:d}x{:d}".format(*self.size), + "channels={:d}".format(self.channels), + ] + if self.mode: + r.append("target_mode={}".format(self.mode)) + return "<{}>".format(" ".join(r)) + + def filter(self, image): + from . import Image + + return image.color_lut_3d( + self.mode or image.mode, Image.LINEAR, self.channels, + self.size[0], self.size[1], self.size[2], self.table) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageFont.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageFont.py new file mode 100644 index 0000000..5384a72 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageFont.py @@ -0,0 +1,480 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIL raster font management +# +# History: +# 1996-08-07 fl created (experimental) +# 1997-08-25 fl minor adjustments to handle fonts from pilfont 0.3 +# 1999-02-06 fl rewrote most font management stuff in C +# 1999-03-17 fl take pth files into account in load_path (from Richard Jones) +# 2001-02-17 fl added freetype support +# 2001-05-09 fl added TransposedFont wrapper class +# 2002-03-04 fl make sure we have a "L" or "1" font +# 2002-12-04 fl skip non-directory entries in the system path +# 2003-04-29 fl add embedded default font +# 2003-09-27 fl added support for truetype charmap encodings +# +# Todo: +# Adapt to PILFONT2 format (16-bit fonts, compressed, single file) +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image +from ._util import isDirectory, isPath, py3 +import os +import sys + +LAYOUT_BASIC = 0 +LAYOUT_RAQM = 1 + + +class _imagingft_not_installed(object): + # module placeholder + def __getattr__(self, id): + raise ImportError("The _imagingft C module is not installed") + + +try: + from . import _imagingft as core +except ImportError: + core = _imagingft_not_installed() + + +# FIXME: add support for pilfont2 format (see FontFile.py) + +# -------------------------------------------------------------------- +# Font metrics format: +# "PILfont" LF +# fontdescriptor LF +# (optional) key=value... LF +# "DATA" LF +# binary data: 256*10*2 bytes (dx, dy, dstbox, srcbox) +# +# To place a character, cut out srcbox and paste at dstbox, +# relative to the character position. Then move the character +# position according to dx, dy. +# -------------------------------------------------------------------- + + +class ImageFont(object): + "PIL font wrapper" + + def _load_pilfont(self, filename): + + with open(filename, "rb") as fp: + for ext in (".png", ".gif", ".pbm"): + try: + fullname = os.path.splitext(filename)[0] + ext + image = Image.open(fullname) + except: + pass + else: + if image and image.mode in ("1", "L"): + break + else: + raise IOError("cannot find glyph data file") + + self.file = fullname + + return self._load_pilfont_data(fp, image) + + def _load_pilfont_data(self, file, image): + + # read PILfont header + if file.readline() != b"PILfont\n": + raise SyntaxError("Not a PILfont file") + file.readline().split(b";") + self.info = [] # FIXME: should be a dictionary + while True: + s = file.readline() + if not s or s == b"DATA\n": + break + self.info.append(s) + + # read PILfont metrics + data = file.read(256*20) + + # check image + if image.mode not in ("1", "L"): + raise TypeError("invalid font image mode") + + image.load() + + self.font = Image.core.font(image.im, data) + + def getsize(self, text, *args, **kwargs): + return self.font.getsize(text) + + def getmask(self, text, mode="", *args, **kwargs): + return self.font.getmask(text, mode) + + +## +# Wrapper for FreeType fonts. Application code should use the +# truetype factory function to create font objects. + +class FreeTypeFont(object): + "FreeType font wrapper (requires _imagingft service)" + + def __init__(self, font=None, size=10, index=0, encoding="", + layout_engine=None): + # FIXME: use service provider instead + + self.path = font + self.size = size + self.index = index + self.encoding = encoding + + if layout_engine not in (LAYOUT_BASIC, LAYOUT_RAQM): + layout_engine = LAYOUT_BASIC + if core.HAVE_RAQM: + layout_engine = LAYOUT_RAQM + if layout_engine == LAYOUT_RAQM and not core.HAVE_RAQM: + layout_engine = LAYOUT_BASIC + + self.layout_engine = layout_engine + + if isPath(font): + self.font = core.getfont(font, size, index, encoding, + layout_engine=layout_engine) + else: + self.font_bytes = font.read() + self.font = core.getfont( + "", size, index, encoding, self.font_bytes, layout_engine) + + def _multiline_split(self, text): + split_character = "\n" if isinstance(text, str) else b"\n" + return text.split(split_character) + + def getname(self): + return self.font.family, self.font.style + + def getmetrics(self): + return self.font.ascent, self.font.descent + + def getsize(self, text, direction=None, features=None): + size, offset = self.font.getsize(text, direction, features) + return (size[0] + offset[0], size[1] + offset[1]) + + def getsize_multiline(self, text, direction=None, + spacing=4, features=None): + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self.getsize('A')[1] + spacing + for line in lines: + line_width, line_height = self.getsize(line, direction, features) + max_width = max(max_width, line_width) + + return max_width, len(lines)*line_spacing - spacing + + def getoffset(self, text): + return self.font.getsize(text)[1] + + def getmask(self, text, mode="", direction=None, features=None): + return self.getmask2(text, mode, direction=direction, + features=features)[0] + + def getmask2(self, text, mode="", fill=Image.core.fill, direction=None, + features=None, *args, **kwargs): + size, offset = self.font.getsize(text, direction, features) + im = fill("L", size, 0) + self.font.render(text, im.id, mode == "1", direction, features) + return im, offset + + def font_variant(self, font=None, size=None, index=None, encoding=None, + layout_engine=None): + """ + Create a copy of this FreeTypeFont object, + using any specified arguments to override the settings. + + Parameters are identical to the parameters used to initialize this + object. + + :return: A FreeTypeFont object. + """ + return FreeTypeFont( + font=self.path if font is None else font, + size=self.size if size is None else size, + index=self.index if index is None else index, + encoding=self.encoding if encoding is None else encoding, + layout_engine=self.layout_engine if layout_engine is None else layout_engine + ) + + +class TransposedFont(object): + "Wrapper for writing rotated or mirrored text" + + def __init__(self, font, orientation=None): + """ + Wrapper that creates a transposed font from any existing font + object. + + :param font: A font object. + :param orientation: An optional orientation. If given, this should + be one of Image.FLIP_LEFT_RIGHT, Image.FLIP_TOP_BOTTOM, + Image.ROTATE_90, Image.ROTATE_180, or Image.ROTATE_270. + """ + self.font = font + self.orientation = orientation # any 'transpose' argument, or None + + def getsize(self, text, *args, **kwargs): + w, h = self.font.getsize(text) + if self.orientation in (Image.ROTATE_90, Image.ROTATE_270): + return h, w + return w, h + + def getmask(self, text, mode="", *args, **kwargs): + im = self.font.getmask(text, mode, *args, **kwargs) + if self.orientation is not None: + return im.transpose(self.orientation) + return im + + +def load(filename): + """ + Load a font file. This function loads a font object from the given + bitmap font file, and returns the corresponding font object. + + :param filename: Name of font file. + :return: A font object. + :exception IOError: If the file could not be read. + """ + f = ImageFont() + f._load_pilfont(filename) + return f + + +def truetype(font=None, size=10, index=0, encoding="", + layout_engine=None): + """ + Load a TrueType or OpenType font from a file or file-like object, + and create a font object. + This function loads a font object from the given file or file-like + object, and creates a font object for a font of the given size. + + This function requires the _imagingft service. + + :param font: A filename or file-like object containing a TrueType font. + Under Windows, if the file is not found in this filename, + the loader also looks in Windows :file:`fonts/` directory. + :param size: The requested size, in points. + :param index: Which font face to load (default is first available face). + :param encoding: Which font encoding to use (default is Unicode). Common + encodings are "unic" (Unicode), "symb" (Microsoft + Symbol), "ADOB" (Adobe Standard), "ADBE" (Adobe Expert), + and "armn" (Apple Roman). See the FreeType documentation + for more information. + :param layout_engine: Which layout engine to use, if available: + `ImageFont.LAYOUT_BASIC` or `ImageFont.LAYOUT_RAQM`. + :return: A font object. + :exception IOError: If the file could not be read. + """ + + try: + return FreeTypeFont(font, size, index, encoding, layout_engine) + except IOError: + ttf_filename = os.path.basename(font) + + dirs = [] + if sys.platform == "win32": + # check the windows font repository + # NOTE: must use uppercase WINDIR, to work around bugs in + # 1.5.2's os.environ.get() + windir = os.environ.get("WINDIR") + if windir: + dirs.append(os.path.join(windir, "fonts")) + elif sys.platform in ('linux', 'linux2'): + lindirs = os.environ.get("XDG_DATA_DIRS", "") + if not lindirs: + # According to the freedesktop spec, XDG_DATA_DIRS should + # default to /usr/share + lindirs = '/usr/share' + dirs += [os.path.join(lindir, "fonts") + for lindir in lindirs.split(":")] + elif sys.platform == 'darwin': + dirs += ['/Library/Fonts', '/System/Library/Fonts', + os.path.expanduser('~/Library/Fonts')] + + ext = os.path.splitext(ttf_filename)[1] + first_font_with_a_different_extension = None + for directory in dirs: + for walkroot, walkdir, walkfilenames in os.walk(directory): + for walkfilename in walkfilenames: + if ext and walkfilename == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + return FreeTypeFont(fontpath, size, index, + encoding, layout_engine) + elif (not ext and + os.path.splitext(walkfilename)[0] == ttf_filename): + fontpath = os.path.join(walkroot, walkfilename) + if os.path.splitext(fontpath)[1] == '.ttf': + return FreeTypeFont(fontpath, size, index, + encoding, layout_engine) + if not ext \ + and first_font_with_a_different_extension is None: + first_font_with_a_different_extension = fontpath + if first_font_with_a_different_extension: + return FreeTypeFont(first_font_with_a_different_extension, size, + index, encoding, layout_engine) + raise + + +def load_path(filename): + """ + Load font file. Same as :py:func:`~PIL.ImageFont.load`, but searches for a + bitmap font along the Python path. + + :param filename: Name of font file. + :return: A font object. + :exception IOError: If the file could not be read. + """ + for directory in sys.path: + if isDirectory(directory): + if not isinstance(filename, str): + if py3: + filename = filename.decode("utf-8") + else: + filename = filename.encode("utf-8") + try: + return load(os.path.join(directory, filename)) + except IOError: + pass + raise IOError("cannot find font file") + + +def load_default(): + """Load a "better than nothing" default font. + + .. versionadded:: 1.1.4 + + :return: A font object. + """ + from io import BytesIO + import base64 + f = ImageFont() + f._load_pilfont_data( + # courB08 + BytesIO(base64.b64decode(b''' +UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAA//8AAQAAAAAAAAABAAEA +BgAAAAH/+gADAAAAAQAAAAMABgAGAAAAAf/6AAT//QADAAAABgADAAYAAAAA//kABQABAAYAAAAL +AAgABgAAAAD/+AAFAAEACwAAABAACQAGAAAAAP/5AAUAAAAQAAAAFQAHAAYAAP////oABQAAABUA +AAAbAAYABgAAAAH/+QAE//wAGwAAAB4AAwAGAAAAAf/5AAQAAQAeAAAAIQAIAAYAAAAB//kABAAB +ACEAAAAkAAgABgAAAAD/+QAE//0AJAAAACgABAAGAAAAAP/6AAX//wAoAAAALQAFAAYAAAAB//8A +BAACAC0AAAAwAAMABgAAAAD//AAF//0AMAAAADUAAQAGAAAAAf//AAMAAAA1AAAANwABAAYAAAAB +//kABQABADcAAAA7AAgABgAAAAD/+QAFAAAAOwAAAEAABwAGAAAAAP/5AAYAAABAAAAARgAHAAYA +AAAA//kABQAAAEYAAABLAAcABgAAAAD/+QAFAAAASwAAAFAABwAGAAAAAP/5AAYAAABQAAAAVgAH +AAYAAAAA//kABQAAAFYAAABbAAcABgAAAAD/+QAFAAAAWwAAAGAABwAGAAAAAP/5AAUAAABgAAAA +ZQAHAAYAAAAA//kABQAAAGUAAABqAAcABgAAAAD/+QAFAAAAagAAAG8ABwAGAAAAAf/8AAMAAABv +AAAAcQAEAAYAAAAA//wAAwACAHEAAAB0AAYABgAAAAD/+gAE//8AdAAAAHgABQAGAAAAAP/7AAT/ +/gB4AAAAfAADAAYAAAAB//oABf//AHwAAACAAAUABgAAAAD/+gAFAAAAgAAAAIUABgAGAAAAAP/5 +AAYAAQCFAAAAiwAIAAYAAP////oABgAAAIsAAACSAAYABgAA////+gAFAAAAkgAAAJgABgAGAAAA +AP/6AAUAAACYAAAAnQAGAAYAAP////oABQAAAJ0AAACjAAYABgAA////+gAFAAAAowAAAKkABgAG +AAD////6AAUAAACpAAAArwAGAAYAAAAA//oABQAAAK8AAAC0AAYABgAA////+gAGAAAAtAAAALsA +BgAGAAAAAP/6AAQAAAC7AAAAvwAGAAYAAP////oABQAAAL8AAADFAAYABgAA////+gAGAAAAxQAA +AMwABgAGAAD////6AAUAAADMAAAA0gAGAAYAAP////oABQAAANIAAADYAAYABgAA////+gAGAAAA +2AAAAN8ABgAGAAAAAP/6AAUAAADfAAAA5AAGAAYAAP////oABQAAAOQAAADqAAYABgAAAAD/+gAF +AAEA6gAAAO8ABwAGAAD////6AAYAAADvAAAA9gAGAAYAAAAA//oABQAAAPYAAAD7AAYABgAA//// ++gAFAAAA+wAAAQEABgAGAAD////6AAYAAAEBAAABCAAGAAYAAP////oABgAAAQgAAAEPAAYABgAA +////+gAGAAABDwAAARYABgAGAAAAAP/6AAYAAAEWAAABHAAGAAYAAP////oABgAAARwAAAEjAAYA +BgAAAAD/+gAFAAABIwAAASgABgAGAAAAAf/5AAQAAQEoAAABKwAIAAYAAAAA//kABAABASsAAAEv +AAgABgAAAAH/+QAEAAEBLwAAATIACAAGAAAAAP/5AAX//AEyAAABNwADAAYAAAAAAAEABgACATcA +AAE9AAEABgAAAAH/+QAE//wBPQAAAUAAAwAGAAAAAP/7AAYAAAFAAAABRgAFAAYAAP////kABQAA +AUYAAAFMAAcABgAAAAD/+wAFAAABTAAAAVEABQAGAAAAAP/5AAYAAAFRAAABVwAHAAYAAAAA//sA +BQAAAVcAAAFcAAUABgAAAAD/+QAFAAABXAAAAWEABwAGAAAAAP/7AAYAAgFhAAABZwAHAAYAAP// +//kABQAAAWcAAAFtAAcABgAAAAD/+QAGAAABbQAAAXMABwAGAAAAAP/5AAQAAgFzAAABdwAJAAYA +AP////kABgAAAXcAAAF+AAcABgAAAAD/+QAGAAABfgAAAYQABwAGAAD////7AAUAAAGEAAABigAF +AAYAAP////sABQAAAYoAAAGQAAUABgAAAAD/+wAFAAABkAAAAZUABQAGAAD////7AAUAAgGVAAAB +mwAHAAYAAAAA//sABgACAZsAAAGhAAcABgAAAAD/+wAGAAABoQAAAacABQAGAAAAAP/7AAYAAAGn +AAABrQAFAAYAAAAA//kABgAAAa0AAAGzAAcABgAA////+wAGAAABswAAAboABQAGAAD////7AAUA +AAG6AAABwAAFAAYAAP////sABgAAAcAAAAHHAAUABgAAAAD/+wAGAAABxwAAAc0ABQAGAAD////7 +AAYAAgHNAAAB1AAHAAYAAAAA//sABQAAAdQAAAHZAAUABgAAAAH/+QAFAAEB2QAAAd0ACAAGAAAA +Av/6AAMAAQHdAAAB3gAHAAYAAAAA//kABAABAd4AAAHiAAgABgAAAAD/+wAF//0B4gAAAecAAgAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAB +//sAAwACAecAAAHpAAcABgAAAAD/+QAFAAEB6QAAAe4ACAAGAAAAAP/5AAYAAAHuAAAB9AAHAAYA +AAAA//oABf//AfQAAAH5AAUABgAAAAD/+QAGAAAB+QAAAf8ABwAGAAAAAv/5AAMAAgH/AAACAAAJ +AAYAAAAA//kABQABAgAAAAIFAAgABgAAAAH/+gAE//sCBQAAAggAAQAGAAAAAP/5AAYAAAIIAAAC +DgAHAAYAAAAB//kABf/+Ag4AAAISAAUABgAA////+wAGAAACEgAAAhkABQAGAAAAAP/7AAX//gIZ +AAACHgADAAYAAAAA//wABf/9Ah4AAAIjAAEABgAAAAD/+QAHAAACIwAAAioABwAGAAAAAP/6AAT/ ++wIqAAACLgABAAYAAAAA//kABP/8Ai4AAAIyAAMABgAAAAD/+gAFAAACMgAAAjcABgAGAAAAAf/5 +AAT//QI3AAACOgAEAAYAAAAB//kABP/9AjoAAAI9AAQABgAAAAL/+QAE//sCPQAAAj8AAgAGAAD/ +///7AAYAAgI/AAACRgAHAAYAAAAA//kABgABAkYAAAJMAAgABgAAAAH//AAD//0CTAAAAk4AAQAG +AAAAAf//AAQAAgJOAAACUQADAAYAAAAB//kABP/9AlEAAAJUAAQABgAAAAH/+QAF//4CVAAAAlgA +BQAGAAD////7AAYAAAJYAAACXwAFAAYAAP////kABgAAAl8AAAJmAAcABgAA////+QAGAAACZgAA +Am0ABwAGAAD////5AAYAAAJtAAACdAAHAAYAAAAA//sABQACAnQAAAJ5AAcABgAA////9wAGAAAC +eQAAAoAACQAGAAD////3AAYAAAKAAAAChwAJAAYAAP////cABgAAAocAAAKOAAkABgAA////9wAG +AAACjgAAApUACQAGAAD////4AAYAAAKVAAACnAAIAAYAAP////cABgAAApwAAAKjAAkABgAA//// ++gAGAAACowAAAqoABgAGAAAAAP/6AAUAAgKqAAACrwAIAAYAAP////cABQAAAq8AAAK1AAkABgAA +////9wAFAAACtQAAArsACQAGAAD////3AAUAAAK7AAACwQAJAAYAAP////gABQAAAsEAAALHAAgA +BgAAAAD/9wAEAAACxwAAAssACQAGAAAAAP/3AAQAAALLAAACzwAJAAYAAAAA//cABAAAAs8AAALT +AAkABgAAAAD/+AAEAAAC0wAAAtcACAAGAAD////6AAUAAALXAAAC3QAGAAYAAP////cABgAAAt0A +AALkAAkABgAAAAD/9wAFAAAC5AAAAukACQAGAAAAAP/3AAUAAALpAAAC7gAJAAYAAAAA//cABQAA +Au4AAALzAAkABgAAAAD/9wAFAAAC8wAAAvgACQAGAAAAAP/4AAUAAAL4AAAC/QAIAAYAAAAA//oA +Bf//Av0AAAMCAAUABgAA////+gAGAAADAgAAAwkABgAGAAD////3AAYAAAMJAAADEAAJAAYAAP// +//cABgAAAxAAAAMXAAkABgAA////9wAGAAADFwAAAx4ACQAGAAD////4AAYAAAAAAAoABwASAAYA +AP////cABgAAAAcACgAOABMABgAA////+gAFAAAADgAKABQAEAAGAAD////6AAYAAAAUAAoAGwAQ +AAYAAAAA//gABgAAABsACgAhABIABgAAAAD/+AAGAAAAIQAKACcAEgAGAAAAAP/4AAYAAAAnAAoA +LQASAAYAAAAA//gABgAAAC0ACgAzABIABgAAAAD/+QAGAAAAMwAKADkAEQAGAAAAAP/3AAYAAAA5 +AAoAPwATAAYAAP////sABQAAAD8ACgBFAA8ABgAAAAD/+wAFAAIARQAKAEoAEQAGAAAAAP/4AAUA +AABKAAoATwASAAYAAAAA//gABQAAAE8ACgBUABIABgAAAAD/+AAFAAAAVAAKAFkAEgAGAAAAAP/5 +AAUAAABZAAoAXgARAAYAAAAA//gABgAAAF4ACgBkABIABgAAAAD/+AAGAAAAZAAKAGoAEgAGAAAA +AP/4AAYAAABqAAoAcAASAAYAAAAA//kABgAAAHAACgB2ABEABgAAAAD/+AAFAAAAdgAKAHsAEgAG +AAD////4AAYAAAB7AAoAggASAAYAAAAA//gABQAAAIIACgCHABIABgAAAAD/+AAFAAAAhwAKAIwA +EgAGAAAAAP/4AAUAAACMAAoAkQASAAYAAAAA//gABQAAAJEACgCWABIABgAAAAD/+QAFAAAAlgAK +AJsAEQAGAAAAAP/6AAX//wCbAAoAoAAPAAYAAAAA//oABQABAKAACgClABEABgAA////+AAGAAAA +pQAKAKwAEgAGAAD////4AAYAAACsAAoAswASAAYAAP////gABgAAALMACgC6ABIABgAA////+QAG +AAAAugAKAMEAEQAGAAD////4AAYAAgDBAAoAyAAUAAYAAP////kABQACAMgACgDOABMABgAA//// ++QAGAAIAzgAKANUAEw== +''')), Image.open(BytesIO(base64.b64decode(b''' +iVBORw0KGgoAAAANSUhEUgAAAx4AAAAUAQAAAAArMtZoAAAEwElEQVR4nABlAJr/AHVE4czCI/4u +Mc4b7vuds/xzjz5/3/7u/n9vMe7vnfH/9++vPn/xyf5zhxzjt8GHw8+2d83u8x27199/nxuQ6Od9 +M43/5z2I+9n9ZtmDBwMQECDRQw/eQIQohJXxpBCNVE6QCCAAAAD//wBlAJr/AgALyj1t/wINwq0g +LeNZUworuN1cjTPIzrTX6ofHWeo3v336qPzfEwRmBnHTtf95/fglZK5N0PDgfRTslpGBvz7LFc4F +IUXBWQGjQ5MGCx34EDFPwXiY4YbYxavpnhHFrk14CDAAAAD//wBlAJr/AgKqRooH2gAgPeggvUAA +Bu2WfgPoAwzRAABAAAAAAACQgLz/3Uv4Gv+gX7BJgDeeGP6AAAD1NMDzKHD7ANWr3loYbxsAD791 +NAADfcoIDyP44K/jv4Y63/Z+t98Ovt+ub4T48LAAAAD//wBlAJr/AuplMlADJAAAAGuAphWpqhMx +in0A/fRvAYBABPgBwBUgABBQ/sYAyv9g0bCHgOLoGAAAAAAAREAAwI7nr0ArYpow7aX8//9LaP/9 +SjdavWA8ePHeBIKB//81/83ndznOaXx379wAAAD//wBlAJr/AqDxW+D3AABAAbUh/QMnbQag/gAY +AYDAAACgtgD/gOqAAAB5IA/8AAAk+n9w0AAA8AAAmFRJuPo27ciC0cD5oeW4E7KA/wD3ECMAn2tt +y8PgwH8AfAxFzC0JzeAMtratAsC/ffwAAAD//wBlAJr/BGKAyCAA4AAAAvgeYTAwHd1kmQF5chkG +ABoMIHcL5xVpTfQbUqzlAAAErwAQBgAAEOClA5D9il08AEh/tUzdCBsXkbgACED+woQg8Si9VeqY +lODCn7lmF6NhnAEYgAAA/NMIAAAAAAD//2JgjLZgVGBg5Pv/Tvpc8hwGBjYGJADjHDrAwPzAjv/H +/Wf3PzCwtzcwHmBgYGcwbZz8wHaCAQMDOwMDQ8MCBgYOC3W7mp+f0w+wHOYxO3OG+e376hsMZjk3 +AAAAAP//YmCMY2A4wMAIN5e5gQETPD6AZisDAwMDgzSDAAPjByiHcQMDAwMDg1nOze1lByRu5/47 +c4859311AYNZzg0AAAAA//9iYGDBYihOIIMuwIjGL39/fwffA8b//xv/P2BPtzzHwCBjUQAAAAD/ +/yLFBrIBAAAA//9i1HhcwdhizX7u8NZNzyLbvT97bfrMf/QHI8evOwcSqGUJAAAA//9iYBB81iSw +pEE170Qrg5MIYydHqwdDQRMrAwcVrQAAAAD//2J4x7j9AAMDn8Q/BgYLBoaiAwwMjPdvMDBYM1Tv +oJodAAAAAP//Yqo/83+dxePWlxl3npsel9lvLfPcqlE9725C+acfVLMEAAAA//9i+s9gwCoaaGMR +evta/58PTEWzr21hufPjA8N+qlnBwAAAAAD//2JiWLci5v1+HmFXDqcnULE/MxgYGBj+f6CaJQAA +AAD//2Ji2FrkY3iYpYC5qDeGgeEMAwPDvwQBBoYvcTwOVLMEAAAA//9isDBgkP///0EOg9z35v// +Gc/eeW7BwPj5+QGZhANUswMAAAD//2JgqGBgYGBgqEMXlvhMPUsAAAAA//8iYDd1AAAAAP//AwDR +w7IkEbzhVQAAAABJRU5ErkJggg== +''')))) + return f diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageGrab.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageGrab.py new file mode 100644 index 0000000..712b02c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageGrab.py @@ -0,0 +1,80 @@ +# +# The Python Imaging Library +# $Id$ +# +# screen grabber (macOS and Windows only) +# +# History: +# 2001-04-26 fl created +# 2001-09-17 fl use builtin driver, if present +# 2002-11-19 fl added grabclipboard support +# +# Copyright (c) 2001-2002 by Secret Labs AB +# Copyright (c) 2001-2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image + +import sys +if sys.platform not in ["win32", "darwin"]: + raise ImportError("ImageGrab is macOS and Windows only") + +if sys.platform == "win32": + grabber = Image.core.grabscreen +elif sys.platform == "darwin": + import os + import tempfile + import subprocess + + +def grab(bbox=None): + if sys.platform == "darwin": + fh, filepath = tempfile.mkstemp('.png') + os.close(fh) + subprocess.call(['screencapture', '-x', filepath]) + im = Image.open(filepath) + im.load() + os.unlink(filepath) + else: + size, data = grabber() + im = Image.frombytes( + "RGB", size, data, + # RGB, 32-bit line padding, origin lower left corner + "raw", "BGR", (size[0]*3 + 3) & -4, -1 + ) + if bbox: + im = im.crop(bbox) + return im + + +def grabclipboard(): + if sys.platform == "darwin": + fh, filepath = tempfile.mkstemp('.jpg') + os.close(fh) + commands = [ + "set theFile to (open for access POSIX file \""+filepath+"\" with write permission)", + "try", + "write (the clipboard as JPEG picture) to theFile", + "end try", + "close access theFile" + ] + script = ["osascript"] + for command in commands: + script += ["-e", command] + subprocess.call(script) + + im = None + if os.stat(filepath).st_size != 0: + im = Image.open(filepath) + im.load() + os.unlink(filepath) + return im + else: + data = Image.core.grabclipboard() + if isinstance(data, bytes): + from . import BmpImagePlugin + import io + return BmpImagePlugin.DibImageFile(io.BytesIO(data)) + return data diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageMath.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageMath.py new file mode 100644 index 0000000..d985877 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageMath.py @@ -0,0 +1,271 @@ +# +# The Python Imaging Library +# $Id$ +# +# a simple math add-on for the Python Imaging Library +# +# History: +# 1999-02-15 fl Original PIL Plus release +# 2005-05-05 fl Simplified and cleaned up for PIL 1.1.6 +# 2005-09-12 fl Fixed int() and float() for Python 2.4.1 +# +# Copyright (c) 1999-2005 by Secret Labs AB +# Copyright (c) 2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image, _imagingmath +from ._util import py3 + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +VERBOSE = 0 + + +def _isconstant(v): + return isinstance(v, int) or isinstance(v, float) + + +class _Operand(object): + """Wraps an image operand, providing standard operators""" + + def __init__(self, im): + self.im = im + + def __fixup(self, im1): + # convert image to suitable mode + if isinstance(im1, _Operand): + # argument was an image. + if im1.im.mode in ("1", "L"): + return im1.im.convert("I") + elif im1.im.mode in ("I", "F"): + return im1.im + else: + raise ValueError("unsupported mode: %s" % im1.im.mode) + else: + # argument was a constant + if _isconstant(im1) and self.im.mode in ("1", "L", "I"): + return Image.new("I", self.im.size, im1) + else: + return Image.new("F", self.im.size, im1) + + def apply(self, op, im1, im2=None, mode=None): + im1 = self.__fixup(im1) + if im2 is None: + # unary operation + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + try: + op = getattr(_imagingmath, op+"_"+im1.mode) + except AttributeError: + raise TypeError("bad operand type for '%s'" % op) + _imagingmath.unop(op, out.im.id, im1.im.id) + else: + # binary operation + im2 = self.__fixup(im2) + if im1.mode != im2.mode: + # convert both arguments to floating point + if im1.mode != "F": + im1 = im1.convert("F") + if im2.mode != "F": + im2 = im2.convert("F") + if im1.mode != im2.mode: + raise ValueError("mode mismatch") + if im1.size != im2.size: + # crop both arguments to a common size + size = (min(im1.size[0], im2.size[0]), + min(im1.size[1], im2.size[1])) + if im1.size != size: + im1 = im1.crop((0, 0) + size) + if im2.size != size: + im2 = im2.crop((0, 0) + size) + out = Image.new(mode or im1.mode, size, None) + else: + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + im2.load() + try: + op = getattr(_imagingmath, op+"_"+im1.mode) + except AttributeError: + raise TypeError("bad operand type for '%s'" % op) + _imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id) + return _Operand(out) + + # unary operators + def __bool__(self): + # an image is "true" if it contains at least one non-zero pixel + return self.im.getbbox() is not None + + if not py3: + # Provide __nonzero__ for pre-Py3k + __nonzero__ = __bool__ + del __bool__ + + def __abs__(self): + return self.apply("abs", self) + + def __pos__(self): + return self + + def __neg__(self): + return self.apply("neg", self) + + # binary operators + def __add__(self, other): + return self.apply("add", self, other) + + def __radd__(self, other): + return self.apply("add", other, self) + + def __sub__(self, other): + return self.apply("sub", self, other) + + def __rsub__(self, other): + return self.apply("sub", other, self) + + def __mul__(self, other): + return self.apply("mul", self, other) + + def __rmul__(self, other): + return self.apply("mul", other, self) + + def __truediv__(self, other): + return self.apply("div", self, other) + + def __rtruediv__(self, other): + return self.apply("div", other, self) + + def __mod__(self, other): + return self.apply("mod", self, other) + + def __rmod__(self, other): + return self.apply("mod", other, self) + + def __pow__(self, other): + return self.apply("pow", self, other) + + def __rpow__(self, other): + return self.apply("pow", other, self) + + if not py3: + # Provide __div__ and __rdiv__ for pre-Py3k + __div__ = __truediv__ + __rdiv__ = __rtruediv__ + del __truediv__ + del __rtruediv__ + + # bitwise + def __invert__(self): + return self.apply("invert", self) + + def __and__(self, other): + return self.apply("and", self, other) + + def __rand__(self, other): + return self.apply("and", other, self) + + def __or__(self, other): + return self.apply("or", self, other) + + def __ror__(self, other): + return self.apply("or", other, self) + + def __xor__(self, other): + return self.apply("xor", self, other) + + def __rxor__(self, other): + return self.apply("xor", other, self) + + def __lshift__(self, other): + return self.apply("lshift", self, other) + + def __rshift__(self, other): + return self.apply("rshift", self, other) + + # logical + def __eq__(self, other): + return self.apply("eq", self, other) + + def __ne__(self, other): + return self.apply("ne", self, other) + + def __lt__(self, other): + return self.apply("lt", self, other) + + def __le__(self, other): + return self.apply("le", self, other) + + def __gt__(self, other): + return self.apply("gt", self, other) + + def __ge__(self, other): + return self.apply("ge", self, other) + + +# conversions +def imagemath_int(self): + return _Operand(self.im.convert("I")) + + +def imagemath_float(self): + return _Operand(self.im.convert("F")) + + +# logical +def imagemath_equal(self, other): + return self.apply("eq", self, other, mode="I") + + +def imagemath_notequal(self, other): + return self.apply("ne", self, other, mode="I") + + +def imagemath_min(self, other): + return self.apply("min", self, other) + + +def imagemath_max(self, other): + return self.apply("max", self, other) + + +def imagemath_convert(self, mode): + return _Operand(self.im.convert(mode)) + + +ops = {} +for k, v in list(globals().items()): + if k[:10] == "imagemath_": + ops[k[10:]] = v + + +def eval(expression, _dict={}, **kw): + """ + Evaluates an image expression. + + :param expression: A string containing a Python-style expression. + :param options: Values to add to the evaluation context. You + can either use a dictionary, or one or more keyword + arguments. + :return: The evaluated expression. This is usually an image object, but can + also be an integer, a floating point value, or a pixel tuple, + depending on the expression. + """ + + # build execution namespace + args = ops.copy() + args.update(_dict) + args.update(kw) + for k, v in list(args.items()): + if hasattr(v, "im"): + args[k] = _Operand(v) + + out = builtins.eval(expression, args) + try: + return out.im + except AttributeError: + return out diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageMode.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageMode.py new file mode 100644 index 0000000..2b3377a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageMode.py @@ -0,0 +1,56 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard mode descriptors +# +# History: +# 2006-03-20 fl Added +# +# Copyright (c) 2006 by Secret Labs AB. +# Copyright (c) 2006 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +# mode descriptor cache +_modes = None + + +class ModeDescriptor(object): + """Wrapper for mode strings.""" + + def __init__(self, mode, bands, basemode, basetype): + self.mode = mode + self.bands = bands + self.basemode = basemode + self.basetype = basetype + + def __str__(self): + return self.mode + + +def getmode(mode): + """Gets a mode descriptor for the given mode.""" + global _modes + if not _modes: + # initialize mode cache + + from . import Image + modes = {} + # core modes + for m, (basemode, basetype, bands) in Image._MODEINFO.items(): + modes[m] = ModeDescriptor(m, bands, basemode, basetype) + # extra experimental modes + modes["RGBa"] = ModeDescriptor("RGBa", + ("R", "G", "B", "a"), "RGB", "L") + modes["LA"] = ModeDescriptor("LA", ("L", "A"), "L", "L") + modes["La"] = ModeDescriptor("La", ("L", "a"), "L", "L") + modes["PA"] = ModeDescriptor("PA", ("P", "A"), "RGB", "L") + # mapping modes + modes["I;16"] = ModeDescriptor("I;16", "I", "L", "L") + modes["I;16L"] = ModeDescriptor("I;16L", "I", "L", "L") + modes["I;16B"] = ModeDescriptor("I;16B", "I", "L", "L") + # set global mode cache atomically + _modes = modes + return _modes[mode] diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageMorph.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageMorph.py new file mode 100644 index 0000000..54ceb79 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageMorph.py @@ -0,0 +1,245 @@ +# A binary morphology add-on for the Python Imaging Library +# +# History: +# 2014-06-04 Initial version. +# +# Copyright (c) 2014 Dov Grobgeld + +from __future__ import print_function + +from . import Image, _imagingmorph +import re + +LUT_SIZE = 1 << 9 + + +class LutBuilder(object): + """A class for building a MorphLut from a descriptive language + + The input patterns is a list of a strings sequences like these:: + + 4:(... + .1. + 111)->1 + + (whitespaces including linebreaks are ignored). The option 4 + describes a series of symmetry operations (in this case a + 4-rotation), the pattern is described by: + + - . or X - Ignore + - 1 - Pixel is on + - 0 - Pixel is off + + The result of the operation is described after "->" string. + + The default is to return the current pixel value, which is + returned if no other match is found. + + Operations: + + - 4 - 4 way rotation + - N - Negate + - 1 - Dummy op for no other operation (an op must always be given) + - M - Mirroring + + Example:: + + lb = LutBuilder(patterns = ["4:(... .1. 111)->1"]) + lut = lb.build_lut() + + """ + def __init__(self, patterns=None, op_name=None): + if patterns is not None: + self.patterns = patterns + else: + self.patterns = [] + self.lut = None + if op_name is not None: + known_patterns = { + 'corner': ['1:(... ... ...)->0', + '4:(00. 01. ...)->1'], + 'dilation4': ['4:(... .0. .1.)->1'], + 'dilation8': ['4:(... .0. .1.)->1', + '4:(... .0. ..1)->1'], + 'erosion4': ['4:(... .1. .0.)->0'], + 'erosion8': ['4:(... .1. .0.)->0', + '4:(... .1. ..0)->0'], + 'edge': ['1:(... ... ...)->0', + '4:(.0. .1. ...)->1', + '4:(01. .1. ...)->1'] + } + if op_name not in known_patterns: + raise Exception('Unknown pattern '+op_name+'!') + + self.patterns = known_patterns[op_name] + + def add_patterns(self, patterns): + self.patterns += patterns + + def build_default_lut(self): + symbols = [0, 1] + m = 1 << 4 # pos of current pixel + self.lut = bytearray(symbols[(i & m) > 0] for i in range(LUT_SIZE)) + + def get_lut(self): + return self.lut + + def _string_permute(self, pattern, permutation): + """string_permute takes a pattern and a permutation and returns the + string permuted according to the permutation list. + """ + assert(len(permutation) == 9) + return ''.join(pattern[p] for p in permutation) + + def _pattern_permute(self, basic_pattern, options, basic_result): + """pattern_permute takes a basic pattern and its result and clones + the pattern according to the modifications described in the $options + parameter. It returns a list of all cloned patterns.""" + patterns = [(basic_pattern, basic_result)] + + # rotations + if '4' in options: + res = patterns[-1][1] + for i in range(4): + patterns.append( + (self._string_permute(patterns[-1][0], [6, 3, 0, + 7, 4, 1, + 8, 5, 2]), res)) + # mirror + if 'M' in options: + n = len(patterns) + for pattern, res in patterns[0:n]: + patterns.append( + (self._string_permute(pattern, [2, 1, 0, + 5, 4, 3, + 8, 7, 6]), res)) + + # negate + if 'N' in options: + n = len(patterns) + for pattern, res in patterns[0:n]: + # Swap 0 and 1 + pattern = (pattern + .replace('0', 'Z') + .replace('1', '0') + .replace('Z', '1')) + res = 1-int(res) + patterns.append((pattern, res)) + + return patterns + + def build_lut(self): + """Compile all patterns into a morphology lut. + + TBD :Build based on (file) morphlut:modify_lut + """ + self.build_default_lut() + patterns = [] + + # Parse and create symmetries of the patterns strings + for p in self.patterns: + m = re.search( + r'(\w*):?\s*\((.+?)\)\s*->\s*(\d)', p.replace('\n', '')) + if not m: + raise Exception('Syntax error in pattern "'+p+'"') + options = m.group(1) + pattern = m.group(2) + result = int(m.group(3)) + + # Get rid of spaces + pattern = pattern.replace(' ', '').replace('\n', '') + + patterns += self._pattern_permute(pattern, options, result) + + # compile the patterns into regular expressions for speed + for i, pattern in enumerate(patterns): + p = pattern[0].replace('.', 'X').replace('X', '[01]') + p = re.compile(p) + patterns[i] = (p, pattern[1]) + + # Step through table and find patterns that match. + # Note that all the patterns are searched. The last one + # caught overrides + for i in range(LUT_SIZE): + # Build the bit pattern + bitpattern = bin(i)[2:] + bitpattern = ('0'*(9-len(bitpattern)) + bitpattern)[::-1] + + for p, r in patterns: + if p.match(bitpattern): + self.lut[i] = [0, 1][r] + + return self.lut + + +class MorphOp(object): + """A class for binary morphological operators""" + + def __init__(self, + lut=None, + op_name=None, + patterns=None): + """Create a binary morphological operator""" + self.lut = lut + if op_name is not None: + self.lut = LutBuilder(op_name=op_name).build_lut() + elif patterns is not None: + self.lut = LutBuilder(patterns=patterns).build_lut() + + def apply(self, image): + """Run a single morphological operation on an image + + Returns a tuple of the number of changed pixels and the + morphed image""" + if self.lut is None: + raise Exception('No operator loaded') + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + outimage = Image.new(image.mode, image.size, None) + count = _imagingmorph.apply( + bytes(self.lut), image.im.id, outimage.im.id) + return count, outimage + + def match(self, image): + """Get a list of coordinates matching the morphological operation on + an image. + + Returns a list of tuples of (x,y) coordinates + of all matching pixels. See :ref:`coordinate-system`.""" + if self.lut is None: + raise Exception('No operator loaded') + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + return _imagingmorph.match(bytes(self.lut), image.im.id) + + def get_on_pixels(self, image): + """Get a list of all turned on pixels in a binary image + + Returns a list of tuples of (x,y) coordinates + of all matching pixels. See :ref:`coordinate-system`.""" + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + return _imagingmorph.get_on_pixels(image.im.id) + + def load_lut(self, filename): + """Load an operator from an mrl file""" + with open(filename, 'rb') as f: + self.lut = bytearray(f.read()) + + if len(self.lut) != LUT_SIZE: + self.lut = None + raise Exception('Wrong size operator file!') + + def save_lut(self, filename): + """Save an operator to an mrl file""" + if self.lut is None: + raise Exception('No operator loaded') + with open(filename, 'wb') as f: + f.write(self.lut) + + def set_lut(self, lut): + """Set the lut from an external source""" + self.lut = lut diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageOps.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageOps.py new file mode 100644 index 0000000..9f516ba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageOps.py @@ -0,0 +1,632 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard image operations +# +# History: +# 2001-10-20 fl Created +# 2001-10-23 fl Added autocontrast operator +# 2001-12-18 fl Added Kevin's fit operator +# 2004-03-14 fl Fixed potential division by zero in equalize +# 2005-05-05 fl Fixed equalize for low number of values +# +# Copyright (c) 2001-2004 by Secret Labs AB +# Copyright (c) 2001-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image +from ._util import isStringType +import operator +import functools +import warnings + + +# +# helpers + +def _border(border): + if isinstance(border, tuple): + if len(border) == 2: + left, top = right, bottom = border + elif len(border) == 4: + left, top, right, bottom = border + else: + left = top = right = bottom = border + return left, top, right, bottom + + +def _color(color, mode): + if isStringType(color): + from . import ImageColor + color = ImageColor.getcolor(color, mode) + return color + + +def _lut(image, lut): + if image.mode == "P": + # FIXME: apply to lookup table, not image data + raise NotImplementedError("mode P support coming soon") + elif image.mode in ("L", "RGB"): + if image.mode == "RGB" and len(lut) == 256: + lut = lut + lut + lut + return image.point(lut) + else: + raise IOError("not supported for this image mode") + +# +# actions + + +def autocontrast(image, cutoff=0, ignore=None): + """ + Maximize (normalize) image contrast. This function calculates a + histogram of the input image, removes **cutoff** percent of the + lightest and darkest pixels from the histogram, and remaps the image + so that the darkest pixel becomes black (0), and the lightest + becomes white (255). + + :param image: The image to process. + :param cutoff: How many percent to cut off from the histogram. + :param ignore: The background pixel value (use None for no background). + :return: An image. + """ + histogram = image.histogram() + lut = [] + for layer in range(0, len(histogram), 256): + h = histogram[layer:layer+256] + if ignore is not None: + # get rid of outliers + try: + h[ignore] = 0 + except TypeError: + # assume sequence + for ix in ignore: + h[ix] = 0 + if cutoff: + # cut off pixels from both ends of the histogram + # get number of pixels + n = 0 + for ix in range(256): + n = n + h[ix] + # remove cutoff% pixels from the low end + cut = n * cutoff // 100 + for lo in range(256): + if cut > h[lo]: + cut = cut - h[lo] + h[lo] = 0 + else: + h[lo] -= cut + cut = 0 + if cut <= 0: + break + # remove cutoff% samples from the hi end + cut = n * cutoff // 100 + for hi in range(255, -1, -1): + if cut > h[hi]: + cut = cut - h[hi] + h[hi] = 0 + else: + h[hi] -= cut + cut = 0 + if cut <= 0: + break + # find lowest/highest samples after preprocessing + for lo in range(256): + if h[lo]: + break + for hi in range(255, -1, -1): + if h[hi]: + break + if hi <= lo: + # don't bother + lut.extend(list(range(256))) + else: + scale = 255.0 / (hi - lo) + offset = -lo * scale + for ix in range(256): + ix = int(ix * scale + offset) + if ix < 0: + ix = 0 + elif ix > 255: + ix = 255 + lut.append(ix) + return _lut(image, lut) + + +def colorize(image, black, white, mid=None, blackpoint=0, + whitepoint=255, midpoint=127): + """ + Colorize grayscale image. + This function calculates a color wedge which maps all black pixels in + the source image to the first color and all white pixels to the + second color. If **mid** is specified, it uses three-color mapping. + The **black** and **white** arguments should be RGB tuples or color names; + optionally you can use three-color mapping by also specifying **mid**. + Mapping positions for any of the colors can be specified + (e.g. **blackpoint**), where these parameters are the integer + value corresponding to where the corresponding color should be mapped. + These parameters must have logical order, such that + **blackpoint** <= **midpoint** <= **whitepoint** (if **mid** is specified). + + :param image: The image to colorize. + :param black: The color to use for black input pixels. + :param white: The color to use for white input pixels. + :param mid: The color to use for midtone input pixels. + :param blackpoint: an int value [0, 255] for the black mapping. + :param whitepoint: an int value [0, 255] for the white mapping. + :param midpoint: an int value [0, 255] for the midtone mapping. + :return: An image. + """ + + # Initial asserts + assert image.mode == "L" + if mid is None: + assert 0 <= blackpoint <= whitepoint <= 255 + else: + assert 0 <= blackpoint <= midpoint <= whitepoint <= 255 + + # Define colors from arguments + black = _color(black, "RGB") + white = _color(white, "RGB") + if mid is not None: + mid = _color(mid, "RGB") + + # Empty lists for the mapping + red = [] + green = [] + blue = [] + + # Create the low-end values + for i in range(0, blackpoint): + red.append(black[0]) + green.append(black[1]) + blue.append(black[2]) + + # Create the mapping (2-color) + if mid is None: + + range_map = range(0, whitepoint - blackpoint) + + for i in range_map: + red.append(black[0] + i * (white[0] - black[0]) // len(range_map)) + green.append(black[1] + i * (white[1] - black[1]) // len(range_map)) + blue.append(black[2] + i * (white[2] - black[2]) // len(range_map)) + + # Create the mapping (3-color) + else: + + range_map1 = range(0, midpoint - blackpoint) + range_map2 = range(0, whitepoint - midpoint) + + for i in range_map1: + red.append(black[0] + i * (mid[0] - black[0]) // len(range_map1)) + green.append(black[1] + i * (mid[1] - black[1]) // len(range_map1)) + blue.append(black[2] + i * (mid[2] - black[2]) // len(range_map1)) + for i in range_map2: + red.append(mid[0] + i * (white[0] - mid[0]) // len(range_map2)) + green.append(mid[1] + i * (white[1] - mid[1]) // len(range_map2)) + blue.append(mid[2] + i * (white[2] - mid[2]) // len(range_map2)) + + # Create the high-end values + for i in range(0, 256 - whitepoint): + red.append(white[0]) + green.append(white[1]) + blue.append(white[2]) + + # Return converted image + image = image.convert("RGB") + return _lut(image, red + green + blue) + + +def pad(image, size, method=Image.NEAREST, color=None, centering=(0.5, 0.5)): + """ + Returns a sized and padded version of the image, expanded to fill the + requested aspect ratio and size. + + :param image: The image to size and crop. + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: What resampling method to use. Default is + :py:attr:`PIL.Image.NEAREST`. + :param color: The background color of the padded image. + :param centering: Control the position of the original image within the + padded version. + (0.5, 0.5) will keep the image centered + (0, 0) will keep the image aligned to the top left + (1, 1) will keep the image aligned to the bottom + right + :return: An image. + """ + + im_ratio = image.width / image.height + dest_ratio = float(size[0]) / size[1] + + if im_ratio == dest_ratio: + out = image.resize(size, resample=method) + else: + out = Image.new(image.mode, size, color) + if im_ratio > dest_ratio: + new_height = int(image.height / image.width * size[0]) + if new_height != size[1]: + image = image.resize((size[0], new_height), resample=method) + + y = int((size[1] - new_height) * max(0, min(centering[1], 1))) + out.paste(image, (0, y)) + else: + new_width = int(image.width / image.height * size[1]) + if new_width != size[0]: + image = image.resize((new_width, size[1]), resample=method) + + x = int((size[0] - new_width) * max(0, min(centering[0], 1))) + out.paste(image, (x, 0)) + return out + + +def crop(image, border=0): + """ + Remove border from image. The same amount of pixels are removed + from all four sides. This function works on all image modes. + + .. seealso:: :py:meth:`~PIL.Image.Image.crop` + + :param image: The image to crop. + :param border: The number of pixels to remove. + :return: An image. + """ + left, top, right, bottom = _border(border) + return image.crop( + (left, top, image.size[0]-right, image.size[1]-bottom) + ) + + +def scale(image, factor, resample=Image.NEAREST): + """ + Returns a rescaled image by a specific factor given in parameter. + A factor greater than 1 expands the image, between 0 and 1 contracts the + image. + + :param image: The image to rescale. + :param factor: The expansion factor, as a float. + :param resample: An optional resampling filter. Same values possible as + in the PIL.Image.resize function. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + if factor == 1: + return image.copy() + elif factor <= 0: + raise ValueError("the factor must be greater than 0") + else: + size = (int(round(factor * image.width)), + int(round(factor * image.height))) + return image.resize(size, resample) + + +def deform(image, deformer, resample=Image.BILINEAR): + """ + Deform the image. + + :param image: The image to deform. + :param deformer: A deformer object. Any object that implements a + **getmesh** method can be used. + :param resample: An optional resampling filter. Same values possible as + in the PIL.Image.transform function. + :return: An image. + """ + return image.transform( + image.size, Image.MESH, deformer.getmesh(image), resample + ) + + +def equalize(image, mask=None): + """ + Equalize the image histogram. This function applies a non-linear + mapping to the input image, in order to create a uniform + distribution of grayscale values in the output image. + + :param image: The image to equalize. + :param mask: An optional mask. If given, only the pixels selected by + the mask are included in the analysis. + :return: An image. + """ + if image.mode == "P": + image = image.convert("RGB") + h = image.histogram(mask) + lut = [] + for b in range(0, len(h), 256): + histo = [_f for _f in h[b:b+256] if _f] + if len(histo) <= 1: + lut.extend(list(range(256))) + else: + step = (functools.reduce(operator.add, histo) - histo[-1]) // 255 + if not step: + lut.extend(list(range(256))) + else: + n = step // 2 + for i in range(256): + lut.append(n // step) + n = n + h[i+b] + return _lut(image, lut) + + +def expand(image, border=0, fill=0): + """ + Add border to the image + + :param image: The image to expand. + :param border: Border width, in pixels. + :param fill: Pixel fill value (a color value). Default is 0 (black). + :return: An image. + """ + left, top, right, bottom = _border(border) + width = left + image.size[0] + right + height = top + image.size[1] + bottom + out = Image.new(image.mode, (width, height), _color(fill, image.mode)) + out.paste(image, (left, top)) + return out + + +def fit(image, size, method=Image.NEAREST, bleed=0.0, centering=(0.5, 0.5)): + """ + Returns a sized and cropped version of the image, cropped to the + requested aspect ratio and size. + + This function was contributed by Kevin Cazabon. + + :param image: The image to size and crop. + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: What resampling method to use. Default is + :py:attr:`PIL.Image.NEAREST`. + :param bleed: Remove a border around the outside of the image (from all + four edges. The value is a decimal percentage (use 0.01 for + one percent). The default value is 0 (no border). + :param centering: Control the cropping position. Use (0.5, 0.5) for + center cropping (e.g. if cropping the width, take 50% off + of the left side, and therefore 50% off the right side). + (0.0, 0.0) will crop from the top left corner (i.e. if + cropping the width, take all of the crop off of the right + side, and if cropping the height, take all of it off the + bottom). (1.0, 0.0) will crop from the bottom left + corner, etc. (i.e. if cropping the width, take all of the + crop off the left side, and if cropping the height take + none from the top, and therefore all off the bottom). + :return: An image. + """ + + # by Kevin Cazabon, Feb 17/2000 + # kevin@cazabon.com + # http://www.cazabon.com + + # ensure inputs are valid + if not isinstance(centering, list): + centering = [centering[0], centering[1]] + + if centering[0] > 1.0 or centering[0] < 0.0: + centering[0] = 0.50 + if centering[1] > 1.0 or centering[1] < 0.0: + centering[1] = 0.50 + + if bleed > 0.49999 or bleed < 0.0: + bleed = 0.0 + + # calculate the area to use for resizing and cropping, subtracting + # the 'bleed' around the edges + + # number of pixels to trim off on Top and Bottom, Left and Right + bleedPixels = ( + int((float(bleed) * float(image.size[0])) + 0.5), + int((float(bleed) * float(image.size[1])) + 0.5) + ) + + liveArea = (0, 0, image.size[0], image.size[1]) + if bleed > 0.0: + liveArea = ( + bleedPixels[0], bleedPixels[1], image.size[0] - bleedPixels[0] - 1, + image.size[1] - bleedPixels[1] - 1 + ) + + liveSize = (liveArea[2] - liveArea[0], liveArea[3] - liveArea[1]) + + # calculate the aspect ratio of the liveArea + liveAreaAspectRatio = float(liveSize[0])/float(liveSize[1]) + + # calculate the aspect ratio of the output image + aspectRatio = float(size[0]) / float(size[1]) + + # figure out if the sides or top/bottom will be cropped off + if liveAreaAspectRatio >= aspectRatio: + # liveArea is wider than what's needed, crop the sides + cropWidth = int((aspectRatio * float(liveSize[1])) + 0.5) + cropHeight = liveSize[1] + else: + # liveArea is taller than what's needed, crop the top and bottom + cropWidth = liveSize[0] + cropHeight = int((float(liveSize[0])/aspectRatio) + 0.5) + + # make the crop + leftSide = int(liveArea[0] + (float(liveSize[0]-cropWidth) * centering[0])) + if leftSide < 0: + leftSide = 0 + topSide = int(liveArea[1] + (float(liveSize[1]-cropHeight) * centering[1])) + if topSide < 0: + topSide = 0 + + out = image.crop( + (leftSide, topSide, leftSide + cropWidth, topSide + cropHeight) + ) + + # resize the image and return it + return out.resize(size, method) + + +def flip(image): + """ + Flip the image vertically (top to bottom). + + :param image: The image to flip. + :return: An image. + """ + return image.transpose(Image.FLIP_TOP_BOTTOM) + + +def grayscale(image): + """ + Convert the image to grayscale. + + :param image: The image to convert. + :return: An image. + """ + return image.convert("L") + + +def invert(image): + """ + Invert (negate) the image. + + :param image: The image to invert. + :return: An image. + """ + lut = [] + for i in range(256): + lut.append(255-i) + return _lut(image, lut) + + +def mirror(image): + """ + Flip image horizontally (left to right). + + :param image: The image to mirror. + :return: An image. + """ + return image.transpose(Image.FLIP_LEFT_RIGHT) + + +def posterize(image, bits): + """ + Reduce the number of bits for each color channel. + + :param image: The image to posterize. + :param bits: The number of bits to keep for each channel (1-8). + :return: An image. + """ + lut = [] + mask = ~(2**(8-bits)-1) + for i in range(256): + lut.append(i & mask) + return _lut(image, lut) + + +def solarize(image, threshold=128): + """ + Invert all pixel values above a threshold. + + :param image: The image to solarize. + :param threshold: All pixels above this greyscale level are inverted. + :return: An image. + """ + lut = [] + for i in range(256): + if i < threshold: + lut.append(i) + else: + lut.append(255-i) + return _lut(image, lut) + + +# -------------------------------------------------------------------- +# PIL USM components, from Kevin Cazabon. + +def gaussian_blur(im, radius=None): + """ PIL_usm.gblur(im, [radius])""" + + warnings.warn( + 'PIL.ImageOps.gaussian_blur is deprecated. ' + 'Use PIL.ImageFilter.GaussianBlur instead. ' + 'This function will be removed in a future version.', + DeprecationWarning + ) + + if radius is None: + radius = 5.0 + + im.load() + + return im.im.gaussian_blur(radius) + + +def gblur(im, radius=None): + """ PIL_usm.gblur(im, [radius])""" + + warnings.warn( + 'PIL.ImageOps.gblur is deprecated. ' + 'Use PIL.ImageFilter.GaussianBlur instead. ' + 'This function will be removed in a future version.', + DeprecationWarning + ) + + return gaussian_blur(im, radius) + + +def unsharp_mask(im, radius=None, percent=None, threshold=None): + """ PIL_usm.usm(im, [radius, percent, threshold])""" + + warnings.warn( + 'PIL.ImageOps.unsharp_mask is deprecated. ' + 'Use PIL.ImageFilter.UnsharpMask instead. ' + 'This function will be removed in a future version.', + DeprecationWarning + ) + + if radius is None: + radius = 5.0 + if percent is None: + percent = 150 + if threshold is None: + threshold = 3 + + im.load() + + return im.im.unsharp_mask(radius, percent, threshold) + + +def usm(im, radius=None, percent=None, threshold=None): + """ PIL_usm.usm(im, [radius, percent, threshold])""" + + warnings.warn( + 'PIL.ImageOps.usm is deprecated. ' + 'Use PIL.ImageFilter.UnsharpMask instead. ' + 'This function will be removed in a future version.', + DeprecationWarning + ) + + return unsharp_mask(im, radius, percent, threshold) + + +def box_blur(image, radius): + """ + Blur the image by setting each pixel to the average value of the pixels + in a square box extending radius pixels in each direction. + Supports float radius of arbitrary size. Uses an optimized implementation + which runs in linear time relative to the size of the image + for any radius value. + + :param image: The image to blur. + :param radius: Size of the box in one direction. Radius 0 does not blur, + returns an identical image. Radius 1 takes 1 pixel + in each direction, i.e. 9 pixels in total. + :return: An image. + """ + warnings.warn( + 'PIL.ImageOps.box_blur is deprecated. ' + 'Use PIL.ImageFilter.BoxBlur instead. ' + 'This function will be removed in a future version.', + DeprecationWarning + ) + + image.load() + + return image._new(image.im.box_blur(radius)) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImagePalette.py b/thesisenv/lib/python3.6/site-packages/PIL/ImagePalette.py new file mode 100644 index 0000000..81e99ab --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImagePalette.py @@ -0,0 +1,216 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image palette object +# +# History: +# 1996-03-11 fl Rewritten. +# 1997-01-03 fl Up and running. +# 1997-08-23 fl Added load hack +# 2001-04-16 fl Fixed randint shadow bug in random() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import array +from . import ImageColor, GimpPaletteFile, GimpGradientFile, PaletteFile + + +class ImagePalette(object): + """ + Color palette for palette mapped images + + :param mode: The mode to use for the Palette. See: + :ref:`concept-modes`. Defaults to "RGB" + :param palette: An optional palette. If given, it must be a bytearray, + an array or a list of ints between 0-255 and of length ``size`` + times the number of colors in ``mode``. The list must be aligned + by channel (All R values must be contiguous in the list before G + and B values.) Defaults to 0 through 255 per channel. + :param size: An optional palette size. If given, it cannot be equal to + or greater than 256. Defaults to 0. + """ + + def __init__(self, mode="RGB", palette=None, size=0): + self.mode = mode + self.rawmode = None # if set, palette contains raw data + self.palette = palette or bytearray(range(256))*len(self.mode) + self.colors = {} + self.dirty = None + if ((size == 0 and len(self.mode)*256 != len(self.palette)) or + (size != 0 and size != len(self.palette))): + raise ValueError("wrong palette size") + + def copy(self): + new = ImagePalette() + + new.mode = self.mode + new.rawmode = self.rawmode + if self.palette is not None: + new.palette = self.palette[:] + new.colors = self.colors.copy() + new.dirty = self.dirty + + return new + + def getdata(self): + """ + Get palette contents in format suitable for the low-level + ``im.putpalette`` primitive. + + .. warning:: This method is experimental. + """ + if self.rawmode: + return self.rawmode, self.palette + return self.mode + ";L", self.tobytes() + + def tobytes(self): + """Convert palette to bytes. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(self.palette, bytes): + return self.palette + arr = array.array("B", self.palette) + if hasattr(arr, 'tobytes'): + return arr.tobytes() + return arr.tostring() + + # Declare tostring as an alias for tobytes + tostring = tobytes + + def getcolor(self, color): + """Given an rgb tuple, allocate palette entry. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(color, tuple): + try: + return self.colors[color] + except KeyError: + # allocate new color slot + if isinstance(self.palette, bytes): + self.palette = bytearray(self.palette) + index = len(self.colors) + if index >= 256: + raise ValueError("cannot allocate more than 256 colors") + self.colors[color] = index + self.palette[index] = color[0] + self.palette[index+256] = color[1] + self.palette[index+512] = color[2] + self.dirty = 1 + return index + else: + raise ValueError("unknown color specifier: %r" % color) + + def save(self, fp): + """Save palette to text file. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(fp, str): + fp = open(fp, "w") + fp.write("# Palette\n") + fp.write("# Mode: %s\n" % self.mode) + for i in range(256): + fp.write("%d" % i) + for j in range(i*len(self.mode), (i+1)*len(self.mode)): + try: + fp.write(" %d" % self.palette[j]) + except IndexError: + fp.write(" 0") + fp.write("\n") + fp.close() + + +# -------------------------------------------------------------------- +# Internal + +def raw(rawmode, data): + palette = ImagePalette() + palette.rawmode = rawmode + palette.palette = data + palette.dirty = 1 + return palette + + +# -------------------------------------------------------------------- +# Factories + +def make_linear_lut(black, white): + lut = [] + if black == 0: + for i in range(256): + lut.append(white*i//255) + else: + raise NotImplementedError # FIXME + return lut + + +def make_gamma_lut(exp): + lut = [] + for i in range(256): + lut.append(int(((i / 255.0) ** exp) * 255.0 + 0.5)) + return lut + + +def negative(mode="RGB"): + palette = list(range(256)) + palette.reverse() + return ImagePalette(mode, palette * len(mode)) + + +def random(mode="RGB"): + from random import randint + palette = [] + for i in range(256*len(mode)): + palette.append(randint(0, 255)) + return ImagePalette(mode, palette) + + +def sepia(white="#fff0c0"): + r, g, b = ImageColor.getrgb(white) + r = make_linear_lut(0, r) + g = make_linear_lut(0, g) + b = make_linear_lut(0, b) + return ImagePalette("RGB", r + g + b) + + +def wedge(mode="RGB"): + return ImagePalette(mode, list(range(256)) * len(mode)) + + +def load(filename): + + # FIXME: supports GIMP gradients only + + with open(filename, "rb") as fp: + + for paletteHandler in [ + GimpPaletteFile.GimpPaletteFile, + GimpGradientFile.GimpGradientFile, + PaletteFile.PaletteFile + ]: + try: + fp.seek(0) + lut = paletteHandler(fp).getpalette() + if lut: + break + except (SyntaxError, ValueError): + # import traceback + # traceback.print_exc() + pass + else: + raise IOError("cannot load palette") + + return lut # data, rawmode diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImagePath.py b/thesisenv/lib/python3.6/site-packages/PIL/ImagePath.py new file mode 100644 index 0000000..8cbfec0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImagePath.py @@ -0,0 +1,20 @@ +# +# The Python Imaging Library +# $Id$ +# +# path interface +# +# History: +# 1996-11-04 fl Created +# 2002-04-14 fl Added documentation stub class +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from . import Image + + +Path = Image.core.path diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageQt.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageQt.py new file mode 100644 index 0000000..e602613 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageQt.py @@ -0,0 +1,218 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a simple Qt image interface. +# +# history: +# 2006-06-03 fl: created +# 2006-06-04 fl: inherit from QImage instead of wrapping it +# 2006-06-05 fl: removed toimage helper; move string support to ImageQt +# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com) +# +# Copyright (c) 2006 by Secret Labs AB +# Copyright (c) 2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image +from ._util import isPath, py3 +from io import BytesIO +import sys + +qt_versions = [ + ['5', 'PyQt5'], + ['side2', 'PySide2'], + ['4', 'PyQt4'], + ['side', 'PySide'] +] +# If a version has already been imported, attempt it first +qt_versions.sort(key=lambda qt_version: qt_version[1] in sys.modules, + reverse=True) +for qt_version, qt_module in qt_versions: + try: + if qt_module == 'PyQt5': + from PyQt5.QtGui import QImage, qRgba, QPixmap + from PyQt5.QtCore import QBuffer, QIODevice + elif qt_module == 'PySide2': + from PySide2.QtGui import QImage, qRgba, QPixmap + from PySide2.QtCore import QBuffer, QIODevice + elif qt_module == 'PyQt4': + from PyQt4.QtGui import QImage, qRgba, QPixmap + from PyQt4.QtCore import QBuffer, QIODevice + elif qt_module == 'PySide': + from PySide.QtGui import QImage, qRgba, QPixmap + from PySide.QtCore import QBuffer, QIODevice + except (ImportError, RuntimeError): + continue + qt_is_installed = True + break +else: + qt_is_installed = False + qt_version = None + + +def rgb(r, g, b, a=255): + """(Internal) Turns an RGB color into a Qt compatible color integer.""" + # use qRgb to pack the colors, and then turn the resulting long + # into a negative integer with the same bitpattern. + return (qRgba(r, g, b, a) & 0xffffffff) + + +def fromqimage(im): + """ + :param im: A PIL Image object, or a file name + (given either as Python string or a PyQt string object) + """ + buffer = QBuffer() + buffer.open(QIODevice.ReadWrite) + # preserve alha channel with png + # otherwise ppm is more friendly with Image.open + if im.hasAlphaChannel(): + im.save(buffer, 'png') + else: + im.save(buffer, 'ppm') + + b = BytesIO() + try: + b.write(buffer.data()) + except TypeError: + # workaround for Python 2 + b.write(str(buffer.data())) + buffer.close() + b.seek(0) + + return Image.open(b) + + +def fromqpixmap(im): + return fromqimage(im) + # buffer = QBuffer() + # buffer.open(QIODevice.ReadWrite) + # # im.save(buffer) + # # What if png doesn't support some image features like animation? + # im.save(buffer, 'ppm') + # bytes_io = BytesIO() + # bytes_io.write(buffer.data()) + # buffer.close() + # bytes_io.seek(0) + # return Image.open(bytes_io) + + +def align8to32(bytes, width, mode): + """ + converts each scanline of data from 8 bit to 32 bit aligned + """ + + bits_per_pixel = { + '1': 1, + 'L': 8, + 'P': 8, + }[mode] + + # calculate bytes per line and the extra padding if needed + bits_per_line = bits_per_pixel * width + full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8) + bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0) + + extra_padding = -bytes_per_line % 4 + + # already 32 bit aligned by luck + if not extra_padding: + return bytes + + new_data = [] + for i in range(len(bytes) // bytes_per_line): + new_data.append(bytes[i*bytes_per_line:(i+1)*bytes_per_line] + + b'\x00' * extra_padding) + + return b''.join(new_data) + + +def _toqclass_helper(im): + data = None + colortable = None + + # handle filename, if given instead of image name + if hasattr(im, "toUtf8"): + # FIXME - is this really the best way to do this? + if py3: + im = str(im.toUtf8(), "utf-8") + else: + im = unicode(im.toUtf8(), "utf-8") + if isPath(im): + im = Image.open(im) + + if im.mode == "1": + format = QImage.Format_Mono + elif im.mode == "L": + format = QImage.Format_Indexed8 + colortable = [] + for i in range(256): + colortable.append(rgb(i, i, i)) + elif im.mode == "P": + format = QImage.Format_Indexed8 + colortable = [] + palette = im.getpalette() + for i in range(0, len(palette), 3): + colortable.append(rgb(*palette[i:i+3])) + elif im.mode == "RGB": + data = im.tobytes("raw", "BGRX") + format = QImage.Format_RGB32 + elif im.mode == "RGBA": + try: + data = im.tobytes("raw", "BGRA") + except SystemError: + # workaround for earlier versions + r, g, b, a = im.split() + im = Image.merge("RGBA", (b, g, r, a)) + format = QImage.Format_ARGB32 + else: + raise ValueError("unsupported image mode %r" % im.mode) + + __data = data or align8to32(im.tobytes(), im.size[0], im.mode) + return { + 'data': __data, 'im': im, 'format': format, 'colortable': colortable + } + + +if qt_is_installed: + class ImageQt(QImage): + + def __init__(self, im): + """ + An PIL image wrapper for Qt. This is a subclass of PyQt's QImage + class. + + :param im: A PIL Image object, or a file name (given either as Python + string or a PyQt string object). + """ + im_data = _toqclass_helper(im) + # must keep a reference, or Qt will crash! + # All QImage constructors that take data operate on an existing + # buffer, so this buffer has to hang on for the life of the image. + # Fixes https://github.com/python-pillow/Pillow/issues/1370 + self.__data = im_data['data'] + QImage.__init__(self, + self.__data, im_data['im'].size[0], + im_data['im'].size[1], im_data['format']) + if im_data['colortable']: + self.setColorTable(im_data['colortable']) + + +def toqimage(im): + return ImageQt(im) + + +def toqpixmap(im): + # # This doesn't work. For now using a dumb approach. + # im_data = _toqclass_helper(im) + # result = QPixmap(im_data['im'].size[0], im_data['im'].size[1]) + # result.loadFromData(im_data['data']) + # Fix some strange bug that causes + if im.mode == 'RGB': + im = im.convert('RGBA') + + qimage = toqimage(im) + return QPixmap.fromImage(qimage) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageSequence.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageSequence.py new file mode 100644 index 0000000..1fc6e5d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageSequence.py @@ -0,0 +1,56 @@ +# +# The Python Imaging Library. +# $Id$ +# +# sequence support classes +# +# history: +# 1997-02-20 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## + + +class Iterator(object): + """ + This class implements an iterator object that can be used to loop + over an image sequence. + + You can use the ``[]`` operator to access elements by index. This operator + will raise an :py:exc:`IndexError` if you try to access a nonexistent + frame. + + :param im: An image object. + """ + + def __init__(self, im): + if not hasattr(im, "seek"): + raise AttributeError("im must have seek method") + self.im = im + self.position = 0 + + def __getitem__(self, ix): + try: + self.im.seek(ix) + return self.im + except EOFError: + raise IndexError # end of sequence + + def __iter__(self): + return self + + def __next__(self): + try: + self.im.seek(self.position) + self.position += 1 + return self.im + except EOFError: + raise StopIteration + + def next(self): + return self.__next__() diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageShow.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageShow.py new file mode 100644 index 0000000..b50d613 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageShow.py @@ -0,0 +1,194 @@ +# +# The Python Imaging Library. +# $Id$ +# +# im.show() drivers +# +# History: +# 2008-04-06 fl Created +# +# Copyright (c) Secret Labs AB 2008. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from PIL import Image +import os +import sys + +if sys.version_info.major >= 3: + from shlex import quote +else: + from pipes import quote + +_viewers = [] + + +def register(viewer, order=1): + try: + if issubclass(viewer, Viewer): + viewer = viewer() + except TypeError: + pass # raised if viewer wasn't a class + if order > 0: + _viewers.append(viewer) + elif order < 0: + _viewers.insert(0, viewer) + + +def show(image, title=None, **options): + r""" + Display a given image. + + :param image: An image object. + :param title: Optional title. Not all viewers can display the title. + :param \**options: Additional viewer options. + :returns: True if a suitable viewer was found, false otherwise. + """ + for viewer in _viewers: + if viewer.show(image, title=title, **options): + return 1 + return 0 + + +class Viewer(object): + """Base class for viewers.""" + + # main api + + def show(self, image, **options): + + # save temporary image to disk + if image.mode[:4] == "I;16": + # @PIL88 @PIL101 + # "I;16" isn't an 'official' mode, but we still want to + # provide a simple way to show 16-bit images. + base = "L" + # FIXME: auto-contrast if max() > 255? + else: + base = Image.getmodebase(image.mode) + if base != image.mode and image.mode != "1" and image.mode != "RGBA": + image = image.convert(base) + + return self.show_image(image, **options) + + # hook methods + + format = None + options = {} + + def get_format(self, image): + """Return format name, or None to save as PGM/PPM""" + return self.format + + def get_command(self, file, **options): + raise NotImplementedError + + def save_image(self, image): + """Save to temporary file, and return filename""" + return image._dump(format=self.get_format(image), **self.options) + + def show_image(self, image, **options): + """Display given image""" + return self.show_file(self.save_image(image), **options) + + def show_file(self, file, **options): + """Display given file""" + os.system(self.get_command(file, **options)) + return 1 + +# -------------------------------------------------------------------- + + +if sys.platform == "win32": + + class WindowsViewer(Viewer): + format = "BMP" + + def get_command(self, file, **options): + return ('start "Pillow" /WAIT "%s" ' + '&& ping -n 2 127.0.0.1 >NUL ' + '&& del /f "%s"' % (file, file)) + + register(WindowsViewer) + +elif sys.platform == "darwin": + + class MacViewer(Viewer): + format = "PNG" + options = {'compress_level': 1} + + def get_command(self, file, **options): + # on darwin open returns immediately resulting in the temp + # file removal while app is opening + command = "open -a /Applications/Preview.app" + command = "(%s %s; sleep 20; rm -f %s)&" % (command, quote(file), + quote(file)) + return command + + register(MacViewer) + +else: + + # unixoids + + def which(executable): + path = os.environ.get("PATH") + if not path: + return None + for dirname in path.split(os.pathsep): + filename = os.path.join(dirname, executable) + if os.path.isfile(filename) and os.access(filename, os.X_OK): + return filename + return None + + class UnixViewer(Viewer): + format = "PNG" + options = {'compress_level': 1} + + def show_file(self, file, **options): + command, executable = self.get_command_ex(file, **options) + command = "(%s %s; rm -f %s)&" % (command, quote(file), + quote(file)) + os.system(command) + return 1 + + # implementations + + class DisplayViewer(UnixViewer): + def get_command_ex(self, file, **options): + command = executable = "display" + return command, executable + + if which("display"): + register(DisplayViewer) + + class EogViewer(UnixViewer): + def get_command_ex(self, file, **options): + command = executable = "eog" + return command, executable + + if which("eog"): + register(EogViewer) + + class XVViewer(UnixViewer): + def get_command_ex(self, file, title=None, **options): + # note: xv is pretty outdated. most modern systems have + # imagemagick's display command instead. + command = executable = "xv" + if title: + command += " -name %s" % quote(title) + return command, executable + + if which("xv"): + register(XVViewer) + +if __name__ == "__main__": + + if len(sys.argv) < 2: + print("Syntax: python ImageShow.py imagefile [title]") + sys.exit() + + print(show(Image.open(sys.argv[1]), *sys.argv[2:])) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageStat.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageStat.py new file mode 100644 index 0000000..d4b38d8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageStat.py @@ -0,0 +1,148 @@ +# +# The Python Imaging Library. +# $Id$ +# +# global image statistics +# +# History: +# 1996-04-05 fl Created +# 1997-05-21 fl Added mask; added rms, var, stddev attributes +# 1997-08-05 fl Added median +# 1998-07-05 hk Fixed integer overflow error +# +# Notes: +# This class shows how to implement delayed evaluation of attributes. +# To get a certain value, simply access the corresponding attribute. +# The __getattr__ dispatcher takes care of the rest. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996-97. +# +# See the README file for information on usage and redistribution. +# + +import math +import operator +import functools + + +class Stat(object): + + def __init__(self, image_or_list, mask=None): + try: + if mask: + self.h = image_or_list.histogram(mask) + else: + self.h = image_or_list.histogram() + except AttributeError: + self.h = image_or_list # assume it to be a histogram list + if not isinstance(self.h, list): + raise TypeError("first argument must be image or list") + self.bands = list(range(len(self.h) // 256)) + + def __getattr__(self, id): + "Calculate missing attribute" + if id[:4] == "_get": + raise AttributeError(id) + # calculate missing attribute + v = getattr(self, "_get" + id)() + setattr(self, id, v) + return v + + def _getextrema(self): + "Get min/max values for each band in the image" + + def minmax(histogram): + n = 255 + x = 0 + for i in range(256): + if histogram[i]: + n = min(n, i) + x = max(x, i) + return n, x # returns (255, 0) if there's no data in the histogram + + v = [] + for i in range(0, len(self.h), 256): + v.append(minmax(self.h[i:])) + return v + + def _getcount(self): + "Get total number of pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + v.append(functools.reduce(operator.add, self.h[i:i+256])) + return v + + def _getsum(self): + "Get sum of all pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + layerSum = 0.0 + for j in range(256): + layerSum += j * self.h[i + j] + v.append(layerSum) + return v + + def _getsum2(self): + "Get squared sum of all pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + sum2 = 0.0 + for j in range(256): + sum2 += (j ** 2) * float(self.h[i + j]) + v.append(sum2) + return v + + def _getmean(self): + "Get average pixel level for each layer" + + v = [] + for i in self.bands: + v.append(self.sum[i] / self.count[i]) + return v + + def _getmedian(self): + "Get median pixel level for each layer" + + v = [] + for i in self.bands: + s = 0 + half = self.count[i]//2 + b = i * 256 + for j in range(256): + s = s + self.h[b+j] + if s > half: + break + v.append(j) + return v + + def _getrms(self): + "Get RMS for each layer" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.sum2[i] / self.count[i])) + return v + + def _getvar(self): + "Get variance for each layer" + + v = [] + for i in self.bands: + n = self.count[i] + v.append((self.sum2[i]-(self.sum[i]**2.0)/n)/n) + return v + + def _getstddev(self): + "Get standard deviation for each layer" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.var[i])) + return v + + +Global = Stat # compatibility diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageTk.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageTk.py new file mode 100644 index 0000000..c56f556 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageTk.py @@ -0,0 +1,301 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Tk display interface +# +# History: +# 96-04-08 fl Created +# 96-09-06 fl Added getimage method +# 96-11-01 fl Rewritten, removed image attribute and crop method +# 97-05-09 fl Use PyImagingPaste method instead of image type +# 97-05-12 fl Minor tweaks to match the IFUNC95 interface +# 97-05-17 fl Support the "pilbitmap" booster patch +# 97-06-05 fl Added file= and data= argument to image constructors +# 98-03-09 fl Added width and height methods to Image classes +# 98-07-02 fl Use default mode for "P" images without palette attribute +# 98-07-02 fl Explicitly destroy Tkinter image objects +# 99-07-24 fl Support multiple Tk interpreters (from Greg Couch) +# 99-07-26 fl Automatically hook into Tkinter (if possible) +# 99-08-15 fl Hook uses _imagingtk instead of _imaging +# +# Copyright (c) 1997-1999 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import sys + +if sys.version_info.major > 2: + import tkinter +else: + import Tkinter as tkinter + +from . import Image +from io import BytesIO + + +# -------------------------------------------------------------------- +# Check for Tkinter interface hooks + +_pilbitmap_ok = None + + +def _pilbitmap_check(): + global _pilbitmap_ok + if _pilbitmap_ok is None: + try: + im = Image.new("1", (1, 1)) + tkinter.BitmapImage(data="PIL:%d" % im.im.id) + _pilbitmap_ok = 1 + except tkinter.TclError: + _pilbitmap_ok = 0 + return _pilbitmap_ok + + +def _get_image_from_kw(kw): + source = None + if "file" in kw: + source = kw.pop("file") + elif "data" in kw: + source = BytesIO(kw.pop("data")) + if source: + return Image.open(source) + + +# -------------------------------------------------------------------- +# PhotoImage + +class PhotoImage(object): + """ + A Tkinter-compatible photo image. This can be used + everywhere Tkinter expects an image object. If the image is an RGBA + image, pixels having alpha 0 are treated as transparent. + + The constructor takes either a PIL image, or a mode and a size. + Alternatively, you can use the **file** or **data** options to initialize + the photo image object. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. + :param size: If the first argument is a mode string, this defines the size + of the image. + :keyword file: A filename to load the image from (using + ``Image.open(file)``). + :keyword data: An 8-bit string containing image data (as loaded from an + image file). + """ + + def __init__(self, image=None, size=None, **kw): + + # Tk compatibility: file or data + if image is None: + image = _get_image_from_kw(kw) + + if hasattr(image, "mode") and hasattr(image, "size"): + # got an image instead of a mode + mode = image.mode + if mode == "P": + # palette mapped data + image.load() + try: + mode = image.palette.mode + except AttributeError: + mode = "RGB" # default + size = image.size + kw["width"], kw["height"] = size + else: + mode = image + image = None + + if mode not in ["1", "L", "RGB", "RGBA"]: + mode = Image.getmodebase(mode) + + self.__mode = mode + self.__size = size + self.__photo = tkinter.PhotoImage(**kw) + self.tk = self.__photo.tk + if image: + self.paste(image) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except: + pass # ignore internal errors + + def __str__(self): + """ + Get the Tkinter photo image identifier. This method is automatically + called by Tkinter whenever a PhotoImage object is passed to a Tkinter + method. + + :return: A Tkinter photo image identifier (a string). + """ + return str(self.__photo) + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def paste(self, im, box=None): + """ + Paste a PIL image into the photo image. Note that this can + be very slow if the photo image is displayed. + + :param im: A PIL image. The size must match the target region. If the + mode does not match, the image is converted to the mode of + the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and lower pixel + coordinate. See :ref:`coordinate-system`. If None is given + instead of a tuple, all of the image is assumed. + """ + + # convert to blittable + im.load() + image = im.im + if image.isblock() and im.mode == self.__mode: + block = image + else: + block = image.new_block(self.__mode, im.size) + image.convert2(block, image) # convert directly between buffers + + tk = self.__photo.tk + + try: + tk.call("PyImagingPhoto", self.__photo, block.id) + except tkinter.TclError: + # activate Tkinter hook + try: + from . import _imagingtk + try: + if hasattr(tk, 'interp'): + # Required for PyPy, which always has CFFI installed + from cffi import FFI + ffi = FFI() + + # PyPy is using an FFI CDATA element + # (Pdb) self.tk.interp + # + _imagingtk.tkinit( + int(ffi.cast("uintptr_t", tk.interp)), 1) + else: + _imagingtk.tkinit(tk.interpaddr(), 1) + except AttributeError: + _imagingtk.tkinit(id(tk), 0) + tk.call("PyImagingPhoto", self.__photo, block.id) + except (ImportError, AttributeError, tkinter.TclError): + raise # configuration problem; cannot attach to Tkinter + +# -------------------------------------------------------------------- +# BitmapImage + + +class BitmapImage(object): + """ + A Tkinter-compatible bitmap image. This can be used everywhere Tkinter + expects an image object. + + The given image must have mode "1". Pixels having value 0 are treated as + transparent. Options, if any, are passed on to Tkinter. The most commonly + used option is **foreground**, which is used to specify the color for the + non-transparent parts. See the Tkinter documentation for information on + how to specify colours. + + :param image: A PIL image. + """ + + def __init__(self, image=None, **kw): + + # Tk compatibility: file or data + if image is None: + image = _get_image_from_kw(kw) + + self.__mode = image.mode + self.__size = image.size + + if _pilbitmap_check(): + # fast way (requires the pilbitmap booster patch) + image.load() + kw["data"] = "PIL:%d" % image.im.id + self.__im = image # must keep a reference + else: + # slow but safe way + kw["data"] = image.tobitmap() + self.__photo = tkinter.BitmapImage(**kw) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except: + pass # ignore internal errors + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def __str__(self): + """ + Get the Tkinter bitmap image identifier. This method is automatically + called by Tkinter whenever a BitmapImage object is passed to a Tkinter + method. + + :return: A Tkinter bitmap image identifier (a string). + """ + return str(self.__photo) + + +def getimage(photo): + """ This function is unimplemented """ + + """Copies the contents of a PhotoImage to a PIL image memory.""" + photo.tk.call("PyImagingPhotoGet", photo) + + +def _show(image, title): + """Helper for the Image.show method.""" + + class UI(tkinter.Label): + def __init__(self, master, im): + if im.mode == "1": + self.image = BitmapImage(im, foreground="white", master=master) + else: + self.image = PhotoImage(im, master=master) + tkinter.Label.__init__(self, master, image=self.image, + bg="black", bd=0) + + if not tkinter._default_root: + raise IOError("tkinter not initialized") + top = tkinter.Toplevel() + if title: + top.title(title) + UI(top, image).pack() diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageTransform.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageTransform.py new file mode 100644 index 0000000..c3f6af8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageTransform.py @@ -0,0 +1,98 @@ +# +# The Python Imaging Library. +# $Id$ +# +# transform wrappers +# +# History: +# 2002-04-08 fl Created +# +# Copyright (c) 2002 by Secret Labs AB +# Copyright (c) 2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from . import Image + + +class Transform(Image.ImageTransformHandler): + def __init__(self, data): + self.data = data + + def getdata(self): + return self.method, self.data + + def transform(self, size, image, **options): + # can be overridden + method, data = self.getdata() + return image.transform(size, method, data, **options) + + +class AffineTransform(Transform): + """ + Define an affine image transform. + + This function takes a 6-tuple (a, b, c, d, e, f) which contain the first + two rows from an affine transform matrix. For each pixel (x, y) in the + output image, the new value is taken from a position (a x + b y + c, + d x + e y + f) in the input image, rounded to nearest pixel. + + This function can be used to scale, translate, rotate, and shear the + original image. + + See :py:meth:`~PIL.Image.Image.transform` + + :param matrix: A 6-tuple (a, b, c, d, e, f) containing the first two rows + from an affine transform matrix. + """ + method = Image.AFFINE + + +class ExtentTransform(Transform): + """ + Define a transform to extract a subregion from an image. + + Maps a rectangle (defined by two corners) from the image to a rectangle of + the given size. The resulting image will contain data sampled from between + the corners, such that (x0, y0) in the input image will end up at (0,0) in + the output image, and (x1, y1) at size. + + This method can be used to crop, stretch, shrink, or mirror an arbitrary + rectangle in the current image. It is slightly slower than crop, but about + as fast as a corresponding resize operation. + + See :py:meth:`~PIL.Image.Image.transform` + + :param bbox: A 4-tuple (x0, y0, x1, y1) which specifies two points in the + input image's coordinate system. See :ref:`coordinate-system`. + """ + method = Image.EXTENT + + +class QuadTransform(Transform): + """ + Define a quad image transform. + + Maps a quadrilateral (a region defined by four corners) from the image to a + rectangle of the given size. + + See :py:meth:`~PIL.Image.Image.transform` + + :param xy: An 8-tuple (x0, y0, x1, y1, x2, y2, x3, y3) which contain the + upper left, lower left, lower right, and upper right corner of the + source quadrilateral. + """ + method = Image.QUAD + + +class MeshTransform(Transform): + """ + Define a mesh image transform. A mesh transform consists of one or more + individual quad transforms. + + See :py:meth:`~PIL.Image.Image.transform` + + :param data: A list of (bbox, quad) tuples. + """ + method = Image.MESH diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImageWin.py b/thesisenv/lib/python3.6/site-packages/PIL/ImageWin.py new file mode 100644 index 0000000..9b86270 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImageWin.py @@ -0,0 +1,228 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Windows DIB display interface +# +# History: +# 1996-05-20 fl Created +# 1996-09-20 fl Fixed subregion exposure +# 1997-09-21 fl Added draw primitive (for tzPrint) +# 2003-05-21 fl Added experimental Window/ImageWindow classes +# 2003-09-05 fl Added fromstring/tostring methods +# +# Copyright (c) Secret Labs AB 1997-2003. +# Copyright (c) Fredrik Lundh 1996-2003. +# +# See the README file for information on usage and redistribution. +# + +from . import Image + + +class HDC(object): + """ + Wraps an HDC integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods. + """ + def __init__(self, dc): + self.dc = dc + + def __int__(self): + return self.dc + + +class HWND(object): + """ + Wraps an HWND integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods, instead of a DC. + """ + def __init__(self, wnd): + self.wnd = wnd + + def __int__(self): + return self.wnd + + +class Dib(object): + """ + A Windows bitmap with the given mode and size. The mode can be one of "1", + "L", "P", or "RGB". + + If the display requires a palette, this constructor creates a suitable + palette and associates it with the image. For an "L" image, 128 greylevels + are allocated. For an "RGB" image, a 6x6x6 colour cube is used, together + with 20 greylevels. + + To make sure that palettes work properly under Windows, you must call the + **palette** method upon certain events from Windows. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. The mode can be one of "1", + "L", "P", or "RGB". + :param size: If the first argument is a mode string, this + defines the size of the image. + """ + + def __init__(self, image, size=None): + if hasattr(image, "mode") and hasattr(image, "size"): + mode = image.mode + size = image.size + else: + mode = image + image = None + if mode not in ["1", "L", "P", "RGB"]: + mode = Image.getmodebase(mode) + self.image = Image.core.display(mode, size) + self.mode = mode + self.size = size + if image: + self.paste(image) + + def expose(self, handle): + """ + Copy the bitmap contents to a device context. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. In PythonWin, you can use the + :py:meth:`CDC.GetHandleAttrib` to get a suitable handle. + """ + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.expose(dc) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.expose(handle) + return result + + def draw(self, handle, dst, src=None): + """ + Same as expose, but allows you to specify where to draw the image, and + what part of it to draw. + + The destination and source areas are given as 4-tuple rectangles. If + the source is omitted, the entire image is copied. If the source and + the destination have different sizes, the image is resized as + necessary. + """ + if not src: + src = (0, 0) + self.size + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.draw(dc, dst, src) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.draw(handle, dst, src) + return result + + def query_palette(self, handle): + """ + Installs the palette associated with the image in the given device + context. + + This method should be called upon **QUERYNEWPALETTE** and + **PALETTECHANGED** events from Windows. If this method returns a + non-zero value, one or more display palette entries were changed, and + the image should be redrawn. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. + :return: A true value if one or more entries were changed (this + indicates that the image should be redrawn). + """ + if isinstance(handle, HWND): + handle = self.image.getdc(handle) + try: + result = self.image.query_palette(handle) + finally: + self.image.releasedc(handle, handle) + else: + result = self.image.query_palette(handle) + return result + + def paste(self, im, box=None): + """ + Paste a PIL image into the bitmap image. + + :param im: A PIL image. The size must match the target region. + If the mode does not match, the image is converted to the + mode of the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and + lower pixel coordinate. See :ref:`coordinate-system`. If + None is given instead of a tuple, all of the image is + assumed. + """ + im.load() + if self.mode != im.mode: + im = im.convert(self.mode) + if box: + self.image.paste(im.im, box) + else: + self.image.paste(im.im) + + def frombytes(self, buffer): + """ + Load display memory contents from byte data. + + :param buffer: A buffer containing display data (usually + data returned from tobytes) + """ + return self.image.frombytes(buffer) + + def tobytes(self): + """ + Copy display memory contents to bytes object. + + :return: A bytes object containing display data. + """ + return self.image.tobytes() + + +class Window(object): + """Create a Window with the given title size.""" + + def __init__(self, title="PIL", width=None, height=None): + self.hwnd = Image.core.createwindow( + title, self.__dispatcher, width or 0, height or 0 + ) + + def __dispatcher(self, action, *args): + return getattr(self, "ui_handle_" + action)(*args) + + def ui_handle_clear(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_damage(self, x0, y0, x1, y1): + pass + + def ui_handle_destroy(self): + pass + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_resize(self, width, height): + pass + + def mainloop(self): + Image.core.eventloop() + + +class ImageWindow(Window): + """Create an image window which displays the given image.""" + + def __init__(self, image, title="PIL"): + if not isinstance(image, Dib): + image = Dib(image) + self.image = image + width, height = image.size + Window.__init__(self, title, width=width, height=height) + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + self.image.draw(dc, (x0, y0, x1, y1)) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/ImtImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/ImtImagePlugin.py new file mode 100644 index 0000000..5a6623c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/ImtImagePlugin.py @@ -0,0 +1,95 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IM Tools support for PIL +# +# history: +# 1996-05-27 fl Created (read 8-bit images only) +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.2) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re + +from . import Image, ImageFile + +__version__ = "0.2" + + +# +# -------------------------------------------------------------------- + +field = re.compile(br"([a-z]*) ([^ \r\n]*)") + + +## +# Image plugin for IM Tools images. + +class ImtImageFile(ImageFile.ImageFile): + + format = "IMT" + format_description = "IM Tools" + + def _open(self): + + # Quick rejection: if there's not a LF among the first + # 100 bytes, this is (probably) not a text header. + + if b"\n" not in self.fp.read(100): + raise SyntaxError("not an IM file") + self.fp.seek(0) + + xsize = ysize = 0 + + while True: + + s = self.fp.read(1) + if not s: + break + + if s == b'\x0C': + + # image data begins + self.tile = [("raw", (0, 0)+self.size, + self.fp.tell(), + (self.mode, 0, 1))] + + break + + else: + + # read key/value pair + # FIXME: dangerous, may read whole file + s = s + self.fp.readline() + if len(s) == 1 or len(s) > 100: + break + if s[0] == ord(b"*"): + continue # comment + + m = field.match(s) + if not m: + break + k, v = m.group(1, 2) + if k == "width": + xsize = int(v) + self._size = xsize, ysize + elif k == "height": + ysize = int(v) + self._size = xsize, ysize + elif k == "pixel" and v == "n8": + self.mode = "L" + + +# +# -------------------------------------------------------------------- + +Image.register_open(ImtImageFile.format, ImtImageFile) + +# +# no extension registered (".im" is simply too common) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/IptcImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/IptcImagePlugin.py new file mode 100644 index 0000000..b63e1ab --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/IptcImagePlugin.py @@ -0,0 +1,255 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IPTC/NAA file handling +# +# history: +# 1995-10-01 fl Created +# 1998-03-09 fl Cleaned up and added to PIL +# 2002-06-18 fl Added getiptcinfo helper +# +# Copyright (c) Secret Labs AB 1997-2002. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from . import Image, ImageFile +from ._binary import i8, i16be as i16, i32be as i32, o8 +import os +import tempfile + +__version__ = "0.3" + +COMPRESSION = { + 1: "raw", + 5: "jpeg" +} + +PAD = o8(0) * 4 + + +# +# Helpers + +def i(c): + return i32((PAD + c)[-4:]) + + +def dump(c): + for i in c: + print("%02x" % i8(i), end=' ') + print() + + +## +# Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields +# from TIFF and JPEG files, use the getiptcinfo function. + +class IptcImageFile(ImageFile.ImageFile): + + format = "IPTC" + format_description = "IPTC/NAA" + + def getint(self, key): + return i(self.info[key]) + + def field(self): + # + # get a IPTC field header + s = self.fp.read(5) + if not len(s): + return None, 0 + + tag = i8(s[1]), i8(s[2]) + + # syntax + if i8(s[0]) != 0x1C or tag[0] < 1 or tag[0] > 9: + raise SyntaxError("invalid IPTC/NAA file") + + # field size + size = i8(s[3]) + if size > 132: + raise IOError("illegal field length in IPTC/NAA file") + elif size == 128: + size = 0 + elif size > 128: + size = i(self.fp.read(size-128)) + else: + size = i16(s[3:]) + + return tag, size + + def _open(self): + + # load descriptive fields + while True: + offset = self.fp.tell() + tag, size = self.field() + if not tag or tag == (8, 10): + break + if size: + tagdata = self.fp.read(size) + else: + tagdata = None + if tag in self.info: + if isinstance(self.info[tag], list): + self.info[tag].append(tagdata) + else: + self.info[tag] = [self.info[tag], tagdata] + else: + self.info[tag] = tagdata + + # mode + layers = i8(self.info[(3, 60)][0]) + component = i8(self.info[(3, 60)][1]) + if (3, 65) in self.info: + id = i8(self.info[(3, 65)][0])-1 + else: + id = 0 + if layers == 1 and not component: + self.mode = "L" + elif layers == 3 and component: + self.mode = "RGB"[id] + elif layers == 4 and component: + self.mode = "CMYK"[id] + + # size + self._size = self.getint((3, 20)), self.getint((3, 30)) + + # compression + try: + compression = COMPRESSION[self.getint((3, 120))] + except KeyError: + raise IOError("Unknown IPTC image compression") + + # tile + if tag == (8, 10): + self.tile = [("iptc", (compression, offset), + (0, 0, self.size[0], self.size[1]))] + + def load(self): + + if len(self.tile) != 1 or self.tile[0][0] != "iptc": + return ImageFile.ImageFile.load(self) + + type, tile, box = self.tile[0] + + encoding, offset = tile + + self.fp.seek(offset) + + # Copy image data to temporary file + o_fd, outfile = tempfile.mkstemp(text=False) + o = os.fdopen(o_fd) + if encoding == "raw": + # To simplify access to the extracted file, + # prepend a PPM header + o.write("P5\n%d %d\n255\n" % self.size) + while True: + type, size = self.field() + if type != (8, 10): + break + while size > 0: + s = self.fp.read(min(size, 8192)) + if not s: + break + o.write(s) + size -= len(s) + o.close() + + try: + _im = Image.open(outfile) + _im.load() + self.im = _im.im + finally: + try: + os.unlink(outfile) + except OSError: + pass + + +Image.register_open(IptcImageFile.format, IptcImageFile) + +Image.register_extension(IptcImageFile.format, ".iim") + + +def getiptcinfo(im): + """ + Get IPTC information from TIFF, JPEG, or IPTC file. + + :param im: An image containing IPTC data. + :returns: A dictionary containing IPTC information, or None if + no IPTC information block was found. + """ + from . import TiffImagePlugin, JpegImagePlugin + import io + + data = None + + if isinstance(im, IptcImageFile): + # return info dictionary right away + return im.info + + elif isinstance(im, JpegImagePlugin.JpegImageFile): + # extract the IPTC/NAA resource + try: + app = im.app["APP13"] + if app[:14] == b"Photoshop 3.0\x00": + app = app[14:] + # parse the image resource block + offset = 0 + while app[offset:offset+4] == b"8BIM": + offset += 4 + # resource code + code = i16(app, offset) + offset += 2 + # resource name (usually empty) + name_len = i8(app[offset]) + # name = app[offset+1:offset+1+name_len] + offset = 1 + offset + name_len + if offset & 1: + offset += 1 + # resource data block + size = i32(app, offset) + offset += 4 + if code == 0x0404: + # 0x0404 contains IPTC/NAA data + data = app[offset:offset+size] + break + offset = offset + size + if offset & 1: + offset += 1 + except (AttributeError, KeyError): + pass + + elif isinstance(im, TiffImagePlugin.TiffImageFile): + # get raw data from the IPTC/NAA tag (PhotoShop tags the data + # as 4-byte integers, so we cannot use the get method...) + try: + data = im.tag.tagdata[TiffImagePlugin.IPTC_NAA_CHUNK] + except (AttributeError, KeyError): + pass + + if data is None: + return None # no properties + + # create an IptcImagePlugin object without initializing it + class FakeImage(object): + pass + im = FakeImage() + im.__class__ = IptcImageFile + + # parse the IPTC information chunk + im.info = {} + im.fp = io.BytesIO(data) + + try: + im._open() + except (IndexError, KeyError): + pass # expected failure + + return im.info diff --git a/thesisenv/lib/python3.6/site-packages/PIL/Jpeg2KImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/Jpeg2KImagePlugin.py new file mode 100644 index 0000000..7659b6b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/Jpeg2KImagePlugin.py @@ -0,0 +1,277 @@ +# +# The Python Imaging Library +# $Id$ +# +# JPEG2000 file handling +# +# History: +# 2014-03-12 ajh Created +# +# Copyright (c) 2014 Coriolis Systems Limited +# Copyright (c) 2014 Alastair Houghton +# +# See the README file for information on usage and redistribution. +# +from . import Image, ImageFile +import struct +import os +import io + +__version__ = "0.1" + + +def _parse_codestream(fp): + """Parse the JPEG 2000 codestream to extract the size and component + count from the SIZ marker segment, returning a PIL (size, mode) tuple.""" + + hdr = fp.read(2) + lsiz = struct.unpack('>H', hdr)[0] + siz = hdr + fp.read(lsiz - 2) + lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, xtsiz, ytsiz, \ + xtosiz, ytosiz, csiz \ + = struct.unpack_from('>HHIIIIIIIIH', siz) + ssiz = [None]*csiz + xrsiz = [None]*csiz + yrsiz = [None]*csiz + for i in range(csiz): + ssiz[i], xrsiz[i], yrsiz[i] \ + = struct.unpack_from('>BBB', siz, 36 + 3 * i) + + size = (xsiz - xosiz, ysiz - yosiz) + if csiz == 1: + if (yrsiz[0] & 0x7f) > 8: + mode = 'I;16' + else: + mode = 'L' + elif csiz == 2: + mode = 'LA' + elif csiz == 3: + mode = 'RGB' + elif csiz == 4: + mode = 'RGBA' + else: + mode = None + + return (size, mode) + + +def _parse_jp2_header(fp): + """Parse the JP2 header box to extract size, component count and + color space information, returning a PIL (size, mode) tuple.""" + + # Find the JP2 header box + header = None + while True: + lbox, tbox = struct.unpack('>I4s', fp.read(8)) + if lbox == 1: + lbox = struct.unpack('>Q', fp.read(8))[0] + hlen = 16 + else: + hlen = 8 + + if lbox < hlen: + raise SyntaxError('Invalid JP2 header length') + + if tbox == b'jp2h': + header = fp.read(lbox - hlen) + break + else: + fp.seek(lbox - hlen, os.SEEK_CUR) + + if header is None: + raise SyntaxError('could not find JP2 header') + + size = None + mode = None + bpc = None + nc = None + + hio = io.BytesIO(header) + while True: + lbox, tbox = struct.unpack('>I4s', hio.read(8)) + if lbox == 1: + lbox = struct.unpack('>Q', hio.read(8))[0] + hlen = 16 + else: + hlen = 8 + + content = hio.read(lbox - hlen) + + if tbox == b'ihdr': + height, width, nc, bpc, c, unkc, ipr \ + = struct.unpack('>IIHBBBB', content) + size = (width, height) + if unkc: + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 2: + mode = 'LA' + elif nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + elif tbox == b'colr': + meth, prec, approx = struct.unpack_from('>BBB', content) + if meth == 1: + cs = struct.unpack_from('>I', content, 3)[0] + if cs == 16: # sRGB + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + elif cs == 17: # grayscale + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 2: + mode = 'LA' + break + elif cs == 18: # sYCC + if nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + + if size is None or mode is None: + raise SyntaxError("Malformed jp2 header") + + return (size, mode) + +## +# Image plugin for JPEG2000 images. + + +class Jpeg2KImageFile(ImageFile.ImageFile): + format = "JPEG2000" + format_description = "JPEG 2000 (ISO 15444)" + + def _open(self): + sig = self.fp.read(4) + if sig == b'\xff\x4f\xff\x51': + self.codec = "j2k" + self._size, self.mode = _parse_codestream(self.fp) + else: + sig = sig + self.fp.read(8) + + if sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': + self.codec = "jp2" + self._size, self.mode = _parse_jp2_header(self.fp) + else: + raise SyntaxError('not a JPEG 2000 file') + + if self.size is None or self.mode is None: + raise SyntaxError('unable to determine size/mode') + + self.reduce = 0 + self.layers = 0 + + fd = -1 + length = -1 + + try: + fd = self.fp.fileno() + length = os.fstat(fd).st_size + except: + fd = -1 + try: + pos = self.fp.tell() + self.fp.seek(0, 2) + length = self.fp.tell() + self.fp.seek(pos, 0) + except: + length = -1 + + self.tile = [('jpeg2k', (0, 0) + self.size, 0, + (self.codec, self.reduce, self.layers, fd, length))] + + def load(self): + if self.reduce: + power = 1 << self.reduce + adjust = power >> 1 + self._size = (int((self.size[0] + adjust) / power), + int((self.size[1] + adjust) / power)) + + if self.tile: + # Update the reduce and layers settings + t = self.tile[0] + t3 = (t[3][0], self.reduce, self.layers, t[3][3], t[3][4]) + self.tile = [(t[0], (0, 0) + self.size, t[2], t3)] + + return ImageFile.ImageFile.load(self) + + +def _accept(prefix): + return (prefix[:4] == b'\xff\x4f\xff\x51' or + prefix[:12] == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a') + + +# ------------------------------------------------------------ +# Save support + +def _save(im, fp, filename): + if filename.endswith('.j2k'): + kind = 'j2k' + else: + kind = 'jp2' + + # Get the keyword arguments + info = im.encoderinfo + + offset = info.get('offset', None) + tile_offset = info.get('tile_offset', None) + tile_size = info.get('tile_size', None) + quality_mode = info.get('quality_mode', 'rates') + quality_layers = info.get('quality_layers', None) + num_resolutions = info.get('num_resolutions', 0) + cblk_size = info.get('codeblock_size', None) + precinct_size = info.get('precinct_size', None) + irreversible = info.get('irreversible', False) + progression = info.get('progression', 'LRCP') + cinema_mode = info.get('cinema_mode', 'no') + fd = -1 + + if hasattr(fp, "fileno"): + try: + fd = fp.fileno() + except: + fd = -1 + + im.encoderconfig = ( + offset, + tile_offset, + tile_size, + quality_mode, + quality_layers, + num_resolutions, + cblk_size, + precinct_size, + irreversible, + progression, + cinema_mode, + fd + ) + + ImageFile._save(im, fp, [('jpeg2k', (0, 0)+im.size, 0, kind)]) + +# ------------------------------------------------------------ +# Registry stuff + + +Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept) +Image.register_save(Jpeg2KImageFile.format, _save) + +Image.register_extensions(Jpeg2KImageFile.format, + [".jp2", ".j2k", ".jpc", ".jpf", ".jpx", ".j2c"]) + +Image.register_mime(Jpeg2KImageFile.format, 'image/jp2') +Image.register_mime(Jpeg2KImageFile.format, 'image/jpx') diff --git a/thesisenv/lib/python3.6/site-packages/PIL/JpegImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/JpegImagePlugin.py new file mode 100644 index 0000000..f206818 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/JpegImagePlugin.py @@ -0,0 +1,804 @@ +# +# The Python Imaging Library. +# $Id$ +# +# JPEG (JFIF) file handling +# +# See "Digital Compression and Coding of Continuous-Tone Still Images, +# Part 1, Requirements and Guidelines" (CCITT T.81 / ISO 10918-1) +# +# History: +# 1995-09-09 fl Created +# 1995-09-13 fl Added full parser +# 1996-03-25 fl Added hack to use the IJG command line utilities +# 1996-05-05 fl Workaround Photoshop 2.5 CMYK polarity bug +# 1996-05-28 fl Added draft support, JFIF version (0.1) +# 1996-12-30 fl Added encoder options, added progression property (0.2) +# 1997-08-27 fl Save mode 1 images as BW (0.3) +# 1998-07-12 fl Added YCbCr to draft and save methods (0.4) +# 1998-10-19 fl Don't hang on files using 16-bit DQT's (0.4.1) +# 2001-04-16 fl Extract DPI settings from JFIF files (0.4.2) +# 2002-07-01 fl Skip pad bytes before markers; identify Exif files (0.4.3) +# 2003-04-25 fl Added experimental EXIF decoder (0.5) +# 2003-06-06 fl Added experimental EXIF GPSinfo decoder +# 2003-09-13 fl Extract COM markers +# 2009-09-06 fl Added icc_profile support (from Florian Hoech) +# 2009-03-06 fl Changed CMYK handling; always use Adobe polarity (0.6) +# 2009-03-08 fl Added subsampling support (from Justin Huff). +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +import array +import struct +import io +import warnings +from . import Image, ImageFile, TiffImagePlugin +from ._binary import i8, o8, i16be as i16 +from .JpegPresets import presets +from ._util import isStringType + +__version__ = "0.6" + + +# +# Parser + +def Skip(self, marker): + n = i16(self.fp.read(2))-2 + ImageFile._safe_read(self.fp, n) + + +def APP(self, marker): + # + # Application marker. Store these in the APP dictionary. + # Also look for well-known application markers. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + + app = "APP%d" % (marker & 15) + + self.app[app] = s # compatibility + self.applist.append((app, s)) + + if marker == 0xFFE0 and s[:4] == b"JFIF": + # extract JFIF information + self.info["jfif"] = version = i16(s, 5) # version + self.info["jfif_version"] = divmod(version, 256) + # extract JFIF properties + try: + jfif_unit = i8(s[7]) + jfif_density = i16(s, 8), i16(s, 10) + except: + pass + else: + if jfif_unit == 1: + self.info["dpi"] = jfif_density + self.info["jfif_unit"] = jfif_unit + self.info["jfif_density"] = jfif_density + elif marker == 0xFFE1 and s[:5] == b"Exif\0": + if "exif" not in self.info: + # extract Exif information (incomplete) + self.info["exif"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:5] == b"FPXR\0": + # extract FlashPix information (incomplete) + self.info["flashpix"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:12] == b"ICC_PROFILE\0": + # Since an ICC profile can be larger than the maximum size of + # a JPEG marker (64K), we need provisions to split it into + # multiple markers. The format defined by the ICC specifies + # one or more APP2 markers containing the following data: + # Identifying string ASCII "ICC_PROFILE\0" (12 bytes) + # Marker sequence number 1, 2, etc (1 byte) + # Number of markers Total of APP2's used (1 byte) + # Profile data (remainder of APP2 data) + # Decoders should use the marker sequence numbers to + # reassemble the profile, rather than assuming that the APP2 + # markers appear in the correct sequence. + self.icclist.append(s) + elif marker == 0xFFEE and s[:5] == b"Adobe": + self.info["adobe"] = i16(s, 5) + # extract Adobe custom properties + try: + adobe_transform = i8(s[1]) + except: + pass + else: + self.info["adobe_transform"] = adobe_transform + elif marker == 0xFFE2 and s[:4] == b"MPF\0": + # extract MPO information + self.info["mp"] = s[4:] + # offset is current location minus buffer size + # plus constant header size + self.info["mpoffset"] = self.fp.tell() - n + 4 + + # If DPI isn't in JPEG header, fetch from EXIF + if "dpi" not in self.info and "exif" in self.info: + try: + exif = self._getexif() + resolution_unit = exif[0x0128] + x_resolution = exif[0x011A] + try: + dpi = x_resolution[0] / x_resolution[1] + except TypeError: + dpi = x_resolution + if resolution_unit == 3: # cm + # 1 dpcm = 2.54 dpi + dpi *= 2.54 + self.info["dpi"] = dpi, dpi + except (KeyError, SyntaxError, ZeroDivisionError): + # SyntaxError for invalid/unreadable exif + # KeyError for dpi not included + # ZeroDivisionError for invalid dpi rational value + self.info["dpi"] = 72, 72 + + +def COM(self, marker): + # + # Comment marker. Store these in the APP dictionary. + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + + self.app["COM"] = s # compatibility + self.applist.append(("COM", s)) + + +def SOF(self, marker): + # + # Start of frame marker. Defines the size and mode of the + # image. JPEG is colour blind, so we use some simple + # heuristics to map the number of layers to an appropriate + # mode. Note that this could be made a bit brighter, by + # looking for JFIF and Adobe APP markers. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + self._size = i16(s[3:]), i16(s[1:]) + + self.bits = i8(s[0]) + if self.bits != 8: + raise SyntaxError("cannot handle %d-bit layers" % self.bits) + + self.layers = i8(s[5]) + if self.layers == 1: + self.mode = "L" + elif self.layers == 3: + self.mode = "RGB" + elif self.layers == 4: + self.mode = "CMYK" + else: + raise SyntaxError("cannot handle %d-layer images" % self.layers) + + if marker in [0xFFC2, 0xFFC6, 0xFFCA, 0xFFCE]: + self.info["progressive"] = self.info["progression"] = 1 + + if self.icclist: + # fixup icc profile + self.icclist.sort() # sort by sequence number + if i8(self.icclist[0][13]) == len(self.icclist): + profile = [] + for p in self.icclist: + profile.append(p[14:]) + icc_profile = b"".join(profile) + else: + icc_profile = None # wrong number of fragments + self.info["icc_profile"] = icc_profile + self.icclist = None + + for i in range(6, len(s), 3): + t = s[i:i+3] + # 4-tuples: id, vsamp, hsamp, qtable + self.layer.append((t[0], i8(t[1])//16, i8(t[1]) & 15, i8(t[2]))) + + +def DQT(self, marker): + # + # Define quantization table. Support baseline 8-bit tables + # only. Note that there might be more than one table in + # each marker. + + # FIXME: The quantization tables can be used to estimate the + # compression quality. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + while len(s): + if len(s) < 65: + raise SyntaxError("bad quantization table marker") + v = i8(s[0]) + if v//16 == 0: + self.quantization[v & 15] = array.array("B", s[1:65]) + s = s[65:] + else: + return # FIXME: add code to read 16-bit tables! + # raise SyntaxError, "bad quantization table element size" + + +# +# JPEG marker table + +MARKER = { + 0xFFC0: ("SOF0", "Baseline DCT", SOF), + 0xFFC1: ("SOF1", "Extended Sequential DCT", SOF), + 0xFFC2: ("SOF2", "Progressive DCT", SOF), + 0xFFC3: ("SOF3", "Spatial lossless", SOF), + 0xFFC4: ("DHT", "Define Huffman table", Skip), + 0xFFC5: ("SOF5", "Differential sequential DCT", SOF), + 0xFFC6: ("SOF6", "Differential progressive DCT", SOF), + 0xFFC7: ("SOF7", "Differential spatial", SOF), + 0xFFC8: ("JPG", "Extension", None), + 0xFFC9: ("SOF9", "Extended sequential DCT (AC)", SOF), + 0xFFCA: ("SOF10", "Progressive DCT (AC)", SOF), + 0xFFCB: ("SOF11", "Spatial lossless DCT (AC)", SOF), + 0xFFCC: ("DAC", "Define arithmetic coding conditioning", Skip), + 0xFFCD: ("SOF13", "Differential sequential DCT (AC)", SOF), + 0xFFCE: ("SOF14", "Differential progressive DCT (AC)", SOF), + 0xFFCF: ("SOF15", "Differential spatial (AC)", SOF), + 0xFFD0: ("RST0", "Restart 0", None), + 0xFFD1: ("RST1", "Restart 1", None), + 0xFFD2: ("RST2", "Restart 2", None), + 0xFFD3: ("RST3", "Restart 3", None), + 0xFFD4: ("RST4", "Restart 4", None), + 0xFFD5: ("RST5", "Restart 5", None), + 0xFFD6: ("RST6", "Restart 6", None), + 0xFFD7: ("RST7", "Restart 7", None), + 0xFFD8: ("SOI", "Start of image", None), + 0xFFD9: ("EOI", "End of image", None), + 0xFFDA: ("SOS", "Start of scan", Skip), + 0xFFDB: ("DQT", "Define quantization table", DQT), + 0xFFDC: ("DNL", "Define number of lines", Skip), + 0xFFDD: ("DRI", "Define restart interval", Skip), + 0xFFDE: ("DHP", "Define hierarchical progression", SOF), + 0xFFDF: ("EXP", "Expand reference component", Skip), + 0xFFE0: ("APP0", "Application segment 0", APP), + 0xFFE1: ("APP1", "Application segment 1", APP), + 0xFFE2: ("APP2", "Application segment 2", APP), + 0xFFE3: ("APP3", "Application segment 3", APP), + 0xFFE4: ("APP4", "Application segment 4", APP), + 0xFFE5: ("APP5", "Application segment 5", APP), + 0xFFE6: ("APP6", "Application segment 6", APP), + 0xFFE7: ("APP7", "Application segment 7", APP), + 0xFFE8: ("APP8", "Application segment 8", APP), + 0xFFE9: ("APP9", "Application segment 9", APP), + 0xFFEA: ("APP10", "Application segment 10", APP), + 0xFFEB: ("APP11", "Application segment 11", APP), + 0xFFEC: ("APP12", "Application segment 12", APP), + 0xFFED: ("APP13", "Application segment 13", APP), + 0xFFEE: ("APP14", "Application segment 14", APP), + 0xFFEF: ("APP15", "Application segment 15", APP), + 0xFFF0: ("JPG0", "Extension 0", None), + 0xFFF1: ("JPG1", "Extension 1", None), + 0xFFF2: ("JPG2", "Extension 2", None), + 0xFFF3: ("JPG3", "Extension 3", None), + 0xFFF4: ("JPG4", "Extension 4", None), + 0xFFF5: ("JPG5", "Extension 5", None), + 0xFFF6: ("JPG6", "Extension 6", None), + 0xFFF7: ("JPG7", "Extension 7", None), + 0xFFF8: ("JPG8", "Extension 8", None), + 0xFFF9: ("JPG9", "Extension 9", None), + 0xFFFA: ("JPG10", "Extension 10", None), + 0xFFFB: ("JPG11", "Extension 11", None), + 0xFFFC: ("JPG12", "Extension 12", None), + 0xFFFD: ("JPG13", "Extension 13", None), + 0xFFFE: ("COM", "Comment", COM) +} + + +def _accept(prefix): + return prefix[0:1] == b"\377" + + +## +# Image plugin for JPEG and JFIF images. + +class JpegImageFile(ImageFile.ImageFile): + + format = "JPEG" + format_description = "JPEG (ISO 10918)" + + def _open(self): + + s = self.fp.read(1) + + if i8(s) != 255: + raise SyntaxError("not a JPEG file") + + # Create attributes + self.bits = self.layers = 0 + + # JPEG specifics (internal) + self.layer = [] + self.huffman_dc = {} + self.huffman_ac = {} + self.quantization = {} + self.app = {} # compatibility + self.applist = [] + self.icclist = [] + + while True: + + i = i8(s) + if i == 0xFF: + s = s + self.fp.read(1) + i = i16(s) + else: + # Skip non-0xFF junk + s = self.fp.read(1) + continue + + if i in MARKER: + name, description, handler = MARKER[i] + if handler is not None: + handler(self, i) + if i == 0xFFDA: # start of scan + rawmode = self.mode + if self.mode == "CMYK": + rawmode = "CMYK;I" # assume adobe conventions + self.tile = [("jpeg", (0, 0) + self.size, 0, + (rawmode, ""))] + # self.__offset = self.fp.tell() + break + s = self.fp.read(1) + elif i == 0 or i == 0xFFFF: + # padded marker or junk; move on + s = b"\xff" + elif i == 0xFF00: # Skip extraneous data (escaped 0xFF) + s = self.fp.read(1) + else: + raise SyntaxError("no marker found") + + def load_read(self, read_bytes): + """ + internal: read more image data + For premature EOF and LOAD_TRUNCATED_IMAGES adds EOI marker + so libjpeg can finish decoding + """ + s = self.fp.read(read_bytes) + + if not s and ImageFile.LOAD_TRUNCATED_IMAGES: + # Premature EOF. + # Pretend file is finished adding EOI marker + return b"\xFF\xD9" + + return s + + def draft(self, mode, size): + + if len(self.tile) != 1: + return + + # Protect from second call + if self.decoderconfig: + return + + d, e, o, a = self.tile[0] + scale = 0 + + if a[0] == "RGB" and mode in ["L", "YCbCr"]: + self.mode = mode + a = mode, "" + + if size: + scale = min(self.size[0] // size[0], self.size[1] // size[1]) + for s in [8, 4, 2, 1]: + if scale >= s: + break + e = e[0], e[1], (e[2]-e[0]+s-1)//s+e[0], (e[3]-e[1]+s-1)//s+e[1] + self._size = ((self.size[0]+s-1)//s, (self.size[1]+s-1)//s) + scale = s + + self.tile = [(d, e, o, a)] + self.decoderconfig = (scale, 0) + + return self + + def load_djpeg(self): + + # ALTERNATIVE: handle JPEGs via the IJG command line utilities + + import subprocess + import tempfile + import os + f, path = tempfile.mkstemp() + os.close(f) + if os.path.exists(self.filename): + subprocess.check_call(["djpeg", "-outfile", path, self.filename]) + else: + raise ValueError("Invalid Filename") + + try: + _im = Image.open(path) + _im.load() + self.im = _im.im + finally: + try: + os.unlink(path) + except OSError: + pass + + self.mode = self.im.mode + self._size = self.im.size + + self.tile = [] + + def _getexif(self): + return _getexif(self) + + def _getmp(self): + return _getmp(self) + + +def _fixup_dict(src_dict): + # Helper function for _getexif() + # returns a dict with any single item tuples/lists as individual values + def _fixup(value): + try: + if len(value) == 1 and not isinstance(value, dict): + return value[0] + except: + pass + return value + + return {k: _fixup(v) for k, v in src_dict.items()} + + +def _getexif(self): + # Extract EXIF information. This method is highly experimental, + # and is likely to be replaced with something better in a future + # version. + + # The EXIF record consists of a TIFF file embedded in a JPEG + # application marker (!). + try: + data = self.info["exif"] + except KeyError: + return None + file = io.BytesIO(data[6:]) + head = file.read(8) + # process dictionary + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif = dict(_fixup_dict(info)) + # get exif extension + try: + # exif field 0x8769 is an offset pointer to the location + # of the nested embedded exif ifd. + # It should be a long, but may be corrupted. + file.seek(exif[0x8769]) + except (KeyError, TypeError): + pass + else: + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif.update(_fixup_dict(info)) + # get gpsinfo extension + try: + # exif field 0x8825 is an offset pointer to the location + # of the nested embedded gps exif ifd. + # It should be a long, but may be corrupted. + file.seek(exif[0x8825]) + except (KeyError, TypeError): + pass + else: + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif[0x8825] = _fixup_dict(info) + + return exif + + +def _getmp(self): + # Extract MP information. This method was inspired by the "highly + # experimental" _getexif version that's been in use for years now, + # itself based on the ImageFileDirectory class in the TIFF plug-in. + + # The MP record essentially consists of a TIFF file embedded in a JPEG + # application marker. + try: + data = self.info["mp"] + except KeyError: + return None + file_contents = io.BytesIO(data) + head = file_contents.read(8) + endianness = '>' if head[:4] == b'\x4d\x4d\x00\x2a' else '<' + # process dictionary + try: + info = TiffImagePlugin.ImageFileDirectory_v2(head) + info.load(file_contents) + mp = dict(info) + except: + raise SyntaxError("malformed MP Index (unreadable directory)") + # it's an error not to have a number of images + try: + quant = mp[0xB001] + except KeyError: + raise SyntaxError("malformed MP Index (no number of images)") + # get MP entries + mpentries = [] + try: + rawmpentries = mp[0xB002] + for entrynum in range(0, quant): + unpackedentry = struct.unpack_from( + '{}LLLHH'.format(endianness), rawmpentries, entrynum * 16) + labels = ('Attribute', 'Size', 'DataOffset', 'EntryNo1', + 'EntryNo2') + mpentry = dict(zip(labels, unpackedentry)) + mpentryattr = { + 'DependentParentImageFlag': bool(mpentry['Attribute'] & + (1 << 31)), + 'DependentChildImageFlag': bool(mpentry['Attribute'] & + (1 << 30)), + 'RepresentativeImageFlag': bool(mpentry['Attribute'] & + (1 << 29)), + 'Reserved': (mpentry['Attribute'] & (3 << 27)) >> 27, + 'ImageDataFormat': (mpentry['Attribute'] & (7 << 24)) >> 24, + 'MPType': mpentry['Attribute'] & 0x00FFFFFF + } + if mpentryattr['ImageDataFormat'] == 0: + mpentryattr['ImageDataFormat'] = 'JPEG' + else: + raise SyntaxError("unsupported picture format in MPO") + mptypemap = { + 0x000000: 'Undefined', + 0x010001: 'Large Thumbnail (VGA Equivalent)', + 0x010002: 'Large Thumbnail (Full HD Equivalent)', + 0x020001: 'Multi-Frame Image (Panorama)', + 0x020002: 'Multi-Frame Image: (Disparity)', + 0x020003: 'Multi-Frame Image: (Multi-Angle)', + 0x030000: 'Baseline MP Primary Image' + } + mpentryattr['MPType'] = mptypemap.get(mpentryattr['MPType'], + 'Unknown') + mpentry['Attribute'] = mpentryattr + mpentries.append(mpentry) + mp[0xB002] = mpentries + except KeyError: + raise SyntaxError("malformed MP Index (bad MP Entry)") + # Next we should try and parse the individual image unique ID list; + # we don't because I've never seen this actually used in a real MPO + # file and so can't test it. + return mp + + +# -------------------------------------------------------------------- +# stuff to save JPEG files + +RAWMODE = { + "1": "L", + "L": "L", + "RGB": "RGB", + "RGBX": "RGB", + "CMYK": "CMYK;I", # assume adobe conventions + "YCbCr": "YCbCr", +} + +zigzag_index = (0, 1, 5, 6, 14, 15, 27, 28, + 2, 4, 7, 13, 16, 26, 29, 42, + 3, 8, 12, 17, 25, 30, 41, 43, + 9, 11, 18, 24, 31, 40, 44, 53, + 10, 19, 23, 32, 39, 45, 52, 54, + 20, 22, 33, 38, 46, 51, 55, 60, + 21, 34, 37, 47, 50, 56, 59, 61, + 35, 36, 48, 49, 57, 58, 62, 63) + +samplings = {(1, 1, 1, 1, 1, 1): 0, + (2, 1, 1, 1, 1, 1): 1, + (2, 2, 1, 1, 1, 1): 2, + } + + +def convert_dict_qtables(qtables): + qtables = [qtables[key] for key in range(len(qtables)) if key in qtables] + for idx, table in enumerate(qtables): + qtables[idx] = [table[i] for i in zigzag_index] + return qtables + + +def get_sampling(im): + # There's no subsampling when image have only 1 layer + # (grayscale images) or when they are CMYK (4 layers), + # so set subsampling to default value. + # + # NOTE: currently Pillow can't encode JPEG to YCCK format. + # If YCCK support is added in the future, subsampling code will have + # to be updated (here and in JpegEncode.c) to deal with 4 layers. + if not hasattr(im, 'layers') or im.layers in (1, 4): + return -1 + sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3] + return samplings.get(sampling, -1) + + +def _save(im, fp, filename): + + try: + rawmode = RAWMODE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as JPEG" % im.mode) + + info = im.encoderinfo + + dpi = [int(round(x)) for x in info.get("dpi", (0, 0))] + + quality = info.get("quality", 0) + subsampling = info.get("subsampling", -1) + qtables = info.get("qtables") + + if quality == "keep": + quality = 0 + subsampling = "keep" + qtables = "keep" + elif quality in presets: + preset = presets[quality] + quality = 0 + subsampling = preset.get('subsampling', -1) + qtables = preset.get('quantization') + elif not isinstance(quality, int): + raise ValueError("Invalid quality setting") + else: + if subsampling in presets: + subsampling = presets[subsampling].get('subsampling', -1) + if isStringType(qtables) and qtables in presets: + qtables = presets[qtables].get('quantization') + + if subsampling == "4:4:4": + subsampling = 0 + elif subsampling == "4:2:2": + subsampling = 1 + elif subsampling == "4:2:0": + subsampling = 2 + elif subsampling == "4:1:1": + # For compatibility. Before Pillow 4.3, 4:1:1 actually meant 4:2:0. + # Set 4:2:0 if someone is still using that value. + subsampling = 2 + elif subsampling == "keep": + if im.format != "JPEG": + raise ValueError( + "Cannot use 'keep' when original image is not a JPEG") + subsampling = get_sampling(im) + + def validate_qtables(qtables): + if qtables is None: + return qtables + if isStringType(qtables): + try: + lines = [int(num) for line in qtables.splitlines() + for num in line.split('#', 1)[0].split()] + except ValueError: + raise ValueError("Invalid quantization table") + else: + qtables = [lines[s:s+64] for s in range(0, len(lines), 64)] + if isinstance(qtables, (tuple, list, dict)): + if isinstance(qtables, dict): + qtables = convert_dict_qtables(qtables) + elif isinstance(qtables, tuple): + qtables = list(qtables) + if not (0 < len(qtables) < 5): + raise ValueError("None or too many quantization tables") + for idx, table in enumerate(qtables): + try: + if len(table) != 64: + raise TypeError + table = array.array('B', table) + except TypeError: + raise ValueError("Invalid quantization table") + else: + qtables[idx] = list(table) + return qtables + + if qtables == "keep": + if im.format != "JPEG": + raise ValueError( + "Cannot use 'keep' when original image is not a JPEG") + qtables = getattr(im, "quantization", None) + qtables = validate_qtables(qtables) + + extra = b"" + + icc_profile = info.get("icc_profile") + if icc_profile: + ICC_OVERHEAD_LEN = 14 + MAX_BYTES_IN_MARKER = 65533 + MAX_DATA_BYTES_IN_MARKER = MAX_BYTES_IN_MARKER - ICC_OVERHEAD_LEN + markers = [] + while icc_profile: + markers.append(icc_profile[:MAX_DATA_BYTES_IN_MARKER]) + icc_profile = icc_profile[MAX_DATA_BYTES_IN_MARKER:] + i = 1 + for marker in markers: + size = struct.pack(">H", 2 + ICC_OVERHEAD_LEN + len(marker)) + extra += (b"\xFF\xE2" + size + b"ICC_PROFILE\0" + o8(i) + + o8(len(markers)) + marker) + i += 1 + + # "progressive" is the official name, but older documentation + # says "progression" + # FIXME: issue a warning if the wrong form is used (post-1.1.7) + progressive = (info.get("progressive", False) or + info.get("progression", False)) + + optimize = info.get("optimize", False) + + # get keyword arguments + im.encoderconfig = ( + quality, + progressive, + info.get("smooth", 0), + optimize, + info.get("streamtype", 0), + dpi[0], dpi[1], + subsampling, + qtables, + extra, + info.get("exif", b"") + ) + + # if we optimize, libjpeg needs a buffer big enough to hold the whole image + # in a shot. Guessing on the size, at im.size bytes. (raw pixel size is + # channels*size, this is a value that's been used in a django patch. + # https://github.com/matthewwithanm/django-imagekit/issues/50 + bufsize = 0 + if optimize or progressive: + # CMYK can be bigger + if im.mode == 'CMYK': + bufsize = 4 * im.size[0] * im.size[1] + # keep sets quality to 0, but the actual value may be high. + elif quality >= 95 or quality == 0: + bufsize = 2 * im.size[0] * im.size[1] + else: + bufsize = im.size[0] * im.size[1] + + # The exif info needs to be written as one block, + APP1, + one spare byte. + # Ensure that our buffer is big enough. Same with the icc_profile block. + bufsize = max(ImageFile.MAXBLOCK, bufsize, len(info.get("exif", b"")) + 5, + len(extra) + 1) + + ImageFile._save(im, fp, [("jpeg", (0, 0)+im.size, 0, rawmode)], bufsize) + + +def _save_cjpeg(im, fp, filename): + # ALTERNATIVE: handle JPEGs via the IJG command line utilities. + import os + import subprocess + tempfile = im._dump() + subprocess.check_call(["cjpeg", "-outfile", filename, tempfile]) + try: + os.unlink(tempfile) + except OSError: + pass + + +## +# Factory for making JPEG and MPO instances +def jpeg_factory(fp=None, filename=None): + im = JpegImageFile(fp, filename) + try: + mpheader = im._getmp() + if mpheader[45057] > 1: + # It's actually an MPO + from .MpoImagePlugin import MpoImageFile + im = MpoImageFile(fp, filename) + except (TypeError, IndexError): + # It is really a JPEG + pass + except SyntaxError: + warnings.warn("Image appears to be a malformed MPO file, it will be " + "interpreted as a base JPEG file") + return im + + +# --------------------------------------------------------------------- +# Registry stuff + +Image.register_open(JpegImageFile.format, jpeg_factory, _accept) +Image.register_save(JpegImageFile.format, _save) + +Image.register_extensions(JpegImageFile.format, + [".jfif", ".jpe", ".jpg", ".jpeg"]) + +Image.register_mime(JpegImageFile.format, "image/jpeg") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/JpegPresets.py b/thesisenv/lib/python3.6/site-packages/PIL/JpegPresets.py new file mode 100644 index 0000000..5f01f0d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/JpegPresets.py @@ -0,0 +1,241 @@ +""" +JPEG quality settings equivalent to the Photoshop settings. + +More presets can be added to the presets dict if needed. + +Can be use when saving JPEG file. + +To apply the preset, specify:: + + quality="preset_name" + +To apply only the quantization table:: + + qtables="preset_name" + +To apply only the subsampling setting:: + + subsampling="preset_name" + +Example:: + + im.save("image_name.jpg", quality="web_high") + + +Subsampling +----------- + +Subsampling is the practice of encoding images by implementing less resolution +for chroma information than for luma information. +(ref.: https://en.wikipedia.org/wiki/Chroma_subsampling) + +Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and +4:2:0. + +You can get the subsampling of a JPEG with the +`JpegImagePlugin.get_subsampling(im)` function. + + +Quantization tables +------------------- + +They are values use by the DCT (Discrete cosine transform) to remove +*unnecessary* information from the image (the lossy part of the compression). +(ref.: https://en.wikipedia.org/wiki/Quantization_matrix#Quantization_matrices, +https://en.wikipedia.org/wiki/JPEG#Quantization) + +You can get the quantization tables of a JPEG with:: + + im.quantization + +This will return a dict with a number of arrays. You can pass this dict +directly as the qtables argument when saving a JPEG. + +The tables format between im.quantization and quantization in presets differ in +3 ways: + +1. The base container of the preset is a list with sublists instead of dict. + dict[0] -> list[0], dict[1] -> list[1], ... +2. Each table in a preset is a list instead of an array. +3. The zigzag order is remove in the preset (needed by libjpeg >= 6a). + +You can convert the dict format to the preset format with the +`JpegImagePlugin.convert_dict_qtables(dict_qtables)` function. + +Libjpeg ref.: https://web.archive.org/web/20120328125543/http://www.jpegcameras.com/libjpeg/libjpeg-3.html + +""" + +presets = { + 'web_low': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [20, 16, 25, 39, 50, 46, 62, 68, + 16, 18, 23, 38, 38, 53, 65, 68, + 25, 23, 31, 38, 53, 65, 68, 68, + 39, 38, 38, 53, 65, 68, 68, 68, + 50, 38, 53, 65, 68, 68, 68, 68, + 46, 53, 65, 68, 68, 68, 68, 68, + 62, 65, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68], + [21, 25, 32, 38, 54, 68, 68, 68, + 25, 28, 24, 38, 54, 68, 68, 68, + 32, 24, 32, 43, 66, 68, 68, 68, + 38, 38, 43, 53, 68, 68, 68, 68, + 54, 54, 66, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68] + ]}, + 'web_medium': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [16, 11, 11, 16, 23, 27, 31, 30, + 11, 12, 12, 15, 20, 23, 23, 30, + 11, 12, 13, 16, 23, 26, 35, 47, + 16, 15, 16, 23, 26, 37, 47, 64, + 23, 20, 23, 26, 39, 51, 64, 64, + 27, 23, 26, 37, 51, 64, 64, 64, + 31, 23, 35, 47, 64, 64, 64, 64, + 30, 30, 47, 64, 64, 64, 64, 64], + [17, 15, 17, 21, 20, 26, 38, 48, + 15, 19, 18, 17, 20, 26, 35, 43, + 17, 18, 20, 22, 26, 30, 46, 53, + 21, 17, 22, 28, 30, 39, 53, 64, + 20, 20, 26, 30, 39, 48, 64, 64, + 26, 26, 30, 39, 48, 63, 64, 64, + 38, 35, 46, 53, 64, 64, 64, 64, + 48, 43, 53, 64, 64, 64, 64, 64] + ]}, + 'web_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 14, 19, + 6, 6, 6, 11, 12, 15, 19, 28, + 9, 8, 10, 12, 16, 20, 27, 31, + 11, 10, 12, 15, 20, 27, 31, 31, + 12, 12, 14, 19, 27, 31, 31, 31, + 16, 12, 19, 28, 31, 31, 31, 31], + [7, 7, 13, 24, 26, 31, 31, 31, + 7, 12, 16, 21, 31, 31, 31, 31, + 13, 16, 17, 31, 31, 31, 31, 31, + 24, 21, 31, 31, 31, 31, 31, 31, + 26, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31] + ]}, + 'web_very_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 11, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 11, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'web_maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 2, + 1, 1, 1, 1, 1, 1, 2, 2, + 1, 1, 1, 1, 1, 2, 2, 3, + 1, 1, 1, 1, 2, 2, 3, 3, + 1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 2, 2, 3, 3, 3, 3], + [1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 1, 2, 3, 3, 3, 3, + 1, 1, 1, 3, 3, 3, 3, 3, + 2, 2, 3, 3, 3, 3, 3, 3, + 2, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3] + ]}, + 'low': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [18, 14, 14, 21, 30, 35, 34, 17, + 14, 16, 16, 19, 26, 23, 12, 12, + 14, 16, 17, 21, 23, 12, 12, 12, + 21, 19, 21, 23, 12, 12, 12, 12, + 30, 26, 23, 12, 12, 12, 12, 12, + 35, 23, 12, 12, 12, 12, 12, 12, + 34, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [20, 19, 22, 27, 20, 20, 17, 17, + 19, 25, 23, 14, 14, 12, 12, 12, + 22, 23, 14, 14, 12, 12, 12, 12, + 27, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'medium': {'subsampling': 2, # "4:2:0" + 'quantization': [ + [12, 8, 8, 12, 17, 21, 24, 17, + 8, 9, 9, 11, 15, 19, 12, 12, + 8, 9, 10, 12, 19, 12, 12, 12, + 12, 11, 12, 21, 12, 12, 12, 12, + 17, 15, 19, 12, 12, 12, 12, 12, + 21, 19, 12, 12, 12, 12, 12, 12, + 24, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [13, 11, 13, 16, 20, 20, 17, 17, + 11, 14, 14, 14, 14, 12, 12, 12, + 13, 14, 14, 14, 12, 12, 12, 12, + 16, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 12, 12, + 6, 6, 6, 11, 12, 12, 12, 12, + 9, 8, 10, 12, 12, 12, 12, 12, + 11, 10, 12, 12, 12, 12, 12, 12, + 12, 12, 12, 12, 12, 12, 12, 12, + 16, 12, 12, 12, 12, 12, 12, 12], + [7, 7, 13, 24, 20, 20, 17, 17, + 7, 12, 16, 14, 14, 12, 12, 12, + 13, 16, 14, 14, 12, 12, 12, 12, + 24, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 10, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 10, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, +} diff --git a/thesisenv/lib/python3.6/site-packages/PIL/McIdasImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/McIdasImagePlugin.py new file mode 100644 index 0000000..161fb5e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/McIdasImagePlugin.py @@ -0,0 +1,75 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Basic McIdas support for PIL +# +# History: +# 1997-05-05 fl Created (8-bit images only) +# 2009-03-08 fl Added 16/32-bit support. +# +# Thanks to Richard Jones and Craig Swank for specs and samples. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +import struct +from . import Image, ImageFile + +__version__ = "0.2" + + +def _accept(s): + return s[:8] == b"\x00\x00\x00\x00\x00\x00\x00\x04" + + +## +# Image plugin for McIdas area images. + +class McIdasImageFile(ImageFile.ImageFile): + + format = "MCIDAS" + format_description = "McIdas area file" + + def _open(self): + + # parse area file directory + s = self.fp.read(256) + if not _accept(s) or len(s) != 256: + raise SyntaxError("not an McIdas area file") + + self.area_descriptor_raw = s + self.area_descriptor = w = [0] + list(struct.unpack("!64i", s)) + + # get mode + if w[11] == 1: + mode = rawmode = "L" + elif w[11] == 2: + # FIXME: add memory map support + mode = "I" + rawmode = "I;16B" + elif w[11] == 4: + # FIXME: add memory map support + mode = "I" + rawmode = "I;32B" + else: + raise SyntaxError("unsupported McIdas format") + + self.mode = mode + self._size = w[10], w[9] + + offset = w[34] + w[15] + stride = w[15] + w[10]*w[11]*w[14] + + self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))] + + +# -------------------------------------------------------------------- +# registry + +Image.register_open(McIdasImageFile.format, McIdasImageFile, _accept) + +# no default extension diff --git a/thesisenv/lib/python3.6/site-packages/PIL/MicImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/MicImagePlugin.py new file mode 100644 index 0000000..1dbb6a5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/MicImagePlugin.py @@ -0,0 +1,107 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Microsoft Image Composer support for PIL +# +# Notes: +# uses TiffImagePlugin.py to read the actual image streams +# +# History: +# 97-01-20 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, TiffImagePlugin + +import olefile + +__version__ = "0.1" + + +# +# -------------------------------------------------------------------- + + +def _accept(prefix): + return prefix[:8] == olefile.MAGIC + + +## +# Image plugin for Microsoft's Image Composer file format. + +class MicImageFile(TiffImagePlugin.TiffImageFile): + + format = "MIC" + format_description = "Microsoft Image Composer" + _close_exclusive_fp_after_loading = False + + def _open(self): + + # read the OLE directory and see if this is a likely + # to be a Microsoft Image Composer file + + try: + self.ole = olefile.OleFileIO(self.fp) + except IOError: + raise SyntaxError("not an MIC file; invalid OLE file") + + # find ACI subfiles with Image members (maybe not the + # best way to identify MIC files, but what the... ;-) + + self.images = [] + for path in self.ole.listdir(): + if path[1:] and path[0][-4:] == ".ACI" and path[1] == "Image": + self.images.append(path) + + # if we didn't find any images, this is probably not + # an MIC file. + if not self.images: + raise SyntaxError("not an MIC file; no image entries") + + self.__fp = self.fp + self.frame = None + + if len(self.images) > 1: + self.category = Image.CONTAINER + + self.seek(0) + + @property + def n_frames(self): + return len(self.images) + + @property + def is_animated(self): + return len(self.images) > 1 + + def seek(self, frame): + if not self._seek_check(frame): + return + try: + filename = self.images[frame] + except IndexError: + raise EOFError("no such frame") + + self.fp = self.ole.openstream(filename) + + TiffImagePlugin.TiffImageFile._open(self) + + self.frame = frame + + def tell(self): + + return self.frame + + +# +# -------------------------------------------------------------------- + +Image.register_open(MicImageFile.format, MicImageFile, _accept) + +Image.register_extension(MicImageFile.format, ".mic") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/MpegImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/MpegImagePlugin.py new file mode 100644 index 0000000..15c7afc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/MpegImagePlugin.py @@ -0,0 +1,85 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPEG file handling +# +# History: +# 95-09-09 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile +from ._binary import i8 + +__version__ = "0.1" + + +# +# Bitstream parser + +class BitStream(object): + + def __init__(self, fp): + self.fp = fp + self.bits = 0 + self.bitbuffer = 0 + + def next(self): + return i8(self.fp.read(1)) + + def peek(self, bits): + while self.bits < bits: + c = self.next() + if c < 0: + self.bits = 0 + continue + self.bitbuffer = (self.bitbuffer << 8) + c + self.bits += 8 + return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1 + + def skip(self, bits): + while self.bits < bits: + self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1)) + self.bits += 8 + self.bits = self.bits - bits + + def read(self, bits): + v = self.peek(bits) + self.bits = self.bits - bits + return v + + +## +# Image plugin for MPEG streams. This plugin can identify a stream, +# but it cannot read it. + +class MpegImageFile(ImageFile.ImageFile): + + format = "MPEG" + format_description = "MPEG" + + def _open(self): + + s = BitStream(self.fp) + + if s.read(32) != 0x1B3: + raise SyntaxError("not an MPEG file") + + self.mode = "RGB" + self._size = s.read(12), s.read(12) + + +# -------------------------------------------------------------------- +# Registry stuff + +Image.register_open(MpegImageFile.format, MpegImageFile) + +Image.register_extensions(MpegImageFile.format, [".mpg", ".mpeg"]) + +Image.register_mime(MpegImageFile.format, "video/mpeg") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/MpoImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/MpoImagePlugin.py new file mode 100644 index 0000000..a1a8d65 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/MpoImagePlugin.py @@ -0,0 +1,99 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPO file handling +# +# See "Multi-Picture Format" (CIPA DC-007-Translation 2009, Standard of the +# Camera & Imaging Products Association) +# +# The multi-picture object combines multiple JPEG images (with a modified EXIF +# data format) into a single file. While it can theoretically be used much like +# a GIF animation, it is commonly used to represent 3D photographs and is (as +# of this writing) the most commonly used format by 3D cameras. +# +# History: +# 2014-03-13 Feneric Created +# +# See the README file for information on usage and redistribution. +# + +from . import Image, JpegImagePlugin + +__version__ = "0.1" + + +def _accept(prefix): + return JpegImagePlugin._accept(prefix) + + +def _save(im, fp, filename): + # Note that we can only save the current frame at present + return JpegImagePlugin._save(im, fp, filename) + + +## +# Image plugin for MPO images. + +class MpoImageFile(JpegImagePlugin.JpegImageFile): + + format = "MPO" + format_description = "MPO (CIPA DC-007)" + _close_exclusive_fp_after_loading = False + + def _open(self): + self.fp.seek(0) # prep the fp in order to pass the JPEG test + JpegImagePlugin.JpegImageFile._open(self) + self.mpinfo = self._getmp() + self.__framecount = self.mpinfo[0xB001] + self.__mpoffsets = [mpent['DataOffset'] + self.info['mpoffset'] + for mpent in self.mpinfo[0xB002]] + self.__mpoffsets[0] = 0 + # Note that the following assertion will only be invalid if something + # gets broken within JpegImagePlugin. + assert self.__framecount == len(self.__mpoffsets) + del self.info['mpoffset'] # no longer needed + self.__fp = self.fp # FIXME: hack + self.__fp.seek(self.__mpoffsets[0]) # get ready to read first frame + self.__frame = 0 + self.offset = 0 + # for now we can only handle reading and individual frame extraction + self.readonly = 1 + + def load_seek(self, pos): + self.__fp.seek(pos) + + @property + def n_frames(self): + return self.__framecount + + @property + def is_animated(self): + return self.__framecount > 1 + + def seek(self, frame): + if not self._seek_check(frame): + return + self.fp = self.__fp + self.offset = self.__mpoffsets[frame] + self.tile = [ + ("jpeg", (0, 0) + self.size, self.offset, (self.mode, "")) + ] + self.__frame = frame + + def tell(self): + return self.__frame + + +# --------------------------------------------------------------------- +# Registry stuff + +# Note that since MPO shares a factory with JPEG, we do not need to do a +# separate registration for it here. +# Image.register_open(MpoImageFile.format, +# JpegImagePlugin.jpeg_factory, _accept) +Image.register_save(MpoImageFile.format, _save) + +Image.register_extension(MpoImageFile.format, ".mpo") + +Image.register_mime(MpoImageFile.format, "image/mpo") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/MspImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/MspImagePlugin.py new file mode 100644 index 0000000..74c6817 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/MspImagePlugin.py @@ -0,0 +1,193 @@ +# +# The Python Imaging Library. +# +# MSP file handling +# +# This is the format used by the Paint program in Windows 1 and 2. +# +# History: +# 95-09-05 fl Created +# 97-01-03 fl Read/write MSP images +# 17-02-21 es Fixed RLE interpretation +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-97. +# Copyright (c) Eric Soroos 2017. +# +# See the README file for information on usage and redistribution. +# +# More info on this format: https://archive.org/details/gg243631 +# Page 313: +# Figure 205. Windows Paint Version 1: "DanM" Format +# Figure 206. Windows Paint Version 2: "LinS" Format. Used in Windows V2.03 +# +# See also: http://www.fileformat.info/format/mspaint/egff.htm + +from . import Image, ImageFile +from ._binary import i16le as i16, o16le as o16, i8 +import struct +import io + +__version__ = "0.1" + + +# +# read MSP files + + +def _accept(prefix): + return prefix[:4] in [b"DanM", b"LinS"] + + +## +# Image plugin for Windows MSP images. This plugin supports both +# uncompressed (Windows 1.0). + +class MspImageFile(ImageFile.ImageFile): + + format = "MSP" + format_description = "Windows Paint" + + def _open(self): + + # Header + s = self.fp.read(32) + if s[:4] not in [b"DanM", b"LinS"]: + raise SyntaxError("not an MSP file") + + # Header checksum + checksum = 0 + for i in range(0, 32, 2): + checksum = checksum ^ i16(s[i:i+2]) + if checksum != 0: + raise SyntaxError("bad MSP checksum") + + self.mode = "1" + self._size = i16(s[4:]), i16(s[6:]) + + if s[:4] == b"DanM": + self.tile = [("raw", (0, 0)+self.size, 32, ("1", 0, 1))] + else: + self.tile = [("MSP", (0, 0)+self.size, 32, None)] + + +class MspDecoder(ImageFile.PyDecoder): + # The algo for the MSP decoder is from + # http://www.fileformat.info/format/mspaint/egff.htm + # cc-by-attribution -- That page references is taken from the + # Encyclopedia of Graphics File Formats and is licensed by + # O'Reilly under the Creative Common/Attribution license + # + # For RLE encoded files, the 32byte header is followed by a scan + # line map, encoded as one 16bit word of encoded byte length per + # line. + # + # NOTE: the encoded length of the line can be 0. This was not + # handled in the previous version of this encoder, and there's no + # mention of how to handle it in the documentation. From the few + # examples I've seen, I've assumed that it is a fill of the + # background color, in this case, white. + # + # + # Pseudocode of the decoder: + # Read a BYTE value as the RunType + # If the RunType value is zero + # Read next byte as the RunCount + # Read the next byte as the RunValue + # Write the RunValue byte RunCount times + # If the RunType value is non-zero + # Use this value as the RunCount + # Read and write the next RunCount bytes literally + # + # e.g.: + # 0x00 03 ff 05 00 01 02 03 04 + # would yield the bytes: + # 0xff ff ff 00 01 02 03 04 + # + # which are then interpreted as a bit packed mode '1' image + + _pulls_fd = True + + def decode(self, buffer): + + img = io.BytesIO() + blank_line = bytearray((0xff,)*((self.state.xsize+7)//8)) + try: + self.fd.seek(32) + rowmap = struct.unpack_from("<%dH" % (self.state.ysize), + self.fd.read(self.state.ysize*2)) + except struct.error: + raise IOError("Truncated MSP file in row map") + + for x, rowlen in enumerate(rowmap): + try: + if rowlen == 0: + img.write(blank_line) + continue + row = self.fd.read(rowlen) + if len(row) != rowlen: + raise IOError( + "Truncated MSP file, expected %d bytes on row %s", + (rowlen, x)) + idx = 0 + while idx < rowlen: + runtype = i8(row[idx]) + idx += 1 + if runtype == 0: + (runcount, runval) = struct.unpack_from("Bc", row, idx) + img.write(runval * runcount) + idx += 2 + else: + runcount = runtype + img.write(row[idx:idx+runcount]) + idx += runcount + + except struct.error: + raise IOError("Corrupted MSP file in row %d" % x) + + self.set_as_raw(img.getvalue(), ("1", 0, 1)) + + return 0, 0 + + +Image.register_decoder('MSP', MspDecoder) + + +# +# write MSP files (uncompressed only) + + +def _save(im, fp, filename): + + if im.mode != "1": + raise IOError("cannot write mode %s as MSP" % im.mode) + + # create MSP header + header = [0] * 16 + + header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1 + header[2], header[3] = im.size + header[4], header[5] = 1, 1 + header[6], header[7] = 1, 1 + header[8], header[9] = im.size + + checksum = 0 + for h in header: + checksum = checksum ^ h + header[12] = checksum # FIXME: is this the right field? + + # header + for h in header: + fp.write(o16(h)) + + # image body + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 32, ("1", 0, 1))]) + + +# +# registry + +Image.register_open(MspImageFile.format, MspImageFile, _accept) +Image.register_save(MspImageFile.format, _save) + +Image.register_extension(MspImageFile.format, ".msp") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/OleFileIO.py b/thesisenv/lib/python3.6/site-packages/PIL/OleFileIO.py new file mode 100644 index 0000000..b3caa10 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/OleFileIO.py @@ -0,0 +1,4 @@ +raise ImportError( + 'PIL.OleFileIO is deprecated. Use the olefile Python package ' + 'instead. This module will be removed in a future version.' +) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/PSDraw.py b/thesisenv/lib/python3.6/site-packages/PIL/PSDraw.py new file mode 100644 index 0000000..d2ded6f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/PSDraw.py @@ -0,0 +1,237 @@ +# +# The Python Imaging Library +# $Id$ +# +# simple postscript graphics interface +# +# History: +# 1996-04-20 fl Created +# 1999-01-10 fl Added gsave/grestore to image method +# 2005-05-04 fl Fixed floating point issue in image (from Eric Etheridge) +# +# Copyright (c) 1997-2005 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from . import EpsImagePlugin +from ._util import py3 +import sys + +## +# Simple Postscript graphics interface. + + +class PSDraw(object): + """ + Sets up printing to the given file. If **fp** is omitted, + :py:attr:`sys.stdout` is assumed. + """ + + def __init__(self, fp=None): + if not fp: + fp = sys.stdout + self.fp = fp + + def _fp_write(self, to_write): + if not py3 or self.fp == sys.stdout: + self.fp.write(to_write) + else: + self.fp.write(bytes(to_write, 'UTF-8')) + + def begin_document(self, id=None): + """Set up printing of a document. (Write Postscript DSC header.)""" + # FIXME: incomplete + self._fp_write("%!PS-Adobe-3.0\n" + "save\n" + "/showpage { } def\n" + "%%EndComments\n" + "%%BeginDocument\n") + # self._fp_write(ERROR_PS) # debugging! + self._fp_write(EDROFF_PS) + self._fp_write(VDI_PS) + self._fp_write("%%EndProlog\n") + self.isofont = {} + + def end_document(self): + """Ends printing. (Write Postscript DSC footer.)""" + self._fp_write("%%EndDocument\n" + "restore showpage\n" + "%%End\n") + if hasattr(self.fp, "flush"): + self.fp.flush() + + def setfont(self, font, size): + """ + Selects which font to use. + + :param font: A Postscript font name + :param size: Size in points. + """ + if font not in self.isofont: + # reencode font + self._fp_write("/PSDraw-%s ISOLatin1Encoding /%s E\n" % + (font, font)) + self.isofont[font] = 1 + # rough + self._fp_write("/F0 %d /PSDraw-%s F\n" % (size, font)) + + def line(self, xy0, xy1): + """ + Draws a line between the two points. Coordinates are given in + Postscript point coordinates (72 points per inch, (0, 0) is the lower + left corner of the page). + """ + xy = xy0 + xy1 + self._fp_write("%d %d %d %d Vl\n" % xy) + + def rectangle(self, box): + """ + Draws a rectangle. + + :param box: A 4-tuple of integers whose order and function is currently + undocumented. + + Hint: the tuple is passed into this format string: + + .. code-block:: python + + %d %d M %d %d 0 Vr\n + """ + self._fp_write("%d %d M %d %d 0 Vr\n" % box) + + def text(self, xy, text): + """ + Draws text at the given position. You must use + :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method. + """ + text = "\\(".join(text.split("(")) + text = "\\)".join(text.split(")")) + xy = xy + (text,) + self._fp_write("%d %d M (%s) S\n" % xy) + + def image(self, box, im, dpi=None): + """Draw a PIL image, centered in the given box.""" + # default resolution depends on mode + if not dpi: + if im.mode == "1": + dpi = 200 # fax + else: + dpi = 100 # greyscale + # image size (on paper) + x = float(im.size[0] * 72) / dpi + y = float(im.size[1] * 72) / dpi + # max allowed size + xmax = float(box[2] - box[0]) + ymax = float(box[3] - box[1]) + if x > xmax: + y = y * xmax / x + x = xmax + if y > ymax: + x = x * ymax / y + y = ymax + dx = (xmax - x) / 2 + box[0] + dy = (ymax - y) / 2 + box[1] + self._fp_write("gsave\n%f %f translate\n" % (dx, dy)) + if (x, y) != im.size: + # EpsImagePlugin._save prints the image at (0,0,xsize,ysize) + sx = x / im.size[0] + sy = y / im.size[1] + self._fp_write("%f %f scale\n" % (sx, sy)) + EpsImagePlugin._save(im, self.fp, None, 0) + self._fp_write("\ngrestore\n") + +# -------------------------------------------------------------------- +# Postscript driver + +# +# EDROFF.PS -- Postscript driver for Edroff 2 +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + + +EDROFF_PS = """\ +/S { show } bind def +/P { moveto show } bind def +/M { moveto } bind def +/X { 0 rmoveto } bind def +/Y { 0 exch rmoveto } bind def +/E { findfont + dup maxlength dict begin + { + 1 index /FID ne { def } { pop pop } ifelse + } forall + /Encoding exch def + dup /FontName exch def + currentdict end definefont pop +} bind def +/F { findfont exch scalefont dup setfont + [ exch /setfont cvx ] cvx bind def +} bind def +""" + +# +# VDI.PS -- Postscript driver for VDI meta commands +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + +VDI_PS = """\ +/Vm { moveto } bind def +/Va { newpath arcn stroke } bind def +/Vl { moveto lineto stroke } bind def +/Vc { newpath 0 360 arc closepath } bind def +/Vr { exch dup 0 rlineto + exch dup neg 0 exch rlineto + exch neg 0 rlineto + 0 exch rlineto + 100 div setgray fill 0 setgray } bind def +/Tm matrix def +/Ve { Tm currentmatrix pop + translate scale newpath 0 0 .5 0 360 arc closepath + Tm setmatrix +} bind def +/Vf { currentgray exch setgray fill setgray } bind def +""" + +# +# ERROR.PS -- Error handler +# +# History: +# 89-11-21 fl: created (pslist 1.10) +# + +ERROR_PS = """\ +/landscape false def +/errorBUF 200 string def +/errorNL { currentpoint 10 sub exch pop 72 exch moveto } def +errordict begin /handleerror { + initmatrix /Courier findfont 10 scalefont setfont + newpath 72 720 moveto $error begin /newerror false def + (PostScript Error) show errorNL errorNL + (Error: ) show + /errorname load errorBUF cvs show errorNL errorNL + (Command: ) show + /command load dup type /stringtype ne { errorBUF cvs } if show + errorNL errorNL + (VMstatus: ) show + vmstatus errorBUF cvs show ( bytes available, ) show + errorBUF cvs show ( bytes used at level ) show + errorBUF cvs show errorNL errorNL + (Operand stargck: ) show errorNL /ostargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall errorNL + (Execution stargck: ) show errorNL /estargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall + end showpage +} def end +""" diff --git a/thesisenv/lib/python3.6/site-packages/PIL/PaletteFile.py b/thesisenv/lib/python3.6/site-packages/PIL/PaletteFile.py new file mode 100644 index 0000000..9ed69d6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/PaletteFile.py @@ -0,0 +1,55 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read simple, teragon-style palette files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from ._binary import o8 + + +## +# File handler for Teragon-style palette files. + +class PaletteFile(object): + + rawmode = "RGB" + + def __init__(self, fp): + + self.palette = [(i, i, i) for i in range(256)] + + while True: + + s = fp.readline() + + if not s: + break + if s[0:1] == b"#": + continue + if len(s) > 100: + raise SyntaxError("bad palette file") + + v = [int(x) for x in s.split()] + try: + [i, r, g, b] = v + except ValueError: + [i, r] = v + g = b = r + + if 0 <= i <= 255: + self.palette[i] = o8(r) + o8(g) + o8(b) + + self.palette = b"".join(self.palette) + + def getpalette(self): + + return self.palette, self.rawmode diff --git a/thesisenv/lib/python3.6/site-packages/PIL/PalmImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/PalmImagePlugin.py new file mode 100644 index 0000000..7d7b165 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/PalmImagePlugin.py @@ -0,0 +1,236 @@ +# +# The Python Imaging Library. +# $Id$ +# + +## +# Image plugin for Palm pixmap images (output only). +## + +from . import Image, ImageFile +from ._binary import o8, o16be as o16b + +__version__ = "1.0" + +_Palm8BitColormapValues = ( + (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255), + (255, 51, 255), (255, 0, 255), (255, 255, 204), (255, 204, 204), + (255, 153, 204), (255, 102, 204), (255, 51, 204), (255, 0, 204), + (255, 255, 153), (255, 204, 153), (255, 153, 153), (255, 102, 153), + (255, 51, 153), (255, 0, 153), (204, 255, 255), (204, 204, 255), + (204, 153, 255), (204, 102, 255), (204, 51, 255), (204, 0, 255), + (204, 255, 204), (204, 204, 204), (204, 153, 204), (204, 102, 204), + (204, 51, 204), (204, 0, 204), (204, 255, 153), (204, 204, 153), + (204, 153, 153), (204, 102, 153), (204, 51, 153), (204, 0, 153), + (153, 255, 255), (153, 204, 255), (153, 153, 255), (153, 102, 255), + (153, 51, 255), (153, 0, 255), (153, 255, 204), (153, 204, 204), + (153, 153, 204), (153, 102, 204), (153, 51, 204), (153, 0, 204), + (153, 255, 153), (153, 204, 153), (153, 153, 153), (153, 102, 153), + (153, 51, 153), (153, 0, 153), (102, 255, 255), (102, 204, 255), + (102, 153, 255), (102, 102, 255), (102, 51, 255), (102, 0, 255), + (102, 255, 204), (102, 204, 204), (102, 153, 204), (102, 102, 204), + (102, 51, 204), (102, 0, 204), (102, 255, 153), (102, 204, 153), + (102, 153, 153), (102, 102, 153), (102, 51, 153), (102, 0, 153), + (51, 255, 255), (51, 204, 255), (51, 153, 255), (51, 102, 255), + (51, 51, 255), (51, 0, 255), (51, 255, 204), (51, 204, 204), + (51, 153, 204), (51, 102, 204), (51, 51, 204), (51, 0, 204), + (51, 255, 153), (51, 204, 153), (51, 153, 153), (51, 102, 153), + (51, 51, 153), (51, 0, 153), (0, 255, 255), (0, 204, 255), + (0, 153, 255), (0, 102, 255), (0, 51, 255), (0, 0, 255), + (0, 255, 204), (0, 204, 204), (0, 153, 204), (0, 102, 204), + (0, 51, 204), (0, 0, 204), (0, 255, 153), (0, 204, 153), + (0, 153, 153), (0, 102, 153), (0, 51, 153), (0, 0, 153), + (255, 255, 102), (255, 204, 102), (255, 153, 102), (255, 102, 102), + (255, 51, 102), (255, 0, 102), (255, 255, 51), (255, 204, 51), + (255, 153, 51), (255, 102, 51), (255, 51, 51), (255, 0, 51), + (255, 255, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0), + (255, 51, 0), (255, 0, 0), (204, 255, 102), (204, 204, 102), + (204, 153, 102), (204, 102, 102), (204, 51, 102), (204, 0, 102), + (204, 255, 51), (204, 204, 51), (204, 153, 51), (204, 102, 51), + (204, 51, 51), (204, 0, 51), (204, 255, 0), (204, 204, 0), + (204, 153, 0), (204, 102, 0), (204, 51, 0), (204, 0, 0), + (153, 255, 102), (153, 204, 102), (153, 153, 102), (153, 102, 102), + (153, 51, 102), (153, 0, 102), (153, 255, 51), (153, 204, 51), + (153, 153, 51), (153, 102, 51), (153, 51, 51), (153, 0, 51), + (153, 255, 0), (153, 204, 0), (153, 153, 0), (153, 102, 0), + (153, 51, 0), (153, 0, 0), (102, 255, 102), (102, 204, 102), + (102, 153, 102), (102, 102, 102), (102, 51, 102), (102, 0, 102), + (102, 255, 51), (102, 204, 51), (102, 153, 51), (102, 102, 51), + (102, 51, 51), (102, 0, 51), (102, 255, 0), (102, 204, 0), + (102, 153, 0), (102, 102, 0), (102, 51, 0), (102, 0, 0), + (51, 255, 102), (51, 204, 102), (51, 153, 102), (51, 102, 102), + (51, 51, 102), (51, 0, 102), (51, 255, 51), (51, 204, 51), + (51, 153, 51), (51, 102, 51), (51, 51, 51), (51, 0, 51), + (51, 255, 0), (51, 204, 0), (51, 153, 0), (51, 102, 0), + (51, 51, 0), (51, 0, 0), (0, 255, 102), (0, 204, 102), + (0, 153, 102), (0, 102, 102), (0, 51, 102), (0, 0, 102), + (0, 255, 51), (0, 204, 51), (0, 153, 51), (0, 102, 51), + (0, 51, 51), (0, 0, 51), (0, 255, 0), (0, 204, 0), + (0, 153, 0), (0, 102, 0), (0, 51, 0), (17, 17, 17), + (34, 34, 34), (68, 68, 68), (85, 85, 85), (119, 119, 119), + (136, 136, 136), (170, 170, 170), (187, 187, 187), (221, 221, 221), + (238, 238, 238), (192, 192, 192), (128, 0, 0), (128, 0, 128), + (0, 128, 0), (0, 128, 128), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0)) + + +# so build a prototype image to be used for palette resampling +def build_prototype_image(): + image = Image.new("L", (1, len(_Palm8BitColormapValues))) + image.putdata(list(range(len(_Palm8BitColormapValues)))) + palettedata = () + for colormapValue in _Palm8BitColormapValues: + palettedata += colormapValue + palettedata += (0, 0, 0)*(256 - len(_Palm8BitColormapValues)) + image.putpalette(palettedata) + return image + + +Palm8BitColormapImage = build_prototype_image() + +# OK, we now have in Palm8BitColormapImage, +# a "P"-mode image with the right palette +# +# -------------------------------------------------------------------- + +_FLAGS = { + "custom-colormap": 0x4000, + "is-compressed": 0x8000, + "has-transparent": 0x2000, + } + +_COMPRESSION_TYPES = { + "none": 0xFF, + "rle": 0x01, + "scanline": 0x00, + } + + +# +# -------------------------------------------------------------------- + +## +# (Internal) Image save plugin for the Palm format. + +def _save(im, fp, filename): + + if im.mode == "P": + + # we assume this is a color Palm image with the standard colormap, + # unless the "info" dict has a "custom-colormap" field + + rawmode = "P" + bpp = 8 + version = 1 + + elif (im.mode == "L" and + "bpp" in im.encoderinfo and + im.encoderinfo["bpp"] in (1, 2, 4)): + + # this is 8-bit grayscale, so we shift it to get the high-order bits, + # and invert it because + # Palm does greyscale from white (0) to black (1) + bpp = im.encoderinfo["bpp"] + im = im.point( + lambda x, shift=8-bpp, maxval=(1 << bpp)-1: maxval - (x >> shift)) + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "L" and "bpp" in im.info and im.info["bpp"] in (1, 2, 4): + + # here we assume that even though the inherent mode is 8-bit grayscale, + # only the lower bpp bits are significant. + # We invert them to match the Palm. + bpp = im.info["bpp"] + im = im.point(lambda x, maxval=(1 << bpp)-1: maxval - (x & maxval)) + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "1": + + # monochrome -- write it inverted, as is the Palm standard + rawmode = "1;I" + bpp = 1 + version = 0 + + else: + + raise IOError("cannot write mode %s as Palm" % im.mode) + + # + # make sure image data is available + im.load() + + # write header + + cols = im.size[0] + rows = im.size[1] + + rowbytes = int((cols + (16//bpp - 1)) / (16 // bpp)) * 2 + transparent_index = 0 + compression_type = _COMPRESSION_TYPES["none"] + + flags = 0 + if im.mode == "P" and "custom-colormap" in im.info: + flags = flags & _FLAGS["custom-colormap"] + colormapsize = 4 * 256 + 2 + colormapmode = im.palette.mode + colormap = im.getdata().getpalette() + else: + colormapsize = 0 + + if "offset" in im.info: + offset = (rowbytes * rows + 16 + 3 + colormapsize) // 4 + else: + offset = 0 + + fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags)) + fp.write(o8(bpp)) + fp.write(o8(version)) + fp.write(o16b(offset)) + fp.write(o8(transparent_index)) + fp.write(o8(compression_type)) + fp.write(o16b(0)) # reserved by Palm + + # now write colormap if necessary + + if colormapsize > 0: + fp.write(o16b(256)) + for i in range(256): + fp.write(o8(i)) + if colormapmode == 'RGB': + fp.write( + o8(colormap[3 * i]) + + o8(colormap[3 * i + 1]) + + o8(colormap[3 * i + 2])) + elif colormapmode == 'RGBA': + fp.write( + o8(colormap[4 * i]) + + o8(colormap[4 * i + 1]) + + o8(colormap[4 * i + 2])) + + # now convert data to raw form + ImageFile._save( + im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, rowbytes, 1))]) + + if hasattr(fp, "flush"): + fp.flush() + + +# +# -------------------------------------------------------------------- + +Image.register_save("Palm", _save) + +Image.register_extension("Palm", ".palm") + +Image.register_mime("Palm", "image/palm") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/PcdImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/PcdImagePlugin.py new file mode 100644 index 0000000..87e5792 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/PcdImagePlugin.py @@ -0,0 +1,66 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCD file handling +# +# History: +# 96-05-10 fl Created +# 96-05-27 fl Added draft mode (128x192, 256x384) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile +from ._binary import i8 + +__version__ = "0.1" + + +## +# Image plugin for PhotoCD images. This plugin only reads the 768x512 +# image from the file; higher resolutions are encoded in a proprietary +# encoding. + +class PcdImageFile(ImageFile.ImageFile): + + format = "PCD" + format_description = "Kodak PhotoCD" + + def _open(self): + + # rough + self.fp.seek(2048) + s = self.fp.read(2048) + + if s[:4] != b"PCD_": + raise SyntaxError("not a PCD file") + + orientation = i8(s[1538]) & 3 + self.tile_post_rotate = None + if orientation == 1: + self.tile_post_rotate = 90 + elif orientation == 3: + self.tile_post_rotate = -90 + + self.mode = "RGB" + self._size = 768, 512 # FIXME: not correct for rotated images! + self.tile = [("pcd", (0, 0)+self.size, 96*2048, None)] + + def load_end(self): + if self.tile_post_rotate: + # Handle rotated PCDs + self.im = self.im.rotate(self.tile_post_rotate) + self._size = self.im.size + + +# +# registry + +Image.register_open(PcdImageFile.format, PcdImageFile) + +Image.register_extension(PcdImageFile.format, ".pcd") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/PcfFontFile.py b/thesisenv/lib/python3.6/site-packages/PIL/PcfFontFile.py new file mode 100644 index 0000000..eba85fe --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/PcfFontFile.py @@ -0,0 +1,245 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library +# $Id$ +# +# portable compiled font file parser +# +# history: +# 1997-08-19 fl created +# 2003-09-13 fl fixed loading of unicode fonts +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from . import Image, FontFile +from ._binary import i8, i16le as l16, i32le as l32, i16be as b16, i32be as b32 + +# -------------------------------------------------------------------- +# declarations + +PCF_MAGIC = 0x70636601 # "\x01fcp" + +PCF_PROPERTIES = (1 << 0) +PCF_ACCELERATORS = (1 << 1) +PCF_METRICS = (1 << 2) +PCF_BITMAPS = (1 << 3) +PCF_INK_METRICS = (1 << 4) +PCF_BDF_ENCODINGS = (1 << 5) +PCF_SWIDTHS = (1 << 6) +PCF_GLYPH_NAMES = (1 << 7) +PCF_BDF_ACCELERATORS = (1 << 8) + +BYTES_PER_ROW = [ + lambda bits: ((bits+7) >> 3), + lambda bits: ((bits+15) >> 3) & ~1, + lambda bits: ((bits+31) >> 3) & ~3, + lambda bits: ((bits+63) >> 3) & ~7, +] + + +def sz(s, o): + return s[o:s.index(b"\0", o)] + + +## +# Font file plugin for the X11 PCF format. + +class PcfFontFile(FontFile.FontFile): + + name = "name" + + def __init__(self, fp): + + magic = l32(fp.read(4)) + if magic != PCF_MAGIC: + raise SyntaxError("not a PCF file") + + FontFile.FontFile.__init__(self) + + count = l32(fp.read(4)) + self.toc = {} + for i in range(count): + type = l32(fp.read(4)) + self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4)) + + self.fp = fp + + self.info = self._load_properties() + + metrics = self._load_metrics() + bitmaps = self._load_bitmaps(metrics) + encoding = self._load_encoding() + + # + # create glyph structure + + for ch in range(256): + ix = encoding[ch] + if ix is not None: + x, y, l, r, w, a, d, f = metrics[ix] + glyph = (w, 0), (l, d-y, x+l, d), (0, 0, x, y), bitmaps[ix] + self.glyph[ch] = glyph + + def _getformat(self, tag): + + format, size, offset = self.toc[tag] + + fp = self.fp + fp.seek(offset) + + format = l32(fp.read(4)) + + if format & 4: + i16, i32 = b16, b32 + else: + i16, i32 = l16, l32 + + return fp, format, i16, i32 + + def _load_properties(self): + + # + # font properties + + properties = {} + + fp, format, i16, i32 = self._getformat(PCF_PROPERTIES) + + nprops = i32(fp.read(4)) + + # read property description + p = [] + for i in range(nprops): + p.append((i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4)))) + if nprops & 3: + fp.seek(4 - (nprops & 3), 1) # pad + + data = fp.read(i32(fp.read(4))) + + for k, s, v in p: + k = sz(data, k) + if s: + v = sz(data, v) + properties[k] = v + + return properties + + def _load_metrics(self): + + # + # font metrics + + metrics = [] + + fp, format, i16, i32 = self._getformat(PCF_METRICS) + + append = metrics.append + + if (format & 0xff00) == 0x100: + + # "compressed" metrics + for i in range(i16(fp.read(2))): + left = i8(fp.read(1)) - 128 + right = i8(fp.read(1)) - 128 + width = i8(fp.read(1)) - 128 + ascent = i8(fp.read(1)) - 128 + descent = i8(fp.read(1)) - 128 + xsize = right - left + ysize = ascent + descent + append( + (xsize, ysize, left, right, width, + ascent, descent, 0) + ) + + else: + + # "jumbo" metrics + for i in range(i32(fp.read(4))): + left = i16(fp.read(2)) + right = i16(fp.read(2)) + width = i16(fp.read(2)) + ascent = i16(fp.read(2)) + descent = i16(fp.read(2)) + attributes = i16(fp.read(2)) + xsize = right - left + ysize = ascent + descent + append( + (xsize, ysize, left, right, width, + ascent, descent, attributes) + ) + + return metrics + + def _load_bitmaps(self, metrics): + + # + # bitmap data + + bitmaps = [] + + fp, format, i16, i32 = self._getformat(PCF_BITMAPS) + + nbitmaps = i32(fp.read(4)) + + if nbitmaps != len(metrics): + raise IOError("Wrong number of bitmaps") + + offsets = [] + for i in range(nbitmaps): + offsets.append(i32(fp.read(4))) + + bitmapSizes = [] + for i in range(4): + bitmapSizes.append(i32(fp.read(4))) + + # byteorder = format & 4 # non-zero => MSB + bitorder = format & 8 # non-zero => MSB + padindex = format & 3 + + bitmapsize = bitmapSizes[padindex] + offsets.append(bitmapsize) + + data = fp.read(bitmapsize) + + pad = BYTES_PER_ROW[padindex] + mode = "1;R" + if bitorder: + mode = "1" + + for i in range(nbitmaps): + x, y, l, r, w, a, d, f = metrics[i] + b, e = offsets[i], offsets[i+1] + bitmaps.append( + Image.frombytes("1", (x, y), data[b:e], "raw", mode, pad(x)) + ) + + return bitmaps + + def _load_encoding(self): + + # map character code to bitmap index + encoding = [None] * 256 + + fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS) + + firstCol, lastCol = i16(fp.read(2)), i16(fp.read(2)) + firstRow, lastRow = i16(fp.read(2)), i16(fp.read(2)) + + default = i16(fp.read(2)) + + nencoding = (lastCol - firstCol + 1) * (lastRow - firstRow + 1) + + for i in range(nencoding): + encodingOffset = i16(fp.read(2)) + if encodingOffset != 0xFFFF: + try: + encoding[i+firstCol] = encodingOffset + except IndexError: + break # only load ISO-8859-1 glyphs + + return encoding diff --git a/thesisenv/lib/python3.6/site-packages/PIL/PcxImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/PcxImagePlugin.py new file mode 100644 index 0000000..daa58b3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/PcxImagePlugin.py @@ -0,0 +1,179 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCX file handling +# +# This format was originally used by ZSoft's popular PaintBrush +# program for the IBM PC. It is also supported by many MS-DOS and +# Windows applications, including the Windows PaintBrush program in +# Windows 3. +# +# history: +# 1995-09-01 fl Created +# 1996-05-20 fl Fixed RGB support +# 1997-01-03 fl Fixed 2-bit and 4-bit support +# 1999-02-03 fl Fixed 8-bit support (broken in 1.0b1) +# 1999-02-07 fl Added write support +# 2002-06-09 fl Made 2-bit and 4-bit support a bit more robust +# 2002-07-30 fl Seek from to current position, not beginning of file +# 2003-06-03 fl Extract DPI settings (info["dpi"]) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import logging +from . import Image, ImageFile, ImagePalette +from ._binary import i8, i16le as i16, o8, o16le as o16 + +logger = logging.getLogger(__name__) + +__version__ = "0.6" + + +def _accept(prefix): + return i8(prefix[0]) == 10 and i8(prefix[1]) in [0, 2, 3, 5] + + +## +# Image plugin for Paintbrush images. + +class PcxImageFile(ImageFile.ImageFile): + + format = "PCX" + format_description = "Paintbrush" + + def _open(self): + + # header + s = self.fp.read(128) + if not _accept(s): + raise SyntaxError("not a PCX file") + + # image + bbox = i16(s, 4), i16(s, 6), i16(s, 8)+1, i16(s, 10)+1 + if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]: + raise SyntaxError("bad PCX image size") + logger.debug("BBox: %s %s %s %s", *bbox) + + # format + version = i8(s[1]) + bits = i8(s[3]) + planes = i8(s[65]) + stride = i16(s, 66) + logger.debug("PCX version %s, bits %s, planes %s, stride %s", + version, bits, planes, stride) + + self.info["dpi"] = i16(s, 12), i16(s, 14) + + if bits == 1 and planes == 1: + mode = rawmode = "1" + + elif bits == 1 and planes in (2, 4): + mode = "P" + rawmode = "P;%dL" % planes + self.palette = ImagePalette.raw("RGB", s[16:64]) + + elif version == 5 and bits == 8 and planes == 1: + mode = rawmode = "L" + # FIXME: hey, this doesn't work with the incremental loader !!! + self.fp.seek(-769, 2) + s = self.fp.read(769) + if len(s) == 769 and i8(s[0]) == 12: + # check if the palette is linear greyscale + for i in range(256): + if s[i*3+1:i*3+4] != o8(i)*3: + mode = rawmode = "P" + break + if mode == "P": + self.palette = ImagePalette.raw("RGB", s[1:]) + self.fp.seek(128) + + elif version == 5 and bits == 8 and planes == 3: + mode = "RGB" + rawmode = "RGB;L" + + else: + raise IOError("unknown PCX mode") + + self.mode = mode + self._size = bbox[2]-bbox[0], bbox[3]-bbox[1] + + bbox = (0, 0) + self.size + logger.debug("size: %sx%s", *self.size) + + self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))] + +# -------------------------------------------------------------------- +# save PCX files + + +SAVE = { + # mode: (version, bits, planes, raw mode) + "1": (2, 1, 1, "1"), + "L": (5, 8, 1, "L"), + "P": (5, 8, 1, "P"), + "RGB": (5, 8, 3, "RGB;L"), +} + + +def _save(im, fp, filename): + + try: + version, bits, planes, rawmode = SAVE[im.mode] + except KeyError: + raise ValueError("Cannot save %s images as PCX" % im.mode) + + # bytes per plane + stride = (im.size[0] * bits + 7) // 8 + # stride should be even + stride += stride % 2 + # Stride needs to be kept in sync with the PcxEncode.c version. + # Ideally it should be passed in in the state, but the bytes value + # gets overwritten. + + logger.debug("PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d", + im.size[0], bits, stride) + + # under windows, we could determine the current screen size with + # "Image.core.display_mode()[1]", but I think that's overkill... + + screen = im.size + + dpi = 100, 100 + + # PCX header + fp.write( + o8(10) + o8(version) + o8(1) + o8(bits) + o16(0) + + o16(0) + o16(im.size[0]-1) + o16(im.size[1]-1) + o16(dpi[0]) + + o16(dpi[1]) + b"\0"*24 + b"\xFF"*24 + b"\0" + o8(planes) + + o16(stride) + o16(1) + o16(screen[0]) + o16(screen[1]) + + b"\0"*54 + ) + + assert fp.tell() == 128 + + ImageFile._save(im, fp, [("pcx", (0, 0)+im.size, 0, + (rawmode, bits*planes))]) + + if im.mode == "P": + # colour palette + fp.write(o8(12)) + fp.write(im.im.getpalette("RGB", "RGB")) # 768 bytes + elif im.mode == "L": + # greyscale palette + fp.write(o8(12)) + for i in range(256): + fp.write(o8(i)*3) + +# -------------------------------------------------------------------- +# registry + + +Image.register_open(PcxImageFile.format, PcxImageFile, _accept) +Image.register_save(PcxImageFile.format, _save) + +Image.register_extension(PcxImageFile.format, ".pcx") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/PdfImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/PdfImagePlugin.py new file mode 100644 index 0000000..b425027 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/PdfImagePlugin.py @@ -0,0 +1,246 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PDF (Acrobat) file handling +# +# History: +# 1996-07-16 fl Created +# 1997-01-18 fl Fixed header +# 2004-02-21 fl Fixes for 1/L/CMYK images, etc. +# 2004-02-24 fl Fixes for 1 and P images. +# +# Copyright (c) 1997-2004 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996-1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## +# Image plugin for PDF images (output only). +## + +from . import Image, ImageFile, ImageSequence, PdfParser +import io +import os +import time + +__version__ = "0.5" + + +# +# -------------------------------------------------------------------- + +# object ids: +# 1. catalogue +# 2. pages +# 3. image +# 4. page +# 5. page contents + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +## +# (Internal) Image save plugin for the PDF format. + +def _save(im, fp, filename, save_all=False): + is_appending = im.encoderinfo.get("append", False) + if is_appending: + existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="r+b") + else: + existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="w+b") + + resolution = im.encoderinfo.get("resolution", 72.0) + + info = { + "title": None if is_appending else os.path.splitext( + os.path.basename(filename) + )[0], + "author": None, + "subject": None, + "keywords": None, + "creator": None, + "producer": None, + "creationDate": None if is_appending else time.gmtime(), + "modDate": None if is_appending else time.gmtime() + } + for k, default in info.items(): + v = im.encoderinfo.get(k) if k in im.encoderinfo else default + if v: + existing_pdf.info[k[0].upper() + k[1:]] = v + + # + # make sure image data is available + im.load() + + existing_pdf.start_writing() + existing_pdf.write_header() + existing_pdf.write_comment("created by PIL PDF driver " + __version__) + + # + # pages + ims = [im] + if save_all: + append_images = im.encoderinfo.get("append_images", []) + for append_im in append_images: + append_im.encoderinfo = im.encoderinfo.copy() + ims.append(append_im) + numberOfPages = 0 + image_refs = [] + page_refs = [] + contents_refs = [] + for im in ims: + im_numberOfPages = 1 + if save_all: + try: + im_numberOfPages = im.n_frames + except AttributeError: + # Image format does not have n_frames. + # It is a single frame image + pass + numberOfPages += im_numberOfPages + for i in range(im_numberOfPages): + image_refs.append(existing_pdf.next_object_id(0)) + page_refs.append(existing_pdf.next_object_id(0)) + contents_refs.append(existing_pdf.next_object_id(0)) + existing_pdf.pages.append(page_refs[-1]) + + # + # catalog and list of pages + existing_pdf.write_catalog() + + pageNumber = 0 + for imSequence in ims: + im_pages = ImageSequence.Iterator(imSequence) if save_all else [imSequence] + for im in im_pages: + # FIXME: Should replace ASCIIHexDecode with RunLengthDecode + # (packbits) or LZWDecode (tiff/lzw compression). Note that + # PDF 1.2 also supports Flatedecode (zip compression). + + bits = 8 + params = None + + if im.mode == "1": + filter = "ASCIIHexDecode" + colorspace = PdfParser.PdfName("DeviceGray") + procset = "ImageB" # grayscale + bits = 1 + elif im.mode == "L": + filter = "DCTDecode" + # params = "<< /Predictor 15 /Columns %d >>" % (width-2) + colorspace = PdfParser.PdfName("DeviceGray") + procset = "ImageB" # grayscale + elif im.mode == "P": + filter = "ASCIIHexDecode" + palette = im.im.getpalette("RGB") + colorspace = [ + PdfParser.PdfName("Indexed"), + PdfParser.PdfName("DeviceRGB"), + 255, + PdfParser.PdfBinary(palette) + ] + procset = "ImageI" # indexed color + elif im.mode == "RGB": + filter = "DCTDecode" + colorspace = PdfParser.PdfName("DeviceRGB") + procset = "ImageC" # color images + elif im.mode == "CMYK": + filter = "DCTDecode" + colorspace = PdfParser.PdfName("DeviceCMYK") + procset = "ImageC" # color images + else: + raise ValueError("cannot save mode %s" % im.mode) + + # + # image + + op = io.BytesIO() + + if filter == "ASCIIHexDecode": + if bits == 1: + # FIXME: the hex encoder doesn't support packed 1-bit + # images; do things the hard way... + data = im.tobytes("raw", "1") + im = Image.new("L", (len(data), 1), None) + im.putdata(data) + ImageFile._save(im, op, [("hex", (0, 0)+im.size, 0, im.mode)]) + elif filter == "DCTDecode": + Image.SAVE["JPEG"](im, op, filename) + elif filter == "FlateDecode": + ImageFile._save(im, op, [("zip", (0, 0)+im.size, 0, im.mode)]) + elif filter == "RunLengthDecode": + ImageFile._save(im, op, + [("packbits", (0, 0)+im.size, 0, im.mode)]) + else: + raise ValueError("unsupported PDF filter (%s)" % filter) + + # + # Get image characteristics + + width, height = im.size + + existing_pdf.write_obj(image_refs[pageNumber], + stream=op.getvalue(), + Type=PdfParser.PdfName("XObject"), + Subtype=PdfParser.PdfName("Image"), + Width=width, # * 72.0 / resolution, + Height=height, # * 72.0 / resolution, + Filter=PdfParser.PdfName(filter), + BitsPerComponent=bits, + DecodeParams=params, + ColorSpace=colorspace) + + # + # page + + existing_pdf.write_page(page_refs[pageNumber], + Resources=PdfParser.PdfDict( + ProcSet=[ + PdfParser.PdfName("PDF"), + PdfParser.PdfName(procset) + ], + XObject=PdfParser.PdfDict( + image=image_refs[pageNumber] + ) + ), + MediaBox=[ + 0, + 0, + int(width * 72.0 / resolution), + int(height * 72.0 / resolution) + ], + Contents=contents_refs[pageNumber]) + + # + # page contents + + page_contents = PdfParser.make_bytes( + "q %d 0 0 %d 0 0 cm /image Do Q\n" % ( + int(width * 72.0 / resolution), + int(height * 72.0 / resolution))) + + existing_pdf.write_obj(contents_refs[pageNumber], + stream=page_contents) + + pageNumber += 1 + + # + # trailer + existing_pdf.write_xref_and_trailer() + if hasattr(fp, "flush"): + fp.flush() + existing_pdf.close() + +# +# -------------------------------------------------------------------- + + +Image.register_save("PDF", _save) +Image.register_save_all("PDF", _save_all) + +Image.register_extension("PDF", ".pdf") + +Image.register_mime("PDF", "application/pdf") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/PdfParser.py b/thesisenv/lib/python3.6/site-packages/PIL/PdfParser.py new file mode 100644 index 0000000..bcd29db --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/PdfParser.py @@ -0,0 +1,974 @@ +import calendar +import codecs +import collections +import mmap +import os +import re +import time +import zlib +from ._util import py3 + +try: + from UserDict import UserDict # Python 2.x +except ImportError: + UserDict = collections.UserDict # Python 3.x + + +if py3: # Python 3.x + def make_bytes(s): + return s.encode("us-ascii") +else: # Python 2.x + def make_bytes(s): # pragma: no cover + return s # pragma: no cover + + +# see 7.9.2.2 Text String Type on page 86 and D.3 PDFDocEncoding Character Set +# on page 656 +def encode_text(s): + return codecs.BOM_UTF16_BE + s.encode("utf_16_be") + + +PDFDocEncoding = { + 0x16: u"\u0017", + 0x18: u"\u02D8", + 0x19: u"\u02C7", + 0x1A: u"\u02C6", + 0x1B: u"\u02D9", + 0x1C: u"\u02DD", + 0x1D: u"\u02DB", + 0x1E: u"\u02DA", + 0x1F: u"\u02DC", + 0x80: u"\u2022", + 0x81: u"\u2020", + 0x82: u"\u2021", + 0x83: u"\u2026", + 0x84: u"\u2014", + 0x85: u"\u2013", + 0x86: u"\u0192", + 0x87: u"\u2044", + 0x88: u"\u2039", + 0x89: u"\u203A", + 0x8A: u"\u2212", + 0x8B: u"\u2030", + 0x8C: u"\u201E", + 0x8D: u"\u201C", + 0x8E: u"\u201D", + 0x8F: u"\u2018", + 0x90: u"\u2019", + 0x91: u"\u201A", + 0x92: u"\u2122", + 0x93: u"\uFB01", + 0x94: u"\uFB02", + 0x95: u"\u0141", + 0x96: u"\u0152", + 0x97: u"\u0160", + 0x98: u"\u0178", + 0x99: u"\u017D", + 0x9A: u"\u0131", + 0x9B: u"\u0142", + 0x9C: u"\u0153", + 0x9D: u"\u0161", + 0x9E: u"\u017E", + 0xA0: u"\u20AC", +} + + +def decode_text(b): + if b[:len(codecs.BOM_UTF16_BE)] == codecs.BOM_UTF16_BE: + return b[len(codecs.BOM_UTF16_BE):].decode("utf_16_be") + elif py3: # Python 3.x + return "".join(PDFDocEncoding.get(byte, chr(byte)) for byte in b) + else: # Python 2.x + return u"".join(PDFDocEncoding.get(ord(byte), byte) for byte in b) + + +class PdfFormatError(RuntimeError): + """An error that probably indicates a syntactic or semantic error in the + PDF file structure""" + pass + + +def check_format_condition(condition, error_message): + if not condition: + raise PdfFormatError(error_message) + + +class IndirectReference(collections.namedtuple("IndirectReferenceTuple", + ["object_id", "generation"])): + def __str__(self): + return "%s %s R" % self + + def __bytes__(self): + return self.__str__().encode("us-ascii") + + def __eq__(self, other): + return other.__class__ is self.__class__ and \ + other.object_id == self.object_id and \ + other.generation == self.generation + + def __ne__(self, other): + return not (self == other) + + def __hash__(self): + return hash((self.object_id, self.generation)) + + +class IndirectObjectDef(IndirectReference): + def __str__(self): + return "%s %s obj" % self + + +class XrefTable: + def __init__(self): + self.existing_entries = {} # object ID => (offset, generation) + self.new_entries = {} # object ID => (offset, generation) + self.deleted_entries = {0: 65536} # object ID => generation + self.reading_finished = False + + def __setitem__(self, key, value): + if self.reading_finished: + self.new_entries[key] = value + else: + self.existing_entries[key] = value + if key in self.deleted_entries: + del self.deleted_entries[key] + + def __getitem__(self, key): + try: + return self.new_entries[key] + except KeyError: + return self.existing_entries[key] + + def __delitem__(self, key): + if key in self.new_entries: + generation = self.new_entries[key][1] + 1 + del self.new_entries[key] + self.deleted_entries[key] = generation + elif key in self.existing_entries: + generation = self.existing_entries[key][1] + 1 + self.deleted_entries[key] = generation + elif key in self.deleted_entries: + generation = self.deleted_entries[key] + else: + raise IndexError("object ID " + str(key) + + " cannot be deleted because it doesn't exist") + + def __contains__(self, key): + return key in self.existing_entries or key in self.new_entries + + def __len__(self): + return len(set(self.existing_entries.keys()) | + set(self.new_entries.keys()) | + set(self.deleted_entries.keys())) + + def keys(self): + return ( + set(self.existing_entries.keys()) - + set(self.deleted_entries.keys()) + ) | set(self.new_entries.keys()) + + def write(self, f): + keys = sorted(set(self.new_entries.keys()) | + set(self.deleted_entries.keys())) + deleted_keys = sorted(set(self.deleted_entries.keys())) + startxref = f.tell() + f.write(b"xref\n") + while keys: + # find a contiguous sequence of object IDs + prev = None + for index, key in enumerate(keys): + if prev is None or prev+1 == key: + prev = key + else: + contiguous_keys = keys[:index] + keys = keys[index:] + break + else: + contiguous_keys = keys + keys = None + f.write(make_bytes("%d %d\n" % + (contiguous_keys[0], len(contiguous_keys)))) + for object_id in contiguous_keys: + if object_id in self.new_entries: + f.write(make_bytes("%010d %05d n \n" % + self.new_entries[object_id])) + else: + this_deleted_object_id = deleted_keys.pop(0) + check_format_condition(object_id == this_deleted_object_id, + "expected the next deleted object " + "ID to be %s, instead found %s" % + (object_id, this_deleted_object_id)) + try: + next_in_linked_list = deleted_keys[0] + except IndexError: + next_in_linked_list = 0 + f.write(make_bytes("%010d %05d f \n" % + (next_in_linked_list, + self.deleted_entries[object_id]))) + return startxref + + +class PdfName: + def __init__(self, name): + if isinstance(name, PdfName): + self.name = name.name + elif isinstance(name, bytes): + self.name = name + else: + self.name = name.encode("us-ascii") + + def name_as_str(self): + return self.name.decode("us-ascii") + + def __eq__(self, other): + return (isinstance(other, PdfName) and other.name == self.name) or \ + other == self.name + + def __hash__(self): + return hash(self.name) + + def __repr__(self): + return "PdfName(%s)" % repr(self.name) + + @classmethod + def from_pdf_stream(cls, data): + return cls(PdfParser.interpret_name(data)) + + allowed_chars = set(range(33, 127)) - set(ord(c) for c in "#%/()<>[]{}") + + def __bytes__(self): + result = bytearray(b"/") + for b in self.name: + if py3: # Python 3.x + if b in self.allowed_chars: + result.append(b) + else: + result.extend(make_bytes("#%02X" % b)) + else: # Python 2.x + if ord(b) in self.allowed_chars: + result.append(b) + else: + result.extend(b"#%02X" % ord(b)) + return bytes(result) + + __str__ = __bytes__ + + +class PdfArray(list): + def __bytes__(self): + return b"[ " + b" ".join(pdf_repr(x) for x in self) + b" ]" + + __str__ = __bytes__ + + +class PdfDict(UserDict): + def __setattr__(self, key, value): + if key == "data": + if hasattr(UserDict, "__setattr__"): + UserDict.__setattr__(self, key, value) + else: + self.__dict__[key] = value + else: + if isinstance(key, str): + key = key.encode("us-ascii") + self[key] = value + + def __getattr__(self, key): + try: + value = self[key] + except KeyError: + try: + value = self[key.encode("us-ascii")] + except KeyError: + raise AttributeError(key) + if isinstance(value, bytes): + value = decode_text(value) + if key.endswith("Date"): + if value.startswith("D:"): + value = value[2:] + + relationship = 'Z' + if len(value) > 17: + relationship = value[14] + offset = int(value[15:17]) * 60 + if len(value) > 20: + offset += int(value[18:20]) + + format = '%Y%m%d%H%M%S'[:len(value) - 2] + value = time.strptime(value[:len(format)+2], format) + if relationship in ['+', '-']: + offset *= 60 + if relationship == '+': + offset *= -1 + value = time.gmtime(calendar.timegm(value) + offset) + return value + + def __bytes__(self): + out = bytearray(b"<<") + for key, value in self.items(): + if value is None: + continue + value = pdf_repr(value) + out.extend(b"\n") + out.extend(bytes(PdfName(key))) + out.extend(b" ") + out.extend(value) + out.extend(b"\n>>") + return bytes(out) + + if not py3: + __str__ = __bytes__ + + +class PdfBinary: + def __init__(self, data): + self.data = data + + if py3: # Python 3.x + def __bytes__(self): + return make_bytes("<%s>" % "".join("%02X" % b for b in self.data)) + else: # Python 2.x + def __str__(self): + return "<%s>" % "".join("%02X" % ord(b) for b in self.data) + + +class PdfStream: + def __init__(self, dictionary, buf): + self.dictionary = dictionary + self.buf = buf + + def decode(self): + try: + filter = self.dictionary.Filter + except AttributeError: + return self.buf + if filter == b"FlateDecode": + try: + expected_length = self.dictionary.DL + except AttributeError: + expected_length = self.dictionary.Length + return zlib.decompress(self.buf, bufsize=int(expected_length)) + else: + raise NotImplementedError( + "stream filter %s unknown/unsupported" % + repr(self.dictionary.Filter)) + + +def pdf_repr(x): + if x is True: + return b"true" + elif x is False: + return b"false" + elif x is None: + return b"null" + elif (isinstance(x, PdfName) or isinstance(x, PdfDict) or + isinstance(x, PdfArray) or isinstance(x, PdfBinary)): + return bytes(x) + elif isinstance(x, int): + return str(x).encode("us-ascii") + elif isinstance(x, time.struct_time): + return b'(D:'+time.strftime('%Y%m%d%H%M%SZ', x).encode("us-ascii")+b')' + elif isinstance(x, dict): + return bytes(PdfDict(x)) + elif isinstance(x, list): + return bytes(PdfArray(x)) + elif ((py3 and isinstance(x, str)) or + (not py3 and isinstance(x, unicode))): + return pdf_repr(encode_text(x)) + elif isinstance(x, bytes): + # XXX escape more chars? handle binary garbage + x = x.replace(b"\\", b"\\\\") + x = x.replace(b"(", b"\\(") + x = x.replace(b")", b"\\)") + return b"(" + x + b")" + else: + return bytes(x) + + +class PdfParser: + """Based on https://www.adobe.com/content/dam/acom/en/devnet/acrobat/pdfs/PDF32000_2008.pdf + Supports PDF up to 1.4 + """ + + def __init__(self, filename=None, f=None, + buf=None, start_offset=0, mode="rb"): + # type: (PdfParser, str, file, Union[bytes, bytearray], int, str) + # -> None + if buf and f: + raise RuntimeError( + "specify buf or f or filename, but not both buf and f") + self.filename = filename + self.buf = buf + self.f = f + self.start_offset = start_offset + self.should_close_buf = False + self.should_close_file = False + if filename is not None and f is None: + self.f = f = open(filename, mode) + self.should_close_file = True + if f is not None: + self.buf = buf = self.get_buf_from_file(f) + self.should_close_buf = True + if not filename and hasattr(f, "name"): + self.filename = f.name + self.cached_objects = {} + if buf: + self.read_pdf_info() + else: + self.file_size_total = self.file_size_this = 0 + self.root = PdfDict() + self.root_ref = None + self.info = PdfDict() + self.info_ref = None + self.page_tree_root = {} + self.pages = [] + self.orig_pages = [] + self.pages_ref = None + self.last_xref_section_offset = None + self.trailer_dict = {} + self.xref_table = XrefTable() + self.xref_table.reading_finished = True + if f: + self.seek_end() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + return False # do not suppress exceptions + + def start_writing(self): + self.close_buf() + self.seek_end() + + def close_buf(self): + try: + self.buf.close() + except AttributeError: + pass + self.buf = None + + def close(self): + if self.should_close_buf: + self.close_buf() + if self.f is not None and self.should_close_file: + self.f.close() + self.f = None + + def seek_end(self): + self.f.seek(0, os.SEEK_END) + + def write_header(self): + self.f.write(b"%PDF-1.4\n") + + def write_comment(self, s): + self.f.write(("%% %s\n" % (s,)).encode("utf-8")) + + def write_catalog(self): + self.del_root() + self.root_ref = self.next_object_id(self.f.tell()) + self.pages_ref = self.next_object_id(0) + self.rewrite_pages() + self.write_obj(self.root_ref, + Type=PdfName(b"Catalog"), + Pages=self.pages_ref) + self.write_obj(self.pages_ref, + Type=PdfName(b"Pages"), + Count=len(self.pages), + Kids=self.pages) + return self.root_ref + + def rewrite_pages(self): + pages_tree_nodes_to_delete = [] + for i, page_ref in enumerate(self.orig_pages): + page_info = self.cached_objects[page_ref] + del self.xref_table[page_ref.object_id] + pages_tree_nodes_to_delete.append(page_info[PdfName(b"Parent")]) + if page_ref not in self.pages: + # the page has been deleted + continue + # make dict keys into strings for passing to write_page + stringified_page_info = {} + for key, value in page_info.items(): + # key should be a PdfName + stringified_page_info[key.name_as_str()] = value + stringified_page_info["Parent"] = self.pages_ref + new_page_ref = self.write_page(None, **stringified_page_info) + for j, cur_page_ref in enumerate(self.pages): + if cur_page_ref == page_ref: + # replace the page reference with the new one + self.pages[j] = new_page_ref + # delete redundant Pages tree nodes from xref table + for pages_tree_node_ref in pages_tree_nodes_to_delete: + while pages_tree_node_ref: + pages_tree_node = self.cached_objects[pages_tree_node_ref] + if pages_tree_node_ref.object_id in self.xref_table: + del self.xref_table[pages_tree_node_ref.object_id] + pages_tree_node_ref = pages_tree_node.get(b"Parent", None) + self.orig_pages = [] + + def write_xref_and_trailer(self, new_root_ref=None): + if new_root_ref: + self.del_root() + self.root_ref = new_root_ref + if self.info: + self.info_ref = self.write_obj(None, self.info) + start_xref = self.xref_table.write(self.f) + num_entries = len(self.xref_table) + trailer_dict = {b"Root": self.root_ref, b"Size": num_entries} + if self.last_xref_section_offset is not None: + trailer_dict[b"Prev"] = self.last_xref_section_offset + if self.info: + trailer_dict[b"Info"] = self.info_ref + self.last_xref_section_offset = start_xref + self.f.write(b"trailer\n" + bytes(PdfDict(trailer_dict)) + + make_bytes("\nstartxref\n%d\n%%%%EOF" % start_xref)) + + def write_page(self, ref, *objs, **dict_obj): + if isinstance(ref, int): + ref = self.pages[ref] + if "Type" not in dict_obj: + dict_obj["Type"] = PdfName(b"Page") + if "Parent" not in dict_obj: + dict_obj["Parent"] = self.pages_ref + return self.write_obj(ref, *objs, **dict_obj) + + def write_obj(self, ref, *objs, **dict_obj): + f = self.f + if ref is None: + ref = self.next_object_id(f.tell()) + else: + self.xref_table[ref.object_id] = (f.tell(), ref.generation) + f.write(bytes(IndirectObjectDef(*ref))) + stream = dict_obj.pop("stream", None) + if stream is not None: + dict_obj["Length"] = len(stream) + if dict_obj: + f.write(pdf_repr(dict_obj)) + for obj in objs: + f.write(pdf_repr(obj)) + if stream is not None: + f.write(b"stream\n") + f.write(stream) + f.write(b"\nendstream\n") + f.write(b"endobj\n") + return ref + + def del_root(self): + if self.root_ref is None: + return + del self.xref_table[self.root_ref.object_id] + del self.xref_table[self.root[b"Pages"].object_id] + + @staticmethod + def get_buf_from_file(f): + if hasattr(f, "getbuffer"): + return f.getbuffer() + elif hasattr(f, "getvalue"): + return f.getvalue() + else: + try: + return mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) + except ValueError: # cannot mmap an empty file + return b"" + + def read_pdf_info(self): + self.file_size_total = len(self.buf) + self.file_size_this = self.file_size_total - self.start_offset + self.read_trailer() + self.root_ref = self.trailer_dict[b"Root"] + self.info_ref = self.trailer_dict.get(b"Info", None) + self.root = PdfDict(self.read_indirect(self.root_ref)) + if self.info_ref is None: + self.info = PdfDict() + else: + self.info = PdfDict(self.read_indirect(self.info_ref)) + check_format_condition(b"Type" in self.root, "/Type missing in Root") + check_format_condition(self.root[b"Type"] == b"Catalog", + "/Type in Root is not /Catalog") + check_format_condition(b"Pages" in self.root, "/Pages missing in Root") + check_format_condition(isinstance(self.root[b"Pages"], + IndirectReference), + "/Pages in Root is not an indirect reference") + self.pages_ref = self.root[b"Pages"] + self.page_tree_root = self.read_indirect(self.pages_ref) + self.pages = self.linearize_page_tree(self.page_tree_root) + # save the original list of page references + # in case the user modifies, adds or deletes some pages + # and we need to rewrite the pages and their list + self.orig_pages = self.pages[:] + + def next_object_id(self, offset=None): + try: + # TODO: support reuse of deleted objects + reference = IndirectReference(max(self.xref_table.keys()) + 1, 0) + except ValueError: + reference = IndirectReference(1, 0) + if offset is not None: + self.xref_table[reference.object_id] = (offset, 0) + return reference + + delimiter = br"[][()<>{}/%]" + delimiter_or_ws = br"[][()<>{}/%\000\011\012\014\015\040]" + whitespace = br"[\000\011\012\014\015\040]" + whitespace_or_hex = br"[\000\011\012\014\015\0400-9a-fA-F]" + whitespace_optional = whitespace + b"*" + whitespace_mandatory = whitespace + b"+" + newline_only = br"[\r\n]+" + newline = whitespace_optional + newline_only + whitespace_optional + re_trailer_end = re.compile( + whitespace_mandatory + br"trailer" + whitespace_optional + + br"\<\<(.*\>\>)" + newline + br"startxref" + newline + br"([0-9]+)" + + newline + br"%%EOF" + whitespace_optional + br"$", re.DOTALL) + re_trailer_prev = re.compile( + whitespace_optional + br"trailer" + whitespace_optional + + br"\<\<(.*?\>\>)" + newline + br"startxref" + newline + br"([0-9]+)" + + newline + br"%%EOF" + whitespace_optional, re.DOTALL) + + def read_trailer(self): + search_start_offset = len(self.buf) - 16384 + if search_start_offset < self.start_offset: + search_start_offset = self.start_offset + m = self.re_trailer_end.search(self.buf, search_start_offset) + check_format_condition(m, "trailer end not found") + # make sure we found the LAST trailer + last_match = m + while m: + last_match = m + m = self.re_trailer_end.search(self.buf, m.start()+16) + if not m: + m = last_match + trailer_data = m.group(1) + self.last_xref_section_offset = int(m.group(2)) + self.trailer_dict = self.interpret_trailer(trailer_data) + self.xref_table = XrefTable() + self.read_xref_table(xref_section_offset=self.last_xref_section_offset) + if b"Prev" in self.trailer_dict: + self.read_prev_trailer(self.trailer_dict[b"Prev"]) + + def read_prev_trailer(self, xref_section_offset): + trailer_offset = self.read_xref_table( + xref_section_offset=xref_section_offset) + m = self.re_trailer_prev.search( + self.buf[trailer_offset:trailer_offset+16384]) + check_format_condition(m, "previous trailer not found") + trailer_data = m.group(1) + check_format_condition(int(m.group(2)) == xref_section_offset, + "xref section offset in previous trailer " + "doesn't match what was expected") + trailer_dict = self.interpret_trailer(trailer_data) + if b"Prev" in trailer_dict: + self.read_prev_trailer(trailer_dict[b"Prev"]) + + re_whitespace_optional = re.compile(whitespace_optional) + re_name = re.compile( + whitespace_optional + br"/([!-$&'*-.0-;=?-Z\\^-z|~]+)(?=" + + delimiter_or_ws + br")") + re_dict_start = re.compile(whitespace_optional + br"\<\<") + re_dict_end = re.compile( + whitespace_optional + br"\>\>" + whitespace_optional) + + @classmethod + def interpret_trailer(cls, trailer_data): + trailer = {} + offset = 0 + while True: + m = cls.re_name.match(trailer_data, offset) + if not m: + m = cls.re_dict_end.match(trailer_data, offset) + check_format_condition( + m and m.end() == len(trailer_data), + "name not found in trailer, remaining data: " + + repr(trailer_data[offset:])) + break + key = cls.interpret_name(m.group(1)) + value, offset = cls.get_value(trailer_data, m.end()) + trailer[key] = value + check_format_condition( + b"Size" in trailer and isinstance(trailer[b"Size"], int), + "/Size not in trailer or not an integer") + check_format_condition( + b"Root" in trailer and + isinstance(trailer[b"Root"], IndirectReference), + "/Root not in trailer or not an indirect reference") + return trailer + + re_hashes_in_name = re.compile(br"([^#]*)(#([0-9a-fA-F]{2}))?") + + @classmethod + def interpret_name(cls, raw, as_text=False): + name = b"" + for m in cls.re_hashes_in_name.finditer(raw): + if m.group(3): + name += m.group(1) + \ + bytearray.fromhex(m.group(3).decode("us-ascii")) + else: + name += m.group(1) + if as_text: + return name.decode("utf-8") + else: + return bytes(name) + + re_null = re.compile( + whitespace_optional + br"null(?=" + delimiter_or_ws + br")") + re_true = re.compile( + whitespace_optional + br"true(?=" + delimiter_or_ws + br")") + re_false = re.compile( + whitespace_optional + br"false(?=" + delimiter_or_ws + br")") + re_int = re.compile( + whitespace_optional + br"([-+]?[0-9]+)(?=" + delimiter_or_ws + br")") + re_real = re.compile( + whitespace_optional + br"([-+]?([0-9]+\.[0-9]*|[0-9]*\.[0-9]+))(?=" + + delimiter_or_ws + br")") + re_array_start = re.compile(whitespace_optional + br"\[") + re_array_end = re.compile(whitespace_optional + br"]") + re_string_hex = re.compile( + whitespace_optional + br"\<(" + whitespace_or_hex + br"*)\>") + re_string_lit = re.compile(whitespace_optional + br"\(") + re_indirect_reference = re.compile( + whitespace_optional + br"([-+]?[0-9]+)" + whitespace_mandatory + + br"([-+]?[0-9]+)" + whitespace_mandatory + br"R(?=" + delimiter_or_ws + + br")") + re_indirect_def_start = re.compile( + whitespace_optional + br"([-+]?[0-9]+)" + whitespace_mandatory + + br"([-+]?[0-9]+)" + whitespace_mandatory + br"obj(?=" + + delimiter_or_ws + br")") + re_indirect_def_end = re.compile( + whitespace_optional + br"endobj(?=" + delimiter_or_ws + br")") + re_comment = re.compile( + br"(" + whitespace_optional + br"%[^\r\n]*" + newline + br")*") + re_stream_start = re.compile(whitespace_optional + br"stream\r?\n") + re_stream_end = re.compile( + whitespace_optional + br"endstream(?=" + delimiter_or_ws + br")") + + @classmethod + def get_value(cls, data, offset, expect_indirect=None, max_nesting=-1): + if max_nesting == 0: + return None, None + m = cls.re_comment.match(data, offset) + if m: + offset = m.end() + m = cls.re_indirect_def_start.match(data, offset) + if m: + check_format_condition( + int(m.group(1)) > 0, + "indirect object definition: object ID must be greater than 0") + check_format_condition( + int(m.group(2)) >= 0, + "indirect object definition: generation must be non-negative") + check_format_condition( + expect_indirect is None or expect_indirect == + IndirectReference(int(m.group(1)), int(m.group(2))), + "indirect object definition different than expected") + object, offset = cls.get_value( + data, m.end(), max_nesting=max_nesting-1) + if offset is None: + return object, None + m = cls.re_indirect_def_end.match(data, offset) + check_format_condition( + m, "indirect object definition end not found") + return object, m.end() + check_format_condition( + not expect_indirect, "indirect object definition not found") + m = cls.re_indirect_reference.match(data, offset) + if m: + check_format_condition( + int(m.group(1)) > 0, + "indirect object reference: object ID must be greater than 0") + check_format_condition( + int(m.group(2)) >= 0, + "indirect object reference: generation must be non-negative") + return IndirectReference(int(m.group(1)), int(m.group(2))), m.end() + m = cls.re_dict_start.match(data, offset) + if m: + offset = m.end() + result = {} + m = cls.re_dict_end.match(data, offset) + while not m: + key, offset = cls.get_value( + data, offset, max_nesting=max_nesting-1) + if offset is None: + return result, None + value, offset = cls.get_value( + data, offset, max_nesting=max_nesting-1) + result[key] = value + if offset is None: + return result, None + m = cls.re_dict_end.match(data, offset) + offset = m.end() + m = cls.re_stream_start.match(data, offset) + if m: + try: + stream_len = int(result[b"Length"]) + except (TypeError, KeyError, ValueError): + raise PdfFormatError( + "bad or missing Length in stream dict (%r)" % + result.get(b"Length", None)) + stream_data = data[m.end():m.end() + stream_len] + m = cls.re_stream_end.match(data, m.end() + stream_len) + check_format_condition(m, "stream end not found") + offset = m.end() + result = PdfStream(PdfDict(result), stream_data) + else: + result = PdfDict(result) + return result, offset + m = cls.re_array_start.match(data, offset) + if m: + offset = m.end() + result = [] + m = cls.re_array_end.match(data, offset) + while not m: + value, offset = cls.get_value( + data, offset, max_nesting=max_nesting-1) + result.append(value) + if offset is None: + return result, None + m = cls.re_array_end.match(data, offset) + return result, m.end() + m = cls.re_null.match(data, offset) + if m: + return None, m.end() + m = cls.re_true.match(data, offset) + if m: + return True, m.end() + m = cls.re_false.match(data, offset) + if m: + return False, m.end() + m = cls.re_name.match(data, offset) + if m: + return PdfName(cls.interpret_name(m.group(1))), m.end() + m = cls.re_int.match(data, offset) + if m: + return int(m.group(1)), m.end() + m = cls.re_real.match(data, offset) + if m: + # XXX Decimal instead of float??? + return float(m.group(1)), m.end() + m = cls.re_string_hex.match(data, offset) + if m: + # filter out whitespace + hex_string = bytearray([ + b for b in m.group(1) + if b in b"0123456789abcdefABCDEF" + ]) + if len(hex_string) % 2 == 1: + # append a 0 if the length is not even - yes, at the end + hex_string.append(ord(b"0")) + return bytearray.fromhex(hex_string.decode("us-ascii")), m.end() + m = cls.re_string_lit.match(data, offset) + if m: + return cls.get_literal_string(data, m.end()) + # return None, offset # fallback (only for debugging) + raise PdfFormatError( + "unrecognized object: " + repr(data[offset:offset+32])) + + re_lit_str_token = re.compile(br"(\\[nrtbf()\\])|(\\[0-9]{1,3})|(\\(\r\n|\r|\n))|(\r\n|\r|\n)|(\()|(\))") + escaped_chars = { + b"n": b"\n", + b"r": b"\r", + b"t": b"\t", + b"b": b"\b", + b"f": b"\f", + b"(": b"(", + b")": b")", + b"\\": b"\\", + ord(b"n"): b"\n", + ord(b"r"): b"\r", + ord(b"t"): b"\t", + ord(b"b"): b"\b", + ord(b"f"): b"\f", + ord(b"("): b"(", + ord(b")"): b")", + ord(b"\\"): b"\\", + } + + @classmethod + def get_literal_string(cls, data, offset): + nesting_depth = 0 + result = bytearray() + for m in cls.re_lit_str_token.finditer(data, offset): + result.extend(data[offset:m.start()]) + if m.group(1): + result.extend(cls.escaped_chars[m.group(1)[1]]) + elif m.group(2): + result.append(int(m.group(2)[1:], 8)) + elif m.group(3): + pass + elif m.group(5): + result.extend(b"\n") + elif m.group(6): + result.extend(b"(") + nesting_depth += 1 + elif m.group(7): + if nesting_depth == 0: + return bytes(result), m.end() + result.extend(b")") + nesting_depth -= 1 + offset = m.end() + raise PdfFormatError("unfinished literal string") + + re_xref_section_start = re.compile( + whitespace_optional + br"xref" + newline) + re_xref_subsection_start = re.compile( + whitespace_optional + br"([0-9]+)" + whitespace_mandatory + + br"([0-9]+)" + whitespace_optional + newline_only) + re_xref_entry = re.compile(br"([0-9]{10}) ([0-9]{5}) ([fn])( \r| \n|\r\n)") + + def read_xref_table(self, xref_section_offset): + subsection_found = False + m = self.re_xref_section_start.match( + self.buf, xref_section_offset + self.start_offset) + check_format_condition(m, "xref section start not found") + offset = m.end() + while True: + m = self.re_xref_subsection_start.match(self.buf, offset) + if not m: + check_format_condition( + subsection_found, "xref subsection start not found") + break + subsection_found = True + offset = m.end() + first_object = int(m.group(1)) + num_objects = int(m.group(2)) + for i in range(first_object, first_object+num_objects): + m = self.re_xref_entry.match(self.buf, offset) + check_format_condition(m, "xref entry not found") + offset = m.end() + is_free = m.group(3) == b"f" + generation = int(m.group(2)) + if not is_free: + new_entry = (int(m.group(1)), generation) + check_format_condition( + i not in self.xref_table or + self.xref_table[i] == new_entry, + "xref entry duplicated (and not identical)") + self.xref_table[i] = new_entry + return offset + + def read_indirect(self, ref, max_nesting=-1): + offset, generation = self.xref_table[ref[0]] + check_format_condition( + generation == ref[1], + "expected to find generation %s for object ID %s in xref table, " + "instead found generation %s at offset %s" + % (ref[1], ref[0], generation, offset)) + value = self.get_value(self.buf, offset + self.start_offset, + expect_indirect=IndirectReference(*ref), + max_nesting=max_nesting)[0] + self.cached_objects[ref] = value + return value + + def linearize_page_tree(self, node=None): + if node is None: + node = self.page_tree_root + check_format_condition( + node[b"Type"] == b"Pages", "/Type of page tree node is not /Pages") + pages = [] + for kid in node[b"Kids"]: + kid_object = self.read_indirect(kid) + if kid_object[b"Type"] == b"Page": + pages.append(kid) + else: + pages.extend(self.linearize_page_tree(node=kid_object)) + return pages diff --git a/thesisenv/lib/python3.6/site-packages/PIL/PixarImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/PixarImagePlugin.py new file mode 100644 index 0000000..d07b28d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/PixarImagePlugin.py @@ -0,0 +1,71 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIXAR raster support for PIL +# +# history: +# 97-01-29 fl Created +# +# notes: +# This is incomplete; it is based on a few samples created with +# Photoshop 2.5 and 3.0, and a summary description provided by +# Greg Coats . Hopefully, "L" and +# "RGBA" support will be added in future versions. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from . import Image, ImageFile +from ._binary import i16le as i16 + +__version__ = "0.1" + + +# +# helpers + +def _accept(prefix): + return prefix[:4] == b"\200\350\000\000" + + +## +# Image plugin for PIXAR raster images. + +class PixarImageFile(ImageFile.ImageFile): + + format = "PIXAR" + format_description = "PIXAR raster image" + + def _open(self): + + # assuming a 4-byte magic label + s = self.fp.read(4) + if s != b"\200\350\000\000": + raise SyntaxError("not a PIXAR file") + + # read rest of header + s = s + self.fp.read(508) + + self._size = i16(s[418:420]), i16(s[416:418]) + + # get channel/depth descriptions + mode = i16(s[424:426]), i16(s[426:428]) + + if mode == (14, 2): + self.mode = "RGB" + # FIXME: to be continued... + + # create tile descriptor (assuming "dumped") + self.tile = [("raw", (0, 0)+self.size, 1024, (self.mode, 0, 1))] + + +# +# -------------------------------------------------------------------- + +Image.register_open(PixarImageFile.format, PixarImageFile, _accept) + +Image.register_extension(PixarImageFile.format, ".pxr") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/PngImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/PngImagePlugin.py new file mode 100644 index 0000000..15077fc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/PngImagePlugin.py @@ -0,0 +1,871 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PNG support code +# +# See "PNG (Portable Network Graphics) Specification, version 1.0; +# W3C Recommendation", 1996-10-01, Thomas Boutell (ed.). +# +# history: +# 1996-05-06 fl Created (couldn't resist it) +# 1996-12-14 fl Upgraded, added read and verify support (0.2) +# 1996-12-15 fl Separate PNG stream parser +# 1996-12-29 fl Added write support, added getchunks +# 1996-12-30 fl Eliminated circular references in decoder (0.3) +# 1998-07-12 fl Read/write 16-bit images as mode I (0.4) +# 2001-02-08 fl Added transparency support (from Zircon) (0.5) +# 2001-04-16 fl Don't close data source in "open" method (0.6) +# 2004-02-24 fl Don't even pretend to support interlaced files (0.7) +# 2004-08-31 fl Do basic sanity check on chunk identifiers (0.8) +# 2004-09-20 fl Added PngInfo chunk container +# 2004-12-18 fl Added DPI read support (based on code by Niki Spahiev) +# 2008-08-13 fl Added tRNS support for RGB images +# 2009-03-06 fl Support for preserving ICC profiles (by Florian Hoech) +# 2009-03-08 fl Added zTXT support (from Lowell Alleman) +# 2009-03-29 fl Read interlaced PNG files (from Conrado Porto Lopes Gouvua) +# +# Copyright (c) 1997-2009 by Secret Labs AB +# Copyright (c) 1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import logging +import re +import zlib +import struct + +from . import Image, ImageFile, ImagePalette +from ._binary import i8, i16be as i16, i32be as i32, o16be as o16, o32be as o32 +from ._util import py3 + +__version__ = "0.9" + +logger = logging.getLogger(__name__) + +is_cid = re.compile(br"\w\w\w\w").match + + +_MAGIC = b"\211PNG\r\n\032\n" + + +_MODES = { + # supported bits/color combinations, and corresponding modes/rawmodes + (1, 0): ("1", "1"), + (2, 0): ("L", "L;2"), + (4, 0): ("L", "L;4"), + (8, 0): ("L", "L"), + (16, 0): ("I", "I;16B"), + (8, 2): ("RGB", "RGB"), + (16, 2): ("RGB", "RGB;16B"), + (1, 3): ("P", "P;1"), + (2, 3): ("P", "P;2"), + (4, 3): ("P", "P;4"), + (8, 3): ("P", "P"), + (8, 4): ("LA", "LA"), + (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available + (8, 6): ("RGBA", "RGBA"), + (16, 6): ("RGBA", "RGBA;16B"), +} + + +_simple_palette = re.compile(b'^\xff*\x00\xff*$') + +# Maximum decompressed size for a iTXt or zTXt chunk. +# Eliminates decompression bombs where compressed chunks can expand 1000x +MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK +# Set the maximum total text chunk size. +MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK + + +def _safe_zlib_decompress(s): + dobj = zlib.decompressobj() + plaintext = dobj.decompress(s, MAX_TEXT_CHUNK) + if dobj.unconsumed_tail: + raise ValueError("Decompressed Data Too Large") + return plaintext + + +def _crc32(data, seed=0): + return zlib.crc32(data, seed) & 0xffffffff + + +# -------------------------------------------------------------------- +# Support classes. Suitable for PNG and related formats like MNG etc. + +class ChunkStream(object): + + def __init__(self, fp): + + self.fp = fp + self.queue = [] + + def read(self): + "Fetch a new chunk. Returns header information." + cid = None + + if self.queue: + cid, pos, length = self.queue.pop() + self.fp.seek(pos) + else: + s = self.fp.read(8) + cid = s[4:] + pos = self.fp.tell() + length = i32(s) + + if not is_cid(cid): + if not ImageFile.LOAD_TRUNCATED_IMAGES: + raise SyntaxError("broken PNG file (chunk %s)" % repr(cid)) + + return cid, pos, length + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + self.queue = self.crc = self.fp = None + + def push(self, cid, pos, length): + + self.queue.append((cid, pos, length)) + + def call(self, cid, pos, length): + "Call the appropriate chunk handler" + + logger.debug("STREAM %r %s %s", cid, pos, length) + return getattr(self, "chunk_" + cid.decode('ascii'))(pos, length) + + def crc(self, cid, data): + "Read and verify checksum" + + # Skip CRC checks for ancillary chunks if allowed to load truncated + # images + # 5th byte of first char is 1 [specs, section 5.4] + if ImageFile.LOAD_TRUNCATED_IMAGES and (i8(cid[0]) >> 5 & 1): + self.crc_skip(cid, data) + return + + try: + crc1 = _crc32(data, _crc32(cid)) + crc2 = i32(self.fp.read(4)) + if crc1 != crc2: + raise SyntaxError("broken PNG file (bad header checksum in %r)" + % cid) + except struct.error: + raise SyntaxError("broken PNG file (incomplete checksum in %r)" + % cid) + + def crc_skip(self, cid, data): + "Read checksum. Used if the C module is not present" + + self.fp.read(4) + + def verify(self, endchunk=b"IEND"): + + # Simple approach; just calculate checksum for all remaining + # blocks. Must be called directly after open. + + cids = [] + + while True: + try: + cid, pos, length = self.read() + except struct.error: + raise IOError("truncated PNG file") + + if cid == endchunk: + break + self.crc(cid, ImageFile._safe_read(self.fp, length)) + cids.append(cid) + + return cids + + +class iTXt(str): + """ + Subclass of string to allow iTXt chunks to look like strings while + keeping their extra information + + """ + @staticmethod + def __new__(cls, text, lang, tkey): + """ + :param cls: the class to use when creating the instance + :param text: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + """ + + self = str.__new__(cls, text) + self.lang = lang + self.tkey = tkey + return self + + +class PngInfo(object): + """ + PNG chunk container (for use with save(pnginfo=)) + + """ + + def __init__(self): + self.chunks = [] + + def add(self, cid, data): + """Appends an arbitrary chunk. Use with caution. + + :param cid: a byte string, 4 bytes long. + :param data: a byte string of the encoded data + + """ + + self.chunks.append((cid, data)) + + def add_itxt(self, key, value, lang="", tkey="", zip=False): + """Appends an iTXt chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + :param zip: compression flag + + """ + + if not isinstance(key, bytes): + key = key.encode("latin-1", "strict") + if not isinstance(value, bytes): + value = value.encode("utf-8", "strict") + if not isinstance(lang, bytes): + lang = lang.encode("utf-8", "strict") + if not isinstance(tkey, bytes): + tkey = tkey.encode("utf-8", "strict") + + if zip: + self.add(b"iTXt", key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + + zlib.compress(value)) + else: + self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + + value) + + def add_text(self, key, value, zip=False): + """Appends a text chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key, text or an + :py:class:`PIL.PngImagePlugin.iTXt` instance + :param zip: compression flag + + """ + if isinstance(value, iTXt): + return self.add_itxt(key, value, value.lang, value.tkey, zip=zip) + + # The tEXt chunk stores latin-1 text + if not isinstance(value, bytes): + try: + value = value.encode('latin-1', 'strict') + except UnicodeError: + return self.add_itxt(key, value, zip=zip) + + if not isinstance(key, bytes): + key = key.encode('latin-1', 'strict') + + if zip: + self.add(b"zTXt", key + b"\0\0" + zlib.compress(value)) + else: + self.add(b"tEXt", key + b"\0" + value) + + +# -------------------------------------------------------------------- +# PNG image stream (IHDR/IEND) + +class PngStream(ChunkStream): + + def __init__(self, fp): + + ChunkStream.__init__(self, fp) + + # local copies of Image attributes + self.im_info = {} + self.im_text = {} + self.im_size = (0, 0) + self.im_mode = None + self.im_tile = None + self.im_palette = None + + self.text_memory = 0 + + def check_text_memory(self, chunklen): + self.text_memory += chunklen + if self.text_memory > MAX_TEXT_MEMORY: + raise ValueError("Too much memory used in text chunks: " + "%s>MAX_TEXT_MEMORY" % self.text_memory) + + def chunk_iCCP(self, pos, length): + + # ICC profile + s = ImageFile._safe_read(self.fp, length) + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + i = s.find(b"\0") + logger.debug("iCCP profile name %r", s[:i]) + logger.debug("Compression method %s", i8(s[i])) + comp_method = i8(s[i]) + if comp_method != 0: + raise SyntaxError("Unknown compression method %s in iCCP chunk" % + comp_method) + try: + icc_profile = _safe_zlib_decompress(s[i+2:]) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + icc_profile = None + else: + raise + except zlib.error: + icc_profile = None # FIXME + self.im_info["icc_profile"] = icc_profile + return s + + def chunk_IHDR(self, pos, length): + + # image header + s = ImageFile._safe_read(self.fp, length) + self.im_size = i32(s), i32(s[4:]) + try: + self.im_mode, self.im_rawmode = _MODES[(i8(s[8]), i8(s[9]))] + except: + pass + if i8(s[12]): + self.im_info["interlace"] = 1 + if i8(s[11]): + raise SyntaxError("unknown filter category") + return s + + def chunk_IDAT(self, pos, length): + + # image data + self.im_tile = [("zip", (0, 0)+self.im_size, pos, self.im_rawmode)] + self.im_idat = length + raise EOFError + + def chunk_IEND(self, pos, length): + + # end of PNG image + raise EOFError + + def chunk_PLTE(self, pos, length): + + # palette + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + self.im_palette = "RGB", s + return s + + def chunk_tRNS(self, pos, length): + + # transparency + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + if _simple_palette.match(s): + # tRNS contains only one full-transparent entry, + # other entries are full opaque + i = s.find(b"\0") + if i >= 0: + self.im_info["transparency"] = i + else: + # otherwise, we have a byte string with one alpha value + # for each palette entry + self.im_info["transparency"] = s + elif self.im_mode == "L": + self.im_info["transparency"] = i16(s) + elif self.im_mode == "RGB": + self.im_info["transparency"] = i16(s), i16(s[2:]), i16(s[4:]) + return s + + def chunk_gAMA(self, pos, length): + # gamma setting + s = ImageFile._safe_read(self.fp, length) + self.im_info["gamma"] = i32(s) / 100000.0 + return s + + def chunk_cHRM(self, pos, length): + # chromaticity, 8 unsigned ints, actual value is scaled by 100,000 + # WP x,y, Red x,y, Green x,y Blue x,y + + s = ImageFile._safe_read(self.fp, length) + raw_vals = struct.unpack('>%dI' % (len(s) // 4), s) + self.im_info['chromaticity'] = tuple(elt/100000.0 for elt in raw_vals) + return s + + def chunk_sRGB(self, pos, length): + # srgb rendering intent, 1 byte + # 0 perceptual + # 1 relative colorimetric + # 2 saturation + # 3 absolute colorimetric + + s = ImageFile._safe_read(self.fp, length) + self.im_info['srgb'] = i8(s) + return s + + def chunk_pHYs(self, pos, length): + + # pixels per unit + s = ImageFile._safe_read(self.fp, length) + px, py = i32(s), i32(s[4:]) + unit = i8(s[8]) + if unit == 1: # meter + dpi = int(px * 0.0254 + 0.5), int(py * 0.0254 + 0.5) + self.im_info["dpi"] = dpi + elif unit == 0: + self.im_info["aspect"] = px, py + return s + + def chunk_tEXt(self, pos, length): + + # text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + # fallback for broken tEXt tags + k = s + v = b"" + if k: + if py3: + k = k.decode('latin-1', 'strict') + v = v.decode('latin-1', 'replace') + + self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + + return s + + def chunk_zTXt(self, pos, length): + + # compressed text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + k = s + v = b"" + if v: + comp_method = i8(v[0]) + else: + comp_method = 0 + if comp_method != 0: + raise SyntaxError("Unknown compression method %s in zTXt chunk" % + comp_method) + try: + v = _safe_zlib_decompress(v[1:]) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + v = b"" + else: + raise + except zlib.error: + v = b"" + + if k: + if py3: + k = k.decode('latin-1', 'strict') + v = v.decode('latin-1', 'replace') + + self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + + return s + + def chunk_iTXt(self, pos, length): + + # international text + r = s = ImageFile._safe_read(self.fp, length) + try: + k, r = r.split(b"\0", 1) + except ValueError: + return s + if len(r) < 2: + return s + cf, cm, r = i8(r[0]), i8(r[1]), r[2:] + try: + lang, tk, v = r.split(b"\0", 2) + except ValueError: + return s + if cf != 0: + if cm == 0: + try: + v = _safe_zlib_decompress(v) + except ValueError: + if ImageFile.LOAD_TRUNCATED_IMAGES: + return s + else: + raise + except zlib.error: + return s + else: + return s + if py3: + try: + k = k.decode("latin-1", "strict") + lang = lang.decode("utf-8", "strict") + tk = tk.decode("utf-8", "strict") + v = v.decode("utf-8", "strict") + except UnicodeError: + return s + + self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk) + self.check_text_memory(len(v)) + + return s + + +# -------------------------------------------------------------------- +# PNG reader + +def _accept(prefix): + return prefix[:8] == _MAGIC + + +## +# Image plugin for PNG images. + +class PngImageFile(ImageFile.ImageFile): + + format = "PNG" + format_description = "Portable network graphics" + + def _open(self): + + if self.fp.read(8) != _MAGIC: + raise SyntaxError("not a PNG file") + + # + # Parse headers up to the first IDAT chunk + + self.png = PngStream(self.fp) + + while True: + + # + # get next chunk + + cid, pos, length = self.png.read() + + try: + s = self.png.call(cid, pos, length) + except EOFError: + break + except AttributeError: + logger.debug("%r %s %s (unknown)", cid, pos, length) + s = ImageFile._safe_read(self.fp, length) + + self.png.crc(cid, s) + + # + # Copy relevant attributes from the PngStream. An alternative + # would be to let the PngStream class modify these attributes + # directly, but that introduces circular references which are + # difficult to break if things go wrong in the decoder... + # (believe me, I've tried ;-) + + self.mode = self.png.im_mode + self._size = self.png.im_size + self.info = self.png.im_info + self.text = self.png.im_text # experimental + self.tile = self.png.im_tile + + if self.png.im_palette: + rawmode, data = self.png.im_palette + self.palette = ImagePalette.raw(rawmode, data) + + self.__idat = length # used by load_read() + + def verify(self): + "Verify PNG file" + + if self.fp is None: + raise RuntimeError("verify must be called directly after open") + + # back up to beginning of IDAT block + self.fp.seek(self.tile[0][2] - 8) + + self.png.verify() + self.png.close() + + self.fp = None + + def load_prepare(self): + "internal: prepare to read PNG file" + + if self.info.get("interlace"): + self.decoderconfig = self.decoderconfig + (1,) + + ImageFile.ImageFile.load_prepare(self) + + def load_read(self, read_bytes): + "internal: read more image data" + + while self.__idat == 0: + # end of chunk, skip forward to next one + + self.fp.read(4) # CRC + + cid, pos, length = self.png.read() + + if cid not in [b"IDAT", b"DDAT"]: + self.png.push(cid, pos, length) + return b"" + + self.__idat = length # empty chunks are allowed + + # read more data from this chunk + if read_bytes <= 0: + read_bytes = self.__idat + else: + read_bytes = min(read_bytes, self.__idat) + + self.__idat = self.__idat - read_bytes + + return self.fp.read(read_bytes) + + def load_end(self): + "internal: finished reading image data" + + self.png.close() + self.png = None + + +# -------------------------------------------------------------------- +# PNG writer + +_OUTMODES = { + # supported PIL modes, and corresponding rawmodes/bits/color combinations + "1": ("1", b'\x01\x00'), + "L;1": ("L;1", b'\x01\x00'), + "L;2": ("L;2", b'\x02\x00'), + "L;4": ("L;4", b'\x04\x00'), + "L": ("L", b'\x08\x00'), + "LA": ("LA", b'\x08\x04'), + "I": ("I;16B", b'\x10\x00'), + "P;1": ("P;1", b'\x01\x03'), + "P;2": ("P;2", b'\x02\x03'), + "P;4": ("P;4", b'\x04\x03'), + "P": ("P", b'\x08\x03'), + "RGB": ("RGB", b'\x08\x02'), + "RGBA": ("RGBA", b'\x08\x06'), +} + + +def putchunk(fp, cid, *data): + """Write a PNG chunk (including CRC field)""" + + data = b"".join(data) + + fp.write(o32(len(data)) + cid) + fp.write(data) + crc = _crc32(data, _crc32(cid)) + fp.write(o32(crc)) + + +class _idat(object): + # wrap output from the encoder in IDAT chunks + + def __init__(self, fp, chunk): + self.fp = fp + self.chunk = chunk + + def write(self, data): + self.chunk(self.fp, b"IDAT", data) + + +def _save(im, fp, filename, chunk=putchunk): + # save an image to disk (called by the save method) + + mode = im.mode + + if mode == "P": + + # + # attempt to minimize storage requirements for palette images + if "bits" in im.encoderinfo: + # number of bits specified by user + colors = 1 << im.encoderinfo["bits"] + else: + # check palette contents + if im.palette: + colors = max(min(len(im.palette.getdata()[1])//3, 256), 2) + else: + colors = 256 + + if colors <= 2: + bits = 1 + elif colors <= 4: + bits = 2 + elif colors <= 16: + bits = 4 + else: + bits = 8 + if bits != 8: + mode = "%s;%d" % (mode, bits) + + # encoder options + im.encoderconfig = (im.encoderinfo.get("optimize", False), + im.encoderinfo.get("compress_level", -1), + im.encoderinfo.get("compress_type", -1), + im.encoderinfo.get("dictionary", b"")) + + # get the corresponding PNG mode + try: + rawmode, mode = _OUTMODES[mode] + except KeyError: + raise IOError("cannot write mode %s as PNG" % mode) + + # + # write minimal PNG file + + fp.write(_MAGIC) + + chunk(fp, b"IHDR", + o32(im.size[0]), o32(im.size[1]), # 0: size + mode, # 8: depth/type + b'\0', # 10: compression + b'\0', # 11: filter category + b'\0') # 12: interlace flag + + chunks = [b"cHRM", b"gAMA", b"sBIT", b"sRGB", b"tIME"] + + icc = im.encoderinfo.get("icc_profile", im.info.get("icc_profile")) + if icc: + # ICC profile + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + name = b"ICC Profile" + data = name + b"\0\0" + zlib.compress(icc) + chunk(fp, b"iCCP", data) + + # You must either have sRGB or iCCP. + # Disallow sRGB chunks when an iCCP-chunk has been emitted. + chunks.remove(b"sRGB") + + info = im.encoderinfo.get("pnginfo") + if info: + chunks_multiple_allowed = [b"sPLT", b"iTXt", b"tEXt", b"zTXt"] + for cid, data in info.chunks: + if cid in chunks: + chunks.remove(cid) + chunk(fp, cid, data) + elif cid in chunks_multiple_allowed: + chunk(fp, cid, data) + + if im.mode == "P": + palette_byte_number = (2 ** bits) * 3 + palette_bytes = im.im.getpalette("RGB")[:palette_byte_number] + while len(palette_bytes) < palette_byte_number: + palette_bytes += b'\0' + chunk(fp, b"PLTE", palette_bytes) + + transparency = im.encoderinfo.get('transparency', + im.info.get('transparency', None)) + + if transparency or transparency == 0: + if im.mode == "P": + # limit to actual palette size + alpha_bytes = 2**bits + if isinstance(transparency, bytes): + chunk(fp, b"tRNS", transparency[:alpha_bytes]) + else: + transparency = max(0, min(255, transparency)) + alpha = b'\xFF' * transparency + b'\0' + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + elif im.mode == "L": + transparency = max(0, min(65535, transparency)) + chunk(fp, b"tRNS", o16(transparency)) + elif im.mode == "RGB": + red, green, blue = transparency + chunk(fp, b"tRNS", o16(red) + o16(green) + o16(blue)) + else: + if "transparency" in im.encoderinfo: + # don't bother with transparency if it's an RGBA + # and it's in the info dict. It's probably just stale. + raise IOError("cannot use transparency for this mode") + else: + if im.mode == "P" and im.im.getpalettemode() == "RGBA": + alpha = im.im.getpalette("RGBA", "A") + alpha_bytes = 2**bits + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + + dpi = im.encoderinfo.get("dpi") + if dpi: + chunk(fp, b"pHYs", + o32(int(dpi[0] / 0.0254 + 0.5)), + o32(int(dpi[1] / 0.0254 + 0.5)), + b'\x01') + + info = im.encoderinfo.get("pnginfo") + if info: + chunks = [b"bKGD", b"hIST"] + for cid, data in info.chunks: + if cid in chunks: + chunks.remove(cid) + chunk(fp, cid, data) + + ImageFile._save(im, _idat(fp, chunk), + [("zip", (0, 0)+im.size, 0, rawmode)]) + + chunk(fp, b"IEND", b"") + + if hasattr(fp, "flush"): + fp.flush() + + +# -------------------------------------------------------------------- +# PNG chunk converter + +def getchunks(im, **params): + """Return a list of PNG chunks representing this image.""" + + class collector(object): + data = [] + + def write(self, data): + pass + + def append(self, chunk): + self.data.append(chunk) + + def append(fp, cid, *data): + data = b"".join(data) + crc = o32(_crc32(data, _crc32(cid))) + fp.append((cid, data, crc)) + + fp = collector() + + try: + im.encoderinfo = params + _save(im, fp, None, append) + finally: + del im.encoderinfo + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(PngImageFile.format, PngImageFile, _accept) +Image.register_save(PngImageFile.format, _save) + +Image.register_extension(PngImageFile.format, ".png") + +Image.register_mime(PngImageFile.format, "image/png") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/PpmImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/PpmImagePlugin.py new file mode 100644 index 0000000..8002678 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/PpmImagePlugin.py @@ -0,0 +1,158 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PPM support for PIL +# +# History: +# 96-03-24 fl Created +# 98-03-06 fl Write RGBA images (as RGB, that is) +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile + +__version__ = "0.2" + +# +# -------------------------------------------------------------------- + +b_whitespace = b'\x20\x09\x0a\x0b\x0c\x0d' + +MODES = { + # standard + b"P4": "1", + b"P5": "L", + b"P6": "RGB", + # extensions + b"P0CMYK": "CMYK", + # PIL extensions (for test purposes only) + b"PyP": "P", + b"PyRGBA": "RGBA", + b"PyCMYK": "CMYK" +} + + +def _accept(prefix): + return prefix[0:1] == b"P" and prefix[1] in b"0456y" + + +## +# Image plugin for PBM, PGM, and PPM images. + +class PpmImageFile(ImageFile.ImageFile): + + format = "PPM" + format_description = "Pbmplus image" + + def _token(self, s=b""): + while True: # read until next whitespace + c = self.fp.read(1) + if not c or c in b_whitespace: + break + if c > b'\x79': + raise ValueError("Expected ASCII value, found binary") + s = s + c + if (len(s) > 9): + raise ValueError("Expected int, got > 9 digits") + return s + + def _open(self): + + # check magic + s = self.fp.read(1) + if s != b"P": + raise SyntaxError("not a PPM file") + mode = MODES[self._token(s)] + + if mode == "1": + self.mode = "1" + rawmode = "1;I" + else: + self.mode = rawmode = mode + + for ix in range(3): + while True: + while True: + s = self.fp.read(1) + if s not in b_whitespace: + break + if s == b"": + raise ValueError( + "File does not extend beyond magic number") + if s != b"#": + break + s = self.fp.readline() + s = int(self._token(s)) + if ix == 0: + xsize = s + elif ix == 1: + ysize = s + if mode == "1": + break + elif ix == 2: + # maxgrey + if s > 255: + if not mode == 'L': + raise ValueError("Too many colors for band: %s" % s) + if s < 2**16: + self.mode = 'I' + rawmode = 'I;16B' + else: + self.mode = 'I' + rawmode = 'I;32B' + + self._size = xsize, ysize + self.tile = [("raw", + (0, 0, xsize, ysize), + self.fp.tell(), + (rawmode, 0, 1))] + + +# +# -------------------------------------------------------------------- + +def _save(im, fp, filename): + if im.mode == "1": + rawmode, head = "1;I", b"P4" + elif im.mode == "L": + rawmode, head = "L", b"P5" + elif im.mode == "I": + if im.getextrema()[1] < 2**16: + rawmode, head = "I;16B", b"P5" + else: + rawmode, head = "I;32B", b"P5" + elif im.mode == "RGB": + rawmode, head = "RGB", b"P6" + elif im.mode == "RGBA": + rawmode, head = "RGB", b"P6" + else: + raise IOError("cannot write mode %s as PPM" % im.mode) + fp.write(head + ("\n%d %d\n" % im.size).encode('ascii')) + if head == b"P6": + fp.write(b"255\n") + if head == b"P5": + if rawmode == "L": + fp.write(b"255\n") + elif rawmode == "I;16B": + fp.write(b"65535\n") + elif rawmode == "I;32B": + fp.write(b"2147483648\n") + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))]) + + # ALTERNATIVE: save via builtin debug function + # im._dump(filename) + +# +# -------------------------------------------------------------------- + + +Image.register_open(PpmImageFile.format, PpmImageFile, _accept) +Image.register_save(PpmImageFile.format, _save) + +Image.register_extensions(PpmImageFile.format, [".pbm", ".pgm", ".ppm"]) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/PsdImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/PsdImagePlugin.py new file mode 100644 index 0000000..2d64ecd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/PsdImagePlugin.py @@ -0,0 +1,307 @@ +# +# The Python Imaging Library +# $Id$ +# +# Adobe PSD 2.5/3.0 file handling +# +# History: +# 1995-09-01 fl Created +# 1997-01-03 fl Read most PSD images +# 1997-01-18 fl Fixed P and CMYK support +# 2001-10-21 fl Added seek/tell support (for layers) +# +# Copyright (c) 1997-2001 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +__version__ = "0.4" + +from . import Image, ImageFile, ImagePalette +from ._binary import i8, i16be as i16, i32be as i32 + +MODES = { + # (photoshop mode, bits) -> (pil mode, required channels) + (0, 1): ("1", 1), + (0, 8): ("L", 1), + (1, 8): ("L", 1), + (2, 8): ("P", 1), + (3, 8): ("RGB", 3), + (4, 8): ("CMYK", 4), + (7, 8): ("L", 1), # FIXME: multilayer + (8, 8): ("L", 1), # duotone + (9, 8): ("LAB", 3) +} + + +# --------------------------------------------------------------------. +# read PSD images + +def _accept(prefix): + return prefix[:4] == b"8BPS" + + +## +# Image plugin for Photoshop images. + +class PsdImageFile(ImageFile.ImageFile): + + format = "PSD" + format_description = "Adobe Photoshop" + + def _open(self): + + read = self.fp.read + + # + # header + + s = read(26) + if s[:4] != b"8BPS" or i16(s[4:]) != 1: + raise SyntaxError("not a PSD file") + + psd_bits = i16(s[22:]) + psd_channels = i16(s[12:]) + psd_mode = i16(s[24:]) + + mode, channels = MODES[(psd_mode, psd_bits)] + + if channels > psd_channels: + raise IOError("not enough channels") + + self.mode = mode + self._size = i32(s[18:]), i32(s[14:]) + + # + # color mode data + + size = i32(read(4)) + if size: + data = read(size) + if mode == "P" and size == 768: + self.palette = ImagePalette.raw("RGB;L", data) + + # + # image resources + + self.resources = [] + + size = i32(read(4)) + if size: + # load resources + end = self.fp.tell() + size + while self.fp.tell() < end: + signature = read(4) + id = i16(read(2)) + name = read(i8(read(1))) + if not (len(name) & 1): + read(1) # padding + data = read(i32(read(4))) + if (len(data) & 1): + read(1) # padding + self.resources.append((id, name, data)) + if id == 1039: # ICC profile + self.info["icc_profile"] = data + + # + # layer and mask information + + self.layers = [] + + size = i32(read(4)) + if size: + end = self.fp.tell() + size + size = i32(read(4)) + if size: + self.layers = _layerinfo(self.fp) + self.fp.seek(end) + + # + # image descriptor + + self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels) + + # keep the file open + self._fp = self.fp + self.frame = 1 + self._min_frame = 1 + + @property + def n_frames(self): + return len(self.layers) + + @property + def is_animated(self): + return len(self.layers) > 1 + + def seek(self, layer): + if not self._seek_check(layer): + return + + # seek to given layer (1..max) + try: + name, mode, bbox, tile = self.layers[layer-1] + self.mode = mode + self.tile = tile + self.frame = layer + self.fp = self._fp + return name, bbox + except IndexError: + raise EOFError("no such layer") + + def tell(self): + # return layer number (0=image, 1..max=layers) + return self.frame + + def load_prepare(self): + # create image memory if necessary + if not self.im or\ + self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.fill(self.mode, self.size, 0) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + +def _layerinfo(file): + # read layerinfo block + layers = [] + read = file.read + for i in range(abs(i16(read(2)))): + + # bounding box + y0 = i32(read(4)) + x0 = i32(read(4)) + y1 = i32(read(4)) + x1 = i32(read(4)) + + # image info + info = [] + mode = [] + types = list(range(i16(read(2)))) + if len(types) > 4: + continue + + for i in types: + type = i16(read(2)) + + if type == 65535: + m = "A" + else: + m = "RGBA"[type] + + mode.append(m) + size = i32(read(4)) + info.append((m, size)) + + # figure out the image mode + mode.sort() + if mode == ["R"]: + mode = "L" + elif mode == ["B", "G", "R"]: + mode = "RGB" + elif mode == ["A", "B", "G", "R"]: + mode = "RGBA" + else: + mode = None # unknown + + # skip over blend flags and extra information + filler = read(12) + name = "" + size = i32(read(4)) + combined = 0 + if size: + length = i32(read(4)) + if length: + mask_y = i32(read(4)) + mask_x = i32(read(4)) + mask_h = i32(read(4)) - mask_y + mask_w = i32(read(4)) - mask_x + file.seek(length - 16, 1) + combined += length + 4 + + length = i32(read(4)) + if length: + file.seek(length, 1) + combined += length + 4 + + length = i8(read(1)) + if length: + # Don't know the proper encoding, + # Latin-1 should be a good guess + name = read(length).decode('latin-1', 'replace') + combined += length + 1 + + file.seek(size - combined, 1) + layers.append((name, mode, (x0, y0, x1, y1))) + + # get tiles + i = 0 + for name, mode, bbox in layers: + tile = [] + for m in mode: + t = _maketile(file, m, bbox, 1) + if t: + tile.extend(t) + layers[i] = name, mode, bbox, tile + i += 1 + + return layers + + +def _maketile(file, mode, bbox, channels): + + tile = None + read = file.read + + compression = i16(read(2)) + + xsize = bbox[2] - bbox[0] + ysize = bbox[3] - bbox[1] + + offset = file.tell() + + if compression == 0: + # + # raw compression + tile = [] + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append(("raw", bbox, offset, layer)) + offset = offset + xsize*ysize + + elif compression == 1: + # + # packbits compression + i = 0 + tile = [] + bytecount = read(channels * ysize * 2) + offset = file.tell() + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append( + ("packbits", bbox, offset, layer) + ) + for y in range(ysize): + offset = offset + i16(bytecount[i:i+2]) + i += 2 + + file.seek(offset) + + if offset & 1: + read(1) # padding + + return tile + +# -------------------------------------------------------------------- +# registry + + +Image.register_open(PsdImageFile.format, PsdImageFile, _accept) + +Image.register_extension(PsdImageFile.format, ".psd") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/PyAccess.py b/thesisenv/lib/python3.6/site-packages/PIL/PyAccess.py new file mode 100644 index 0000000..cce2de2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/PyAccess.py @@ -0,0 +1,319 @@ +# +# The Python Imaging Library +# Pillow fork +# +# Python implementation of the PixelAccess Object +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# Copyright (c) 2013 Eric Soroos +# +# See the README file for information on usage and redistribution +# + +# Notes: +# +# * Implements the pixel access object following Access. +# * Does not implement the line functions, as they don't appear to be used +# * Taking only the tuple form, which is used from python. +# * Fill.c uses the integer form, but it's still going to use the old +# Access.c implementation. +# + +import logging +import sys + +from cffi import FFI + + +logger = logging.getLogger(__name__) + + +defs = """ +struct Pixel_RGBA { + unsigned char r,g,b,a; +}; +struct Pixel_I16 { + unsigned char l,r; +}; +""" +ffi = FFI() +ffi.cdef(defs) + + +class PyAccess(object): + + def __init__(self, img, readonly=False): + vals = dict(img.im.unsafe_ptrs) + self.readonly = readonly + self.image8 = ffi.cast('unsigned char **', vals['image8']) + self.image32 = ffi.cast('int **', vals['image32']) + self.image = ffi.cast('unsigned char **', vals['image']) + self.xsize, self.ysize = img.im.size + + # Keep pointer to im object to prevent dereferencing. + self._im = img.im + + # Debugging is polluting test traces, only useful here + # when hacking on PyAccess + # logger.debug("%s", vals) + self._post_init() + + def _post_init(self): + pass + + def __setitem__(self, xy, color): + """ + Modifies the pixel at x,y. The color is given as a single + numerical value for single band images, and a tuple for + multi-band images + + :param xy: The pixel coordinate, given as (x, y). See + :ref:`coordinate-system`. + :param color: The pixel value. + """ + if self.readonly: + raise ValueError('Attempt to putpixel a read only image') + (x, y) = self.check_xy(xy) + return self.set_pixel(x, y, color) + + def __getitem__(self, xy): + """ + Returns the pixel at x,y. The pixel is returned as a single + value for single band images or a tuple for multiple band + images + + :param xy: The pixel coordinate, given as (x, y). See + :ref:`coordinate-system`. + :returns: a pixel value for single band images, a tuple of + pixel values for multiband images. + """ + + (x, y) = self.check_xy(xy) + return self.get_pixel(x, y) + + putpixel = __setitem__ + getpixel = __getitem__ + + def check_xy(self, xy): + (x, y) = xy + if not (0 <= x < self.xsize and 0 <= y < self.ysize): + raise ValueError('pixel location out of range') + return xy + + +class _PyAccess32_2(PyAccess): + """ PA, LA, stored in first and last bytes of a 32 bit word """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.a) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.a = min(color[1], 255) + + +class _PyAccess32_3(PyAccess): + """ RGB and friends, stored in the first three bytes of a 32 bit word """ + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.g, pixel.b) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + pixel.a = 255 + + +class _PyAccess32_4(PyAccess): + """ RGBA etc, all 4 bytes of a 32 bit word """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.g, pixel.b, pixel.a) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + pixel.a = min(color[3], 255) + + +class _PyAccess8(PyAccess): + """ 1, L, P, 8 bit images stored as uint8 """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image8 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 255) + except TypeError: + # tuple + self.pixels[y][x] = min(color[0], 255) + + +class _PyAccessI16_N(PyAccess): + """ I;16 access, native bitendian without conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('unsigned short **', self.image) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 65535) + except TypeError: + # tuple + self.pixels[y][x] = min(color[0], 65535) + + +class _PyAccessI16_L(PyAccess): + """ I;16L access, with conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('struct Pixel_I16 **', self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l + pixel.r * 256 + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except TypeError: + color = min(color[0], 65535) + + pixel.l = color & 0xFF + pixel.r = color >> 8 + + +class _PyAccessI16_B(PyAccess): + """ I;16B access, with conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('struct Pixel_I16 **', self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l * 256 + pixel.r + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except: + color = min(color[0], 65535) + + pixel.l = color >> 8 + pixel.r = color & 0xFF + + +class _PyAccessI32_N(PyAccess): + """ Signed Int32 access, native endian """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + self.pixels[y][x] = color + + +class _PyAccessI32_Swap(PyAccess): + """ I;32L/B access, with byteswapping conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def reverse(self, i): + orig = ffi.new('int *', i) + chars = ffi.cast('unsigned char *', orig) + chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], \ + chars[1], chars[0] + return ffi.cast('int *', chars)[0] + + def get_pixel(self, x, y): + return self.reverse(self.pixels[y][x]) + + def set_pixel(self, x, y, color): + self.pixels[y][x] = self.reverse(color) + + +class _PyAccessF(PyAccess): + """ 32 bit float access """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('float **', self.image32) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # not a tuple + self.pixels[y][x] = color + except TypeError: + # tuple + self.pixels[y][x] = color[0] + + +mode_map = {'1': _PyAccess8, + 'L': _PyAccess8, + 'P': _PyAccess8, + 'LA': _PyAccess32_2, + 'La': _PyAccess32_2, + 'PA': _PyAccess32_2, + 'RGB': _PyAccess32_3, + 'LAB': _PyAccess32_3, + 'HSV': _PyAccess32_3, + 'YCbCr': _PyAccess32_3, + 'RGBA': _PyAccess32_4, + 'RGBa': _PyAccess32_4, + 'RGBX': _PyAccess32_4, + 'CMYK': _PyAccess32_4, + 'F': _PyAccessF, + 'I': _PyAccessI32_N, + } + +if sys.byteorder == 'little': + mode_map['I;16'] = _PyAccessI16_N + mode_map['I;16L'] = _PyAccessI16_N + mode_map['I;16B'] = _PyAccessI16_B + + mode_map['I;32L'] = _PyAccessI32_N + mode_map['I;32B'] = _PyAccessI32_Swap +else: + mode_map['I;16'] = _PyAccessI16_L + mode_map['I;16L'] = _PyAccessI16_L + mode_map['I;16B'] = _PyAccessI16_N + + mode_map['I;32L'] = _PyAccessI32_Swap + mode_map['I;32B'] = _PyAccessI32_N + + +def new(img, readonly=False): + access_type = mode_map.get(img.mode, None) + if not access_type: + logger.debug("PyAccess Not Implemented: %s", img.mode) + return None + return access_type(img, readonly) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/SgiImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/SgiImagePlugin.py new file mode 100644 index 0000000..88df351 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/SgiImagePlugin.py @@ -0,0 +1,228 @@ +# +# The Python Imaging Library. +# $Id$ +# +# SGI image file handling +# +# See "The SGI Image File Format (Draft version 0.97)", Paul Haeberli. +# +# +# +# History: +# 2017-22-07 mb Add RLE decompression +# 2016-16-10 mb Add save method without compression +# 1995-09-10 fl Created +# +# Copyright (c) 2016 by Mickael Bonfill. +# Copyright (c) 2008 by Karsten Hiddemann. +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1995 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile +from ._binary import i8, o8, i16be as i16 +from ._util import py3 +import struct +import os + + +__version__ = "0.3" + + +def _accept(prefix): + return len(prefix) >= 2 and i16(prefix) == 474 + + +MODES = { + (1, 1, 1): "L", + (1, 2, 1): "L", + (2, 1, 1): "L;16B", + (2, 2, 1): "L;16B", + (1, 3, 3): "RGB", + (2, 3, 3): "RGB;16B", + (1, 3, 4): "RGBA", + (2, 3, 4): "RGBA;16B" +} + + +## +# Image plugin for SGI images. +class SgiImageFile(ImageFile.ImageFile): + + format = "SGI" + format_description = "SGI Image File Format" + + def _open(self): + + # HEAD + headlen = 512 + s = self.fp.read(headlen) + + # magic number : 474 + if i16(s) != 474: + raise ValueError("Not an SGI image file") + + # compression : verbatim or RLE + compression = i8(s[2]) + + # bpc : 1 or 2 bytes (8bits or 16bits) + bpc = i8(s[3]) + + # dimension : 1, 2 or 3 (depending on xsize, ysize and zsize) + dimension = i16(s[4:]) + + # xsize : width + xsize = i16(s[6:]) + + # ysize : height + ysize = i16(s[8:]) + + # zsize : channels count + zsize = i16(s[10:]) + + # layout + layout = bpc, dimension, zsize + + # determine mode from bits/zsize + rawmode = "" + try: + rawmode = MODES[layout] + except KeyError: + pass + + if rawmode == "": + raise ValueError("Unsupported SGI image mode") + + self._size = xsize, ysize + self.mode = rawmode.split(";")[0] + + # orientation -1 : scanlines begins at the bottom-left corner + orientation = -1 + + # decoder info + if compression == 0: + pagesize = xsize * ysize * bpc + if bpc == 2: + self.tile = [("SGI16", (0, 0) + self.size, + headlen, (self.mode, 0, orientation))] + else: + self.tile = [] + offset = headlen + for layer in self.mode: + self.tile.append( + ("raw", (0, 0) + self.size, + offset, (layer, 0, orientation))) + offset += pagesize + elif compression == 1: + self.tile = [("sgi_rle", (0, 0) + self.size, + headlen, (rawmode, orientation, bpc))] + + +def _save(im, fp, filename): + if im.mode != "RGB" and im.mode != "RGBA" and im.mode != "L": + raise ValueError("Unsupported SGI image mode") + + # Get the keyword arguments + info = im.encoderinfo + + # Byte-per-pixel precision, 1 = 8bits per pixel + bpc = info.get("bpc", 1) + + if bpc not in (1, 2): + raise ValueError("Unsupported number of bytes per pixel") + + # Flip the image, since the origin of SGI file is the bottom-left corner + orientation = -1 + # Define the file as SGI File Format + magicNumber = 474 + # Run-Length Encoding Compression - Unsupported at this time + rle = 0 + + # Number of dimensions (x,y,z) + dim = 3 + # X Dimension = width / Y Dimension = height + x, y = im.size + if im.mode == "L" and y == 1: + dim = 1 + elif im.mode == "L": + dim = 2 + # Z Dimension: Number of channels + z = len(im.mode) + + if dim == 1 or dim == 2: + z = 1 + + # assert we've got the right number of bands. + if len(im.getbands()) != z: + raise ValueError("incorrect number of bands in SGI write: %s vs %s" % + (z, len(im.getbands()))) + + # Minimum Byte value + pinmin = 0 + # Maximum Byte value (255 = 8bits per pixel) + pinmax = 255 + # Image name (79 characters max, truncated below in write) + imgName = os.path.splitext(os.path.basename(filename))[0] + if py3: + imgName = imgName.encode('ascii', 'ignore') + # Standard representation of pixel in the file + colormap = 0 + fp.write(struct.pack('>h', magicNumber)) + fp.write(o8(rle)) + fp.write(o8(bpc)) + fp.write(struct.pack('>H', dim)) + fp.write(struct.pack('>H', x)) + fp.write(struct.pack('>H', y)) + fp.write(struct.pack('>H', z)) + fp.write(struct.pack('>l', pinmin)) + fp.write(struct.pack('>l', pinmax)) + fp.write(struct.pack('4s', b'')) # dummy + fp.write(struct.pack('79s', imgName)) # truncates to 79 chars + fp.write(struct.pack('s', b'')) # force null byte after imgname + fp.write(struct.pack('>l', colormap)) + fp.write(struct.pack('404s', b'')) # dummy + + rawmode = 'L' + if bpc == 2: + rawmode = 'L;16B' + + for channel in im.split(): + fp.write(channel.tobytes('raw', rawmode, 0, orientation)) + + fp.close() + + +class SGI16Decoder(ImageFile.PyDecoder): + _pulls_fd = True + + def decode(self, buffer): + rawmode, stride, orientation = self.args + pagesize = self.state.xsize * self.state.ysize + zsize = len(self.mode) + self.fd.seek(512) + + for band in range(zsize): + channel = Image.new('L', (self.state.xsize, self.state.ysize)) + channel.frombytes(self.fd.read(2 * pagesize), 'raw', + 'L;16B', stride, orientation) + self.im.putband(channel.im, band) + + return -1, 0 + +# +# registry + + +Image.register_decoder("SGI16", SGI16Decoder) +Image.register_open(SgiImageFile.format, SgiImageFile, _accept) +Image.register_save(SgiImageFile.format, _save) +Image.register_mime(SgiImageFile.format, "image/sgi") +Image.register_mime(SgiImageFile.format, "image/rgb") + +Image.register_extensions(SgiImageFile.format, + [".bw", ".rgb", ".rgba", ".sgi"]) + +# End of file diff --git a/thesisenv/lib/python3.6/site-packages/PIL/SpiderImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/SpiderImagePlugin.py new file mode 100644 index 0000000..3f57952 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/SpiderImagePlugin.py @@ -0,0 +1,313 @@ +# +# The Python Imaging Library. +# +# SPIDER image file handling +# +# History: +# 2004-08-02 Created BB +# 2006-03-02 added save method +# 2006-03-13 added support for stack images +# +# Copyright (c) 2004 by Health Research Inc. (HRI) RENSSELAER, NY 12144. +# Copyright (c) 2004 by William Baxter. +# Copyright (c) 2004 by Secret Labs AB. +# Copyright (c) 2004 by Fredrik Lundh. +# + +## +# Image plugin for the Spider image format. This format is is used +# by the SPIDER software, in processing image data from electron +# microscopy and tomography. +## + +# +# SpiderImagePlugin.py +# +# The Spider image format is used by SPIDER software, in processing +# image data from electron microscopy and tomography. +# +# Spider home page: +# https://spider.wadsworth.org/spider_doc/spider/docs/spider.html +# +# Details about the Spider image format: +# https://spider.wadsworth.org/spider_doc/spider/docs/image_doc.html +# + +from __future__ import print_function + +from PIL import Image, ImageFile +import os +import struct +import sys + + +def isInt(f): + try: + i = int(f) + if f-i == 0: + return 1 + else: + return 0 + except (ValueError, OverflowError): + return 0 + + +iforms = [1, 3, -11, -12, -21, -22] + + +# There is no magic number to identify Spider files, so just check a +# series of header locations to see if they have reasonable values. +# Returns no. of bytes in the header, if it is a valid Spider header, +# otherwise returns 0 + +def isSpiderHeader(t): + h = (99,) + t # add 1 value so can use spider header index start=1 + # header values 1,2,5,12,13,22,23 should be integers + for i in [1, 2, 5, 12, 13, 22, 23]: + if not isInt(h[i]): + return 0 + # check iform + iform = int(h[5]) + if iform not in iforms: + return 0 + # check other header values + labrec = int(h[13]) # no. records in file header + labbyt = int(h[22]) # total no. of bytes in header + lenbyt = int(h[23]) # record length in bytes + if labbyt != (labrec * lenbyt): + return 0 + # looks like a valid header + return labbyt + + +def isSpiderImage(filename): + with open(filename, 'rb') as fp: + f = fp.read(92) # read 23 * 4 bytes + t = struct.unpack('>23f', f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + t = struct.unpack('<23f', f) # little-endian + hdrlen = isSpiderHeader(t) + return hdrlen + + +class SpiderImageFile(ImageFile.ImageFile): + + format = "SPIDER" + format_description = "Spider 2D image" + _close_exclusive_fp_after_loading = False + + def _open(self): + # check header + n = 27 * 4 # read 27 float values + f = self.fp.read(n) + + try: + self.bigendian = 1 + t = struct.unpack('>27f', f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + self.bigendian = 0 + t = struct.unpack('<27f', f) # little-endian + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + raise SyntaxError("not a valid Spider file") + except struct.error: + raise SyntaxError("not a valid Spider file") + + h = (99,) + t # add 1 value : spider header index starts at 1 + iform = int(h[5]) + if iform != 1: + raise SyntaxError("not a Spider 2D image") + + self._size = int(h[12]), int(h[2]) # size in pixels (width, height) + self.istack = int(h[24]) + self.imgnumber = int(h[27]) + + if self.istack == 0 and self.imgnumber == 0: + # stk=0, img=0: a regular 2D image + offset = hdrlen + self._nimages = 1 + elif self.istack > 0 and self.imgnumber == 0: + # stk>0, img=0: Opening the stack for the first time + self.imgbytes = int(h[12]) * int(h[2]) * 4 + self.hdrlen = hdrlen + self._nimages = int(h[26]) + # Point to the first image in the stack + offset = hdrlen * 2 + self.imgnumber = 1 + elif self.istack == 0 and self.imgnumber > 0: + # stk=0, img>0: an image within the stack + offset = hdrlen + self.stkoffset + self.istack = 2 # So Image knows it's still a stack + else: + raise SyntaxError("inconsistent stack header values") + + if self.bigendian: + self.rawmode = "F;32BF" + else: + self.rawmode = "F;32F" + self.mode = "F" + + self.tile = [ + ("raw", (0, 0) + self.size, offset, + (self.rawmode, 0, 1))] + self.__fp = self.fp # FIXME: hack + + @property + def n_frames(self): + return self._nimages + + @property + def is_animated(self): + return self._nimages > 1 + + # 1st image index is zero (although SPIDER imgnumber starts at 1) + def tell(self): + if self.imgnumber < 1: + return 0 + else: + return self.imgnumber - 1 + + def seek(self, frame): + if self.istack == 0: + raise EOFError("attempt to seek in a non-stack file") + if not self._seek_check(frame): + return + self.stkoffset = self.hdrlen + frame * (self.hdrlen + self.imgbytes) + self.fp = self.__fp + self.fp.seek(self.stkoffset) + self._open() + + # returns a byte image after rescaling to 0..255 + def convert2byte(self, depth=255): + (minimum, maximum) = self.getextrema() + m = 1 + if maximum != minimum: + m = depth / (maximum-minimum) + b = -m * minimum + return self.point(lambda i, m=m, b=b: i * m + b).convert("L") + + # returns a ImageTk.PhotoImage object, after rescaling to 0..255 + def tkPhotoImage(self): + from PIL import ImageTk + return ImageTk.PhotoImage(self.convert2byte(), palette=256) + + +# -------------------------------------------------------------------- +# Image series + +# given a list of filenames, return a list of images +def loadImageSeries(filelist=None): + """create a list of Image.images for use in montage""" + if filelist is None or len(filelist) < 1: + return + + imglist = [] + for img in filelist: + if not os.path.exists(img): + print("unable to find %s" % img) + continue + try: + im = Image.open(img).convert2byte() + except: + if not isSpiderImage(img): + print(img + " is not a Spider image file") + continue + im.info['filename'] = img + imglist.append(im) + return imglist + + +# -------------------------------------------------------------------- +# For saving images in Spider format + +def makeSpiderHeader(im): + nsam, nrow = im.size + lenbyt = nsam * 4 # There are labrec records in the header + labrec = 1024 / lenbyt + if 1024 % lenbyt != 0: + labrec += 1 + labbyt = labrec * lenbyt + hdr = [] + nvalues = int(labbyt / 4) + for i in range(nvalues): + hdr.append(0.0) + + if len(hdr) < 23: + return [] + + # NB these are Fortran indices + hdr[1] = 1.0 # nslice (=1 for an image) + hdr[2] = float(nrow) # number of rows per slice + hdr[5] = 1.0 # iform for 2D image + hdr[12] = float(nsam) # number of pixels per line + hdr[13] = float(labrec) # number of records in file header + hdr[22] = float(labbyt) # total number of bytes in header + hdr[23] = float(lenbyt) # record length in bytes + + # adjust for Fortran indexing + hdr = hdr[1:] + hdr.append(0.0) + # pack binary data into a string + hdrstr = [] + for v in hdr: + hdrstr.append(struct.pack('f', v)) + return hdrstr + + +def _save(im, fp, filename): + if im.mode[0] != "F": + im = im.convert('F') + + hdr = makeSpiderHeader(im) + if len(hdr) < 256: + raise IOError("Error creating Spider header") + + # write the SPIDER header + fp.writelines(hdr) + + rawmode = "F;32NF" # 32-bit native floating point + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))]) + + +def _save_spider(im, fp, filename): + # get the filename extension and register it with Image + ext = os.path.splitext(filename)[1] + Image.register_extension(SpiderImageFile.format, ext) + _save(im, fp, filename) + +# -------------------------------------------------------------------- + + +Image.register_open(SpiderImageFile.format, SpiderImageFile) +Image.register_save(SpiderImageFile.format, _save_spider) + +if __name__ == "__main__": + + if len(sys.argv) < 2: + print("Syntax: python SpiderImagePlugin.py [infile] [outfile]") + sys.exit() + + filename = sys.argv[1] + if not isSpiderImage(filename): + print("input image must be in Spider format") + sys.exit() + + im = Image.open(filename) + print("image: " + str(im)) + print("format: " + str(im.format)) + print("size: " + str(im.size)) + print("mode: " + str(im.mode)) + print("max, min: ", end=' ') + print(im.getextrema()) + + if len(sys.argv) > 2: + outfile = sys.argv[2] + + # perform some image operation + im = im.transpose(Image.FLIP_LEFT_RIGHT) + print( + "saving a flipped version of %s as %s " % + (os.path.basename(filename), outfile)) + im.save(outfile, SpiderImageFile.format) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/SunImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/SunImagePlugin.py new file mode 100644 index 0000000..898350e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/SunImagePlugin.py @@ -0,0 +1,137 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Sun image file handling +# +# History: +# 1995-09-10 fl Created +# 1996-05-28 fl Fixed 32-bit alignment +# 1998-12-29 fl Import ImagePalette module +# 2001-12-18 fl Fixed palette loading (from Jean-Claude Rimbault) +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995-1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile, ImagePalette +from ._binary import i32be as i32 + +__version__ = "0.3" + + +def _accept(prefix): + return len(prefix) >= 4 and i32(prefix) == 0x59a66a95 + + +## +# Image plugin for Sun raster files. + +class SunImageFile(ImageFile.ImageFile): + + format = "SUN" + format_description = "Sun Raster File" + + def _open(self): + + # The Sun Raster file header is 32 bytes in length + # and has the following format: + + # typedef struct _SunRaster + # { + # DWORD MagicNumber; /* Magic (identification) number */ + # DWORD Width; /* Width of image in pixels */ + # DWORD Height; /* Height of image in pixels */ + # DWORD Depth; /* Number of bits per pixel */ + # DWORD Length; /* Size of image data in bytes */ + # DWORD Type; /* Type of raster file */ + # DWORD ColorMapType; /* Type of color map */ + # DWORD ColorMapLength; /* Size of the color map in bytes */ + # } SUNRASTER; + + # HEAD + s = self.fp.read(32) + if i32(s) != 0x59a66a95: + raise SyntaxError("not an SUN raster file") + + offset = 32 + + self._size = i32(s[4:8]), i32(s[8:12]) + + depth = i32(s[12:16]) + # data_length = i32(s[16:20]) # unreliable, ignore. + file_type = i32(s[20:24]) + palette_type = i32(s[24:28]) # 0: None, 1: RGB, 2: Raw/arbitrary + palette_length = i32(s[28:32]) + + if depth == 1: + self.mode, rawmode = "1", "1;I" + elif depth == 4: + self.mode, rawmode = "L", "L;4" + elif depth == 8: + self.mode = rawmode = "L" + elif depth == 24: + if file_type == 3: + self.mode, rawmode = "RGB", "RGB" + else: + self.mode, rawmode = "RGB", "BGR" + elif depth == 32: + if file_type == 3: + self.mode, rawmode = 'RGB', 'RGBX' + else: + self.mode, rawmode = 'RGB', 'BGRX' + else: + raise SyntaxError("Unsupported Mode/Bit Depth") + + if palette_length: + if palette_length > 1024: + raise SyntaxError("Unsupported Color Palette Length") + + if palette_type != 1: + raise SyntaxError("Unsupported Palette Type") + + offset = offset + palette_length + self.palette = ImagePalette.raw("RGB;L", + self.fp.read(palette_length)) + if self.mode == "L": + self.mode = "P" + rawmode = rawmode.replace('L', 'P') + + # 16 bit boundaries on stride + stride = ((self.size[0] * depth + 15) // 16) * 2 + + # file type: Type is the version (or flavor) of the bitmap + # file. The following values are typically found in the Type + # field: + # 0000h Old + # 0001h Standard + # 0002h Byte-encoded + # 0003h RGB format + # 0004h TIFF format + # 0005h IFF format + # FFFFh Experimental + + # Old and standard are the same, except for the length tag. + # byte-encoded is run-length-encoded + # RGB looks similar to standard, but RGB byte order + # TIFF and IFF mean that they were converted from T/IFF + # Experimental means that it's something else. + # (https://www.fileformat.info/format/sunraster/egff.htm) + + if file_type in (0, 1, 3, 4, 5): + self.tile = [("raw", (0, 0)+self.size, offset, (rawmode, stride))] + elif file_type == 2: + self.tile = [("sun_rle", (0, 0)+self.size, offset, rawmode)] + else: + raise SyntaxError('Unsupported Sun Raster file type') + +# +# registry + + +Image.register_open(SunImageFile.format, SunImageFile, _accept) + +Image.register_extension(SunImageFile.format, ".ras") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/TarIO.py b/thesisenv/lib/python3.6/site-packages/PIL/TarIO.py new file mode 100644 index 0000000..0e949ff --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/TarIO.py @@ -0,0 +1,56 @@ +# +# The Python Imaging Library. +# $Id$ +# +# read files from within a tar file +# +# History: +# 95-06-18 fl Created +# 96-05-28 fl Open files in binary mode +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-96. +# +# See the README file for information on usage and redistribution. +# + +from . import ContainerIO + + +## +# A file object that provides read access to a given member of a TAR +# file. + +class TarIO(ContainerIO.ContainerIO): + + def __init__(self, tarfile, file): + """ + Create file object. + + :param tarfile: Name of TAR file. + :param file: Name of member file. + """ + fh = open(tarfile, "rb") + + while True: + + s = fh.read(512) + if len(s) != 512: + raise IOError("unexpected end of tar file") + + name = s[:100].decode('utf-8') + i = name.find('\0') + if i == 0: + raise IOError("cannot find subfile") + if i > 0: + name = name[:i] + + size = int(s[124:135], 8) + + if file == name: + break + + fh.seek((size + 511) & (~511), 1) + + # Open region + ContainerIO.ContainerIO.__init__(self, fh, fh.tell(), size) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/TgaImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/TgaImagePlugin.py new file mode 100644 index 0000000..c266d50 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/TgaImagePlugin.py @@ -0,0 +1,227 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TGA file handling +# +# History: +# 95-09-01 fl created (reads 24-bit files only) +# 97-01-04 fl support more TGA versions, including compressed images +# 98-07-04 fl fixed orientation and alpha layer bugs +# 98-09-11 fl fixed orientation for runlength decoder +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from . import Image, ImageFile, ImagePalette +from ._binary import i8, i16le as i16, o8, o16le as o16 + +import warnings + +__version__ = "0.3" + + +# +# -------------------------------------------------------------------- +# Read RGA file + + +MODES = { + # map imagetype/depth to rawmode + (1, 8): "P", + (3, 1): "1", + (3, 8): "L", + (3, 16): "LA", + (2, 16): "BGR;5", + (2, 24): "BGR", + (2, 32): "BGRA", +} + + +## +# Image plugin for Targa files. + +class TgaImageFile(ImageFile.ImageFile): + + format = "TGA" + format_description = "Targa" + + def _open(self): + + # process header + s = self.fp.read(18) + + id_len = i8(s[0]) + + colormaptype = i8(s[1]) + imagetype = i8(s[2]) + + depth = i8(s[16]) + + flags = i8(s[17]) + + self._size = i16(s[12:]), i16(s[14:]) + + # validate header fields + if colormaptype not in (0, 1) or\ + self.size[0] <= 0 or self.size[1] <= 0 or\ + depth not in (1, 8, 16, 24, 32): + raise SyntaxError("not a TGA file") + + # image mode + if imagetype in (3, 11): + self.mode = "L" + if depth == 1: + self.mode = "1" # ??? + elif depth == 16: + self.mode = "LA" + elif imagetype in (1, 9): + self.mode = "P" + elif imagetype in (2, 10): + self.mode = "RGB" + if depth == 32: + self.mode = "RGBA" + else: + raise SyntaxError("unknown TGA mode") + + # orientation + orientation = flags & 0x30 + if orientation == 0x20: + orientation = 1 + elif not orientation: + orientation = -1 + else: + raise SyntaxError("unknown TGA orientation") + + self.info["orientation"] = orientation + + if imagetype & 8: + self.info["compression"] = "tga_rle" + + if id_len: + self.info["id_section"] = self.fp.read(id_len) + + if colormaptype: + # read palette + start, size, mapdepth = i16(s[3:]), i16(s[5:]), i16(s[7:]) + if mapdepth == 16: + self.palette = ImagePalette.raw( + "BGR;16", b"\0"*2*start + self.fp.read(2*size)) + elif mapdepth == 24: + self.palette = ImagePalette.raw( + "BGR", b"\0"*3*start + self.fp.read(3*size)) + elif mapdepth == 32: + self.palette = ImagePalette.raw( + "BGRA", b"\0"*4*start + self.fp.read(4*size)) + + # setup tile descriptor + try: + rawmode = MODES[(imagetype & 7, depth)] + if imagetype & 8: + # compressed + self.tile = [("tga_rle", (0, 0)+self.size, + self.fp.tell(), (rawmode, orientation, depth))] + else: + self.tile = [("raw", (0, 0)+self.size, + self.fp.tell(), (rawmode, 0, orientation))] + except KeyError: + pass # cannot decode + +# +# -------------------------------------------------------------------- +# Write TGA file + + +SAVE = { + "1": ("1", 1, 0, 3), + "L": ("L", 8, 0, 3), + "LA": ("LA", 16, 0, 3), + "P": ("P", 8, 1, 1), + "RGB": ("BGR", 24, 0, 2), + "RGBA": ("BGRA", 32, 0, 2), +} + + +def _save(im, fp, filename): + + try: + rawmode, bits, colormaptype, imagetype = SAVE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as TGA" % im.mode) + + if "rle" in im.encoderinfo: + rle = im.encoderinfo["rle"] + else: + compression = im.encoderinfo.get("compression", + im.info.get("compression")) + rle = compression == "tga_rle" + if rle: + imagetype += 8 + + id_section = im.encoderinfo.get("id_section", + im.info.get("id_section", "")) + id_len = len(id_section) + if id_len > 255: + id_len = 255 + id_section = id_section[:255] + warnings.warn("id_section has been trimmed to 255 characters") + + if colormaptype: + colormapfirst, colormaplength, colormapentry = 0, 256, 24 + else: + colormapfirst, colormaplength, colormapentry = 0, 0, 0 + + if im.mode in ("LA", "RGBA"): + flags = 8 + else: + flags = 0 + + orientation = im.encoderinfo.get("orientation", + im.info.get("orientation", -1)) + if orientation > 0: + flags = flags | 0x20 + + fp.write(o8(id_len) + + o8(colormaptype) + + o8(imagetype) + + o16(colormapfirst) + + o16(colormaplength) + + o8(colormapentry) + + o16(0) + + o16(0) + + o16(im.size[0]) + + o16(im.size[1]) + + o8(bits) + + o8(flags)) + + if id_section: + fp.write(id_section) + + if colormaptype: + fp.write(im.im.getpalette("RGB", "BGR")) + + if rle: + ImageFile._save( + im, + fp, + [("tga_rle", (0, 0) + im.size, 0, (rawmode, orientation))]) + else: + ImageFile._save( + im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))]) + + # write targa version 2 footer + fp.write(b"\000" * 8 + b"TRUEVISION-XFILE." + b"\000") + +# +# -------------------------------------------------------------------- +# Registry + + +Image.register_open(TgaImageFile.format, TgaImageFile) +Image.register_save(TgaImageFile.format, _save) + +Image.register_extension(TgaImageFile.format, ".tga") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/TiffImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/TiffImagePlugin.py new file mode 100644 index 0000000..5059a13 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/TiffImagePlugin.py @@ -0,0 +1,1832 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF file handling +# +# TIFF is a flexible, if somewhat aged, image file format originally +# defined by Aldus. Although TIFF supports a wide variety of pixel +# layouts and compression methods, the name doesn't really stand for +# "thousands of incompatible file formats," it just feels that way. +# +# To read TIFF data from a stream, the stream must be seekable. For +# progressive decoding, make sure to use TIFF files where the tag +# directory is placed first in the file. +# +# History: +# 1995-09-01 fl Created +# 1996-05-04 fl Handle JPEGTABLES tag +# 1996-05-18 fl Fixed COLORMAP support +# 1997-01-05 fl Fixed PREDICTOR support +# 1997-08-27 fl Added support for rational tags (from Perry Stoll) +# 1998-01-10 fl Fixed seek/tell (from Jan Blom) +# 1998-07-15 fl Use private names for internal variables +# 1999-06-13 fl Rewritten for PIL 1.0 (1.0) +# 2000-10-11 fl Additional fixes for Python 2.0 (1.1) +# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2) +# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3) +# 2001-12-18 fl Added workaround for broken Matrox library +# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart) +# 2003-05-19 fl Check FILLORDER tag +# 2003-09-26 fl Added RGBa support +# 2004-02-24 fl Added DPI support; fixed rational write support +# 2005-02-07 fl Added workaround for broken Corel Draw 10 files +# 2006-01-09 fl Added support for float/double tags (from Russell Nelson) +# +# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from __future__ import division, print_function + +from . import Image, ImageFile, ImagePalette, TiffTags +from ._binary import i8, o8 +from ._util import py3 + +from fractions import Fraction +from numbers import Number, Rational + +import io +import itertools +import os +import struct +import sys +import warnings + +from .TiffTags import TYPES + +try: + # Python 3 + from collections.abc import MutableMapping +except ImportError: + # Python 2.7 + from collections import MutableMapping + + +__version__ = "1.3.5" +DEBUG = False # Needs to be merged with the new logging approach. + +# Set these to true to force use of libtiff for reading or writing. +READ_LIBTIFF = False +WRITE_LIBTIFF = False +IFD_LEGACY_API = True + +II = b"II" # little-endian (Intel style) +MM = b"MM" # big-endian (Motorola style) + +# +# -------------------------------------------------------------------- +# Read TIFF files + +# a few tag names, just to make the code below a bit more readable +IMAGEWIDTH = 256 +IMAGELENGTH = 257 +BITSPERSAMPLE = 258 +COMPRESSION = 259 +PHOTOMETRIC_INTERPRETATION = 262 +FILLORDER = 266 +IMAGEDESCRIPTION = 270 +STRIPOFFSETS = 273 +SAMPLESPERPIXEL = 277 +ROWSPERSTRIP = 278 +STRIPBYTECOUNTS = 279 +X_RESOLUTION = 282 +Y_RESOLUTION = 283 +PLANAR_CONFIGURATION = 284 +RESOLUTION_UNIT = 296 +SOFTWARE = 305 +DATE_TIME = 306 +ARTIST = 315 +PREDICTOR = 317 +COLORMAP = 320 +TILEOFFSETS = 324 +EXTRASAMPLES = 338 +SAMPLEFORMAT = 339 +JPEGTABLES = 347 +COPYRIGHT = 33432 +IPTC_NAA_CHUNK = 33723 # newsphoto properties +PHOTOSHOP_CHUNK = 34377 # photoshop properties +ICCPROFILE = 34675 +EXIFIFD = 34665 +XMP = 700 + +# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java +IMAGEJ_META_DATA_BYTE_COUNTS = 50838 +IMAGEJ_META_DATA = 50839 + +COMPRESSION_INFO = { + # Compression => pil compression name + 1: "raw", + 2: "tiff_ccitt", + 3: "group3", + 4: "group4", + 5: "tiff_lzw", + 6: "tiff_jpeg", # obsolete + 7: "jpeg", + 8: "tiff_adobe_deflate", + 32771: "tiff_raw_16", # 16-bit padding + 32773: "packbits", + 32809: "tiff_thunderscan", + 32946: "tiff_deflate", + 34676: "tiff_sgilog", + 34677: "tiff_sgilog24", +} + +COMPRESSION_INFO_REV = {v: k for k, v in COMPRESSION_INFO.items()} + +OPEN_INFO = { + # (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample, + # ExtraSamples) => mode, rawmode + (II, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (MM, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (II, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (II, 1, (1,), 1, (1,), ()): ("1", "1"), + (MM, 1, (1,), 1, (1,), ()): ("1", "1"), + (II, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"), + + (II, 0, (1,), 1, (2,), ()): ("L", "L;2I"), + (MM, 0, (1,), 1, (2,), ()): ("L", "L;2I"), + (II, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), + (MM, 0, (1,), 2, (2,), ()): ("L", "L;2IR"), + (II, 1, (1,), 1, (2,), ()): ("L", "L;2"), + (MM, 1, (1,), 1, (2,), ()): ("L", "L;2"), + (II, 1, (1,), 2, (2,), ()): ("L", "L;2R"), + (MM, 1, (1,), 2, (2,), ()): ("L", "L;2R"), + + (II, 0, (1,), 1, (4,), ()): ("L", "L;4I"), + (MM, 0, (1,), 1, (4,), ()): ("L", "L;4I"), + (II, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), + (MM, 0, (1,), 2, (4,), ()): ("L", "L;4IR"), + (II, 1, (1,), 1, (4,), ()): ("L", "L;4"), + (MM, 1, (1,), 1, (4,), ()): ("L", "L;4"), + (II, 1, (1,), 2, (4,), ()): ("L", "L;4R"), + (MM, 1, (1,), 2, (4,), ()): ("L", "L;4R"), + + (II, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (II, 1, (1,), 1, (8,), ()): ("L", "L"), + (MM, 1, (1,), 1, (8,), ()): ("L", "L"), + (II, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"), + + (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"), + + (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"), + (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"), + (II, 1, (2,), 1, (16,), ()): ("I", "I;16S"), + (MM, 1, (2,), 1, (16,), ()): ("I", "I;16BS"), + + (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"), + (II, 1, (2,), 1, (32,), ()): ("I", "I;32S"), + (MM, 1, (2,), 1, (32,), ()): ("I", "I;32BS"), + (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"), + + (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + + (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (0, 0)): ("RGBX", "RGBXX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0, 0)): ("RGBX", "RGBXXX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (1, 0)): ("RGBA", "RGBaX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (1, 0, 0)): ("RGBA", "RGBaXX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8), (2, 0)): ("RGBA", "RGBAX"), + (II, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"), + (MM, 2, (1,), 1, (8, 8, 8, 8, 8, 8), (2, 0, 0)): ("RGBA", "RGBAXX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + + (II, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16L"), + (MM, 2, (1,), 1, (16, 16, 16), ()): ("RGB", "RGB;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), ()): ("RGBA", "RGBA;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), (0,)): ("RGBX", "RGBX;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), (1,)): ("RGBA", "RGBa;16B"), + (II, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16L"), + (MM, 2, (1,), 1, (16, 16, 16, 16), (2,)): ("RGBA", "RGBA;16B"), + + (II, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (II, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (MM, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (II, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (II, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (MM, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (II, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (II, 3, (1,), 1, (8,), ()): ("P", "P"), + (MM, 3, (1,), 1, (8,), ()): ("P", "P"), + (II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (II, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"), + + (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (II, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"), + (MM, 5, (1,), 1, (8, 8, 8, 8, 8), (0,)): ("CMYK", "CMYKX"), + (II, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"), + (MM, 5, (1,), 1, (8, 8, 8, 8, 8, 8), (0, 0)): ("CMYK", "CMYKXX"), + + # JPEG compressed images handled by LibTiff and auto-converted to RGB + # Minimal Baseline TIFF requires YCbCr images to have 3 SamplesPerPixel + (II, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (MM, 6, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + + (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), + (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), +} + +PREFIXES = [ + b"MM\x00\x2A", # Valid TIFF header with big-endian byte order + b"II\x2A\x00", # Valid TIFF header with little-endian byte order + b"MM\x2A\x00", # Invalid TIFF header, assume big-endian + b"II\x00\x2A", # Invalid TIFF header, assume little-endian +] + + +def _accept(prefix): + return prefix[:4] in PREFIXES + + +def _limit_rational(val, max_val): + inv = abs(val) > 1 + n_d = IFDRational(1 / val if inv else val).limit_rational(max_val) + return n_d[::-1] if inv else n_d + + +## +# Wrapper for TIFF IFDs. + +_load_dispatch = {} +_write_dispatch = {} + + +class IFDRational(Rational): + """ Implements a rational class where 0/0 is a legal value to match + the in the wild use of exif rationals. + + e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used + """ + + """ If the denominator is 0, store this as a float('nan'), otherwise store + as a fractions.Fraction(). Delegate as appropriate + + """ + + __slots__ = ('_numerator', '_denominator', '_val') + + def __init__(self, value, denominator=1): + """ + :param value: either an integer numerator, a + float/rational/other number, or an IFDRational + :param denominator: Optional integer denominator + """ + self._denominator = denominator + self._numerator = value + self._val = float(1) + + if isinstance(value, Fraction): + self._numerator = value.numerator + self._denominator = value.denominator + self._val = value + + if isinstance(value, IFDRational): + self._denominator = value.denominator + self._numerator = value.numerator + self._val = value._val + return + + if denominator == 0: + self._val = float('nan') + return + + elif denominator == 1: + self._val = Fraction(value) + else: + self._val = Fraction(value, denominator) + + @property + def numerator(a): + return a._numerator + + @property + def denominator(a): + return a._denominator + + def limit_rational(self, max_denominator): + """ + + :param max_denominator: Integer, the maximum denominator value + :returns: Tuple of (numerator, denominator) + """ + + if self.denominator == 0: + return (self.numerator, self.denominator) + + f = self._val.limit_denominator(max_denominator) + return (f.numerator, f.denominator) + + def __repr__(self): + return str(float(self._val)) + + def __hash__(self): + return self._val.__hash__() + + def __eq__(self, other): + return self._val == other + + def _delegate(op): + def delegate(self, *args): + return getattr(self._val, op)(*args) + return delegate + + """ a = ['add','radd', 'sub', 'rsub','div', 'rdiv', 'mul', 'rmul', + 'truediv', 'rtruediv', 'floordiv', + 'rfloordiv','mod','rmod', 'pow','rpow', 'pos', 'neg', + 'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'nonzero', + 'ceil', 'floor', 'round'] + print("\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a)) + """ + + __add__ = _delegate('__add__') + __radd__ = _delegate('__radd__') + __sub__ = _delegate('__sub__') + __rsub__ = _delegate('__rsub__') + __div__ = _delegate('__div__') + __rdiv__ = _delegate('__rdiv__') + __mul__ = _delegate('__mul__') + __rmul__ = _delegate('__rmul__') + __truediv__ = _delegate('__truediv__') + __rtruediv__ = _delegate('__rtruediv__') + __floordiv__ = _delegate('__floordiv__') + __rfloordiv__ = _delegate('__rfloordiv__') + __mod__ = _delegate('__mod__') + __rmod__ = _delegate('__rmod__') + __pow__ = _delegate('__pow__') + __rpow__ = _delegate('__rpow__') + __pos__ = _delegate('__pos__') + __neg__ = _delegate('__neg__') + __abs__ = _delegate('__abs__') + __trunc__ = _delegate('__trunc__') + __lt__ = _delegate('__lt__') + __gt__ = _delegate('__gt__') + __le__ = _delegate('__le__') + __ge__ = _delegate('__ge__') + __nonzero__ = _delegate('__nonzero__') + __ceil__ = _delegate('__ceil__') + __floor__ = _delegate('__floor__') + __round__ = _delegate('__round__') + + +class ImageFileDirectory_v2(MutableMapping): + """This class represents a TIFF tag directory. To speed things up, we + don't decode tags unless they're asked for. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v2() + ifd[key] = 'Some Data' + ifd.tagtype[key] = 2 + print(ifd[key]) + 'Some Data' + + Individual values are returned as the strings or numbers, sequences are + returned as tuples of the values. + + The tiff metadata type of each item is stored in a dictionary of + tag types in + `~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types + are read from a tiff file, guessed from the type added, or added + manually. + + Data Structures: + + * self.tagtype = {} + + * Key: numerical tiff tag number + * Value: integer corresponding to the data type from + ~PIL.TiffTags.TYPES` + + .. versionadded:: 3.0.0 + """ + """ + Documentation: + + 'internal' data structures: + * self._tags_v2 = {} Key: numerical tiff tag number + Value: decoded data, as tuple for multiple values + * self._tagdata = {} Key: numerical tiff tag number + Value: undecoded byte string from file + * self._tags_v1 = {} Key: numerical tiff tag number + Value: decoded data in the v1 format + + Tags will be found in the private attributes self._tagdata, and in + self._tags_v2 once decoded. + + Self.legacy_api is a value for internal use, and shouldn't be + changed from outside code. In cooperation with the + ImageFileDirectory_v1 class, if legacy_api is true, then decoded + tags will be populated into both _tags_v1 and _tags_v2. _Tags_v2 + will be used if this IFD is used in the TIFF save routine. Tags + should be read from tags_v1 if legacy_api == true. + + """ + + def __init__(self, ifh=b"II\052\0\0\0\0\0", prefix=None): + """Initialize an ImageFileDirectory. + + To construct an ImageFileDirectory from a real file, pass the 8-byte + magic header to the constructor. To only set the endianness, pass it + as the 'prefix' keyword argument. + + :param ifh: One of the accepted magic headers (cf. PREFIXES); also sets + endianness. + :param prefix: Override the endianness of the file. + """ + if ifh[:4] not in PREFIXES: + raise SyntaxError("not a TIFF file (header %r not valid)" % ifh) + self._prefix = prefix if prefix is not None else ifh[:2] + if self._prefix == MM: + self._endian = ">" + elif self._prefix == II: + self._endian = "<" + else: + raise SyntaxError("not a TIFF IFD") + self.reset() + self.next, = self._unpack("L", ifh[4:]) + self._legacy_api = False + + prefix = property(lambda self: self._prefix) + offset = property(lambda self: self._offset) + legacy_api = property(lambda self: self._legacy_api) + + @legacy_api.setter + def legacy_api(self, value): + raise Exception("Not allowing setting of legacy api") + + def reset(self): + self._tags_v1 = {} # will remain empty if legacy_api is false + self._tags_v2 = {} # main tag storage + self._tagdata = {} + self.tagtype = {} # added 2008-06-05 by Florian Hoech + self._next = None + self._offset = None + + def __str__(self): + return str(dict(self)) + + def named(self): + """ + :returns: dict of name|key: value + + Returns the complete tag dictionary, with named tags where possible. + """ + return dict((TiffTags.lookup(code).name, value) + for code, value in self.items()) + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v2)) + + def __getitem__(self, tag): + if tag not in self._tags_v2: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + self[tag] = handler(self, data, self.legacy_api) # check type + val = self._tags_v2[tag] + if self.legacy_api and not isinstance(val, (tuple, bytes)): + val = val, + return val + + def __contains__(self, tag): + return tag in self._tags_v2 or tag in self._tagdata + + if not py3: + def has_key(self, tag): + return tag in self + + def __setitem__(self, tag, value): + self._setitem(tag, value, self.legacy_api) + + def _setitem(self, tag, value, legacy_api): + basetypes = (Number, bytes, str) + if not py3: + basetypes += unicode, + + info = TiffTags.lookup(tag) + values = [value] if isinstance(value, basetypes) else value + + if tag not in self.tagtype: + if info.type: + self.tagtype[tag] = info.type + else: + self.tagtype[tag] = 7 + if all(isinstance(v, IFDRational) for v in values): + self.tagtype[tag] = 5 + elif all(isinstance(v, int) for v in values): + if all(v < 2 ** 16 for v in values): + self.tagtype[tag] = 3 + else: + self.tagtype[tag] = 4 + elif all(isinstance(v, float) for v in values): + self.tagtype[tag] = 12 + else: + if py3: + if all(isinstance(v, str) for v in values): + self.tagtype[tag] = 2 + else: + # Never treat data as binary by default on Python 2. + self.tagtype[tag] = 2 + + if self.tagtype[tag] == 7 and py3: + values = [value.encode("ascii", 'replace') if isinstance( + value, str) else value] + elif self.tagtype[tag] == 5: + values = [float(v) if isinstance(v, int) else v + for v in values] + + values = tuple(info.cvt_enum(value) for value in values) + + dest = self._tags_v1 if legacy_api else self._tags_v2 + + # Three branches: + # Spec'd length == 1, Actual length 1, store as element + # Spec'd length == 1, Actual > 1, Warn and truncate. Formerly barfed. + # No Spec, Actual length 1, Formerly (<4.2) returned a 1 element tuple. + # Don't mess with the legacy api, since it's frozen. + if (info.length == 1) or \ + (info.length is None and len(values) == 1 and not legacy_api): + # Don't mess with the legacy api, since it's frozen. + if legacy_api and self.tagtype[tag] in [5, 10]: # rationals + values = values, + try: + dest[tag], = values + except ValueError: + # We've got a builtin tag with 1 expected entry + warnings.warn( + "Metadata Warning, tag %s had too many entries: %s, expected 1" % ( + tag, len(values))) + dest[tag] = values[0] + + else: + # Spec'd length > 1 or undefined + # Unspec'd, and length > 1 + dest[tag] = values + + def __delitem__(self, tag): + self._tags_v2.pop(tag, None) + self._tags_v1.pop(tag, None) + self._tagdata.pop(tag, None) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v2)) + + def _unpack(self, fmt, data): + return struct.unpack(self._endian + fmt, data) + + def _pack(self, fmt, *values): + return struct.pack(self._endian + fmt, *values) + + def _register_loader(idx, size): + def decorator(func): + from .TiffTags import TYPES + if func.__name__.startswith("load_"): + TYPES[idx] = func.__name__[5:].replace("_", " ") + _load_dispatch[idx] = size, func + return func + return decorator + + def _register_writer(idx): + def decorator(func): + _write_dispatch[idx] = func + return func + return decorator + + def _register_basic(idx_fmt_name): + from .TiffTags import TYPES + idx, fmt, name = idx_fmt_name + TYPES[idx] = name + size = struct.calcsize("=" + fmt) + _load_dispatch[idx] = size, lambda self, data, legacy_api=True: ( + self._unpack("{}{}".format(len(data) // size, fmt), data)) + _write_dispatch[idx] = lambda self, *values: ( + b"".join(self._pack(fmt, value) for value in values)) + + list(map(_register_basic, + [(3, "H", "short"), + (4, "L", "long"), + (6, "b", "signed byte"), + (8, "h", "signed short"), + (9, "l", "signed long"), + (11, "f", "float"), + (12, "d", "double")])) + + @_register_loader(1, 1) # Basic type, except for the legacy API. + def load_byte(self, data, legacy_api=True): + return data + + @_register_writer(1) # Basic type, except for the legacy API. + def write_byte(self, data): + return data + + @_register_loader(2, 1) + def load_string(self, data, legacy_api=True): + if data.endswith(b"\0"): + data = data[:-1] + return data.decode("latin-1", "replace") + + @_register_writer(2) + def write_string(self, value): + # remerge of https://github.com/python-pillow/Pillow/pull/1416 + if sys.version_info.major == 2: + value = value.decode('ascii', 'replace') + return b"" + value.encode('ascii', 'replace') + b"\0" + + @_register_loader(5, 8) + def load_rational(self, data, legacy_api=True): + vals = self._unpack("{}L".format(len(data) // 4), data) + + def combine(a, b): return (a, b) if legacy_api else IFDRational(a, b) + return tuple(combine(num, denom) + for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(5) + def write_rational(self, *values): + return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 31)) + for frac in values) + + @_register_loader(7, 1) + def load_undefined(self, data, legacy_api=True): + return data + + @_register_writer(7) + def write_undefined(self, value): + return value + + @_register_loader(10, 8) + def load_signed_rational(self, data, legacy_api=True): + vals = self._unpack("{}l".format(len(data) // 4), data) + + def combine(a, b): return (a, b) if legacy_api else IFDRational(a, b) + return tuple(combine(num, denom) + for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(10) + def write_signed_rational(self, *values): + return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 30)) + for frac in values) + + def _ensure_read(self, fp, size): + ret = fp.read(size) + if len(ret) != size: + raise IOError("Corrupt EXIF data. " + + "Expecting to read %d bytes but only got %d. " % + (size, len(ret))) + return ret + + def load(self, fp): + + self.reset() + self._offset = fp.tell() + + try: + for i in range(self._unpack("H", self._ensure_read(fp, 2))[0]): + tag, typ, count, data = self._unpack("HHL4s", + self._ensure_read(fp, 12)) + if DEBUG: + tagname = TiffTags.lookup(tag).name + typname = TYPES.get(typ, "unknown") + print("tag: %s (%d) - type: %s (%d)" % + (tagname, tag, typname, typ), end=" ") + + try: + unit_size, handler = self._load_dispatch[typ] + except KeyError: + if DEBUG: + print("- unsupported type", typ) + continue # ignore unsupported type + size = count * unit_size + if size > 4: + here = fp.tell() + offset, = self._unpack("L", data) + if DEBUG: + print("Tag Location: %s - Data Location: %s" % + (here, offset), end=" ") + fp.seek(offset) + data = ImageFile._safe_read(fp, size) + fp.seek(here) + else: + data = data[:size] + + if len(data) != size: + warnings.warn("Possibly corrupt EXIF data. " + "Expecting to read %d bytes but only got %d." + " Skipping tag %s" % (size, len(data), tag)) + continue + + if not data: + continue + + self._tagdata[tag] = data + self.tagtype[tag] = typ + + if DEBUG: + if size > 32: + print("- value: " % size) + else: + print("- value:", self[tag]) + + self.next, = self._unpack("L", self._ensure_read(fp, 4)) + except IOError as msg: + warnings.warn(str(msg)) + return + + def save(self, fp): + + if fp.tell() == 0: # skip TIFF header on subsequent pages + # tiff header -- PIL always starts the first IFD at offset 8 + fp.write(self._prefix + self._pack("HL", 42, 8)) + + # FIXME What about tagdata? + fp.write(self._pack("H", len(self._tags_v2))) + + entries = [] + offset = fp.tell() + len(self._tags_v2) * 12 + 4 + stripoffsets = None + + # pass 1: convert tags to binary format + # always write tags in ascending order + for tag, value in sorted(self._tags_v2.items()): + if tag == STRIPOFFSETS: + stripoffsets = len(entries) + typ = self.tagtype.get(tag) + if DEBUG: + print("Tag %s, Type: %s, Value: %s" % (tag, typ, value)) + values = value if isinstance(value, tuple) else (value,) + data = self._write_dispatch[typ](self, *values) + if DEBUG: + tagname = TiffTags.lookup(tag).name + typname = TYPES.get(typ, "unknown") + print("save: %s (%d) - type: %s (%d)" % + (tagname, tag, typname, typ), end=" ") + if len(data) >= 16: + print("- value: " % len(data)) + else: + print("- value:", values) + + # count is sum of lengths for string and arbitrary data + count = len(data) if typ in [2, 7] else len(values) + # figure out if data fits into the entry + if len(data) <= 4: + entries.append((tag, typ, count, data.ljust(4, b"\0"), b"")) + else: + entries.append((tag, typ, count, self._pack("L", offset), + data)) + offset += (len(data) + 1) // 2 * 2 # pad to word + + # update strip offset data to point beyond auxiliary data + if stripoffsets is not None: + tag, typ, count, value, data = entries[stripoffsets] + if data: + raise NotImplementedError( + "multistrip support not yet implemented") + value = self._pack("L", self._unpack("L", value)[0] + offset) + entries[stripoffsets] = tag, typ, count, value, data + + # pass 2: write entries to file + for tag, typ, count, value, data in entries: + if DEBUG > 1: + print(tag, typ, count, repr(value), repr(data)) + fp.write(self._pack("HHL4s", tag, typ, count, value)) + + # -- overwrite here for multi-page -- + fp.write(b"\0\0\0\0") # end of entries + + # pass 3: write auxiliary data to file + for tag, typ, count, value, data in entries: + fp.write(data) + if len(data) & 1: + fp.write(b"\0") + + return offset + + +ImageFileDirectory_v2._load_dispatch = _load_dispatch +ImageFileDirectory_v2._write_dispatch = _write_dispatch +for idx, name in TYPES.items(): + name = name.replace(" ", "_") + setattr(ImageFileDirectory_v2, "load_" + name, _load_dispatch[idx][1]) + setattr(ImageFileDirectory_v2, "write_" + name, _write_dispatch[idx]) +del _load_dispatch, _write_dispatch, idx, name + + +# Legacy ImageFileDirectory support. +class ImageFileDirectory_v1(ImageFileDirectory_v2): + """This class represents the **legacy** interface to a TIFF tag directory. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v1() + ifd[key] = 'Some Data' + ifd.tagtype[key] = 2 + print(ifd[key]) + ('Some Data',) + + Also contains a dictionary of tag types as read from the tiff image file, + `~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`. + + Values are returned as a tuple. + + .. deprecated:: 3.0.0 + """ + def __init__(self, *args, **kwargs): + ImageFileDirectory_v2.__init__(self, *args, **kwargs) + self._legacy_api = True + + tags = property(lambda self: self._tags_v1) + tagdata = property(lambda self: self._tagdata) + + @classmethod + def from_v2(cls, original): + """ Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + + """ + + ifd = cls(prefix=original.prefix) + ifd._tagdata = original._tagdata + ifd.tagtype = original.tagtype + ifd.next = original.next # an indicator for multipage tiffs + return ifd + + def to_v2(self): + """ Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + + """ + + ifd = ImageFileDirectory_v2(prefix=self.prefix) + ifd._tagdata = dict(self._tagdata) + ifd.tagtype = dict(self.tagtype) + ifd._tags_v2 = dict(self._tags_v2) + return ifd + + def __contains__(self, tag): + return tag in self._tags_v1 or tag in self._tagdata + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v1)) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v1)) + + def __setitem__(self, tag, value): + for legacy_api in (False, True): + self._setitem(tag, value, legacy_api) + + def __getitem__(self, tag): + if tag not in self._tags_v1: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + for legacy in (False, True): + self._setitem(tag, handler(self, data, legacy), legacy) + val = self._tags_v1[tag] + if not isinstance(val, (tuple, bytes)): + val = val, + return val + + +# undone -- switch this pointer when IFD_LEGACY_API == False +ImageFileDirectory = ImageFileDirectory_v1 + + +## +# Image plugin for TIFF files. + +class TiffImageFile(ImageFile.ImageFile): + + format = "TIFF" + format_description = "Adobe TIFF" + _close_exclusive_fp_after_loading = False + + def _open(self): + "Open the first image in a TIFF file" + + # Header + ifh = self.fp.read(8) + + # image file directory (tag dictionary) + self.tag_v2 = ImageFileDirectory_v2(ifh) + + # legacy tag/ifd entries will be filled in later + self.tag = self.ifd = None + + # setup frame pointers + self.__first = self.__next = self.tag_v2.next + self.__frame = -1 + self.__fp = self.fp + self._frame_pos = [] + self._n_frames = None + self._is_animated = None + + if DEBUG: + print("*** TiffImageFile._open ***") + print("- __first:", self.__first) + print("- ifh: ", ifh) + + # and load the first frame + self._seek(0) + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self._seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + if self._n_frames is not None: + self._is_animated = self._n_frames != 1 + else: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + "Select a given frame as current image" + if not self._seek_check(frame): + return + self._seek(frame) + # Create a new core image object on second and + # subsequent frames in the image. Image may be + # different size/mode. + Image._decompression_bomb_check(self.size) + self.im = Image.core.new(self.mode, self.size) + + def _seek(self, frame): + self.fp = self.__fp + while len(self._frame_pos) <= frame: + if not self.__next: + raise EOFError("no more images in TIFF file") + if DEBUG: + print("Seeking to frame %s, on frame %s, " + "__next %s, location: %s" % + (frame, self.__frame, self.__next, self.fp.tell())) + # reset python3 buffered io handle in case fp + # was passed to libtiff, invalidating the buffer + self.fp.tell() + self.fp.seek(self.__next) + self._frame_pos.append(self.__next) + if DEBUG: + print("Loading tags, location: %s" % self.fp.tell()) + self.tag_v2.load(self.fp) + self.__next = self.tag_v2.next + self.__frame += 1 + self.fp.seek(self._frame_pos[frame]) + self.tag_v2.load(self.fp) + self.__next = self.tag_v2.next + # fill the legacy tag/ifd entries + self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2) + self.__frame = frame + self._setup() + + def tell(self): + "Return the current frame number" + return self.__frame + + @property + def size(self): + return self._size + + @size.setter + def size(self, value): + warnings.warn( + 'Setting the size of a TIFF image directly is deprecated, and will' + ' be removed in a future version. Use the resize method instead.', + DeprecationWarning + ) + self._size = value + + def load(self): + if self.use_load_libtiff: + return self._load_libtiff() + return super(TiffImageFile, self).load() + + def load_end(self): + # allow closing if we're on the first frame, there's no next + # This is the ImageFile.load path only, libtiff specific below. + if self.__frame == 0 and not self.__next: + self._close_exclusive_fp_after_loading = True + + def _load_libtiff(self): + """ Overload method triggered when we detect a compressed tiff + Calls out to libtiff """ + + pixel = Image.Image.load(self) + + if self.tile is None: + raise IOError("cannot load this image") + if not self.tile: + return pixel + + self.load_prepare() + + if not len(self.tile) == 1: + raise IOError("Not exactly one tile") + + # (self._compression, (extents tuple), + # 0, (rawmode, self._compression, fp)) + extents = self.tile[0][1] + args = list(self.tile[0][3]) + [self.tag_v2.offset] + + # To be nice on memory footprint, if there's a + # file descriptor, use that instead of reading + # into a string in python. + # libtiff closes the file descriptor, so pass in a dup. + try: + fp = hasattr(self.fp, "fileno") and os.dup(self.fp.fileno()) + # flush the file descriptor, prevents error on pypy 2.4+ + # should also eliminate the need for fp.tell for py3 + # in _seek + if hasattr(self.fp, "flush"): + self.fp.flush() + except IOError: + # io.BytesIO have a fileno, but returns an IOError if + # it doesn't use a file descriptor. + fp = False + + if fp: + args[2] = fp + + decoder = Image._getdecoder(self.mode, 'libtiff', tuple(args), + self.decoderconfig) + try: + decoder.setimage(self.im, extents) + except ValueError: + raise IOError("Couldn't set the image") + + if hasattr(self.fp, "getvalue"): + # We've got a stringio like thing passed in. Yay for all in memory. + # The decoder needs the entire file in one shot, so there's not + # a lot we can do here other than give it the entire file. + # unless we could do something like get the address of the + # underlying string for stringio. + # + # Rearranging for supporting byteio items, since they have a fileno + # that returns an IOError if there's no underlying fp. Easier to + # deal with here by reordering. + if DEBUG: + print("have getvalue. just sending in a string from getvalue") + n, err = decoder.decode(self.fp.getvalue()) + elif hasattr(self.fp, "fileno"): + # we've got a actual file on disk, pass in the fp. + if DEBUG: + print("have fileno, calling fileno version of the decoder.") + self.fp.seek(0) + # 4 bytes, otherwise the trace might error out + n, err = decoder.decode(b"fpfp") + else: + # we have something else. + if DEBUG: + print("don't have fileno or getvalue. just reading") + # UNDONE -- so much for that buffer size thing. + n, err = decoder.decode(self.fp.read()) + + self.tile = [] + self.readonly = 0 + # libtiff closed the fp in a, we need to close self.fp, if possible + if self._exclusive_fp: + if self.__frame == 0 and not self.__next: + self.fp.close() + self.fp = None # might be shared + + if err < 0: + raise IOError(err) + + return Image.Image.load(self) + + def _setup(self): + "Setup this image object based on current tags" + + if 0xBC01 in self.tag_v2: + raise IOError("Windows Media Photo files not yet supported") + + # extract relevant tags + self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)] + self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1) + + # photometric is a required tag, but not everyone is reading + # the specification + photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0) + + fillorder = self.tag_v2.get(FILLORDER, 1) + + if DEBUG: + print("*** Summary ***") + print("- compression:", self._compression) + print("- photometric_interpretation:", photo) + print("- planar_configuration:", self._planar_configuration) + print("- fill_order:", fillorder) + print("- YCbCr subsampling:", self.tag.get(530)) + + # size + xsize = self.tag_v2.get(IMAGEWIDTH) + ysize = self.tag_v2.get(IMAGELENGTH) + self._size = xsize, ysize + + if DEBUG: + print("- size:", self.size) + + sampleFormat = self.tag_v2.get(SAMPLEFORMAT, (1,)) + if (len(sampleFormat) > 1 + and max(sampleFormat) == min(sampleFormat) == 1): + # SAMPLEFORMAT is properly per band, so an RGB image will + # be (1,1,1). But, we don't support per band pixel types, + # and anything more than one band is a uint8. So, just + # take the first element. Revisit this if adding support + # for more exotic images. + sampleFormat = (1,) + + bps_tuple = self.tag_v2.get(BITSPERSAMPLE, (1,)) + extra_tuple = self.tag_v2.get(EXTRASAMPLES, ()) + if photo in (2, 6, 8): # RGB, YCbCr, LAB + bps_count = 3 + elif photo == 5: # CMYK + bps_count = 4 + else: + bps_count = 1 + bps_count += len(extra_tuple) + # Some files have only one value in bps_tuple, + # while should have more. Fix it + if bps_count > len(bps_tuple) and len(bps_tuple) == 1: + bps_tuple = bps_tuple * bps_count + + # mode: check photometric interpretation and bits per pixel + key = (self.tag_v2.prefix, photo, sampleFormat, fillorder, + bps_tuple, extra_tuple) + if DEBUG: + print("format key:", key) + try: + self.mode, rawmode = OPEN_INFO[key] + except KeyError: + if DEBUG: + print("- unsupported format") + raise SyntaxError("unknown pixel mode") + + if DEBUG: + print("- raw mode:", rawmode) + print("- pil mode:", self.mode) + + self.info["compression"] = self._compression + + xres = self.tag_v2.get(X_RESOLUTION, 1) + yres = self.tag_v2.get(Y_RESOLUTION, 1) + + if xres and yres: + resunit = self.tag_v2.get(RESOLUTION_UNIT) + if resunit == 2: # dots per inch + self.info["dpi"] = xres, yres + elif resunit == 3: # dots per centimeter. convert to dpi + self.info["dpi"] = xres * 2.54, yres * 2.54 + elif resunit is None: # used to default to 1, but now 2) + self.info["dpi"] = xres, yres + # For backward compatibility, + # we also preserve the old behavior + self.info["resolution"] = xres, yres + else: # No absolute unit of measurement + self.info["resolution"] = xres, yres + + # build tile descriptors + x = y = layer = 0 + self.tile = [] + self.use_load_libtiff = READ_LIBTIFF or self._compression != 'raw' + if self.use_load_libtiff: + # Decoder expects entire file as one tile. + # There's a buffer size limit in load (64k) + # so large g4 images will fail if we use that + # function. + # + # Setup the one tile for the whole image, then + # use the _load_libtiff function. + + # libtiff handles the fillmode for us, so 1;IR should + # actually be 1;I. Including the R double reverses the + # bits, so stripes of the image are reversed. See + # https://github.com/python-pillow/Pillow/issues/279 + if fillorder == 2: + # Replace fillorder with fillorder=1 + key = key[:3] + (1,) + key[4:] + if DEBUG: + print("format key:", key) + # this should always work, since all the + # fillorder==2 modes have a corresponding + # fillorder=1 mode + self.mode, rawmode = OPEN_INFO[key] + # libtiff always returns the bytes in native order. + # we're expecting image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if rawmode == 'I;16': + rawmode = 'I;16N' + if ';16B' in rawmode: + rawmode = rawmode.replace(';16B', ';16N') + if ';16L' in rawmode: + rawmode = rawmode.replace(';16L', ';16N') + + # Offset in the tile tuple is 0, we go from 0,0 to + # w,h, and we only do this once -- eds + a = (rawmode, self._compression, False) + self.tile.append( + (self._compression, + (0, 0, xsize, ysize), + 0, a)) + + elif STRIPOFFSETS in self.tag_v2 or TILEOFFSETS in self.tag_v2: + # striped image + if STRIPOFFSETS in self.tag_v2: + offsets = self.tag_v2[STRIPOFFSETS] + h = self.tag_v2.get(ROWSPERSTRIP, ysize) + w = self.size[0] + else: + # tiled image + offsets = self.tag_v2[TILEOFFSETS] + w = self.tag_v2.get(322) + h = self.tag_v2.get(323) + + for offset in offsets: + if x + w > xsize: + stride = w * sum(bps_tuple) / 8 # bytes per line + else: + stride = 0 + + tile_rawmode = rawmode + if self._planar_configuration == 2: + # each band on it's own layer + tile_rawmode = rawmode[layer] + # adjust stride width accordingly + stride /= bps_count + + a = (tile_rawmode, int(stride), 1) + self.tile.append( + (self._compression, + (x, y, min(x+w, xsize), min(y+h, ysize)), + offset, a)) + x = x + w + if x >= self.size[0]: + x, y = 0, y + h + if y >= self.size[1]: + x = y = 0 + layer += 1 + else: + if DEBUG: + print("- unsupported data organization") + raise SyntaxError("unknown data organization") + + # Fix up info. + if ICCPROFILE in self.tag_v2: + self.info['icc_profile'] = self.tag_v2[ICCPROFILE] + + # fixup palette descriptor + + if self.mode == "P": + palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]] + self.palette = ImagePalette.raw("RGB;L", b"".join(palette)) + + +# +# -------------------------------------------------------------------- +# Write TIFF files + +# little endian is default except for image modes with +# explicit big endian byte-order + +SAVE_INFO = { + # mode => rawmode, byteorder, photometrics, + # sampleformat, bitspersample, extra + "1": ("1", II, 1, 1, (1,), None), + "L": ("L", II, 1, 1, (8,), None), + "LA": ("LA", II, 1, 1, (8, 8), 2), + "P": ("P", II, 3, 1, (8,), None), + "PA": ("PA", II, 3, 1, (8, 8), 2), + "I": ("I;32S", II, 1, 2, (32,), None), + "I;16": ("I;16", II, 1, 1, (16,), None), + "I;16S": ("I;16S", II, 1, 2, (16,), None), + "F": ("F;32F", II, 1, 3, (32,), None), + "RGB": ("RGB", II, 2, 1, (8, 8, 8), None), + "RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0), + "RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2), + "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None), + "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None), + "LAB": ("LAB", II, 8, 1, (8, 8, 8), None), + + "I;32BS": ("I;32BS", MM, 1, 2, (32,), None), + "I;16B": ("I;16B", MM, 1, 1, (16,), None), + "I;16BS": ("I;16BS", MM, 1, 2, (16,), None), + "F;32BF": ("F;32BF", MM, 1, 3, (32,), None), +} + + +def _save(im, fp, filename): + + try: + rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode] + except KeyError: + raise IOError("cannot write mode %s as TIFF" % im.mode) + + ifd = ImageFileDirectory_v2(prefix=prefix) + + compression = im.encoderinfo.get('compression', im.info.get('compression')) + if compression is None: + compression = 'raw' + + libtiff = WRITE_LIBTIFF or compression != 'raw' + + # required for color libtiff images + ifd[PLANAR_CONFIGURATION] = getattr(im, '_planar_configuration', 1) + + ifd[IMAGEWIDTH] = im.size[0] + ifd[IMAGELENGTH] = im.size[1] + + # write any arbitrary tags passed in as an ImageFileDirectory + info = im.encoderinfo.get("tiffinfo", {}) + if DEBUG: + print("Tiffinfo Keys: %s" % list(info)) + if isinstance(info, ImageFileDirectory_v1): + info = info.to_v2() + for key in info: + ifd[key] = info.get(key) + try: + ifd.tagtype[key] = info.tagtype[key] + except: + pass # might not be an IFD, Might not have populated type + + # additions written by Greg Couch, gregc@cgl.ucsf.edu + # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com + if hasattr(im, 'tag_v2'): + # preserve tags from original TIFF image file + for key in (RESOLUTION_UNIT, X_RESOLUTION, Y_RESOLUTION, + IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK, XMP): + if key in im.tag_v2: + ifd[key] = im.tag_v2[key] + ifd.tagtype[key] = im.tag_v2.tagtype[key] + + # preserve ICC profile (should also work when saving other formats + # which support profiles as TIFF) -- 2008-06-06 Florian Hoech + if "icc_profile" in im.info: + ifd[ICCPROFILE] = im.info["icc_profile"] + + for key, name in [(IMAGEDESCRIPTION, "description"), + (X_RESOLUTION, "resolution"), + (Y_RESOLUTION, "resolution"), + (X_RESOLUTION, "x_resolution"), + (Y_RESOLUTION, "y_resolution"), + (RESOLUTION_UNIT, "resolution_unit"), + (SOFTWARE, "software"), + (DATE_TIME, "date_time"), + (ARTIST, "artist"), + (COPYRIGHT, "copyright")]: + if name in im.encoderinfo: + ifd[key] = im.encoderinfo[name] + + dpi = im.encoderinfo.get("dpi") + if dpi: + ifd[RESOLUTION_UNIT] = 2 + ifd[X_RESOLUTION] = dpi[0] + ifd[Y_RESOLUTION] = dpi[1] + + if bits != (1,): + ifd[BITSPERSAMPLE] = bits + if len(bits) != 1: + ifd[SAMPLESPERPIXEL] = len(bits) + if extra is not None: + ifd[EXTRASAMPLES] = extra + if format != 1: + ifd[SAMPLEFORMAT] = format + + ifd[PHOTOMETRIC_INTERPRETATION] = photo + + if im.mode == "P": + lut = im.im.getpalette("RGB", "RGB;L") + ifd[COLORMAP] = tuple(i8(v) * 256 for v in lut) + # data orientation + stride = len(bits) * ((im.size[0]*bits[0]+7)//8) + ifd[ROWSPERSTRIP] = im.size[1] + ifd[STRIPBYTECOUNTS] = stride * im.size[1] + ifd[STRIPOFFSETS] = 0 # this is adjusted by IFD writer + # no compression by default: + ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1) + + if libtiff: + if DEBUG: + print("Saving using libtiff encoder") + print("Items: %s" % sorted(ifd.items())) + _fp = 0 + if hasattr(fp, "fileno"): + try: + fp.seek(0) + _fp = os.dup(fp.fileno()) + except io.UnsupportedOperation: + pass + + # STRIPOFFSETS and STRIPBYTECOUNTS are added by the library + # based on the data in the strip. + blocklist = [STRIPOFFSETS, STRIPBYTECOUNTS] + atts = {} + # bits per sample is a single short in the tiff directory, not a list. + atts[BITSPERSAMPLE] = bits[0] + # Merge the ones that we have with (optional) more bits from + # the original file, e.g x,y resolution so that we can + # save(load('')) == original file. + legacy_ifd = {} + if hasattr(im, 'tag'): + legacy_ifd = im.tag.to_v2() + for tag, value in itertools.chain(ifd.items(), + getattr(im, 'tag_v2', {}).items(), + legacy_ifd.items()): + # Libtiff can only process certain core items without adding + # them to the custom dictionary. It will segfault if it attempts + # to add a custom tag without the dictionary entry + # + # UNDONE -- add code for the custom dictionary + if tag not in TiffTags.LIBTIFF_CORE: + continue + if tag not in atts and tag not in blocklist: + if isinstance(value, str if py3 else unicode): + atts[tag] = value.encode('ascii', 'replace') + b"\0" + elif isinstance(value, IFDRational): + atts[tag] = float(value) + else: + atts[tag] = value + + if DEBUG: + print("Converted items: %s" % sorted(atts.items())) + + # libtiff always expects the bytes in native order. + # we're storing image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if im.mode in ('I;16B', 'I;16'): + rawmode = 'I;16N' + + a = (rawmode, compression, _fp, filename, atts) + e = Image._getencoder(im.mode, 'libtiff', a, im.encoderconfig) + e.setimage(im.im, (0, 0)+im.size) + while True: + # undone, change to self.decodermaxblock: + l, s, d = e.encode(16*1024) + if not _fp: + fp.write(d) + if s: + break + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + + else: + offset = ifd.save(fp) + + ImageFile._save(im, fp, [ + ("raw", (0, 0)+im.size, offset, (rawmode, stride, 1)) + ]) + + # -- helper for multi-page save -- + if "_debug_multipage" in im.encoderinfo: + # just to access o32 and o16 (using correct byte order) + im._debug_multipage = ifd + + +class AppendingTiffWriter: + fieldSizes = [ + 0, # None + 1, # byte + 1, # ascii + 2, # short + 4, # long + 8, # rational + 1, # sbyte + 1, # undefined + 2, # sshort + 4, # slong + 8, # srational + 4, # float + 8, # double + ] + + # StripOffsets = 273 + # FreeOffsets = 288 + # TileOffsets = 324 + # JPEGQTables = 519 + # JPEGDCTables = 520 + # JPEGACTables = 521 + Tags = {273, 288, 324, 519, 520, 521} + + def __init__(self, fn, new=False): + if hasattr(fn, 'read'): + self.f = fn + self.close_fp = False + else: + self.name = fn + self.close_fp = True + try: + self.f = io.open(fn, "w+b" if new else "r+b") + except IOError: + self.f = io.open(fn, "w+b") + self.beginning = self.f.tell() + self.setup() + + def setup(self): + # Reset everything. + self.f.seek(self.beginning, os.SEEK_SET) + + self.whereToWriteNewIFDOffset = None + self.offsetOfNewPage = 0 + + self.IIMM = IIMM = self.f.read(4) + if not IIMM: + # empty file - first page + self.isFirst = True + return + + self.isFirst = False + if IIMM == b"II\x2a\x00": + self.setEndian("<") + elif IIMM == b"MM\x00\x2a": + self.setEndian(">") + else: + raise RuntimeError("Invalid TIFF file header") + + self.skipIFDs() + self.goToEnd() + + def finalize(self): + if self.isFirst: + return + + # fix offsets + self.f.seek(self.offsetOfNewPage) + + IIMM = self.f.read(4) + if not IIMM: + # raise RuntimeError("nothing written into new page") + # Make it easy to finish a frame without committing to a new one. + return + + if IIMM != self.IIMM: + raise RuntimeError("IIMM of new page doesn't match IIMM of " + "first page") + + IFDoffset = self.readLong() + IFDoffset += self.offsetOfNewPage + self.f.seek(self.whereToWriteNewIFDOffset) + self.writeLong(IFDoffset) + self.f.seek(IFDoffset) + self.fixIFD() + + def newFrame(self): + # Call this to finish a frame. + self.finalize() + self.setup() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + if self.close_fp: + self.close() + return False + + def tell(self): + return self.f.tell() - self.offsetOfNewPage + + def seek(self, offset, whence): + if whence == os.SEEK_SET: + offset += self.offsetOfNewPage + + self.f.seek(offset, whence) + return self.tell() + + def goToEnd(self): + self.f.seek(0, os.SEEK_END) + pos = self.f.tell() + + # pad to 16 byte boundary + padBytes = 16 - pos % 16 + if 0 < padBytes < 16: + self.f.write(bytes(bytearray(padBytes))) + self.offsetOfNewPage = self.f.tell() + + def setEndian(self, endian): + self.endian = endian + self.longFmt = self.endian + "L" + self.shortFmt = self.endian + "H" + self.tagFormat = self.endian + "HHL" + + def skipIFDs(self): + while True: + IFDoffset = self.readLong() + if IFDoffset == 0: + self.whereToWriteNewIFDOffset = self.f.tell() - 4 + break + + self.f.seek(IFDoffset) + numTags = self.readShort() + self.f.seek(numTags * 12, os.SEEK_CUR) + + def write(self, data): + return self.f.write(data) + + def readShort(self): + value, = struct.unpack(self.shortFmt, self.f.read(2)) + return value + + def readLong(self): + value, = struct.unpack(self.longFmt, self.f.read(4)) + return value + + def rewriteLastShortToLong(self, value): + self.f.seek(-2, os.SEEK_CUR) + bytesWritten = self.f.write(struct.pack(self.longFmt, value)) + if bytesWritten is not None and bytesWritten != 4: + raise RuntimeError("wrote only %u bytes but wanted 4" % + bytesWritten) + + def rewriteLastShort(self, value): + self.f.seek(-2, os.SEEK_CUR) + bytesWritten = self.f.write(struct.pack(self.shortFmt, value)) + if bytesWritten is not None and bytesWritten != 2: + raise RuntimeError("wrote only %u bytes but wanted 2" % + bytesWritten) + + def rewriteLastLong(self, value): + self.f.seek(-4, os.SEEK_CUR) + bytesWritten = self.f.write(struct.pack(self.longFmt, value)) + if bytesWritten is not None and bytesWritten != 4: + raise RuntimeError("wrote only %u bytes but wanted 4" % + bytesWritten) + + def writeShort(self, value): + bytesWritten = self.f.write(struct.pack(self.shortFmt, value)) + if bytesWritten is not None and bytesWritten != 2: + raise RuntimeError("wrote only %u bytes but wanted 2" % + bytesWritten) + + def writeLong(self, value): + bytesWritten = self.f.write(struct.pack(self.longFmt, value)) + if bytesWritten is not None and bytesWritten != 4: + raise RuntimeError("wrote only %u bytes but wanted 4" % + bytesWritten) + + def close(self): + self.finalize() + self.f.close() + + def fixIFD(self): + numTags = self.readShort() + + for i in range(numTags): + tag, fieldType, count = struct.unpack(self.tagFormat, + self.f.read(8)) + + fieldSize = self.fieldSizes[fieldType] + totalSize = fieldSize * count + isLocal = (totalSize <= 4) + if not isLocal: + offset = self.readLong() + offset += self.offsetOfNewPage + self.rewriteLastLong(offset) + + if tag in self.Tags: + curPos = self.f.tell() + + if isLocal: + self.fixOffsets(count, isShort=(fieldSize == 2), + isLong=(fieldSize == 4)) + self.f.seek(curPos + 4) + else: + self.f.seek(offset) + self.fixOffsets(count, isShort=(fieldSize == 2), + isLong=(fieldSize == 4)) + self.f.seek(curPos) + + offset = curPos = None + + elif isLocal: + # skip the locally stored value that is not an offset + self.f.seek(4, os.SEEK_CUR) + + def fixOffsets(self, count, isShort=False, isLong=False): + if not isShort and not isLong: + raise RuntimeError("offset is neither short nor long") + + for i in range(count): + offset = self.readShort() if isShort else self.readLong() + offset += self.offsetOfNewPage + if isShort and offset >= 65536: + # offset is now too large - we must convert shorts to longs + if count != 1: + raise RuntimeError("not implemented") # XXX TODO + + # simple case - the offset is just one and therefore it is + # local (not referenced with another offset) + self.rewriteLastShortToLong(offset) + self.f.seek(-10, os.SEEK_CUR) + self.writeShort(4) # rewrite the type to LONG + self.f.seek(8, os.SEEK_CUR) + elif isShort: + self.rewriteLastShort(offset) + else: + self.rewriteLastLong(offset) + + +def _save_all(im, fp, filename): + encoderinfo = im.encoderinfo.copy() + encoderconfig = im.encoderconfig + append_images = list(encoderinfo.get("append_images", [])) + if not hasattr(im, "n_frames") and not append_images: + return _save(im, fp, filename) + + cur_idx = im.tell() + try: + with AppendingTiffWriter(fp) as tf: + for ims in [im]+append_images: + ims.encoderinfo = encoderinfo + ims.encoderconfig = encoderconfig + if not hasattr(ims, "n_frames"): + nfr = 1 + else: + nfr = ims.n_frames + + for idx in range(nfr): + ims.seek(idx) + ims.load() + _save(ims, tf, filename) + tf.newFrame() + finally: + im.seek(cur_idx) + + +# +# -------------------------------------------------------------------- +# Register + +Image.register_open(TiffImageFile.format, TiffImageFile, _accept) +Image.register_save(TiffImageFile.format, _save) +Image.register_save_all(TiffImageFile.format, _save_all) + +Image.register_extensions(TiffImageFile.format, [".tif", ".tiff"]) + +Image.register_mime(TiffImageFile.format, "image/tiff") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/TiffTags.py b/thesisenv/lib/python3.6/site-packages/PIL/TiffTags.py new file mode 100644 index 0000000..c1e14af --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/TiffTags.py @@ -0,0 +1,449 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF tags +# +# This module provides clear-text names for various well-known +# TIFF tags. the TIFF codec works just fine without it. +# +# Copyright (c) Secret Labs AB 1999. +# +# See the README file for information on usage and redistribution. +# + +## +# This module provides constants and clear-text names for various +# well-known TIFF tags. +## + +from collections import namedtuple + + +class TagInfo(namedtuple("_TagInfo", "value name type length enum")): + __slots__ = [] + + def __new__(cls, value=None, name="unknown", + type=None, length=None, enum=None): + return super(TagInfo, cls).__new__( + cls, value, name, type, length, enum or {}) + + def cvt_enum(self, value): + return self.enum.get(value, value) + + +def lookup(tag): + """ + :param tag: Integer tag number + :returns: Taginfo namedtuple, From the TAGS_V2 info if possible, + otherwise just populating the value and name from TAGS. + If the tag is not recognized, "unknown" is returned for the name + + """ + + return TAGS_V2.get(tag, TagInfo(tag, TAGS.get(tag, 'unknown'))) + + +## +# Map tag numbers to tag info. +# +# id: (Name, Type, Length, enum_values) +# +# The length here differs from the length in the tiff spec. For +# numbers, the tiff spec is for the number of fields returned. We +# agree here. For string-like types, the tiff spec uses the length of +# field in bytes. In Pillow, we are using the number of expected +# fields, in general 1 for string-like types. + + +BYTE = 1 +ASCII = 2 +SHORT = 3 +LONG = 4 +RATIONAL = 5 +UNDEFINED = 7 +SIGNED_RATIONAL = 10 +DOUBLE = 12 + +TAGS_V2 = { + + 254: ("NewSubfileType", LONG, 1), + 255: ("SubfileType", SHORT, 1), + 256: ("ImageWidth", LONG, 1), + 257: ("ImageLength", LONG, 1), + 258: ("BitsPerSample", SHORT, 0), + 259: ("Compression", SHORT, 1, + {"Uncompressed": 1, "CCITT 1d": 2, "Group 3 Fax": 3, + "Group 4 Fax": 4, "LZW": 5, "JPEG": 6, "PackBits": 32773}), + + 262: ("PhotometricInterpretation", SHORT, 1, + {"WhiteIsZero": 0, "BlackIsZero": 1, "RGB": 2, "RGB Palette": 3, + "Transparency Mask": 4, "CMYK": 5, "YCbCr": 6, "CieLAB": 8, + "CFA": 32803, # TIFF/EP, Adobe DNG + "LinearRaw": 32892}), # Adobe DNG + 263: ("Threshholding", SHORT, 1), + 264: ("CellWidth", SHORT, 1), + 265: ("CellLength", SHORT, 1), + 266: ("FillOrder", SHORT, 1), + 269: ("DocumentName", ASCII, 1), + + 270: ("ImageDescription", ASCII, 1), + 271: ("Make", ASCII, 1), + 272: ("Model", ASCII, 1), + 273: ("StripOffsets", LONG, 0), + 274: ("Orientation", SHORT, 1), + 277: ("SamplesPerPixel", SHORT, 1), + 278: ("RowsPerStrip", LONG, 1), + 279: ("StripByteCounts", LONG, 0), + + 280: ("MinSampleValue", LONG, 0), + 281: ("MaxSampleValue", SHORT, 0), + 282: ("XResolution", RATIONAL, 1), + 283: ("YResolution", RATIONAL, 1), + 284: ("PlanarConfiguration", SHORT, 1, {"Contiguous": 1, "Separate": 2}), + 285: ("PageName", ASCII, 1), + 286: ("XPosition", RATIONAL, 1), + 287: ("YPosition", RATIONAL, 1), + 288: ("FreeOffsets", LONG, 1), + 289: ("FreeByteCounts", LONG, 1), + + 290: ("GrayResponseUnit", SHORT, 1), + 291: ("GrayResponseCurve", SHORT, 0), + 292: ("T4Options", LONG, 1), + 293: ("T6Options", LONG, 1), + 296: ("ResolutionUnit", SHORT, 1, {"none": 1, "inch": 2, "cm": 3}), + 297: ("PageNumber", SHORT, 2), + + 301: ("TransferFunction", SHORT, 0), + 305: ("Software", ASCII, 1), + 306: ("DateTime", ASCII, 1), + + 315: ("Artist", ASCII, 1), + 316: ("HostComputer", ASCII, 1), + 317: ("Predictor", SHORT, 1, {"none": 1, "Horizontal Differencing": 2}), + 318: ("WhitePoint", RATIONAL, 2), + 319: ("PrimaryChromaticities", RATIONAL, 6), + + 320: ("ColorMap", SHORT, 0), + 321: ("HalftoneHints", SHORT, 2), + 322: ("TileWidth", LONG, 1), + 323: ("TileLength", LONG, 1), + 324: ("TileOffsets", LONG, 0), + 325: ("TileByteCounts", LONG, 0), + + 332: ("InkSet", SHORT, 1), + 333: ("InkNames", ASCII, 1), + 334: ("NumberOfInks", SHORT, 1), + 336: ("DotRange", SHORT, 0), + 337: ("TargetPrinter", ASCII, 1), + 338: ("ExtraSamples", SHORT, 0), + 339: ("SampleFormat", SHORT, 0), + + 340: ("SMinSampleValue", DOUBLE, 0), + 341: ("SMaxSampleValue", DOUBLE, 0), + 342: ("TransferRange", SHORT, 6), + + 347: ("JPEGTables", UNDEFINED, 1), + + # obsolete JPEG tags + 512: ("JPEGProc", SHORT, 1), + 513: ("JPEGInterchangeFormat", LONG, 1), + 514: ("JPEGInterchangeFormatLength", LONG, 1), + 515: ("JPEGRestartInterval", SHORT, 1), + 517: ("JPEGLosslessPredictors", SHORT, 0), + 518: ("JPEGPointTransforms", SHORT, 0), + 519: ("JPEGQTables", LONG, 0), + 520: ("JPEGDCTables", LONG, 0), + 521: ("JPEGACTables", LONG, 0), + + 529: ("YCbCrCoefficients", RATIONAL, 3), + 530: ("YCbCrSubSampling", SHORT, 2), + 531: ("YCbCrPositioning", SHORT, 1), + 532: ("ReferenceBlackWhite", RATIONAL, 6), + + 700: ('XMP', BYTE, 1), + + 33432: ("Copyright", ASCII, 1), + 34377: ('PhotoshopInfo', BYTE, 1), + + # FIXME add more tags here + 34665: ("ExifIFD", SHORT, 1), + 34675: ('ICCProfile', UNDEFINED, 1), + 34853: ('GPSInfoIFD', BYTE, 1), + + # MPInfo + 45056: ("MPFVersion", UNDEFINED, 1), + 45057: ("NumberOfImages", LONG, 1), + 45058: ("MPEntry", UNDEFINED, 1), + 45059: ("ImageUIDList", UNDEFINED, 0), # UNDONE, check + 45060: ("TotalFrames", LONG, 1), + 45313: ("MPIndividualNum", LONG, 1), + 45569: ("PanOrientation", LONG, 1), + 45570: ("PanOverlap_H", RATIONAL, 1), + 45571: ("PanOverlap_V", RATIONAL, 1), + 45572: ("BaseViewpointNum", LONG, 1), + 45573: ("ConvergenceAngle", SIGNED_RATIONAL, 1), + 45574: ("BaselineLength", RATIONAL, 1), + 45575: ("VerticalDivergence", SIGNED_RATIONAL, 1), + 45576: ("AxisDistance_X", SIGNED_RATIONAL, 1), + 45577: ("AxisDistance_Y", SIGNED_RATIONAL, 1), + 45578: ("AxisDistance_Z", SIGNED_RATIONAL, 1), + 45579: ("YawAngle", SIGNED_RATIONAL, 1), + 45580: ("PitchAngle", SIGNED_RATIONAL, 1), + 45581: ("RollAngle", SIGNED_RATIONAL, 1), + + 50741: ("MakerNoteSafety", SHORT, 1, {"Unsafe": 0, "Safe": 1}), + 50780: ("BestQualityScale", RATIONAL, 1), + 50838: ("ImageJMetaDataByteCounts", LONG, 0), # Can be more than one + 50839: ("ImageJMetaData", UNDEFINED, 1) # see Issue #2006 +} + +# Legacy Tags structure +# these tags aren't included above, but were in the previous versions +TAGS = {347: 'JPEGTables', + 700: 'XMP', + + # Additional Exif Info + 32932: 'Wang Annotation', + 33434: 'ExposureTime', + 33437: 'FNumber', + 33445: 'MD FileTag', + 33446: 'MD ScalePixel', + 33447: 'MD ColorTable', + 33448: 'MD LabName', + 33449: 'MD SampleInfo', + 33450: 'MD PrepDate', + 33451: 'MD PrepTime', + 33452: 'MD FileUnits', + 33550: 'ModelPixelScaleTag', + 33723: 'IptcNaaInfo', + 33918: 'INGR Packet Data Tag', + 33919: 'INGR Flag Registers', + 33920: 'IrasB Transformation Matrix', + 33922: 'ModelTiepointTag', + 34264: 'ModelTransformationTag', + 34377: 'PhotoshopInfo', + 34735: 'GeoKeyDirectoryTag', + 34736: 'GeoDoubleParamsTag', + 34737: 'GeoAsciiParamsTag', + 34850: 'ExposureProgram', + 34852: 'SpectralSensitivity', + 34855: 'ISOSpeedRatings', + 34856: 'OECF', + 34864: 'SensitivityType', + 34865: 'StandardOutputSensitivity', + 34866: 'RecommendedExposureIndex', + 34867: 'ISOSpeed', + 34868: 'ISOSpeedLatitudeyyy', + 34869: 'ISOSpeedLatitudezzz', + 34908: 'HylaFAX FaxRecvParams', + 34909: 'HylaFAX FaxSubAddress', + 34910: 'HylaFAX FaxRecvTime', + 36864: 'ExifVersion', + 36867: 'DateTimeOriginal', + 36868: 'DateTImeDigitized', + 37121: 'ComponentsConfiguration', + 37122: 'CompressedBitsPerPixel', + 37724: 'ImageSourceData', + 37377: 'ShutterSpeedValue', + 37378: 'ApertureValue', + 37379: 'BrightnessValue', + 37380: 'ExposureBiasValue', + 37381: 'MaxApertureValue', + 37382: 'SubjectDistance', + 37383: 'MeteringMode', + 37384: 'LightSource', + 37385: 'Flash', + 37386: 'FocalLength', + 37396: 'SubjectArea', + 37500: 'MakerNote', + 37510: 'UserComment', + 37520: 'SubSec', + 37521: 'SubSecTimeOriginal', + 37522: 'SubsecTimeDigitized', + 40960: 'FlashPixVersion', + 40961: 'ColorSpace', + 40962: 'PixelXDimension', + 40963: 'PixelYDimension', + 40964: 'RelatedSoundFile', + 40965: 'InteroperabilityIFD', + 41483: 'FlashEnergy', + 41484: 'SpatialFrequencyResponse', + 41486: 'FocalPlaneXResolution', + 41487: 'FocalPlaneYResolution', + 41488: 'FocalPlaneResolutionUnit', + 41492: 'SubjectLocation', + 41493: 'ExposureIndex', + 41495: 'SensingMethod', + 41728: 'FileSource', + 41729: 'SceneType', + 41730: 'CFAPattern', + 41985: 'CustomRendered', + 41986: 'ExposureMode', + 41987: 'WhiteBalance', + 41988: 'DigitalZoomRatio', + 41989: 'FocalLengthIn35mmFilm', + 41990: 'SceneCaptureType', + 41991: 'GainControl', + 41992: 'Contrast', + 41993: 'Saturation', + 41994: 'Sharpness', + 41995: 'DeviceSettingDescription', + 41996: 'SubjectDistanceRange', + 42016: 'ImageUniqueID', + 42032: 'CameraOwnerName', + 42033: 'BodySerialNumber', + 42034: 'LensSpecification', + 42035: 'LensMake', + 42036: 'LensModel', + 42037: 'LensSerialNumber', + 42112: 'GDAL_METADATA', + 42113: 'GDAL_NODATA', + 42240: 'Gamma', + 50215: 'Oce Scanjob Description', + 50216: 'Oce Application Selector', + 50217: 'Oce Identification Number', + 50218: 'Oce ImageLogic Characteristics', + + # Adobe DNG + 50706: 'DNGVersion', + 50707: 'DNGBackwardVersion', + 50708: 'UniqueCameraModel', + 50709: 'LocalizedCameraModel', + 50710: 'CFAPlaneColor', + 50711: 'CFALayout', + 50712: 'LinearizationTable', + 50713: 'BlackLevelRepeatDim', + 50714: 'BlackLevel', + 50715: 'BlackLevelDeltaH', + 50716: 'BlackLevelDeltaV', + 50717: 'WhiteLevel', + 50718: 'DefaultScale', + 50719: 'DefaultCropOrigin', + 50720: 'DefaultCropSize', + 50721: 'ColorMatrix1', + 50722: 'ColorMatrix2', + 50723: 'CameraCalibration1', + 50724: 'CameraCalibration2', + 50725: 'ReductionMatrix1', + 50726: 'ReductionMatrix2', + 50727: 'AnalogBalance', + 50728: 'AsShotNeutral', + 50729: 'AsShotWhiteXY', + 50730: 'BaselineExposure', + 50731: 'BaselineNoise', + 50732: 'BaselineSharpness', + 50733: 'BayerGreenSplit', + 50734: 'LinearResponseLimit', + 50735: 'CameraSerialNumber', + 50736: 'LensInfo', + 50737: 'ChromaBlurRadius', + 50738: 'AntiAliasStrength', + 50740: 'DNGPrivateData', + 50778: 'CalibrationIlluminant1', + 50779: 'CalibrationIlluminant2', + 50784: 'Alias Layer Metadata' + } + + +def _populate(): + for k, v in TAGS_V2.items(): + # Populate legacy structure. + TAGS[k] = v[0] + if len(v) == 4: + for sk, sv in v[3].items(): + TAGS[(k, sv)] = sk + + TAGS_V2[k] = TagInfo(k, *v) + + +_populate() +## +# Map type numbers to type names -- defined in ImageFileDirectory. + +TYPES = {} + +# was: +# TYPES = { +# 1: "byte", +# 2: "ascii", +# 3: "short", +# 4: "long", +# 5: "rational", +# 6: "signed byte", +# 7: "undefined", +# 8: "signed short", +# 9: "signed long", +# 10: "signed rational", +# 11: "float", +# 12: "double", +# } + +# +# These tags are handled by default in libtiff, without +# adding to the custom dictionary. From tif_dir.c, searching for +# case TIFFTAG in the _TIFFVSetField function: +# Line: item. +# 148: case TIFFTAG_SUBFILETYPE: +# 151: case TIFFTAG_IMAGEWIDTH: +# 154: case TIFFTAG_IMAGELENGTH: +# 157: case TIFFTAG_BITSPERSAMPLE: +# 181: case TIFFTAG_COMPRESSION: +# 202: case TIFFTAG_PHOTOMETRIC: +# 205: case TIFFTAG_THRESHHOLDING: +# 208: case TIFFTAG_FILLORDER: +# 214: case TIFFTAG_ORIENTATION: +# 221: case TIFFTAG_SAMPLESPERPIXEL: +# 228: case TIFFTAG_ROWSPERSTRIP: +# 238: case TIFFTAG_MINSAMPLEVALUE: +# 241: case TIFFTAG_MAXSAMPLEVALUE: +# 244: case TIFFTAG_SMINSAMPLEVALUE: +# 247: case TIFFTAG_SMAXSAMPLEVALUE: +# 250: case TIFFTAG_XRESOLUTION: +# 256: case TIFFTAG_YRESOLUTION: +# 262: case TIFFTAG_PLANARCONFIG: +# 268: case TIFFTAG_XPOSITION: +# 271: case TIFFTAG_YPOSITION: +# 274: case TIFFTAG_RESOLUTIONUNIT: +# 280: case TIFFTAG_PAGENUMBER: +# 284: case TIFFTAG_HALFTONEHINTS: +# 288: case TIFFTAG_COLORMAP: +# 294: case TIFFTAG_EXTRASAMPLES: +# 298: case TIFFTAG_MATTEING: +# 305: case TIFFTAG_TILEWIDTH: +# 316: case TIFFTAG_TILELENGTH: +# 327: case TIFFTAG_TILEDEPTH: +# 333: case TIFFTAG_DATATYPE: +# 344: case TIFFTAG_SAMPLEFORMAT: +# 361: case TIFFTAG_IMAGEDEPTH: +# 364: case TIFFTAG_SUBIFD: +# 376: case TIFFTAG_YCBCRPOSITIONING: +# 379: case TIFFTAG_YCBCRSUBSAMPLING: +# 383: case TIFFTAG_TRANSFERFUNCTION: +# 389: case TIFFTAG_REFERENCEBLACKWHITE: +# 393: case TIFFTAG_INKNAMES: + +# some of these are not in our TAGS_V2 dict and were included from tiff.h + +LIBTIFF_CORE = {255, 256, 257, 258, 259, 262, 263, 266, 274, 277, + 278, 280, 281, 340, 341, 282, 283, 284, 286, 287, + 296, 297, 321, 320, 338, 32995, 322, 323, 32998, + 32996, 339, 32997, 330, 531, 530, 301, 532, 333, + # as above + 269 # this has been in our tests forever, and works + } + +LIBTIFF_CORE.remove(320) # Array of short, crashes +LIBTIFF_CORE.remove(301) # Array of short, crashes +LIBTIFF_CORE.remove(532) # Array of long, crashes + +LIBTIFF_CORE.remove(255) # We don't have support for subfiletypes +LIBTIFF_CORE.remove(322) # We don't have support for writing tiled images with libtiff +LIBTIFF_CORE.remove(323) # Tiled images +LIBTIFF_CORE.remove(333) # Ink Names either + +# Note to advanced users: There may be combinations of these +# parameters and values that when added properly, will work and +# produce valid tiff images that may work in your application. +# It is safe to add and remove tags from this set from Pillow's point +# of view so long as you test against libtiff. diff --git a/thesisenv/lib/python3.6/site-packages/PIL/WalImageFile.py b/thesisenv/lib/python3.6/site-packages/PIL/WalImageFile.py new file mode 100644 index 0000000..6602cc8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/WalImageFile.py @@ -0,0 +1,128 @@ +# encoding: utf-8 +# +# The Python Imaging Library. +# $Id$ +# +# WAL file handling +# +# History: +# 2003-04-23 fl created +# +# Copyright (c) 2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +# NOTE: This format cannot be automatically recognized, so the reader +# is not registered for use with Image.open(). To open a WAL file, use +# the WalImageFile.open() function instead. + +# This reader is based on the specification available from: +# https://www.flipcode.com/archives/Quake_2_BSP_File_Format.shtml +# and has been tested with a few sample files found using google. + +from . import Image +from ._binary import i32le as i32 + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + + +def open(filename): + """ + Load texture from a Quake2 WAL texture file. + + By default, a Quake2 standard palette is attached to the texture. + To override the palette, use the putpalette method. + + :param filename: WAL file name, or an opened file handle. + :returns: An image instance. + """ + # FIXME: modify to return a WalImageFile instance instead of + # plain Image object ? + + def imopen(fp): + # read header fields + header = fp.read(32+24+32+12) + size = i32(header, 32), i32(header, 36) + offset = i32(header, 40) + + # load pixel data + fp.seek(offset) + + Image._decompression_bomb_check(size) + im = Image.frombytes("P", size, fp.read(size[0] * size[1])) + im.putpalette(quake2palette) + + im.format = "WAL" + im.format_description = "Quake2 Texture" + + # strings are null-terminated + im.info["name"] = header[:32].split(b"\0", 1)[0] + next_name = header[56:56+32].split(b"\0", 1)[0] + if next_name: + im.info["next_name"] = next_name + + return im + + if hasattr(filename, "read"): + return imopen(filename) + else: + with builtins.open(filename, "rb") as fp: + return imopen(fp) + + +quake2palette = ( + # default palette taken from piffo 0.93 by Hans Häggström + b"\x01\x01\x01\x0b\x0b\x0b\x12\x12\x12\x17\x17\x17\x1b\x1b\x1b\x1e" + b"\x1e\x1e\x22\x22\x22\x26\x26\x26\x29\x29\x29\x2c\x2c\x2c\x2f\x2f" + b"\x2f\x32\x32\x32\x35\x35\x35\x37\x37\x37\x3a\x3a\x3a\x3c\x3c\x3c" + b"\x24\x1e\x13\x22\x1c\x12\x20\x1b\x12\x1f\x1a\x10\x1d\x19\x10\x1b" + b"\x17\x0f\x1a\x16\x0f\x18\x14\x0d\x17\x13\x0d\x16\x12\x0d\x14\x10" + b"\x0b\x13\x0f\x0b\x10\x0d\x0a\x0f\x0b\x0a\x0d\x0b\x07\x0b\x0a\x07" + b"\x23\x23\x26\x22\x22\x25\x22\x20\x23\x21\x1f\x22\x20\x1e\x20\x1f" + b"\x1d\x1e\x1d\x1b\x1c\x1b\x1a\x1a\x1a\x19\x19\x18\x17\x17\x17\x16" + b"\x16\x14\x14\x14\x13\x13\x13\x10\x10\x10\x0f\x0f\x0f\x0d\x0d\x0d" + b"\x2d\x28\x20\x29\x24\x1c\x27\x22\x1a\x25\x1f\x17\x38\x2e\x1e\x31" + b"\x29\x1a\x2c\x25\x17\x26\x20\x14\x3c\x30\x14\x37\x2c\x13\x33\x28" + b"\x12\x2d\x24\x10\x28\x1f\x0f\x22\x1a\x0b\x1b\x14\x0a\x13\x0f\x07" + b"\x31\x1a\x16\x30\x17\x13\x2e\x16\x10\x2c\x14\x0d\x2a\x12\x0b\x27" + b"\x0f\x0a\x25\x0f\x07\x21\x0d\x01\x1e\x0b\x01\x1c\x0b\x01\x1a\x0b" + b"\x01\x18\x0a\x01\x16\x0a\x01\x13\x0a\x01\x10\x07\x01\x0d\x07\x01" + b"\x29\x23\x1e\x27\x21\x1c\x26\x20\x1b\x25\x1f\x1a\x23\x1d\x19\x21" + b"\x1c\x18\x20\x1b\x17\x1e\x19\x16\x1c\x18\x14\x1b\x17\x13\x19\x14" + b"\x10\x17\x13\x0f\x14\x10\x0d\x12\x0f\x0b\x0f\x0b\x0a\x0b\x0a\x07" + b"\x26\x1a\x0f\x23\x19\x0f\x20\x17\x0f\x1c\x16\x0f\x19\x13\x0d\x14" + b"\x10\x0b\x10\x0d\x0a\x0b\x0a\x07\x33\x22\x1f\x35\x29\x26\x37\x2f" + b"\x2d\x39\x35\x34\x37\x39\x3a\x33\x37\x39\x30\x34\x36\x2b\x31\x34" + b"\x27\x2e\x31\x22\x2b\x2f\x1d\x28\x2c\x17\x25\x2a\x0f\x20\x26\x0d" + b"\x1e\x25\x0b\x1c\x22\x0a\x1b\x20\x07\x19\x1e\x07\x17\x1b\x07\x14" + b"\x18\x01\x12\x16\x01\x0f\x12\x01\x0b\x0d\x01\x07\x0a\x01\x01\x01" + b"\x2c\x21\x21\x2a\x1f\x1f\x29\x1d\x1d\x27\x1c\x1c\x26\x1a\x1a\x24" + b"\x18\x18\x22\x17\x17\x21\x16\x16\x1e\x13\x13\x1b\x12\x12\x18\x10" + b"\x10\x16\x0d\x0d\x12\x0b\x0b\x0d\x0a\x0a\x0a\x07\x07\x01\x01\x01" + b"\x2e\x30\x29\x2d\x2e\x27\x2b\x2c\x26\x2a\x2a\x24\x28\x29\x23\x27" + b"\x27\x21\x26\x26\x1f\x24\x24\x1d\x22\x22\x1c\x1f\x1f\x1a\x1c\x1c" + b"\x18\x19\x19\x16\x17\x17\x13\x13\x13\x10\x0f\x0f\x0d\x0b\x0b\x0a" + b"\x30\x1e\x1b\x2d\x1c\x19\x2c\x1a\x17\x2a\x19\x14\x28\x17\x13\x26" + b"\x16\x10\x24\x13\x0f\x21\x12\x0d\x1f\x10\x0b\x1c\x0f\x0a\x19\x0d" + b"\x0a\x16\x0b\x07\x12\x0a\x07\x0f\x07\x01\x0a\x01\x01\x01\x01\x01" + b"\x28\x29\x38\x26\x27\x36\x25\x26\x34\x24\x24\x31\x22\x22\x2f\x20" + b"\x21\x2d\x1e\x1f\x2a\x1d\x1d\x27\x1b\x1b\x25\x19\x19\x21\x17\x17" + b"\x1e\x14\x14\x1b\x13\x12\x17\x10\x0f\x13\x0d\x0b\x0f\x0a\x07\x07" + b"\x2f\x32\x29\x2d\x30\x26\x2b\x2e\x24\x29\x2c\x21\x27\x2a\x1e\x25" + b"\x28\x1c\x23\x26\x1a\x21\x25\x18\x1e\x22\x14\x1b\x1f\x10\x19\x1c" + b"\x0d\x17\x1a\x0a\x13\x17\x07\x10\x13\x01\x0d\x0f\x01\x0a\x0b\x01" + b"\x01\x3f\x01\x13\x3c\x0b\x1b\x39\x10\x20\x35\x14\x23\x31\x17\x23" + b"\x2d\x18\x23\x29\x18\x3f\x3f\x3f\x3f\x3f\x39\x3f\x3f\x31\x3f\x3f" + b"\x2a\x3f\x3f\x20\x3f\x3f\x14\x3f\x3c\x12\x3f\x39\x0f\x3f\x35\x0b" + b"\x3f\x32\x07\x3f\x2d\x01\x3d\x2a\x01\x3b\x26\x01\x39\x21\x01\x37" + b"\x1d\x01\x34\x1a\x01\x32\x16\x01\x2f\x12\x01\x2d\x0f\x01\x2a\x0b" + b"\x01\x27\x07\x01\x23\x01\x01\x1d\x01\x01\x17\x01\x01\x10\x01\x01" + b"\x3d\x01\x01\x19\x19\x3f\x3f\x01\x01\x01\x01\x3f\x16\x16\x13\x10" + b"\x10\x0f\x0d\x0d\x0b\x3c\x2e\x2a\x36\x27\x20\x30\x21\x18\x29\x1b" + b"\x10\x3c\x39\x37\x37\x32\x2f\x31\x2c\x28\x2b\x26\x21\x30\x22\x20" +) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/WebPImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/WebPImagePlugin.py new file mode 100644 index 0000000..e6485c2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/WebPImagePlugin.py @@ -0,0 +1,337 @@ +from . import Image, ImageFile +try: + from . import _webp + SUPPORTED = True +except ImportError as e: + SUPPORTED = False +from io import BytesIO + + +_VALID_WEBP_MODES = { + "RGBX": True, + "RGBA": True, + "RGB": True, + } + +_VALID_WEBP_LEGACY_MODES = { + "RGB": True, + "RGBA": True, + } + +_VP8_MODES_BY_IDENTIFIER = { + b"VP8 ": "RGB", + b"VP8X": "RGBA", + b"VP8L": "RGBA", # lossless + } + + +def _accept(prefix): + is_riff_file_format = prefix[:4] == b"RIFF" + is_webp_file = prefix[8:12] == b"WEBP" + is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER + + if is_riff_file_format and is_webp_file and is_valid_vp8_mode: + if not SUPPORTED: + return "image file could not be identified because WEBP support not installed" + return True + + +class WebPImageFile(ImageFile.ImageFile): + + format = "WEBP" + format_description = "WebP image" + + def _open(self): + if not _webp.HAVE_WEBPANIM: + # Legacy mode + data, width, height, self.mode, icc_profile, exif = \ + _webp.WebPDecode(self.fp.read()) + if icc_profile: + self.info["icc_profile"] = icc_profile + if exif: + self.info["exif"] = exif + self._size = width, height + self.fp = BytesIO(data) + self.tile = [("raw", (0, 0) + self.size, 0, self.mode)] + self._n_frames = 1 + return + + # Use the newer AnimDecoder API to parse the (possibly) animated file, + # and access muxed chunks like ICC/EXIF/XMP. + self._decoder = _webp.WebPAnimDecoder(self.fp.read()) + + # Get info from decoder + width, height, loop_count, bgcolor, frame_count, mode = \ + self._decoder.get_info() + self._size = width, height + self.info["loop"] = loop_count + bg_a, bg_r, bg_g, bg_b = \ + (bgcolor >> 24) & 0xFF, \ + (bgcolor >> 16) & 0xFF, \ + (bgcolor >> 8) & 0xFF, \ + bgcolor & 0xFF + self.info["background"] = (bg_r, bg_g, bg_b, bg_a) + self._n_frames = frame_count + self.mode = 'RGB' if mode == 'RGBX' else mode + self.rawmode = mode + self.tile = [] + + # Attempt to read ICC / EXIF / XMP chunks from file + icc_profile = self._decoder.get_chunk("ICCP") + exif = self._decoder.get_chunk("EXIF") + xmp = self._decoder.get_chunk("XMP ") + if icc_profile: + self.info["icc_profile"] = icc_profile + if exif: + self.info["exif"] = exif + if xmp: + self.info["xmp"] = xmp + + # Initialize seek state + self._reset(reset=False) + self.seek(0) + + def _getexif(self): + from .JpegImagePlugin import _getexif + return _getexif(self) + + @property + def n_frames(self): + return self._n_frames + + @property + def is_animated(self): + return self._n_frames > 1 + + def seek(self, frame): + if not _webp.HAVE_WEBPANIM: + return super(WebPImageFile, self).seek(frame) + + # Perform some simple checks first + if frame >= self._n_frames: + raise EOFError("attempted to seek beyond end of sequence") + if frame < 0: + raise EOFError("negative frame index is not valid") + + # Set logical frame to requested position + self.__logical_frame = frame + + def _reset(self, reset=True): + if reset: + self._decoder.reset() + self.__physical_frame = 0 + self.__loaded = -1 + self.__timestamp = 0 + + def _get_next(self): + # Get next frame + ret = self._decoder.get_next() + self.__physical_frame += 1 + + # Check if an error occurred + if ret is None: + self._reset() # Reset just to be safe + self.seek(0) + raise EOFError("failed to decode next frame in WebP file") + + # Compute duration + data, timestamp = ret + duration = timestamp - self.__timestamp + self.__timestamp = timestamp + + # libwebp gives frame end, adjust to start of frame + timestamp -= duration + return data, timestamp, duration + + def _seek(self, frame): + if self.__physical_frame == frame: + return # Nothing to do + if frame < self.__physical_frame: + self._reset() # Rewind to beginning + while self.__physical_frame < frame: + self._get_next() # Advance to the requested frame + + def load(self): + if _webp.HAVE_WEBPANIM: + if self.__loaded != self.__logical_frame: + self._seek(self.__logical_frame) + + # We need to load the image data for this frame + data, timestamp, duration = self._get_next() + self.info["timestamp"] = timestamp + self.info["duration"] = duration + self.__loaded = self.__logical_frame + + # Set tile + if self.fp: + self.fp.close() + self.fp = BytesIO(data) + self.tile = [("raw", (0, 0) + self.size, 0, self.rawmode)] + + return super(WebPImageFile, self).load() + + def tell(self): + if not _webp.HAVE_WEBPANIM: + return super(WebPImageFile, self).tell() + + return self.__logical_frame + + +def _save_all(im, fp, filename): + encoderinfo = im.encoderinfo.copy() + append_images = list(encoderinfo.get("append_images", [])) + + # If total frame count is 1, then save using the legacy API, which + # will preserve non-alpha modes + total = 0 + for ims in [im]+append_images: + total += 1 if not hasattr(ims, "n_frames") else ims.n_frames + if total == 1: + _save(im, fp, filename) + return + + background = encoderinfo.get("background", (0, 0, 0, 0)) + duration = im.encoderinfo.get("duration", 0) + loop = im.encoderinfo.get("loop", 0) + minimize_size = im.encoderinfo.get("minimize_size", False) + kmin = im.encoderinfo.get("kmin", None) + kmax = im.encoderinfo.get("kmax", None) + allow_mixed = im.encoderinfo.get("allow_mixed", False) + verbose = False + lossless = im.encoderinfo.get("lossless", False) + quality = im.encoderinfo.get("quality", 80) + method = im.encoderinfo.get("method", 0) + icc_profile = im.encoderinfo.get("icc_profile", "") + exif = im.encoderinfo.get("exif", "") + xmp = im.encoderinfo.get("xmp", "") + if allow_mixed: + lossless = False + + # Sensible keyframe defaults are from gif2webp.c script + if kmin is None: + kmin = 9 if lossless else 3 + if kmax is None: + kmax = 17 if lossless else 5 + + # Validate background color + if (not isinstance(background, (list, tuple)) or len(background) != 4 or + not all(v >= 0 and v < 256 for v in background)): + raise IOError("Background color is not an RGBA tuple clamped " + "to (0-255): %s" % str(background)) + + # Convert to packed uint + bg_r, bg_g, bg_b, bg_a = background + background = (bg_a << 24) | (bg_r << 16) | (bg_g << 8) | (bg_b << 0) + + # Setup the WebP animation encoder + enc = _webp.WebPAnimEncoder( + im.size[0], im.size[1], + background, + loop, + minimize_size, + kmin, kmax, + allow_mixed, + verbose + ) + + # Add each frame + frame_idx = 0 + timestamp = 0 + cur_idx = im.tell() + try: + for ims in [im]+append_images: + # Get # of frames in this image + if not hasattr(ims, "n_frames"): + nfr = 1 + else: + nfr = ims.n_frames + + for idx in range(nfr): + ims.seek(idx) + ims.load() + + # Make sure image mode is supported + frame = ims + rawmode = ims.mode + if ims.mode not in _VALID_WEBP_MODES: + alpha = 'A' in ims.mode or 'a' in ims.mode \ + or (ims.mode == 'P' and 'A' in ims.im.getpalettemode()) + rawmode = 'RGBA' if alpha else 'RGB' + frame = ims.convert(rawmode) + + if rawmode == 'RGB': + # For faster conversion, use RGBX + rawmode = 'RGBX' + + # Append the frame to the animation encoder + enc.add( + frame.tobytes('raw', rawmode), + timestamp, + frame.size[0], frame.size[1], + rawmode, + lossless, + quality, + method + ) + + # Update timestamp and frame index + if isinstance(duration, (list, tuple)): + timestamp += duration[frame_idx] + else: + timestamp += duration + frame_idx += 1 + + finally: + im.seek(cur_idx) + + # Force encoder to flush frames + enc.add( + None, + timestamp, + 0, 0, "", lossless, quality, 0 + ) + + # Get the final output from the encoder + data = enc.assemble(icc_profile, exif, xmp) + if data is None: + raise IOError("cannot write file as WebP (encoder returned None)") + + fp.write(data) + + +def _save(im, fp, filename): + lossless = im.encoderinfo.get("lossless", False) + quality = im.encoderinfo.get("quality", 80) + icc_profile = im.encoderinfo.get("icc_profile", "") + exif = im.encoderinfo.get("exif", "") + xmp = im.encoderinfo.get("xmp", "") + + if im.mode not in _VALID_WEBP_LEGACY_MODES: + alpha = 'A' in im.mode or 'a' in im.mode \ + or (im.mode == 'P' and 'A' in im.im.getpalettemode()) + im = im.convert('RGBA' if alpha else 'RGB') + + data = _webp.WebPEncode( + im.tobytes(), + im.size[0], + im.size[1], + lossless, + float(quality), + im.mode, + icc_profile, + exif, + xmp + ) + if data is None: + raise IOError("cannot write file as WebP (encoder returned None)") + + fp.write(data) + + +Image.register_open(WebPImageFile.format, WebPImageFile, _accept) +if SUPPORTED: + Image.register_save(WebPImageFile.format, _save) + if _webp.HAVE_WEBPANIM: + Image.register_save_all(WebPImageFile.format, _save_all) + Image.register_extension(WebPImageFile.format, ".webp") + Image.register_mime(WebPImageFile.format, "image/webp") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/WmfImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/WmfImagePlugin.py new file mode 100644 index 0000000..81699bd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/WmfImagePlugin.py @@ -0,0 +1,169 @@ +# +# The Python Imaging Library +# $Id$ +# +# WMF stub codec +# +# history: +# 1996-12-14 fl Created +# 2004-02-22 fl Turned into a stub driver +# 2004-02-23 fl Added EMF support +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# +# WMF/EMF reference documentation: +# https://winprotocoldoc.blob.core.windows.net/productionwindowsarchives/MS-WMF/[MS-WMF].pdf +# http://wvware.sourceforge.net/caolan/index.html +# http://wvware.sourceforge.net/caolan/ora-wmf.html + +from __future__ import print_function + +from . import Image, ImageFile +from ._binary import i16le as word, si16le as short, \ + i32le as dword, si32le as _long +from ._util import py3 + + +__version__ = "0.2" + +_handler = None + +if py3: + long = int + + +def register_handler(handler): + """ + Install application-specific WMF image handler. + + :param handler: Handler object. + """ + global _handler + _handler = handler + + +if hasattr(Image.core, "drawwmf"): + # install default handler (windows only) + + class WmfHandler(object): + + def open(self, im): + im.mode = "RGB" + self.bbox = im.info["wmf_bbox"] + + def load(self, im): + im.fp.seek(0) # rewind + return Image.frombytes( + "RGB", im.size, + Image.core.drawwmf(im.fp.read(), im.size, self.bbox), + "raw", "BGR", (im.size[0]*3 + 3) & -4, -1 + ) + + register_handler(WmfHandler()) + +# +# -------------------------------------------------------------------- +# Read WMF file + + +def _accept(prefix): + return ( + prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or + prefix[:4] == b"\x01\x00\x00\x00" + ) + + +## +# Image plugin for Windows metafiles. + +class WmfStubImageFile(ImageFile.StubImageFile): + + format = "WMF" + format_description = "Windows Metafile" + + def _open(self): + + # check placable header + s = self.fp.read(80) + + if s[:6] == b"\xd7\xcd\xc6\x9a\x00\x00": + + # placeable windows metafile + + # get units per inch + inch = word(s, 14) + + # get bounding box + x0 = short(s, 6) + y0 = short(s, 8) + x1 = short(s, 10) + y1 = short(s, 12) + + # normalize size to 72 dots per inch + size = (x1 - x0) * 72 // inch, (y1 - y0) * 72 // inch + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + self.info["dpi"] = 72 + + # sanity check (standard metafile header) + if s[22:26] != b"\x01\x00\t\x00": + raise SyntaxError("Unsupported WMF file format") + + elif dword(s) == 1 and s[40:44] == b" EMF": + # enhanced metafile + + # get bounding box + x0 = _long(s, 8) + y0 = _long(s, 12) + x1 = _long(s, 16) + y1 = _long(s, 20) + + # get frame (in 0.01 millimeter units) + frame = _long(s, 24), _long(s, 28), _long(s, 32), _long(s, 36) + + # normalize size to 72 dots per inch + size = x1 - x0, y1 - y0 + + # calculate dots per inch from bbox and frame + xdpi = 2540 * (x1 - y0) // (frame[2] - frame[0]) + ydpi = 2540 * (y1 - y0) // (frame[3] - frame[1]) + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + if xdpi == ydpi: + self.info["dpi"] = xdpi + else: + self.info["dpi"] = xdpi, ydpi + + else: + raise SyntaxError("Unsupported file format") + + self.mode = "RGB" + self._size = size + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr(_handler, "save"): + raise IOError("WMF save handler not installed") + _handler.save(im, fp, filename) + +# +# -------------------------------------------------------------------- +# Registry stuff + + +Image.register_open(WmfStubImageFile.format, WmfStubImageFile, _accept) +Image.register_save(WmfStubImageFile.format, _save) + +Image.register_extensions(WmfStubImageFile.format, [".wmf", ".emf"]) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/XVThumbImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/XVThumbImagePlugin.py new file mode 100644 index 0000000..8cdd848 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/XVThumbImagePlugin.py @@ -0,0 +1,80 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XV Thumbnail file handler by Charles E. "Gene" Cash +# (gcash@magicnet.net) +# +# see xvcolor.c and xvbrowse.c in the sources to John Bradley's XV, +# available from ftp://ftp.cis.upenn.edu/pub/xv/ +# +# history: +# 98-08-15 cec created (b/w only) +# 98-12-09 cec added color palette +# 98-12-28 fl added to PIL (with only a few very minor modifications) +# +# To do: +# FIXME: make save work (this requires quantization support) +# + +from . import Image, ImageFile, ImagePalette +from ._binary import i8, o8 + +__version__ = "0.1" + +_MAGIC = b"P7 332" + +# standard color palette for thumbnails (RGB332) +PALETTE = b"" +for r in range(8): + for g in range(8): + for b in range(4): + PALETTE = PALETTE + (o8((r*255)//7)+o8((g*255)//7)+o8((b*255)//3)) + + +def _accept(prefix): + return prefix[:6] == _MAGIC + + +## +# Image plugin for XV thumbnail images. + +class XVThumbImageFile(ImageFile.ImageFile): + + format = "XVThumb" + format_description = "XV thumbnail image" + + def _open(self): + + # check magic + if not _accept(self.fp.read(6)): + raise SyntaxError("not an XV thumbnail file") + + # Skip to beginning of next line + self.fp.readline() + + # skip info comments + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("Unexpected EOF reading XV thumbnail file") + if i8(s[0]) != 35: # ie. when not a comment: '#' + break + + # parse header line (already read) + s = s.strip().split() + + self.mode = "P" + self._size = int(s[0]), int(s[1]) + + self.palette = ImagePalette.raw("RGB", PALETTE) + + self.tile = [ + ("raw", (0, 0)+self.size, + self.fp.tell(), (self.mode, 0, 1) + )] + + +# -------------------------------------------------------------------- + +Image.register_open(XVThumbImageFile.format, XVThumbImageFile, _accept) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/XbmImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/XbmImagePlugin.py new file mode 100644 index 0000000..0cccda1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/XbmImagePlugin.py @@ -0,0 +1,96 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XBM File handling +# +# History: +# 1995-09-08 fl Created +# 1996-11-01 fl Added save support +# 1997-07-07 fl Made header parser more tolerant +# 1997-07-22 fl Fixed yet another parser bug +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) +# 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog) +# 2004-02-24 fl Allow some whitespace before first #define +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import re +from . import Image, ImageFile + +__version__ = "0.6" + +# XBM header +xbm_head = re.compile( + br"\s*#define[ \t]+.*_width[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+.*_height[ \t]+(?P[0-9]+)[\r\n]+" + b"(?P" + b"#define[ \t]+[^_]*_x_hot[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+[^_]*_y_hot[ \t]+(?P[0-9]+)[\r\n]+" + b")?" + b"[\\000-\\377]*_bits\\[\\]" +) + + +def _accept(prefix): + return prefix.lstrip()[:7] == b"#define" + + +## +# Image plugin for X11 bitmaps. + +class XbmImageFile(ImageFile.ImageFile): + + format = "XBM" + format_description = "X11 Bitmap" + + def _open(self): + + m = xbm_head.match(self.fp.read(512)) + + if m: + + xsize = int(m.group("width")) + ysize = int(m.group("height")) + + if m.group("hotspot"): + self.info["hotspot"] = ( + int(m.group("xhot")), int(m.group("yhot")) + ) + + self.mode = "1" + self._size = xsize, ysize + + self.tile = [("xbm", (0, 0)+self.size, m.end(), None)] + + +def _save(im, fp, filename): + + if im.mode != "1": + raise IOError("cannot write mode %s as XBM" % im.mode) + + fp.write(("#define im_width %d\n" % im.size[0]).encode('ascii')) + fp.write(("#define im_height %d\n" % im.size[1]).encode('ascii')) + + hotspot = im.encoderinfo.get("hotspot") + if hotspot: + fp.write(("#define im_x_hot %d\n" % hotspot[0]).encode('ascii')) + fp.write(("#define im_y_hot %d\n" % hotspot[1]).encode('ascii')) + + fp.write(b"static char im_bits[] = {\n") + + ImageFile._save(im, fp, [("xbm", (0, 0)+im.size, 0, None)]) + + fp.write(b"};\n") + + +Image.register_open(XbmImageFile.format, XbmImageFile, _accept) +Image.register_save(XbmImageFile.format, _save) + +Image.register_extension(XbmImageFile.format, ".xbm") + +Image.register_mime(XbmImageFile.format, "image/xbm") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/XpmImagePlugin.py b/thesisenv/lib/python3.6/site-packages/PIL/XpmImagePlugin.py new file mode 100644 index 0000000..02bc28a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/XpmImagePlugin.py @@ -0,0 +1,129 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XPM File handling +# +# History: +# 1996-12-29 fl Created +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re +from . import Image, ImageFile, ImagePalette +from ._binary import i8, o8 + +__version__ = "0.2" + +# XPM header +xpm_head = re.compile(b"\"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)") + + +def _accept(prefix): + return prefix[:9] == b"/* XPM */" + + +## +# Image plugin for X11 pixel maps. + +class XpmImageFile(ImageFile.ImageFile): + + format = "XPM" + format_description = "X11 Pixel Map" + + def _open(self): + + if not _accept(self.fp.read(9)): + raise SyntaxError("not an XPM file") + + # skip forward to next string + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("broken XPM file") + m = xpm_head.match(s) + if m: + break + + self._size = int(m.group(1)), int(m.group(2)) + + pal = int(m.group(3)) + bpp = int(m.group(4)) + + if pal > 256 or bpp != 1: + raise ValueError("cannot read this XPM file") + + # + # load palette description + + palette = [b"\0\0\0"] * 256 + + for i in range(pal): + + s = self.fp.readline() + if s[-2:] == b'\r\n': + s = s[:-2] + elif s[-1:] in b'\r\n': + s = s[:-1] + + c = i8(s[1]) + s = s[2:-2].split() + + for i in range(0, len(s), 2): + + if s[i] == b"c": + + # process colour key + rgb = s[i+1] + if rgb == b"None": + self.info["transparency"] = c + elif rgb[0:1] == b"#": + # FIXME: handle colour names (see ImagePalette.py) + rgb = int(rgb[1:], 16) + palette[c] = (o8((rgb >> 16) & 255) + + o8((rgb >> 8) & 255) + + o8(rgb & 255)) + else: + # unknown colour + raise ValueError("cannot read this XPM file") + break + + else: + + # missing colour key + raise ValueError("cannot read this XPM file") + + self.mode = "P" + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + self.tile = [("raw", (0, 0)+self.size, self.fp.tell(), ("P", 0, 1))] + + def load_read(self, bytes): + + # + # load all image data in one chunk + + xsize, ysize = self.size + + s = [None] * ysize + + for i in range(ysize): + s[i] = self.fp.readline()[1:xsize+1].ljust(xsize) + + return b"".join(s) + +# +# Registry + + +Image.register_open(XpmImageFile.format, XpmImageFile, _accept) + +Image.register_extension(XpmImageFile.format, ".xpm") + +Image.register_mime(XpmImageFile.format, "image/xpm") diff --git a/thesisenv/lib/python3.6/site-packages/PIL/__init__.py b/thesisenv/lib/python3.6/site-packages/PIL/__init__.py new file mode 100644 index 0000000..bc8cfed --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/__init__.py @@ -0,0 +1,72 @@ +"""Pillow (Fork of the Python Imaging Library) + +Pillow is the friendly PIL fork by Alex Clark and Contributors. + https://github.com/python-pillow/Pillow/ + +Pillow is forked from PIL 1.1.7. + +PIL is the Python Imaging Library by Fredrik Lundh and Contributors. +Copyright (c) 1999 by Secret Labs AB. + +Use PIL.__version__ for this Pillow version. +PIL.VERSION is the old PIL version and will be removed in the future. + +;-) +""" + +from . import _version + +# VERSION is deprecated and will be removed in Pillow 6.0.0. +# PILLOW_VERSION is deprecated and will be removed after that. +# Use __version__ instead. +VERSION = '1.1.7' # PIL Version +PILLOW_VERSION = __version__ = _version.__version__ + +del _version + + +_plugins = ['BlpImagePlugin', + 'BmpImagePlugin', + 'BufrStubImagePlugin', + 'CurImagePlugin', + 'DcxImagePlugin', + 'DdsImagePlugin', + 'EpsImagePlugin', + 'FitsStubImagePlugin', + 'FliImagePlugin', + 'FpxImagePlugin', + 'FtexImagePlugin', + 'GbrImagePlugin', + 'GifImagePlugin', + 'GribStubImagePlugin', + 'Hdf5StubImagePlugin', + 'IcnsImagePlugin', + 'IcoImagePlugin', + 'ImImagePlugin', + 'ImtImagePlugin', + 'IptcImagePlugin', + 'JpegImagePlugin', + 'Jpeg2KImagePlugin', + 'McIdasImagePlugin', + 'MicImagePlugin', + 'MpegImagePlugin', + 'MpoImagePlugin', + 'MspImagePlugin', + 'PalmImagePlugin', + 'PcdImagePlugin', + 'PcxImagePlugin', + 'PdfImagePlugin', + 'PixarImagePlugin', + 'PngImagePlugin', + 'PpmImagePlugin', + 'PsdImagePlugin', + 'SgiImagePlugin', + 'SpiderImagePlugin', + 'SunImagePlugin', + 'TgaImagePlugin', + 'TiffImagePlugin', + 'WebPImagePlugin', + 'WmfImagePlugin', + 'XbmImagePlugin', + 'XpmImagePlugin', + 'XVThumbImagePlugin'] diff --git a/thesisenv/lib/python3.6/site-packages/PIL/_binary.py b/thesisenv/lib/python3.6/site-packages/PIL/_binary.py new file mode 100644 index 0000000..767c13b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/_binary.py @@ -0,0 +1,94 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Binary input/output support routines. +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1995-2003 by Fredrik Lundh +# Copyright (c) 2012 by Brian Crowell +# +# See the README file for information on usage and redistribution. +# + +from struct import unpack_from, pack +from ._util import py3 + +if py3: + def i8(c): + return c if c.__class__ is int else c[0] + + def o8(i): + return bytes((i & 255,)) +else: + def i8(c): + return ord(c) + + def o8(i): + return chr(i & 255) + + +# Input, le = little endian, be = big endian +def i16le(c, o=0): + """ + Converts a 2-bytes (16 bits) string to an unsigned integer. + + c: string containing bytes to convert + o: offset of bytes to convert in string + """ + return unpack_from("H", c, o)[0] + + +def i32be(c, o=0): + return unpack_from(">I", c, o)[0] + + +# Output, le = little endian, be = big endian +def o16le(i): + return pack("H", i) + + +def o32be(i): + return pack(">I", i) diff --git a/thesisenv/lib/python3.6/site-packages/PIL/_imaging.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/PIL/_imaging.cpython-36m-darwin.so new file mode 100644 index 0000000..65a05d2 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/_imaging.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/_imagingcms.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/PIL/_imagingcms.cpython-36m-darwin.so new file mode 100644 index 0000000..dc0c5b5 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/_imagingcms.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/_imagingft.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/PIL/_imagingft.cpython-36m-darwin.so new file mode 100644 index 0000000..375c6ea Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/_imagingft.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/_imagingmath.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/PIL/_imagingmath.cpython-36m-darwin.so new file mode 100644 index 0000000..82b618a Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/_imagingmath.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/_imagingmorph.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/PIL/_imagingmorph.cpython-36m-darwin.so new file mode 100644 index 0000000..c9293d3 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/_imagingmorph.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/_imagingtk.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/PIL/_imagingtk.cpython-36m-darwin.so new file mode 100644 index 0000000..21a686a Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/_imagingtk.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/_tkinter_finder.py b/thesisenv/lib/python3.6/site-packages/PIL/_tkinter_finder.py new file mode 100644 index 0000000..987d962 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/_tkinter_finder.py @@ -0,0 +1,20 @@ +""" Find compiled module linking to Tcl / Tk libraries +""" +import sys + +if sys.version_info.major > 2: + from tkinter import _tkinter as tk +else: + from Tkinter import tkinter as tk + +if hasattr(sys, 'pypy_find_executable'): + # Tested with packages at https://bitbucket.org/pypy/pypy/downloads. + # PyPies 1.6, 2.0 do not have tkinter built in. PyPy3-2.3.1 gives an + # OSError trying to import tkinter. Otherwise: + try: # PyPy 5.1, 4.0.0, 2.6.1, 2.6.0 + TKINTER_LIB = tk.tklib_cffi.__file__ + except AttributeError: + # PyPy3 2.4, 2.1-beta1; PyPy 2.5.1, 2.5.0, 2.4.0, 2.3, 2.2, 2.1 + TKINTER_LIB = tk.tkffi.verifier.modulefilename +else: + TKINTER_LIB = tk.__file__ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/_util.py b/thesisenv/lib/python3.6/site-packages/PIL/_util.py new file mode 100644 index 0000000..e6989d6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/_util.py @@ -0,0 +1,30 @@ +import os +import sys + +py3 = sys.version_info.major >= 3 + +if py3: + def isStringType(t): + return isinstance(t, str) + + def isPath(f): + return isinstance(f, (bytes, str)) +else: + def isStringType(t): + return isinstance(t, basestring) + + def isPath(f): + return isinstance(f, basestring) + + +# Checks if an object is a string, and that it points to a directory. +def isDirectory(f): + return isPath(f) and os.path.isdir(f) + + +class deferred_error(object): + def __init__(self, ex): + self.ex = ex + + def __getattr__(self, elt): + raise self.ex diff --git a/thesisenv/lib/python3.6/site-packages/PIL/_version.py b/thesisenv/lib/python3.6/site-packages/PIL/_version.py new file mode 100644 index 0000000..b5e4f0d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/_version.py @@ -0,0 +1,2 @@ +# Master version for Pillow +__version__ = '5.3.0' diff --git a/thesisenv/lib/python3.6/site-packages/PIL/_webp.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/PIL/_webp.cpython-36m-darwin.so new file mode 100644 index 0000000..0aec5b3 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/PIL/_webp.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/PIL/features.py b/thesisenv/lib/python3.6/site-packages/PIL/features.py new file mode 100644 index 0000000..9926445 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/PIL/features.py @@ -0,0 +1,85 @@ +from . import Image + +modules = { + "pil": "PIL._imaging", + "tkinter": "PIL._tkinter_finder", + "freetype2": "PIL._imagingft", + "littlecms2": "PIL._imagingcms", + "webp": "PIL._webp", +} + + +def check_module(feature): + if not (feature in modules): + raise ValueError("Unknown module %s" % feature) + + module = modules[feature] + + try: + __import__(module) + return True + except ImportError: + return False + + +def get_supported_modules(): + return [f for f in modules if check_module(f)] + + +codecs = { + "jpg": "jpeg", + "jpg_2000": "jpeg2k", + "zlib": "zip", + "libtiff": "libtiff" +} + + +def check_codec(feature): + if feature not in codecs: + raise ValueError("Unknown codec %s" % feature) + + codec = codecs[feature] + + return codec + "_encoder" in dir(Image.core) + + +def get_supported_codecs(): + return [f for f in codecs if check_codec(f)] + + +features = { + "webp_anim": ("PIL._webp", 'HAVE_WEBPANIM'), + "webp_mux": ("PIL._webp", 'HAVE_WEBPMUX'), + "transp_webp": ("PIL._webp", "HAVE_TRANSPARENCY"), + "raqm": ("PIL._imagingft", "HAVE_RAQM") +} + + +def check_feature(feature): + if feature not in features: + raise ValueError("Unknown feature %s" % feature) + + module, flag = features[feature] + + try: + imported_module = __import__(module, fromlist=['PIL']) + return getattr(imported_module, flag) + except ImportError: + return None + + +def get_supported_features(): + return [f for f in features if check_feature(f)] + + +def check(feature): + return (feature in modules and check_module(feature) or + feature in codecs and check_codec(feature) or + feature in features and check_feature(feature)) + + +def get_supported(): + ret = get_supported_modules() + ret.extend(get_supported_features()) + ret.extend(get_supported_codecs()) + return ret diff --git a/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/INSTALLER b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/LICENSE.txt b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..80456a7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/LICENSE.txt @@ -0,0 +1,16 @@ +The Python Imaging Library (PIL) is + + Copyright © 1997-2011 by Secret Labs AB + Copyright © 1995-2011 by Fredrik Lundh + +Pillow is the friendly PIL fork. It is + + Copyright © 2010-2018 by Alex Clark and contributors + +Like PIL, Pillow is licensed under the open source PIL Software License: + +By obtaining, using, and/or copying this software and/or its associated documentation, you agree that you have read, understood, and will comply with the following terms and conditions: + +Permission to use, copy, modify, and distribute this software and its associated documentation for any purpose and without fee is hereby granted, provided that the above copyright notice appears in all copies, and that both that copyright notice and this permission notice appear in supporting documentation, and that the name of Secret Labs AB or the author not be used in advertising or publicity pertaining to distribution of the software without specific, written prior permission. + +SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/METADATA b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/METADATA new file mode 100644 index 0000000..660fa41 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/METADATA @@ -0,0 +1,107 @@ +Metadata-Version: 2.1 +Name: Pillow +Version: 5.3.0 +Summary: Python Imaging Library (Fork) +Home-page: http://python-pillow.org +Author: Alex Clark (Fork Author) +Author-email: aclark@aclark.net +License: Standard PIL License +Keywords: Imaging +Platform: UNKNOWN +Classifier: Development Status :: 6 - Mature +Classifier: Topic :: Multimedia :: Graphics +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Digital Camera +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Screen Capture +Classifier: Topic :: Multimedia :: Graphics :: Graphics Conversion +Classifier: Topic :: Multimedia :: Graphics :: Viewers +Classifier: License :: Other/Proprietary License +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* + +Pillow +====== + +Python Imaging Library (Fork) +----------------------------- + +Pillow is the friendly PIL fork by `Alex Clark and Contributors `_. PIL is the Python Imaging Library by Fredrik Lundh and Contributors. + +.. start-badges + +.. list-table:: + :stub-columns: 1 + + * - docs + - |docs| + * - tests + - |linux| |macos| |windows| |coverage| + * - package + - |zenodo| |version| + * - social + - |gitter| |twitter| + +.. |docs| image:: https://readthedocs.org/projects/pillow/badge/?version=latest + :target: https://pillow.readthedocs.io/?badge=latest + :alt: Documentation Status + +.. |linux| image:: https://img.shields.io/travis/python-pillow/Pillow/master.svg?label=Linux%20build + :target: https://travis-ci.org/python-pillow/Pillow + :alt: Travis CI build status (Linux) + +.. |macos| image:: https://img.shields.io/travis/python-pillow/pillow-wheels/latest.svg?label=macOS%20build + :target: https://travis-ci.org/python-pillow/pillow-wheels + :alt: Travis CI build status (macOS) + +.. |windows| image:: https://img.shields.io/appveyor/ci/python-pillow/Pillow/master.svg?label=Windows%20build + :target: https://ci.appveyor.com/project/python-pillow/Pillow + :alt: AppVeyor CI build status (Windows) + +.. |coverage| image:: https://coveralls.io/repos/python-pillow/Pillow/badge.svg?branch=master&service=github + :target: https://coveralls.io/github/python-pillow/Pillow?branch=master + :alt: Code coverage + +.. |zenodo| image:: https://zenodo.org/badge/17549/python-pillow/Pillow.svg + :target: https://zenodo.org/badge/latestdoi/17549/python-pillow/Pillow + +.. |version| image:: https://img.shields.io/pypi/v/pillow.svg + :target: https://pypi.org/project/Pillow/ + :alt: Latest PyPI version + +.. |gitter| image:: https://badges.gitter.im/python-pillow/Pillow.svg + :target: https://gitter.im/python-pillow/Pillow?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge + :alt: Join the chat at https://gitter.im/python-pillow/Pillow + +.. |twitter| image:: https://img.shields.io/badge/tweet-on%20Twitter-00aced.svg + :target: https://twitter.com/PythonPillow + :alt: Follow on https://twitter.com/PythonPillow + +.. end-badges + + + +More Information +---------------- + +- `Documentation `_ + + - `Installation `_ + - `Handbook `_ + +- `Contribute `_ + + - `Issues `_ + - `Pull requests `_ + +- `Changelog `_ + + - `Pre-fork `_ + + diff --git a/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/RECORD b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/RECORD new file mode 100644 index 0000000..fa3be69 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/RECORD @@ -0,0 +1,209 @@ +PIL/.dylibs/libfreetype.6.dylib,sha256=m0qsMUKy3L2LJX3BJJs_L2KjRAxpeGhS4Xgevx1o8ww,1976728 +PIL/.dylibs/libjpeg.9.dylib,sha256=n0EEWqznQhoKFfA47FK_4TCFQqyPXXhCyKIrQI9VAdw,717696 +PIL/.dylibs/liblcms2.2.dylib,sha256=uTq3a5PFhkwlQNEAd_24Hzcnc_RSyOpTgxYsDSm3fUo,969452 +PIL/.dylibs/liblzma.5.dylib,sha256=sq2_eFD8IYIiBV_-PdBtFCbmL6RYxv32begwaw-_0Qg,479156 +PIL/.dylibs/libopenjp2.2.1.0.dylib,sha256=F-r6pjRJODyGvYIw1DU1S1hGmFZUOaLcSPpJYjWcu7I,671168 +PIL/.dylibs/libpng16.16.dylib,sha256=Jdp3OQ2M5iYG4bxmmhM7RQeR9i1nh1V3OsFT2VyFCBk,602204 +PIL/.dylibs/libtiff.5.dylib,sha256=TLpcU5Slh9YOzEPzDcxy1YaPSIaNPDhuPOUcEeLYDm8,1290132 +PIL/.dylibs/libwebp.7.dylib,sha256=8f9BNpbhzg-itZ39a3OnQcXJE-Q4KxhMTn74VKVvTZY,1384692 +PIL/.dylibs/libwebpdemux.2.dylib,sha256=ND8dpbn9-VJBPQtMKC9ZhgTR5fjh6NCk8DJMfOs8vBM,61000 +PIL/.dylibs/libwebpmux.3.dylib,sha256=NM8v8WHrT0eqMmgkhJsEqae4C270V00fEWmK26C7wms,124156 +PIL/.dylibs/libz.1.2.11.dylib,sha256=nXDg3A_3j-SJkNZ_rYFbAEEvU-PhOcggyoiMKN1hdC0,257796 +PIL/BdfFontFile.py,sha256=lap8i0-KXX9TGbBfFkyEm3YMONHAKoD3ilhtfwl8xXo,3000 +PIL/BlpImagePlugin.py,sha256=8Qia6DFeu-tZn1VYF591-IGXJJdvgIYoLBpniKD4pSY,14416 +PIL/BmpImagePlugin.py,sha256=ceYTIBt49JbRRD54BwunPLu7TVx5oBY51oOesJ0G_S8,14182 +PIL/BufrStubImagePlugin.py,sha256=gW5inS0a7X7HW2sv5a5r94P2v9nRVUSv2cdEgKo8VWI,1519 +PIL/ContainerIO.py,sha256=NjmpvNBDrF2zC4c60QzJU14YZ_H0ykzfeb_HJ0WH5po,2705 +PIL/CurImagePlugin.py,sha256=IiY3siY1L_BmGmsFd-EyoXDMur-PT15ZYE5achTqjls,1737 +PIL/DcxImagePlugin.py,sha256=H-pUUjreLV8pcigxXi76XSaGCsfjugZVRJpjt5yV9dE,2025 +PIL/DdsImagePlugin.py,sha256=r6kf4T5ZZ4kcJZyjN-np0ob_O6NYRqI1HLR5yat9M_c,5072 +PIL/EpsImagePlugin.py,sha256=lhPW82E2rLF6cjTnnIZhBoVpF-4YxX7yZhp63xMxXf4,12703 +PIL/ExifTags.py,sha256=-Auwh-hnhpOuBCo6SW5hEmToJYmbjat13TTG4OnF9Q4,8837 +PIL/FitsStubImagePlugin.py,sha256=rbIznhaRU63PAFVjp_czCG0js9kw7Uq6PDJ4TuQY_3E,1623 +PIL/FliImagePlugin.py,sha256=tcSlB2OEEYJzVatMeKXuH09SnCzmo7KYMRzbPIWjDwg,4094 +PIL/FontFile.py,sha256=1PGL-w3Adrfda7ISCKkcgljCyU2D606gaNkG7wViAJM,2800 +PIL/FpxImagePlugin.py,sha256=4EGYtQiaNxPaNztFblW-ByxiB1kh5WaCD60Z1MM9VLk,6282 +PIL/FtexImagePlugin.py,sha256=roR39s4VKquA_eBUiKzKRjbF1dAsRcvjZfi-4JvW5OE,3296 +PIL/GbrImagePlugin.py,sha256=gnf5QYRPqVSPGshEg78Q_Bls0muXYwiw7YZKNaqg_Xg,2754 +PIL/GdImageFile.py,sha256=Jdl6pgAz4FN5VoPiYzGSk5rp3MO6s5g3jvsiNH5TnOw,2289 +PIL/GifImagePlugin.py,sha256=jWrIDUQ2zdDL83Qdlz-QOStzJLgEpRxgoVpWVLs-HLM,26951 +PIL/GimpGradientFile.py,sha256=zs7-vHAdVCInRQRDx2K5fBpofMlRR0GRKxGVmcUGMag,3345 +PIL/GimpPaletteFile.py,sha256=1o3b3WhPCeUHz_9uWforK0iQ_OBeGoFMHnEsn9lfCjM,1339 +PIL/GribStubImagePlugin.py,sha256=Ct8GRHzqlcD1uI093lsVirimGj8zSnJjs5QgBBGeFZA,1542 +PIL/Hdf5StubImagePlugin.py,sha256=7-DvTj34u1bRFGZOMsgtd7QadvhkbYgNzKSc2vx6PkM,1516 +PIL/IcnsImagePlugin.py,sha256=0hCj4hGo0PV_uKUrTypgAdMpcj3YxD19xA-0k13oKXM,11760 +PIL/IcoImagePlugin.py,sha256=8jC_B4NCUbb8cHwROU1G-nw2HFm-iAA7-rREpr5sErY,9609 +PIL/ImImagePlugin.py,sha256=pa2qqJMeh0doO2KtP-8Fm3gCqfAFR2lcEIE00Ww26Pc,10163 +PIL/Image.py,sha256=p8elay6s1tMhPXAr_offKpBnXRtpb2t9K4TH9zLysuI,98243 +PIL/ImageChops.py,sha256=f07JlSm9Ln3-PPIM1-ylbchp29FSnOPunFyciLZyMyc,6182 +PIL/ImageCms.py,sha256=awGhfurjYYz4QjpXItmrNAgmWCMMK9kk7tcThRs_DNw,36004 +PIL/ImageColor.py,sha256=ctBZpa78H8kqxM8kqpT12c0tw0D812YWy-KtRl-mupA,8703 +PIL/ImageDraw.py,sha256=4_8Eu55HHwIWRJtBG4HOq3MW0akl18ZKmpL9soCwFCA,16527 +PIL/ImageDraw2.py,sha256=kpFXgNEmf2Yn16zy9TpU4_z7ekN5sEhlKmmTXwnC3eg,3127 +PIL/ImageEnhance.py,sha256=wDIaAs_zjcE86416xlCCW6WmxbI1ua6iOjYOwm1-tkE,3208 +PIL/ImageFile.py,sha256=XvlagL2cyYxhIrIh5Q0567aPdMiAgkt8XhXD1_V_pUc,20762 +PIL/ImageFilter.py,sha256=xoYoRkuBg1rfRXvejKrXk6tjmeQKzQHyFX7EoBQoPzM,15352 +PIL/ImageFont.py,sha256=KJH_PYreWNsNvsRVkPRkVEhhVHMVOee7JdMLv5nYgM4,21556 +PIL/ImageGrab.py,sha256=q7wTv2h-t6-2KqKKVIhLg3lNTTAhlo45bErT368pGz8,2150 +PIL/ImageMath.py,sha256=k3KvcxS5Vm07IWVnZqq0Fx8ceVB1MdsBlYHoLYrpDys,7463 +PIL/ImageMode.py,sha256=7FRP65DI8LZejbFWUtefXn4b5ZnQinvlYwAbeP6SPBk,1597 +PIL/ImageMorph.py,sha256=MrDftUNiY7lSGDu5AGdrOpZsne3x9pFeiddg_KtpFyE,8271 +PIL/ImageOps.py,sha256=50pdkq8mF6sO3twh8Gm_qr7TxidhEeVc9ul1wMSRZhw,20228 +PIL/ImagePalette.py,sha256=IDL9FgTwlBSfUdo_8BSpS9nHYGFrcNzgqJYqdA4Kyys,6319 +PIL/ImagePath.py,sha256=IPUmk_1SdD5NjpZWuwDEDMrRIq_izWrhiczq7zFgLl8,337 +PIL/ImageQt.py,sha256=CZtLL_Uh4EUaeclr-w3Iq7y2wIvRqGHkwQDB1oByyts,6544 +PIL/ImageSequence.py,sha256=fp7ziB8L6zhEXz8bTrU5sYCdGnZ7OzADEOsvCd37Vc4,1240 +PIL/ImageShow.py,sha256=tH6tIj8jh__hOeAUpnRDgSiU15kfhaOaoc8cREe5OTU,5262 +PIL/ImageStat.py,sha256=NuQM-hCjP_TlAbj6jr5LC4S35QpdwmMKsate-UEqYNE,3854 +PIL/ImageTk.py,sha256=pVeub1amZXiuZzjUKqRAplbeu2yI0U7kfpITZ9Cz8vw,9420 +PIL/ImageTransform.py,sha256=3tSnRn747qaNC-8BaOC0T1CyeMJoaKUzpLEwCPKyHFs,2839 +PIL/ImageWin.py,sha256=cH6bBrWyk42copvCfPILYhpTkdngxA1d8v1S7R9ol-Y,7217 +PIL/ImtImagePlugin.py,sha256=PHIOn60naWLAV9FyUja2zggiNu7sIivpMntF-IczeII,2242 +PIL/IptcImagePlugin.py,sha256=A3vo8uSxwbUi2kVzaEB-WVRnts8SmpJ0fRd2W_QDUys,6757 +PIL/Jpeg2KImagePlugin.py,sha256=rVcCQ9LS9eabzUKNmrbBs6YGs75-aNKaY88ETvHE9lk,7760 +PIL/JpegImagePlugin.py,sha256=cQNKq_v8K3-P7Dssuehyf56kmGiAkMWgb7WSCc-shpo,27663 +PIL/JpegPresets.py,sha256=t9_TuyTIVp9RkXlIv3BVLEh7T1NZtVZwzzLpIlcJiMQ,12399 +PIL/McIdasImagePlugin.py,sha256=rajcN6-9PrYJ9Ex--MDbgwqXDhSFJdsKhXOCA1pwbx4,1769 +PIL/MicImagePlugin.py,sha256=X0tFPFlbaHeY4tI70AmzViUs781-u-ycrqHM7Hb9oKk,2460 +PIL/MpegImagePlugin.py,sha256=AJjo-gDBT2ok2foejHRaXvO5u416JM9DvCdr9eSaF9k,1832 +PIL/MpoImagePlugin.py,sha256=gD6xTKauVCGIsK_Sey8Nh-VYyfthO4ysBsMbwmnyo68,2982 +PIL/MspImagePlugin.py,sha256=JpWpCwm6BIPWOtr8_HjSMZ4YKDbXWwGLMS1LixczacU,5534 +PIL/OleFileIO.py,sha256=EJ54RgQCUPoQjO0lDdoe34MeOd9IH_RwYH2GKpKYlPY,152 +PIL/PSDraw.py,sha256=hQuLYYkxbTOL6fw4eomK_Rop0U0JWZIlljBwtpj_jes,6870 +PIL/PaletteFile.py,sha256=xnAUCKhUxSIeqqn--4DFPRaNtVxeqoNvYgdzq_7kidQ,1110 +PIL/PalmImagePlugin.py,sha256=-4otkAWd40ykwzLq06CZ8QWb_bFZO_cLirtZ_ZeP-7s,9150 +PIL/PcdImagePlugin.py,sha256=VR0iEJ0UcdM4s2pVIILruv4R5irGZAlO5SZOT-EF3bw,1521 +PIL/PcfFontFile.py,sha256=BdhMHBLk_DMvnO6IAe_IdddpHwZRjXPEzTeh2oglylQ,6136 +PIL/PcxImagePlugin.py,sha256=1Wr5XjlpE8jipIehuk_kzHa8IUE_DoaI8Qew1MZT5ww,5225 +PIL/PdfImagePlugin.py,sha256=KiMtzTobyiCORJ-3uZafp3nGzjDTvcUXP-PCweD0BiM,8292 +PIL/PdfParser.py,sha256=kTxFoLgJn1BDdbVLc3RZ3KDWjl-bBeVBOVz9s5ImsoU,35812 +PIL/PixarImagePlugin.py,sha256=OVn1aHCti2cVAtTUnU2x3yj8KvP9NFgaNjc80ou3yvE,1675 +PIL/PngImagePlugin.py,sha256=frkMBRVKCvrXc0IftFKa4XbAEzZJ7mueM-St6pKjqwk,25691 +PIL/PpmImagePlugin.py,sha256=a5zogM8UsfC4P1FWiz6843uJwDJi59n3MZLhGfVdKQs,4242 +PIL/PsdImagePlugin.py,sha256=IPKR3UrgDrC09OgkVQDUXCcPFTYOr4D7SRag5o1qGCU,7551 +PIL/PyAccess.py,sha256=rIH60zn4NcjZmlMp5sZowFTWLjzUqvooECNZa87u9WQ,8895 +PIL/SgiImagePlugin.py,sha256=4dK6KwaZ1nQdfCQwtWl3nqE78ZPAnEvJa574CA5YjWU,6152 +PIL/SpiderImagePlugin.py,sha256=RNi5iNkfbWi-FbgkbFRA5ETOnH-kw8WX1LjiWKXYsUw,9264 +PIL/SunImagePlugin.py,sha256=1oYBM86mhI9vpPCy6_yH5nITzjcOieituTjcaZoVwwk,4367 +PIL/TarIO.py,sha256=0f0geSoJ9B0PPcM0jMcCCfa94hwYDS6hth7FRNFwmpM,1239 +PIL/TgaImagePlugin.py,sha256=mAMySZWVZuVxfssW-dSGqtszVl8rbDa-z4Z5GR9TXpE,5992 +PIL/TiffImagePlugin.py,sha256=C0Sc9lPJft-2QUuP4lCd1HsQbbnYnl8_wDAXSAxLJdw,64902 +PIL/TiffTags.py,sha256=B4ygNlSv1p1k9bvqtn4KXqrBoZQK7Jl-M79ONrGcI4g,14471 +PIL/WalImageFile.py,sha256=rKggtH36cF0a7NaiwrxfWS1Lq2zSoI9bJfGvW5LIax8,5558 +PIL/WebPImagePlugin.py,sha256=ri2gzIeXRC8gmdG81pFvnIssR97I6FA0IgnAl7cCpCM,10413 +PIL/WmfImagePlugin.py,sha256=MjVxOj6cx-qcSSJq-ejIj-x1_vUhp9ien3Dis2-E2ek,4237 +PIL/XVThumbImagePlugin.py,sha256=bnw1cJ_Dn3fpbEIW-N5-jKX4zbm3u7PTl6sEZp7AmTM,1960 +PIL/XbmImagePlugin.py,sha256=nENR_AnYDIvPIYvC4BAQPbK3VmFwz8oUVE6xap39wso,2505 +PIL/XpmImagePlugin.py,sha256=LJmr56_OBjvPuMCs-pqKs9L-CkGdSywrIxMY0_xnmW8,3103 +PIL/__init__.py,sha256=wsCY37LjZNnONsIg0CiKsbNdzV27uVp7b8FTlpd_R4A,2093 +PIL/__pycache__/BdfFontFile.cpython-36.pyc,, +PIL/__pycache__/BlpImagePlugin.cpython-36.pyc,, +PIL/__pycache__/BmpImagePlugin.cpython-36.pyc,, +PIL/__pycache__/BufrStubImagePlugin.cpython-36.pyc,, +PIL/__pycache__/ContainerIO.cpython-36.pyc,, +PIL/__pycache__/CurImagePlugin.cpython-36.pyc,, +PIL/__pycache__/DcxImagePlugin.cpython-36.pyc,, +PIL/__pycache__/DdsImagePlugin.cpython-36.pyc,, +PIL/__pycache__/EpsImagePlugin.cpython-36.pyc,, +PIL/__pycache__/ExifTags.cpython-36.pyc,, +PIL/__pycache__/FitsStubImagePlugin.cpython-36.pyc,, +PIL/__pycache__/FliImagePlugin.cpython-36.pyc,, +PIL/__pycache__/FontFile.cpython-36.pyc,, +PIL/__pycache__/FpxImagePlugin.cpython-36.pyc,, +PIL/__pycache__/FtexImagePlugin.cpython-36.pyc,, +PIL/__pycache__/GbrImagePlugin.cpython-36.pyc,, +PIL/__pycache__/GdImageFile.cpython-36.pyc,, +PIL/__pycache__/GifImagePlugin.cpython-36.pyc,, +PIL/__pycache__/GimpGradientFile.cpython-36.pyc,, +PIL/__pycache__/GimpPaletteFile.cpython-36.pyc,, +PIL/__pycache__/GribStubImagePlugin.cpython-36.pyc,, +PIL/__pycache__/Hdf5StubImagePlugin.cpython-36.pyc,, +PIL/__pycache__/IcnsImagePlugin.cpython-36.pyc,, +PIL/__pycache__/IcoImagePlugin.cpython-36.pyc,, +PIL/__pycache__/ImImagePlugin.cpython-36.pyc,, +PIL/__pycache__/Image.cpython-36.pyc,, +PIL/__pycache__/ImageChops.cpython-36.pyc,, +PIL/__pycache__/ImageCms.cpython-36.pyc,, +PIL/__pycache__/ImageColor.cpython-36.pyc,, +PIL/__pycache__/ImageDraw.cpython-36.pyc,, +PIL/__pycache__/ImageDraw2.cpython-36.pyc,, +PIL/__pycache__/ImageEnhance.cpython-36.pyc,, +PIL/__pycache__/ImageFile.cpython-36.pyc,, +PIL/__pycache__/ImageFilter.cpython-36.pyc,, +PIL/__pycache__/ImageFont.cpython-36.pyc,, +PIL/__pycache__/ImageGrab.cpython-36.pyc,, +PIL/__pycache__/ImageMath.cpython-36.pyc,, +PIL/__pycache__/ImageMode.cpython-36.pyc,, +PIL/__pycache__/ImageMorph.cpython-36.pyc,, +PIL/__pycache__/ImageOps.cpython-36.pyc,, +PIL/__pycache__/ImagePalette.cpython-36.pyc,, +PIL/__pycache__/ImagePath.cpython-36.pyc,, +PIL/__pycache__/ImageQt.cpython-36.pyc,, +PIL/__pycache__/ImageSequence.cpython-36.pyc,, +PIL/__pycache__/ImageShow.cpython-36.pyc,, +PIL/__pycache__/ImageStat.cpython-36.pyc,, +PIL/__pycache__/ImageTk.cpython-36.pyc,, +PIL/__pycache__/ImageTransform.cpython-36.pyc,, +PIL/__pycache__/ImageWin.cpython-36.pyc,, +PIL/__pycache__/ImtImagePlugin.cpython-36.pyc,, +PIL/__pycache__/IptcImagePlugin.cpython-36.pyc,, +PIL/__pycache__/Jpeg2KImagePlugin.cpython-36.pyc,, +PIL/__pycache__/JpegImagePlugin.cpython-36.pyc,, +PIL/__pycache__/JpegPresets.cpython-36.pyc,, +PIL/__pycache__/McIdasImagePlugin.cpython-36.pyc,, +PIL/__pycache__/MicImagePlugin.cpython-36.pyc,, +PIL/__pycache__/MpegImagePlugin.cpython-36.pyc,, +PIL/__pycache__/MpoImagePlugin.cpython-36.pyc,, +PIL/__pycache__/MspImagePlugin.cpython-36.pyc,, +PIL/__pycache__/OleFileIO.cpython-36.pyc,, +PIL/__pycache__/PSDraw.cpython-36.pyc,, +PIL/__pycache__/PaletteFile.cpython-36.pyc,, +PIL/__pycache__/PalmImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PcdImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PcfFontFile.cpython-36.pyc,, +PIL/__pycache__/PcxImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PdfImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PdfParser.cpython-36.pyc,, +PIL/__pycache__/PixarImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PngImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PpmImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PsdImagePlugin.cpython-36.pyc,, +PIL/__pycache__/PyAccess.cpython-36.pyc,, +PIL/__pycache__/SgiImagePlugin.cpython-36.pyc,, +PIL/__pycache__/SpiderImagePlugin.cpython-36.pyc,, +PIL/__pycache__/SunImagePlugin.cpython-36.pyc,, +PIL/__pycache__/TarIO.cpython-36.pyc,, +PIL/__pycache__/TgaImagePlugin.cpython-36.pyc,, +PIL/__pycache__/TiffImagePlugin.cpython-36.pyc,, +PIL/__pycache__/TiffTags.cpython-36.pyc,, +PIL/__pycache__/WalImageFile.cpython-36.pyc,, +PIL/__pycache__/WebPImagePlugin.cpython-36.pyc,, +PIL/__pycache__/WmfImagePlugin.cpython-36.pyc,, +PIL/__pycache__/XVThumbImagePlugin.cpython-36.pyc,, +PIL/__pycache__/XbmImagePlugin.cpython-36.pyc,, +PIL/__pycache__/XpmImagePlugin.cpython-36.pyc,, +PIL/__pycache__/__init__.cpython-36.pyc,, +PIL/__pycache__/_binary.cpython-36.pyc,, +PIL/__pycache__/_tkinter_finder.cpython-36.pyc,, +PIL/__pycache__/_util.cpython-36.pyc,, +PIL/__pycache__/_version.cpython-36.pyc,, +PIL/__pycache__/features.cpython-36.pyc,, +PIL/_binary.py,sha256=Wy_0dJYiAzbkg7rKBi2e9os-UsPqcgPFMaGUW1x7vNE,1822 +PIL/_imaging.cpython-36m-darwin.so,sha256=dWcw44C3gOmqPYYIwE0qdXh89_-kA7Bw9lHtaBZ7QWw,894472 +PIL/_imagingcms.cpython-36m-darwin.so,sha256=vijVtBHJVo6UEa2NhrKWgNDsvdtpjfXY4X0N1oi9TxQ,85964 +PIL/_imagingft.cpython-36m-darwin.so,sha256=uWBezngf3NkXhYNAdoMlGOL8BTpv7IzUBpThs_avjyk,51400 +PIL/_imagingmath.cpython-36m-darwin.so,sha256=J0hY11oU3ONK37jd9O6pjbev4SNtnQcnq7rVr87h4Ow,67388 +PIL/_imagingmorph.cpython-36m-darwin.so,sha256=udZQaanRAjjha03ofBcYExQvKudSjwBZU2TVo4K9tiY,26452 +PIL/_imagingtk.cpython-36m-darwin.so,sha256=fe9sCbhpofrsIBw8EsFlyPNGtbrEzromNFCW2ckTins,36496 +PIL/_tkinter_finder.py,sha256=OxAeW-nXH-BLvlWO-YjwMtaTG33_UQ5kmR3IbtyMAH8,702 +PIL/_util.py,sha256=zYI94v_4bOptVC8oOkYdl844rW8TfODA6xlZD_rdeq0,590 +PIL/_version.py,sha256=pv_VXLvUR152CKmoMC7gd7j49lg8NIuBjTwHHrFVzus,50 +PIL/_webp.cpython-36m-darwin.so,sha256=C5XvCrwGJQygJOyhdsFS7BTL9LDeqYgQrO1NSv_RvF0,49784 +PIL/features.py,sha256=9D3LoufNcZvNvp8ExVAqdequM0vD1LF_puH_byd3h38,1874 +Pillow-5.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Pillow-5.3.0.dist-info/LICENSE.txt,sha256=SWD8GL7MKGvoOcj_F71wzVIiUz92ESkUIVNENxiiUZk,1452 +Pillow-5.3.0.dist-info/METADATA,sha256=MC3MnWhnq4PM62HVLgSfN4_s5Xgmzb7QSItmFgNJcTU,4109 +Pillow-5.3.0.dist-info/RECORD,, +Pillow-5.3.0.dist-info/WHEEL,sha256=tiLeek1oNTIC85Ebac-hFGFA7V52LkBitEJ2MuLs8lo,249 +Pillow-5.3.0.dist-info/top_level.txt,sha256=riZqrk-hyZqh5f1Z0Zwii3dKfxEsByhu9cU9IODF-NY,4 +Pillow-5.3.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 diff --git a/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/WHEEL b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/WHEEL new file mode 100644 index 0000000..a38b651 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/WHEEL @@ -0,0 +1,9 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: false +Tag: cp36-cp36m-macosx_10_6_intel +Tag: cp36-cp36m-macosx_10_9_intel +Tag: cp36-cp36m-macosx_10_9_x86_64 +Tag: cp36-cp36m-macosx_10_10_intel +Tag: cp36-cp36m-macosx_10_10_x86_64 + diff --git a/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/top_level.txt new file mode 100644 index 0000000..b338169 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +PIL diff --git a/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/zip-safe b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Pillow-5.3.0.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4-py2.7-nspkg.pth b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4-py2.7-nspkg.pth new file mode 100644 index 0000000..38abb22 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4-py2.7-nspkg.pth @@ -0,0 +1 @@ +import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('Products',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('Products', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('Products', [os.path.dirname(p)])));m = m or sys.modules.setdefault('Products', types.ModuleType('Products'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) diff --git a/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/INSTALLER b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/METADATA b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/METADATA new file mode 100644 index 0000000..b241413 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/METADATA @@ -0,0 +1,298 @@ +Metadata-Version: 2.1 +Name: Products.statusmessages +Version: 5.0.4 +Summary: statusmessages provides an easy way of handling internationalized status messages managed via an BrowserRequest adapter storing status messages in client-side cookies. +Home-page: https://pypi.python.org/pypi/Products.statusmessages +Author: Hanno Schlichting +Author-email: plone-developers@lists.sourceforge.net +License: BSD +Keywords: Zope CMF Plone status messages i18n +Platform: UNKNOWN +Classifier: Framework :: Plone +Classifier: Framework :: Plone :: 5.0 +Classifier: Framework :: Plone :: 5.1 +Classifier: Framework :: Zope2 +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Provides-Extra: test +Requires-Dist: setuptools +Requires-Dist: six +Requires-Dist: zope.annotation +Requires-Dist: zope.i18n +Requires-Dist: zope.interface +Provides-Extra: test +Requires-Dist: zope.component; extra == 'test' +Requires-Dist: Zope2; extra == 'test' + +Introduction +============ + +statusmessages provides an easy way of handling internationalized status +messages managed via an BrowserRequest adapter storing status messages in +client-side cookies. + +It is quite common to write status messages which should be shown to the user +after some action. These messages of course should be internationalized. As +these messages normally are defined in Python code, the common way to i18n-ize +these in Zope is to use zope.i18n Messages. Messages are complex objects +consisting of a translation domain and a default unicode text and might have an +additional mapping dict and a distinct id. + +The usual way to provide status messages in CMF/Plone has been to add a +"?portal_status_messages=some%20text" to the URL. While this has some usability +problems it also isn't possible to i18n-ize these in the common way, as the URL +is currently limited to the ASCII charset, but an encoding providing support for +the full unicode range is required. + +The solution provided by this module is to store the status messages inside a +cookie. In version 1.x a server side session like storage has been used, but +this turned out not to be caching friendly for the usual web caching strategies. + +Changelog +========= + +5.0.4 (2018-09-23) +------------------ + +Bug fixes: + +- Use Umlaut ä in tests in order to check string/bytes handling. + Refactor test_encoding to unittest. + [jensens] + + +5.0.3 (2018-06-21) +------------------ + +Bug fixes: + +- Python 3 compatibility fixes. + [pbauer] + + +5.0.2 (2018-02-05) +------------------ + +New features: + +- Add Python 2 / 3 compatibility + [davilima6] + + +5.0.1 (2017-11-26) +------------------ + +Bug fixes: + +- Fix issue where encoding and decoding of statusmessages into cookie + raised exception, because of + ``TypeError: Struct() argument 1 must be string, not unicode`` + [datakurre] + + +5.0 (2017-08-04) +---------------- + +Breaking changes: + +- Remove python 2.6 (and thus Plone 4.3.x) compatibility. + [tomgross] + +New features: + +- Python 3 compatibility + [tomgross] + +Bug fixes: + +- Update code to follow Plone styleguide. + [gforcada] + + +4.1.2 (2017-02-12) +------------------ + +Bug fixes: + +- Fix deprecated import in test. + [pbauer] + + +4.1.1 (2016-08-11) +------------------ + +Bug fixes: + +- Use zope.interface decorator. + [gforcada] + + +4.1.0 (2016-05-25) +------------------ + +New features: + +- Convert tests to plone.app.testing. + [do3cc] + + +4.0 - 2010-07-18 +---------------- + +- Use the standard libraries doctest module. + [hannosch] + + +4.0b1 - 2010-03-01 +------------------ + +- Stopped the cookie from being expired if a redirect (301, 302) or not-modified + (304) response is sent. This means that if you set a redirect and then + (superfluously) render a template that would show the status message, you + won't lose the message. + [optilude] + + +4.0a2 - 2009-12-17 +------------------ + +- Changed the default type of a new message from the empty string to info. + [hannosch] + + +4.0a1 - 2009-12-17 +------------------ + +- Simplified the interface to use simpler add/show method names while keeping + backwards compatibility. + [hannosch] + +- More code simplification. Make the code itself independent of Zope2. + [hannosch] + +- Removed a five:implements statement, as the ZPublisher.HTTPRequest is always + an IBrowserRequest in Zope 2.12. + [hannosch] + +- This version depends on Zope 2.12+. + [hannosch] + +- Package metadata cleanup. + [hannosch] + +- Declare package and test dependencies. + [hannosch] + + +3.0.3 - 2007-11-24 +------------------ + +- Use binascii.b2a_base64 instead of base64.encodestring; the former doesn't + inject newlines every 76 characters, which makes it easier to strip just the + last one (slightly faster). This fixes tickets #7323 and #7325. + [mj] + + +3.0.2 - 2007-11-06 +------------------ + +- Fixed encoding format for the cookie value. The former format imposed a + serious security risk. The full security issue is tracked at: + http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2007-5741. This also fixes + http://dev.plone.org/plone/ticket/6943. + [hannosch, witsch, mj] + + +3.0.1 - 2007-10-07 +------------------ + +- Added the IAttributeAnnotatable interface assignment for the request to this + package as well as the inclusion of the zope.annotation, as we rely on it. + [hannosch] + + +3.0 - 2007-08-09 +---------------- + +- No changes. + [hannosch] + + +3.0rc1 - 2007-07-10 +------------------- + +- Removed useless setup.cfg. + [hannosch] + + +3.0b2 - 2007-03-23 +------------------ + +- Fixed duplicate message bug. Showing identical messages to the end user more + than once, doesn't make any sense. This closes + http://dev.plone.org/plone/ticket/6109. + [hannosch] + +- Added 's support for statusmessages without a redirect. This uses annotations + on the request instead of direct values, so we avoid the possibility of + sneaking those in via query strings. + [tomster, hannosch] + + +3.0b1 - 2007-03-05 +------------------ + +- Converted to a package in the Products namespace. + [hannosch] + +- Added explicit translation of statusmessages before storing them in the + cookie. This makes sure we have a reasonable context to base the + translation on. + [hannosch] + +- Changed license to BSD, to make it possible to include it as a dependency + in Archetypes. + [hannosch] + + +2.1 - 2006-10-25 +---------------- + +- Updated test infrastructure, removed custom testrunner. + [hannosch] + +- Fixed deprecation warning for the zcml content directive. + [hannosch] + + +2.0 - 2006-05-15 +---------------- + +- Total reimplementation using cookies instead of a server-side in-memory + storage to store status messages. The reasoning behind this change is that + the former approach didn't play well with web caching strategies and added an + additional burden in ZEO environments (having to use load-balancers, which + are able to identify users and keep them connected to the same ZEO server). + [hannosch] + + +1.1 - 2006-02-13 +---------------- + +- Added tests for ThreadSafeDict. + [hannosch] + +- Fixed serious memory leak and did some code improvements. + [hannosch, alecm] + + +1.0 - 2006-01-26 +---------------- + +- Initial implementation + [hannosch] + + diff --git a/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/RECORD b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/RECORD new file mode 100644 index 0000000..b13bf8f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/RECORD @@ -0,0 +1,24 @@ +Products.statusmessages-5.0.4-py2.7-nspkg.pth,sha256=nq2GLEDBKV9G0i4LEhkEEQT0gMqQlG1ToxVfs90lBMs,549 +Products.statusmessages-5.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Products.statusmessages-5.0.4.dist-info/METADATA,sha256=tutryeANX0AnvgSOgnVM4Po-uT-xzfClf6PnkwTJAkQ,7181 +Products.statusmessages-5.0.4.dist-info/RECORD,, +Products.statusmessages-5.0.4.dist-info/WHEEL,sha256=gduuPyBvFJQSQ0zdyxF7k0zynDXbIbvg5ZBHoXum5uk,110 +Products.statusmessages-5.0.4.dist-info/namespace_packages.txt,sha256=UgMr3LVXWHS9vjnxTk8VOZd7xxgBn5T02HOaMRXjjTM,9 +Products.statusmessages-5.0.4.dist-info/top_level.txt,sha256=UgMr3LVXWHS9vjnxTk8VOZd7xxgBn5T02HOaMRXjjTM,9 +Products/statusmessages/__init__.py,sha256=vPOu4yfjbTHCWz9i9aDaA0i2DmLiGzyRr5Qq32JqQH8,60 +Products/statusmessages/__pycache__/__init__.cpython-36.pyc,, +Products/statusmessages/__pycache__/adapter.cpython-36.pyc,, +Products/statusmessages/__pycache__/interfaces.cpython-36.pyc,, +Products/statusmessages/__pycache__/message.cpython-36.pyc,, +Products/statusmessages/adapter.py,sha256=KZ3cHD7OBWCVHwG3NasYcev6R_YEtXP7XEhwb-S9meU,3647 +Products/statusmessages/configure.zcml,sha256=bTehHH6U6E2oBBqEo6BIK3x4wumwqXyfemGOHnkd9Cg,709 +Products/statusmessages/interfaces.py,sha256=H7HhIaeZEXgglohb3Ijr2PAOowC4-mnf27OYXwZwuVA,760 +Products/statusmessages/message.py,sha256=u9DyFD0Bvo76KG0iqa4_Eg0ZC1aJztKi6x8ZvoNCovA,3021 +Products/statusmessages/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Products/statusmessages/tests/__pycache__/__init__.cpython-36.pyc,, +Products/statusmessages/tests/__pycache__/test_adapter.cpython-36.pyc,, +Products/statusmessages/tests/__pycache__/test_doctests.cpython-36.pyc,, +Products/statusmessages/tests/__pycache__/test_encoding.cpython-36.pyc,, +Products/statusmessages/tests/test_adapter.py,sha256=qIVHc8mfT7aGmLbXwJkVfyJE1OP63Z9cQijqG8MxzwU,8050 +Products/statusmessages/tests/test_doctests.py,sha256=d-HrzO4zR3Rz3d8EbehXonj3XhO61ISYdAHhGGj9pjo,264 +Products/statusmessages/tests/test_encoding.py,sha256=2-DmP_Le8xje5963Wftqdo8DV0TJj5kOSS66JBUlbO0,1983 diff --git a/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/WHEEL b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/WHEEL new file mode 100644 index 0000000..1316c41 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/namespace_packages.txt b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/namespace_packages.txt new file mode 100644 index 0000000..f0ad2e0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/namespace_packages.txt @@ -0,0 +1 @@ +Products diff --git a/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/top_level.txt new file mode 100644 index 0000000..f0ad2e0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products.statusmessages-5.0.4.dist-info/top_level.txt @@ -0,0 +1 @@ +Products diff --git a/thesisenv/lib/python3.6/site-packages/Products/statusmessages/__init__.py b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/__init__.py new file mode 100644 index 0000000..0d98752 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- +STATUSMESSAGEKEY = 'statusmessages' diff --git a/thesisenv/lib/python3.6/site-packages/Products/statusmessages/adapter.py b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/adapter.py new file mode 100644 index 0000000..6d490ad --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/adapter.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +from Products.statusmessages import STATUSMESSAGEKEY +from Products.statusmessages.interfaces import IStatusMessage +from Products.statusmessages.message import decode +from Products.statusmessages.message import Message +from zope.annotation.interfaces import IAnnotations +from zope.i18n import translate +from zope.interface import implementer + +import binascii +import logging + + +logger = logging.getLogger('statusmessages') + + +@implementer(IStatusMessage) +class StatusMessage(object): + """Adapter for the BrowserRequest to handle status messages. + + Let's make sure that this implementation actually fulfills the + 'IStatusMessage' API:: + + >>> from zope.interface.verify import verifyClass + >>> verifyClass(IStatusMessage, StatusMessage) + True + """ + + def __init__(self, context): + self.context = context # the context must be the request + + def add(self, text, type=u'info'): + """Add a status message. + """ + context = self.context + text = translate(text, context=context) + annotations = IAnnotations(context) + + old = annotations.get( + STATUSMESSAGEKEY, + context.cookies.get(STATUSMESSAGEKEY), + ) + value = _encodeCookieValue(text, type, old=old) + context.response.setCookie(STATUSMESSAGEKEY, value, path='/') + annotations[STATUSMESSAGEKEY] = value + + def show(self): + """Removes all status messages and returns them for display. + """ + context = self.context + annotations = IAnnotations(context) + value = annotations.get( + STATUSMESSAGEKEY, + context.cookies.get(STATUSMESSAGEKEY), + ) + if value is None: + return [] + value = _decodeCookieValue(value) + + # clear the existing cookie entries, except on responses that don't + # actually render in the browser (really, these shouldn't render + # anything so we shouldn't get to this message, but some templates + # are sloppy). + if self.context.response.getStatus() not in (301, 302, 304): + context.cookies[STATUSMESSAGEKEY] = None + context.response.expireCookie(STATUSMESSAGEKEY, path='/') + annotations[STATUSMESSAGEKEY] = None + + return value + + # BBB + addStatusMessage = add + showStatusMessages = show + + +def _encodeCookieValue(text, type, old=None): + """Encodes text and type to a list of Messages. If there is already some old + existing list, add the new Message at the end but don't add duplicate + messages. + """ + results = [] + message = Message(text, type=type) + + if old is not None: + results = _decodeCookieValue(old) + if message not in results: + results.append(message) + + messages = b''.join([r.encode() for r in results]) + bin_value = binascii.b2a_base64(messages).rstrip() + # remove the stupid b that will lead to values like "b'AYR...'" + return bin_value.decode('utf-8') + + +def _decodeCookieValue(string): + """Decode a cookie value to a list of Messages. + """ + results = [] + # Return nothing if the cookie is marked as deleted + if string == 'deleted': + return results + # Try to decode the cookie value + try: + value = binascii.a2b_base64(string) + while len(value) > 1: # at least 2 bytes of data + message, value = decode(value) + if message is not None: + results.append(message) + except (binascii.Error, UnicodeEncodeError): + logger.exception('Unexpected value in statusmessages cookie') + return [] + + return results diff --git a/thesisenv/lib/python3.6/site-packages/Products/statusmessages/configure.zcml b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/configure.zcml new file mode 100644 index 0000000..5ba0b66 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/configure.zcml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/Products/statusmessages/interfaces.py b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/interfaces.py new file mode 100644 index 0000000..fa19f88 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/interfaces.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +from zope.interface import Attribute +from zope.interface import Interface + + +class IMessage(Interface): + """A single status message.""" + + message = Attribute('The text of this message. Usally a Message object.') + + type = Attribute('The type of this message.') + + +class IStatusMessage(Interface): + """An adapter for the BrowserRequest to handle status messages.""" + + def addStatusMessage(text, type=u'info'): + """Add a status message.""" + + def add(text, type=u'info'): + """Add a status message.""" + + def showStatusMessages(): + """Removes all status messages and returns them for display. + """ + def show(): + """Removes all status messages and returns them for display. + """ diff --git a/thesisenv/lib/python3.6/site-packages/Products/statusmessages/message.py b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/message.py new file mode 100644 index 0000000..3c8b592 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/message.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +from Products.statusmessages.interfaces import IMessage +from zope.interface import implementer + +import six +import struct + + +def _utf8(value): + if isinstance(value, six.text_type): + return value.encode('utf-8') + elif isinstance(value, six.binary_type): + return value + return b'' + + +def _unicode(value): + return six.text_type(value, 'utf-8', 'ignore') + + +@implementer(IMessage) +class Message(object): + """A single status message. + + Let's make sure that this implementation actually fulfills the + 'IMessage' API. + + >>> from zope.interface.verify import verifyClass + >>> verifyClass(IMessage, Message) + True + + >>> status = Message(u'this is a test', type=u'info') + >>> status.message == 'this is a test' + True + + >>> status.type == 'info' + True + + It is quite common to use MessageID's as status messages: + + >>> from zope.i18nmessageid import MessageFactory + >>> from zope.i18nmessageid import Message as I18NMessage + >>> msg_factory = MessageFactory('test') + + >>> msg = msg_factory(u'test_message', default=u'Default text') + + >>> status = Message(msg, type=u'warn') + >>> status.type == 'warn' + True + + >>> type(status.message) is I18NMessage + True + + >>> status.message.default == 'Default text' + True + + >>> status.message.domain == u'test' + True + + """ + + def __init__(self, message, type=''): + self.message = message + self.type = type + + def __eq__(self, other): + if not isinstance(other, Message): + return False + if self.message == other.message and self.type == other.type: + return True + return False + + def encode(self): + """ + Encode to a cookie friendly format. + + The format consists of a two bytes length header of 11 bits for the + message length and 5 bits for the type length followed by two values. + """ + + if six.PY3: + fmt_tpl = '!H{0}s{1}s' + else: + fmt_tpl = b'!H{0}s{1}s' + message = _utf8(self.message)[:0x3FF] # we can store 2^11 bytes + type_ = _utf8(self.type)[:0x1F] # we can store 2^5 bytes + size = (len(message) << 5) + (len(type_) & 31) # pack into 16 bits + fmt = fmt_tpl.format(len(message), len(type_)) + return struct.pack(fmt, size, message, type_) + + +def decode(value): + """ + Decode messages from a cookie + + We return the decoded message object, and the remainder of the cookie + value as bytes (it can contain further messages). + + We expect at least 2 bytes (size information). + """ + if len(value) >= 2: + size = struct.unpack(b'!H', value[:2])[0] + msize, tsize = (size >> 5, size & 31) + message = Message( + _unicode(value[2:msize + 2]), + _unicode(value[msize + 2:msize + tsize + 2]), + ) + return message, value[msize + tsize + 2:] + return None, b'' diff --git a/thesisenv/lib/python3.6/site-packages/Products/statusmessages/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/Products/statusmessages/tests/test_adapter.py b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/tests/test_adapter.py new file mode 100644 index 0000000..5420ef8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/tests/test_adapter.py @@ -0,0 +1,308 @@ +# -*- coding: UTF-8 -*- +""" StatusMessage adapter tests. """ +import unittest + + +class TestAdapter(unittest.TestCase): + + def test_directives(self): + """ + Test status messages + + First some boilerplate. + + >>> from zope.component.testing import setUp + >>> setUp() + + >>> import Products.Five + >>> import Products.statusmessages + + >>> from Zope2.App import zcml + >>> zcml.load_config('meta.zcml', Products.Five) + >>> zcml.load_config('configure.zcml', Products.statusmessages) + + Now lets make sure we can actually adapt the request. + + >>> from Products.statusmessages.interfaces import IStatusMessage + >>> status = IStatusMessage(self.app.REQUEST) + >>> IStatusMessage.providedBy(status) + True + + We also need the request to be annotatable: + + >>> from zope.interface import directlyProvides + >>> from zope.annotation.interfaces import IAttributeAnnotatable + >>> directlyProvides(self.app.REQUEST, IAttributeAnnotatable) + + The dummy request we have is a bit limited, so we need a simple method + to fake a real request/response for the cookie handling. Basically it + puts all entries from response.cookies into REQUEST.cookies but shifts + the real values into the right place as browsers would do it. + + >>> def fakePublish(request): + ... cookies = request.response.cookies.copy() + ... new_cookies = {} + ... for key in cookies.keys(): + ... new_cookies[key] = cookies[key]['value'] + ... request.cookies = new_cookies + ... request.response.cookies = {} + + >>> request = self.app.REQUEST + >>> status = IStatusMessage(request) + + Make sure there's no stored message. + + >>> len(status.show()) + 0 + + Add one message + + >>> status.add(u'täst', type=u'info') + + Now check the results + + >>> messages = status.show() + >>> len(messages) + 1 + + >>> messages[0].message + u'täst' + + >>> messages[0].type + u'info' + + Make sure messages are removed + + >>> len(status.show()) + 0 + + Since we accessed the message prior to publishing the page, we must + ensure that the messages have been removed from the cookies + + >>> fakePublish(request) + >>> len(status.show()) + 0 + + Now we repeat the test, only this time we publish the page prior to + retrieving the messages + + Add one message + + >>> status.add(u'täst', type=u'info') + + Publish the request + + >>> fakePublish(request) + + Now check the results + + >>> messages = status.show() + >>> len(messages) + 1 + + >>> messages[0].message + u'täst' + + >>> messages[0].type + u'info' + + Make sure messages are removed + + >>> len(status.show()) + 0 + + Add two messages (without publishing) + + >>> status.add(u'täst', type=u'info') + >>> status.add(u'täst1', u'warn') + + And check the results again + + >>> messages = status.show() + >>> len(messages) + 2 + + >>> test = messages[1] + + >>> test.message + u'täst1' + + >>> test.type + u'warn' + + Make sure messages are removed again + + >>> len(status.show()) + 0 + + Add two messages (with publishing) + + >>> status.add(u'täst', type=u'info') + >>> fakePublish(request) + >>> status.add(u'test1', u'warn') + + And check the results again + + >>> fakePublish(request) + >>> messages = status.show() + >>> len(messages) + 2 + + >>> test = messages[1] + + >>> test.message + u'test1' + + >>> test.type + u'warn' + + Make sure messages are removed again + + >>> len(status.show()) + 0 + + Add two identical messages + + >>> status.add(u'täst', type=u'info') + >>> status.add(u'täst', type=u'info') + + And check the results again + + >>> fakePublish(request) + >>> messages = status.show() + >>> len(messages) + 1 + + >>> test = messages[0] + + >>> test.message + u'täst' + + >>> test.type + u'info' + + Make sure messages are removed again + + >>> len(status.show()) + 0 + + Test incredibly long messages: + + >>> status.add(u'm' * 0x400, type=u't' * 0x20) + + And check the results again + + >>> fakePublish(request) + >>> messages = status.show() + >>> len(messages) + 1 + + >>> test = messages[0] + + >>> test.message == u'm' * 0x3FF + True + + >>> test.type == u't' * 0x1F + True + + Messages are stored as base64-ed cookie values, so we must make sure we + create proper header values; all ascii characters, and no newlines: + + >>> status.add(u'täst' * 40, type=u'info') + >>> cookies = [c['value'] for c in request.response.cookies.values()] + >>> cookies = ''.join(cookies) + >>> cookies == six.text_type(cookies).encode('ASCII') + True + >>> '\\n' in cookies + False + + >>> from zope.component.testing import tearDown + >>> tearDown() + """ + + def test_301(self): + """ + Test status messages for 301/302/304 request + + First some boilerplate. + + >>> from zope.component.testing import setUp + >>> setUp() + + >>> import Products.Five + >>> import Products.statusmessages + + >>> from Zope2.App import zcml + >>> zcml.load_config('meta.zcml', Products.Five) + >>> zcml.load_config('configure.zcml', Products.statusmessages) + + >>> from zope.interface import directlyProvides + >>> from zope.annotation.interfaces import IAttributeAnnotatable + >>> directlyProvides(self.app.REQUEST, IAttributeAnnotatable) + + >>> from Products.statusmessages.interfaces import IStatusMessage + + >>> def fakePublish(request, status=200): + ... cookies = request.response.cookies.copy() + ... new_cookies = {} + ... for key in cookies.keys(): + ... new_cookies[key] = cookies[key]['value'] + ... request.cookies = new_cookies + ... request.response.cookies = {} + ... request.response.setStatus(status) + + >>> request = self.app.REQUEST + >>> status = IStatusMessage(request) + + Make sure there's no stored message. + + >>> len(status.show()) + 0 + + Add one message + + >>> status.add(u'täst', type=u'info') + + Publish a redirect response that also happened to call show(). + This could happen if the redirect (unnecessarily) + rendered a template showing the status message, for example. + + >>> fakePublish(request, 302) + >>> messages = status.show() + >>> len(messages) + 1 + + >>> messages[0].message + u'täst' + + >>> messages[0].type + u'info' + + Make sure messages are not removed - we really want them to show the + next time around, when the redirect has completed. + + >>> len(status.show()) + 1 + + Let's now fake redirection. The message should still be there, + but will then be expired. + + >>> fakePublish(request, 200) + >>> messages = status.show() + >>> len(messages) + 1 + + >>> messages[0].message + u'täst' + + >>> messages[0].type + u'info' + + The message should now be gone. + + >>> len(status.show()) + 0 + + >>> from zope.component.testing import tearDown + >>> tearDown() + """ diff --git a/thesisenv/lib/python3.6/site-packages/Products/statusmessages/tests/test_doctests.py b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/tests/test_doctests.py new file mode 100644 index 0000000..face234 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/tests/test_doctests.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +from doctest import DocTestSuite +from unittest import TestSuite + + +test_list = ( + DocTestSuite('Products.statusmessages.adapter'), + DocTestSuite('Products.statusmessages.message'), +) + + +def test_suite(): + return TestSuite(test_list) diff --git a/thesisenv/lib/python3.6/site-packages/Products/statusmessages/tests/test_encoding.py b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/tests/test_encoding.py new file mode 100644 index 0000000..1af3325 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/Products/statusmessages/tests/test_encoding.py @@ -0,0 +1,77 @@ +# -*- coding: UTF-8 -*- +from __future__ import unicode_literals + +import unittest + + +class TestEncoding(unittest.TestCase): + + def test_encoding_msg_with_type(self): + """Test message encoding: + """ + from Products.statusmessages.message import Message + from Products.statusmessages.message import decode + m = Message('späm', 'eggs') + self.assertEqual( + m.encode(), + b'\x00\xa4sp\xc3\xa4meggs', + ) + self.assertEqual(decode(m.encode())[0], m) + + def test_encoding_msg_without_type(self): + from Products.statusmessages.message import Message + from Products.statusmessages.message import decode + m = Message('späm') + self.assertEqual( + m, + Message('späm'), + ) + self.assertEqual(m.encode(), b'\x00\xa0sp\xc3\xa4m') + self.assertEqual(decode(m.encode())[0], m) + + def test_decoding(self): + """Test message decoding: + """ + from Products.statusmessages.message import decode + + # Craft a wrong value: + m, rem = decode(b'\x01\x84spameggs') + self.assertEqual( + m.message, + 'spameggs', + ) + self.assertEqual( + m.type, + '', + ) + self.assertEqual(rem, b'') + + # Craft another wrong value: + m, rem = decode(b'\x00\x24spameggs') + self.assertEqual( + m.message, + 's', + ) + self.assertEqual( + m.type, + 'pame', + ) + self.assertEqual(rem, b'ggs') + + # And another wrong value: + m, rem = decode(b'\x00spameggs') + self.assertEqual( + m.message, + 'pam', + ) + self.assertEqual( + m.type, + 'eggs', + ) + self.assertEqual(rem, b'') + + # And yet another wrong value: + m, rem = decode('') + + self.assertIs(m, None) + self.assertEqual(rem, b'') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/INSTALLER b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/LICENSE.txt b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..e1f9ad7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/LICENSE.txt @@ -0,0 +1,44 @@ +Zope Public License (ZPL) Version 2.1 + +A copyright notice accompanies this license document that identifies the +copyright holders. + +This license has been certified as open source. It has also been designated as +GPL compatible by the Free Software Foundation (FSF). + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions in source code must retain the accompanying copyright +notice, this list of conditions, and the following disclaimer. + +2. Redistributions in binary form must reproduce the accompanying copyright +notice, this list of conditions, and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +3. Names of the copyright holders must not be used to endorse or promote +products derived from this software without prior written permission from the +copyright holders. + +4. The right to distribute this software or to use it for any purpose does not +give you the right to use Servicemarks (sm) or Trademarks (tm) of the +copyright +holders. Use of them is covered by separate agreement with the copyright +holders. + +5. If any files are modified, you must cause the modified files to carry +prominent notices stating that you changed the files and the date of any +change. + +Disclaimer + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY EXPRESSED +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/METADATA b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/METADATA new file mode 100644 index 0000000..6a696dd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/METADATA @@ -0,0 +1,479 @@ +Metadata-Version: 2.1 +Name: ZConfig +Version: 3.3.0 +Summary: Structured Configuration Library +Home-page: https://github.com/zopefoundation/ZConfig/ +Author: Fred L. Drake, Jr. +Author-email: fred@fdrake.net +Maintainer: Zope Foundation and Contributors +License: ZPL 2.1 +Keywords: configuration structured simple flexible typed hierarchy logging +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Operating System :: OS Independent +Classifier: Topic :: Software Development +Provides-Extra: docs +Requires-Dist: sphinxcontrib-programoutput; extra == 'docs' +Provides-Extra: test +Requires-Dist: docutils; extra == 'test' +Requires-Dist: manuel; extra == 'test' +Requires-Dist: zope.testrunner; extra == 'test' + +ZConfig: Schema-driven configuration +==================================== + +.. image:: https://img.shields.io/pypi/v/ZConfig.svg + :target: https://pypi.python.org/pypi/ZConfig/ + :alt: Latest release + +.. image:: https://img.shields.io/pypi/pyversions/ZConfig.svg + :target: https://pypi.org/project/ZConfig/ + :alt: Supported Python versions + +.. image:: https://travis-ci.org/zopefoundation/ZConfig.svg?branch=master + :target: https://travis-ci.org/zopefoundation/ZConfig + +.. image:: https://coveralls.io/repos/github/zopefoundation/ZConfig/badge.svg?branch=master + :target: https://coveralls.io/github/zopefoundation/ZConfig?branch=master + +.. image:: https://readthedocs.org/projects/zconfig/badge/?version=latest + :target: http://zconfig.readthedocs.org/en/latest/ + :alt: Documentation Status + +ZConfig is a configuration library intended for general use. It +supports a hierarchical schema-driven configuration model that allows +a schema to specify data conversion routines written in Python. +ZConfig's model is very different from the model supported by the +ConfigParser module found in Python's standard library, and is more +suitable to configuration-intensive applications. + +ZConfig schema are written in an XML-based language and are able to +"import" schema components provided by Python packages. Since +components are able to bind to conversion functions provided by Python +code in the package (or elsewhere), configuration objects can be +arbitrarily complex, with values that have been verified against +arbitrary constraints. This makes it easy for applications to +separate configuration support from configuration loading even with +configuration data being defined and consumed by a wide range of +separate packages. + +ZConfig is licensed under the Zope Public License, version 2.1. See +the file LICENSE.txt in the distribution for the full license text. + +Reference documentation is available at https://zconfig.readthedocs.io. + +Information on the latest released version of the ZConfig package is +available at + + https://pypi.python.org/pypi/ZConfig/ + +You may either create an RPM and install this, or install directly from +the source distribution. + +There is a mailing list for discussions and questions about ZConfig; +more information on the list is available at + + http://mail.zope.org/mailman/listinfo/zconfig/ + + +Configuring Logging +------------------- + +One common use of ZConfig is to configure the Python logging +framework. This is extremely simple to do as the following example +demonstrates: + + >>> from ZConfig import configureLoggers + >>> configureLoggers(''' + ... + ... level INFO + ... + ... PATH STDOUT + ... format %(levelname)s %(name)s %(message)s + ... + ... + ... ''') + +The above configures the root logger to output messages logged at INFO +or above to the console, as we can see in the following example: + + >>> from logging import getLogger + >>> logger = getLogger() + >>> logger.info('An info message') + INFO root An info message + >>> logger.debug('A debug message') + +A more common configuration would see STDOUT replaced with a path to +the file into which log entries would be written. + +For more information, see the `the documentation `_. + + +Installing from the source distribution +--------------------------------------- + +For a simple installation:: + + python setup.py install + + +To install to a user's home-dir:: + + python setup.py install --home=

+ + +To install to another prefix (for example, /usr/local):: + + python setup.py install --prefix=/usr/local + + +If you need to force the python interpreter to (for example) python2:: + + python2 setup.py install + + +For more information on installing packages, please refer to the +`Python Packaging User Guide `__. + + +========================== +Change History for ZConfig +========================== + +3.3.0 (2018-10-04) +------------------ + +- Drop support for Python 3.3. + +- Add support for Python 3.7. + +- Drop support for 'python setup.py test'. See `issue 38 + `_. + +- Add support for ``example`` in ``section`` and ``multisection``, and + include those examples in generated documentation. See + https://github.com/zopefoundation/ZConfig/pull/5. + +- Fix configuration loaders to decode byte data using UTF-8 instead of + the default encoding (usually ASCII). See `issue 37 + `_. + +3.2.0 (2017-06-22) +------------------ + +- Drop support for Python 2.6 and 3.2 and add support for Python 3.6. + +- Run tests with pypy and pypy3 as well. + +- Host docs at https://zconfig.readthedocs.io + +- BaseLoader is now an abstract class that cannot be instantiated. + +- Allow ``nan``, ``inf`` and ``-inf`` values for floats in + configurations. See + https://github.com/zopefoundation/ZConfig/issues/16. + +- Scripts ``zconfig`` (for schema validation) and + ``zconfig_schema2html`` are ported to Python 3. + +- A new ``ZConfig.sphinx`` `Sphinx extension + `_ + facilitates automatically documenting ZConfig components using their + description and examples in Sphinx documentation. See + https://github.com/zopefoundation/ZConfig/pull/25. + +- Simplify internal schema processing of max and min occurrence + values. See https://github.com/zopefoundation/ZConfig/issues/15. + +- Almost all uses of ``type`` as a parameter name have been replaced + with ``type_`` to avoid shadowing a builtin. These were typically + not public APIs and weren't expected to be called with keyword + arguments so there should not be any user-visible changes. See + https://github.com/zopefoundation/ZConfig/issues/17 + +3.1.0 (2015-10-17) +------------------ + +- Add ability to do variable substitution from environment variables using + $() syntax. + +3.0.4 (2014-03-20) +------------------ + +- Added Python 3.4 support. + + +3.0.3 (2013-03-02) +------------------ + +- Added Python 3.2 support. + + +3.0.2 (2013-02-14) +------------------ + +- Fixed ResourceWarning in BaseLoader.openResource(). + + +3.0.1 (2013-02-13) +------------------ + +- Removed an accidentally left `pdb` statement from the code. + +- Fix a bug in Python 3 with the custom string `repr()` function. + + +3.0.0 (2013-02-13) +------------------ + +- Added Python 3.3 support. + +- Dropped Python 2.4 and 2.5 support. + + +2.9.3 (2012-06-25) +------------------ + +- Fixed: port values of 0 weren't allowed. Port 0 is used to request + an ephemeral port. + + +2.9.2 (2012-02-11) +------------------ + +- Adjust test classes to avoid base classes being considered separate + test cases by (at least) the "nose" test runner. + + +2.9.1 (2012-02-11) +------------------ + +- Make FileHandler.reopen thread safe. + + +2.9.0 (2011-03-22) +------------------ + +- Allow identical redefinition of ``%define`` names. +- Added support for IPv6 addresses. + + +2.8.0 (2010-04-13) +------------------ + +- Fix relative path recognition. + https://bugs.launchpad.net/zconfig/+bug/405687 + +- Added SMTP authentication support for email logger on Python 2.6. + + +2.7.1 (2009-06-13) +------------------ + +- Improved documentation + +- Fixed tests failures on windows. + + +2.7.0 (2009-06-11) +------------------ + +- Added a convenience function, ``ZConfig.configureLoggers(text)`` for + configuring loggers. + +- Relaxed the requirement for a logger name in logger sections, + allowing the logger section to be used for both root and non-root + loggers. + + +2.6.1 (2008-12-05) +------------------ + +- Fixed support for schema descriptions that override descriptions from a base + schema. If multiple base schema provide descriptions but the derived schema + does not, the first base mentioned that provides a description wins. + https://bugs.launchpad.net/zconfig/+bug/259475 + +- Fixed compatibility bug with Python 2.5.0. + +- No longer trigger deprecation warnings under Python 2.6. + + +2.6.0 (2008-09-03) +------------------ + +- Added support for file rotation by time by specifying when and + interval, rather than max-size, for log files. + +- Removed dependency on setuptools from the setup.py. + + +2.5.1 (2007-12-24) +------------------ + +- Made it possible to run unit tests via 'python setup.py test' (requires + setuptools on sys.path). + +- Added better error messages to test failure assertions. + + +2.5 (2007-08-31) +------------------------ + +*A note on the version number:* + +Information discovered in the revision control system suggests that some +past revision has been called "2.4", though it is not clear that any +actual release was made with that version number. We're going to skip +revision 2.4 entirely to avoid potential issues with anyone using +something claiming to be ZConfig 2.4, and go straight to version 2.5. + +- Add support for importing schema components from ZIP archives (including + eggs). + +- Added a 'formatter' configuration option in the logging handler sections + to allow specifying a constructor for the formatter. + +- Documented the package: URL scheme that can be used in extending schema. + +- Added support for reopening all log files opened via configurations using + the ZConfig.components.logger package. For Zope, this is usable via the + ``zc.signalhandler`` package. ``zc.signalhandler`` is not required for + ZConfig. + +- Added support for rotating log files internally by size. + +- Added a minimal implementation of schema-less parsing; this is mostly + intended for applications that want to read several fragments of ZConfig + configuration files and assemble a combined configuration. Used in some + ``zc.buildout`` recipes. + +- Converted to using ``zc.buildout`` and the standard test runner from + ``zope.testing``. + +- Added more tests. + + +2.3.1 (2005-08-21) +------------------ + +- Isolated some of the case-normalization code so it will at least be + easier to override. This remains non-trivial. + + +2.3 (2005-05-18) +---------------- + +- Added "inet-binding-address" and "inet-connection-address" to the + set of standard datatypes. These are similar to the "inet-address" + type, but the default hostname is more sensible. The datatype used + should reflect how the value will be used. + +- Alternate rotating logfile handler for Windows, to avoid platform + limitations on renaming open files. Contributed by Sidnei da Silva. + +- For
and , if the name attribute is omitted, + assume name="*", since this is what is used most often. + + +2.2 (2004-04-21) +---------------- + +- More documentation has been written. + +- Added a timedelta datatype function; the input is the same as for + the time-interval datatype, but the resulting value is a + datetime.timedelta object. + +- Make sure keys specified as attributes of the element are + converted by the appropriate key type, and are re-checked for + derived sections. + +- Refactored the ZConfig.components.logger schema components so that a + schema can import just one of the "eventlog" or "logger" sections if + desired. This can be helpful to avoid naming conflicts. + +- Added a reopen() method to the logger factories. + +- Always use an absolute pathname when opening a FileHandler. + +- A fix to the logger 'format' key to allow the %(process)d expansion variable + that the logging package supports. + +- A new timedelta built-in datatype was added. Similar to time-interval + except that it returns a datetime.timedelta object instead. + + +2.1 (2004-04-12) +---------------- + +- Removed compatibility with Python 2.1 and 2.2. + +- Schema components must really be in Python packages; the directory + search has been modified to perform an import to locate the package + rather than incorrectly implementing the search algorithm. + +- The default objects use for section values now provide a method + getSectionAttributes(); this returns a list of all the attributes of + the section object which store configuration-defined data (including + information derived from the schema). + +- Default information can now be included in a schema for and by using . + +- More documentation has been added to discuss schema extension. + +- Support for a Unicode-free Python has been fixed. + +- Derived section types now inherit the datatype of the base type if + no datatype is identified explicitly. + +- Derived section types can now override the keytype instead of always + inheriting from their base type. + +- makes use of the current prefix if the + package name begins witha dot. + +- Added two standard datatypes: dotted-name and dotted-suffix. + +- Added two standard schema components: ZConfig.components.basic and + ZConfig.components.logger. + + +2.0 (2003-10-27) +---------------- + +- Configurations can import additional schema components using a new + "%import" directive; this can be used to integrate 3rd-party + components into an application. + +- Schemas may be extended using a new "extends" attribute on the + element. + +- Better error messages when elements in a schema definition are + improperly nested. + +- The "zconfig" script can now simply verify that a schema definition + is valid, if that's all that's needed. + + +1.0 (2003-03-25) +---------------- + +- Initial release. + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/RECORD b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/RECORD new file mode 100644 index 0000000..bdf15e6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/RECORD @@ -0,0 +1,145 @@ +../../../bin/zconfig,sha256=oOQR38jQNziXKnd_0cfszLLBudWiwCDBucVwcb-JSLc,248 +../../../bin/zconfig_schema2html,sha256=vp7zcnbBclWutkoKaGNaRXVMj4kJdloD2lsBNrqi1Gw,250 +ZConfig-3.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +ZConfig-3.3.0.dist-info/LICENSE.txt,sha256=PmcdsR32h1FswdtbPWXkqjg-rKPCDOo_r1Og9zNdCjw,2070 +ZConfig-3.3.0.dist-info/METADATA,sha256=sHNteMVs1EpFVNSvJHDBUu5Im-KuQ-4TDSfyCinWqr0,14745 +ZConfig-3.3.0.dist-info/RECORD,, +ZConfig-3.3.0.dist-info/WHEEL,sha256=8T8fxefr_r-A79qbOJ9d_AaEgkpCGmEPHc-gpCq5BRg,110 +ZConfig-3.3.0.dist-info/entry_points.txt,sha256=LaOi7VnYxJr9q5EWc2dMk_lJCx3oflDdv_FQiWkmBVQ,99 +ZConfig-3.3.0.dist-info/top_level.txt,sha256=qKGSSXGHk0nNcFVduhJ3dUglDybP6xrHK3ufvRWTMPI,8 +ZConfig/__init__.py,sha256=VB0_3Dmy2ISMA9bE39OL1oLqoXT88gvoCM2EufV4apw,7901 +ZConfig/__pycache__/__init__.cpython-36.pyc,, +ZConfig/__pycache__/_compat.cpython-36.pyc,, +ZConfig/__pycache__/_schema_utils.cpython-36.pyc,, +ZConfig/__pycache__/cfgparser.cpython-36.pyc,, +ZConfig/__pycache__/cmdline.cpython-36.pyc,, +ZConfig/__pycache__/datatypes.cpython-36.pyc,, +ZConfig/__pycache__/info.cpython-36.pyc,, +ZConfig/__pycache__/loader.cpython-36.pyc,, +ZConfig/__pycache__/matcher.cpython-36.pyc,, +ZConfig/__pycache__/schema.cpython-36.pyc,, +ZConfig/__pycache__/schema2html.cpython-36.pyc,, +ZConfig/__pycache__/schemaless.cpython-36.pyc,, +ZConfig/__pycache__/sphinx.cpython-36.pyc,, +ZConfig/__pycache__/substitution.cpython-36.pyc,, +ZConfig/__pycache__/url.cpython-36.pyc,, +ZConfig/__pycache__/validator.cpython-36.pyc,, +ZConfig/_compat.py,sha256=hNYcNFk7ISAMcTvK1QffU81YMKLPtlm4_KWmAvniaW4,2683 +ZConfig/_schema_utils.py,sha256=4DF-UmtgH1XJfEmNUbOL2ds8qk1QkRfwurruRwV65vI,10156 +ZConfig/cfgparser.py,sha256=Hct3pqDFMYb2uMvN_GfAEbCFz04MSSn5yWPcTXzwgYo,6577 +ZConfig/cmdline.py,sha256=0EFZBt0PWXV5zB0O6YAUdZu_xbrCj00H7x_VCMYanTw,7670 +ZConfig/components/__init__.py,sha256=Wxwe1Yvh0PsxQGRzt5WMkXtzaWEZpEPOcWx6ZWFr5U8,28 +ZConfig/components/__pycache__/__init__.cpython-36.pyc,, +ZConfig/components/basic/__init__.py,sha256=Wxwe1Yvh0PsxQGRzt5WMkXtzaWEZpEPOcWx6ZWFr5U8,28 +ZConfig/components/basic/__pycache__/__init__.cpython-36.pyc,, +ZConfig/components/basic/__pycache__/mapping.cpython-36.pyc,, +ZConfig/components/basic/component.xml,sha256=AdT2naEAZ83iLl6IIKD_nVZa6nHhAsWDVDCPEUJuszQ,206 +ZConfig/components/basic/mapping.py,sha256=77gkqVKzw60mv_BmR7I1wyddAArG4RnqceeMlFWIGfQ,764 +ZConfig/components/basic/mapping.xml,sha256=9ISmp62x626Y4VJl-Ry78F_wyGE3y1FDp5mvMBIIGkI,945 +ZConfig/components/basic/tests/__init__.py,sha256=Wxwe1Yvh0PsxQGRzt5WMkXtzaWEZpEPOcWx6ZWFr5U8,28 +ZConfig/components/basic/tests/__pycache__/__init__.cpython-36.pyc,, +ZConfig/components/basic/tests/__pycache__/test_mapping.cpython-36.pyc,, +ZConfig/components/basic/tests/test_mapping.py,sha256=DdzFCIiSDP372uNnb-4JTZEokghhnYDvLOX29MN0ZLI,2653 +ZConfig/components/logger/__init__.py,sha256=zC97fFrJ5Drj2hUTdoYATIbqFaapIr3CsvPOpqzun4Y,702 +ZConfig/components/logger/__pycache__/__init__.cpython-36.pyc,, +ZConfig/components/logger/__pycache__/datatypes.cpython-36.pyc,, +ZConfig/components/logger/__pycache__/factory.cpython-36.pyc,, +ZConfig/components/logger/__pycache__/handlers.cpython-36.pyc,, +ZConfig/components/logger/__pycache__/logger.cpython-36.pyc,, +ZConfig/components/logger/__pycache__/loghandler.cpython-36.pyc,, +ZConfig/components/logger/abstract.xml,sha256=1Ou38WA8V5SCImCQr9E3Hdmhtp--zXsVWZvrekXjAiM,151 +ZConfig/components/logger/base-logger.xml,sha256=GMGKocaRWK6HcgDKiYsHmVqZjF2F-biR_7uD91nU35M,1801 +ZConfig/components/logger/component.xml,sha256=s7l90AJ9qGn0utWE035Wjnmz8DGCIGbT88QVQdneh6Y,375 +ZConfig/components/logger/datatypes.py,sha256=abkrHnBCkdOVlLVWQaD76zOyWf8Dwn47u94p5CEv-M8,1133 +ZConfig/components/logger/eventlog.xml,sha256=ZrMgTsZ3WS0p3u3G5d1tNEKif40BHR9qZmAKSDOnc6g,478 +ZConfig/components/logger/factory.py,sha256=_8TR_WE-vgUAfgGlD0Zc7fpgTeyz2F49CCi8gp7x5-c,1400 +ZConfig/components/logger/handlers.py,sha256=MWf24bxgf4awSRtMP8xl0nEnPSk4Xdwd3z7YHzxacEQ,7569 +ZConfig/components/logger/handlers.xml,sha256=w6O6m_T8-GJIFjexGP_1zp6ZhALsO-Y2TyuLnPisZPU,4062 +ZConfig/components/logger/logger.py,sha256=77CX9eCM6SJoQUDb4LJbQzPBcEEI4NQ5mLlZinGZ6HY,3510 +ZConfig/components/logger/logger.xml,sha256=GXPVVwVSbia0IzcrKq4PJPPLrhS8tU_24vgCubvoQSs,1460 +ZConfig/components/logger/loghandler.py,sha256=kyNhXpvzpNJ8jwqxQLggHMcdHAJYZp8aLzNfAN3p9cA,5336 +ZConfig/components/logger/tests/__init__.py,sha256=Wxwe1Yvh0PsxQGRzt5WMkXtzaWEZpEPOcWx6ZWFr5U8,28 +ZConfig/components/logger/tests/__pycache__/__init__.cpython-36.pyc,, +ZConfig/components/logger/tests/__pycache__/test_logger.cpython-36.pyc,, +ZConfig/components/logger/tests/test_logger.py,sha256=zildHCKgjKsCi_AwP3nf3mz2hoIxbfpRAUTGY8fJ7XA,31209 +ZConfig/datatypes.py,sha256=yk5BImUwfY4ATXXKIz930ooAXtQsQ_fHkG6QXNbq9D4,17915 +ZConfig/info.py,sha256=fbzDx2oZ_3OrPyqirsV7y7iuPgvzfEybyom-U1CC9YI,17757 +ZConfig/loader.py,sha256=gO5egt4hh3MtOwOlLXXmqN-7rQaPsX9sYvDrnA26BS8,18758 +ZConfig/matcher.py,sha256=sSDGtEvM88PRuuQhOz3-jAhVovAasUoMhHm9p0fAF5w,11175 +ZConfig/schema.py,sha256=QR_RQK4lUumDsr5m1J29o7D8fUAovImerhgKYiGcAKo,22590 +ZConfig/schema2html.py,sha256=57zJSvZXAN0auHaiD7Jw1lR8RnKnqlAbBPtAoMYGHts,4548 +ZConfig/schemaless.py,sha256=uV09k--cBsCFhtXEWsPlxmC3wpyKzbwCD6WBLI6MEIE,3168 +ZConfig/schemaless.txt,sha256=0_WUee4vJkeeAagw8frPS5g4KVQc7vRM5SumByJud_M,6913 +ZConfig/sphinx.py,sha256=RKXcztiD3-0fbYqG8WC_MhZEu6g0mqFFwi-j04ee5js,6049 +ZConfig/substitution.py,sha256=kcygm2aR15a9bqPAP6vDGbfxM-x54MitTh5ZCIAlL5s,3762 +ZConfig/tests/__init__.py,sha256=FEjJCpqATtNIgpxxSyUs2EeA3PzISGkRNLAmOPNm53A,762 +ZConfig/tests/__pycache__/__init__.cpython-36.pyc,, +ZConfig/tests/__pycache__/support.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_cfgimports.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_cmdline.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_config.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_cookbook.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_datatypes.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_info.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_loader.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_matcher.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_readme.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_schema.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_schema2html.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_schemaless.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_subst.cpython-36.pyc,, +ZConfig/tests/__pycache__/test_validator.cpython-36.pyc,, +ZConfig/tests/bad-component.xml,sha256=dOHdskqyCKBDg0Fwkd8fONrPL9ksarEJzmMo-YhNLYk,41 +ZConfig/tests/bad-component2.xml,sha256=M6pseuhNTeUplvv1FNcubbwJ0Ckrnmy80vDT_4zvWQI,122 +ZConfig/tests/foosample.zip,sha256=W0z1MjDaOPnErJBAdzc49NsBl7ky7DNvoTI1ahO6kQU,1581 +ZConfig/tests/input/base-datatype1.xml,sha256=1MkpejZXG86y6zzp_AlcAIiI9R5OZm_KICv_5DlJ2vY,98 +ZConfig/tests/input/base-datatype2.xml,sha256=ZI2CZBYGXQuQV_NL-sWDOwU26XmARgAcvuavME437Ig,98 +ZConfig/tests/input/base-keytype1.xml,sha256=FJ5Tc8rkdL6m0OBbTcDa-C15-TyRxC-Oa5Pp6JH0gtQ,70 +ZConfig/tests/input/base-keytype2.xml,sha256=a_TQaBAcQoCEbFKf5ExzXXaiUnjSGQFCGMpp7czd-Dc,96 +ZConfig/tests/input/base.xml,sha256=MBi3f1w74aMURTj3-X6_XTT8bV5r7nMmN8pFyUx_63I,161 +ZConfig/tests/input/include.conf,sha256=c6kmp6OHSMkMGnui5Pc0soY2zlR1g8tQn6V2JjNS6Dg,56 +ZConfig/tests/input/inner.conf,sha256=Tq3RZtSB5ATZJRRsF3Nu5LWjhVayjZMQw58nn_S9d-E,42 +ZConfig/tests/input/library.xml,sha256=kUANBd45kCsb30QM6whhTFAk79qNbl2roW6C_7lCwbc,157 +ZConfig/tests/input/logger.xml,sha256=YiKmxH50CAe25TOvOGrZ-pd6SDkpHSmYutoqPAAlCC0,404 +ZConfig/tests/input/non-ascii.txt,sha256=x2yK8edLhMf_FE3xKx1RkaV4Hve5pvASSaa965UGYTc,76 +ZConfig/tests/input/outer.conf,sha256=Kn3aEwWDZnPSZhdAJXHhEo7XCiKJOSqfA3KaCe_2OVA,62 +ZConfig/tests/input/simple.conf,sha256=vv7oqCp2WhPlHnlLaVWBvAMprDctIrWZ4inNdzhqMkc,440 +ZConfig/tests/input/simple.xml,sha256=iWUID0Zjhsyoq2-FioEktegSw-zNroyajHbu22wgpDA,951 +ZConfig/tests/input/simplesections.conf,sha256=iyL26VBFXO9OrnH_QRatNH3TJejgqjisa55rGmMQvjQ,409 +ZConfig/tests/input/simplesections.xml,sha256=MPEL0ZufBk8jUC6oVEVosVAToGxHZjrUEk_lbag4D7U,1884 +ZConfig/tests/library/README.txt,sha256=imy9vWA8MeiqioL9Sqp1_CXQsJo2bdeYOOpFqzWFdIs,88 +ZConfig/tests/library/__init__.py,sha256=nFa4sn7SGMgCxVss13M2_uzaJEsA3KTIM8_j8TsX_Tw,23 +ZConfig/tests/library/__pycache__/__init__.cpython-36.pyc,, +ZConfig/tests/library/thing/__init__.py,sha256=-lFoYNBesgQYyyX0YATe6LY3Mr1YKuGTN7sY8raLr8E,839 +ZConfig/tests/library/thing/__pycache__/__init__.cpython-36.pyc,, +ZConfig/tests/library/thing/component.xml,sha256=DgmcBO3b5AOj-25fZweVY5UmjjFaszVJBpfZ95EfWkc,344 +ZConfig/tests/library/thing/extras/extras.xml,sha256=DrugmfPFEN61aNU1G-Agte0Rlq5nTHheibkW0ZFE91o,104 +ZConfig/tests/library/widget/__init__.py,sha256=nFa4sn7SGMgCxVss13M2_uzaJEsA3KTIM8_j8TsX_Tw,23 +ZConfig/tests/library/widget/__pycache__/__init__.cpython-36.pyc,, +ZConfig/tests/library/widget/component.xml,sha256=xKS80wQPXXIzx7teDoNM-ACUXl7vLtt_V24AujVRqRM,256 +ZConfig/tests/library/widget/extra.xml,sha256=4BmV5r18l32yrRipFqMWWhZdODkwS6mYm7YJvMXaGrY,103 +ZConfig/tests/support.py,sha256=0ccli3j33NjWs_mqaT8f4DV3UGB2ajwF8o_SDbZKbug,3183 +ZConfig/tests/test_cfgimports.py,sha256=ZTv1IVfAYOxhhgrcZojAA32520pR0r5KKtYDQy8CMro,2218 +ZConfig/tests/test_cmdline.py,sha256=50UAy3ZxCDWdcKP2TwnftXy3_Q6mZlqkQE9EPhVkmGc,8202 +ZConfig/tests/test_config.py,sha256=3z_sTTUw9gruE5ltfafQAAYUGhRAAxf-_4n7OOWnJf8,10113 +ZConfig/tests/test_cookbook.py,sha256=1bNWR3r2mV1wztjCa0iujKqlsu6LNkrEF22puvneA9E,2428 +ZConfig/tests/test_datatypes.py,sha256=_3S7T0ja-EwDFp7bdpQ_dZF358iOyL5pGgh_Ojf46sw,16234 +ZConfig/tests/test_info.py,sha256=wRglSnY3A460FT96r4Jy4jRKTBFVKNPQ-eYiA_7wb4s,7016 +ZConfig/tests/test_loader.py,sha256=pAfX8aeANeLYBskxyNA1H88XdgSRR6GvG2HC7xLTvdI,16637 +ZConfig/tests/test_matcher.py,sha256=OMUaWmqHm4a2RKvxU8ggr3n-PPmBqP7_fKMrmU4_kLw,4218 +ZConfig/tests/test_readme.py,sha256=mvw0j1Ivg-Lh1v-eediPmq7R2Nk-W49DELkn0vmlfLY,2696 +ZConfig/tests/test_schema.py,sha256=1jQW_uMPGINhMKa2-R64qNQ1vwvewY1DGlHc-NyQMi8,50577 +ZConfig/tests/test_schema2html.py,sha256=AGfXtcmn4I_EhMWF1ThRWv8R4qE-182B4Yhz5HEtZx8,7252 +ZConfig/tests/test_schemaless.py,sha256=UkBvTyICD5YE6J_K7aLQZa1IAfj7Wtnpq8-yZLcZwXI,1274 +ZConfig/tests/test_subst.py,sha256=E33zNBck-cbGBgVneVkbSq6-X6-sx7oj5Pf7NWrIiik,3793 +ZConfig/tests/test_validator.py,sha256=rLpCmNfSbSa_UKJ7TZQH-Gj46PFxY6Ta2TvvKOczV2I,1919 +ZConfig/tests/zipsource/README.txt,sha256=v-5zXsOYnW_nHI_QDxsjIk7c5K2HvbuZTKfNOKm03GQ,108 +ZConfig/tests/zipsource/foo/__init__.py,sha256=h61ciVTdVvu8oEUXv4dHf_Tc5XUXDH3RKB1-8fQhSsg,38 +ZConfig/tests/zipsource/foo/__pycache__/__init__.cpython-36.pyc,, +ZConfig/tests/zipsource/foo/sample/__init__.py,sha256=h61ciVTdVvu8oEUXv4dHf_Tc5XUXDH3RKB1-8fQhSsg,38 +ZConfig/tests/zipsource/foo/sample/__pycache__/__init__.cpython-36.pyc,, +ZConfig/tests/zipsource/foo/sample/__pycache__/datatypes.cpython-36.pyc,, +ZConfig/tests/zipsource/foo/sample/component.xml,sha256=h1pLsvByAVbkjDgKCW2pKBeRxbfYnBxPEC9yN5ousEw,234 +ZConfig/tests/zipsource/foo/sample/datatypes.py,sha256=vay-ORxRWftjDm7b3TNM4pjBdkuXl55SFfOxzJ1kBok,124 +ZConfig/url.py,sha256=PDzkxCutT__YywmkyzeXKkty37g9wb1bfP5PTmjDZqw,1896 +ZConfig/validator.py,sha256=4j3uHS8cksEebJxJk_RqxAjBg3oY41alhUQsA5EtC8w,2205 diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/WHEEL b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/WHEEL new file mode 100644 index 0000000..1001235 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/entry_points.txt b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/entry_points.txt new file mode 100644 index 0000000..145335a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/entry_points.txt @@ -0,0 +1,4 @@ +[console_scripts] +zconfig = ZConfig.validator:main +zconfig_schema2html = ZConfig.schema2html:main + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/top_level.txt new file mode 100644 index 0000000..cf34985 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig-3.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +ZConfig diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/__init__.py b/thesisenv/lib/python3.6/site-packages/ZConfig/__init__.py new file mode 100644 index 0000000..deba14e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/__init__.py @@ -0,0 +1,225 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Structured, schema-driven configuration library. + +ZConfig is a configuration library intended for general use. It +supports a hierarchical schema-driven configuration model that allows +a schema to specify data conversion routines written in Python. +ZConfig's model is very different from the model supported by the +ConfigParser module found in Python's standard library, and is more +suitable to configuration-intensive applications. + +ZConfig schema are written in an XML-based language and are able to +``import`` schema components provided by Python packages. Since +components are able to bind to conversion functions provided by Python +code in the package (or elsewhere), configuration objects can be +arbitrarily complex, with values that have been verified against +arbitrary constraints. This makes it easy for applications to +separate configuration support from configuration loading even with +configuration data being defined and consumed by a wide range of +separate packages. + +""" +__docformat__ = "reStructuredText" + +version_info = (3, 0) +__version__ = ".".join([str(n) for n in version_info]) + +from ZConfig.loader import loadConfig, loadConfigFile +from ZConfig.loader import loadSchema, loadSchemaFile + +from ZConfig._compat import TextIO + + +class ConfigurationError(Exception): + """Base class for exceptions specific to the :mod:`ZConfig` package. + + All instances provide a ``message`` attribute that describes + the specific error, and a ``url`` attribute that gives the URL + of the resource the error was located in, or ``None``. + """ + + + # The 'message' attribute was deprecated for BaseException with + # Python 2.6; here we create descriptor properties to continue using it + def __set_message(self, v): + self.__dict__['message'] = v + + def __get_message(self): + return self.__dict__['message'] + + def __del_message(self): + del self.__dict__['message'] + + message = property(__get_message, __set_message, __del_message) + + def __init__(self, msg, url=None): + self.message = msg + self.url = url + Exception.__init__(self, msg) + + def __str__(self): + return self.message + + +class _ParseError(ConfigurationError): + def __init__(self, msg, url, lineno, colno=None): + self.lineno = lineno + self.colno = colno + ConfigurationError.__init__(self, msg, url) + + def __str__(self): + s = self.message + if self.url: + s += "\n(" + elif (self.lineno, self.colno) != (None, None): + s += " (" + if self.lineno: + s += "line %d" % self.lineno + if self.colno is not None: + s += ", column %d" % self.colno + if self.url: + s += " in %s)" % self.url + else: + s += ")" + elif self.url: + s += self.url + ")" + return s + + +class SchemaError(_ParseError): + """Raised when a schema contains an error. + + This exception type provides the attributes ``url``, ``lineno``, + and ``colno``, which provide the source URL, the line number, and + the column number at which the error was detected. These attributes + may be ``None`` in some cases. + """ + + def __init__(self, msg, url=None, lineno=None, colno=None): + _ParseError.__init__(self, msg, url, lineno, colno) + + +class SchemaResourceError(SchemaError): + """Raised when there's an error locating a resource required by the + schema. + + Instances of this exception class add the attributes ``filename``, + ``package``, and ``path``, which hold the filename searched for + within the package being loaded, the name of the package, and the + ``__path__`` attribute of the package itself (or ``None`` if it + isn't a package or could not be imported). + """ + + def __init__(self, msg, url=None, lineno=None, colno=None, + path=None, package=None, filename=None): + self.filename = filename + self.package = package + if path is not None: + path = path[:] + self.path = path + SchemaError.__init__(self, msg, url, lineno, colno) + + def __str__(self): + s = SchemaError.__str__(self) + if self.package is not None: + s += "\n Package name: " + repr(self.package) + if self.filename is not None: + s += "\n File name: " + repr(self.filename) + if self.package is not None: + s += "\n Package path: " + repr(self.path) + return s + + +class ConfigurationSyntaxError(_ParseError): + """Exception raised when a configuration source does not conform to + the allowed syntax. + + In addition to the ``message`` and ``url`` attributes, exceptions + of this type offer the ``lineno`` attribute, which provides the + line number at which the error was detected. + """ + + +class DataConversionError(ConfigurationError, ValueError): + """Raised when a data type conversion fails with :exc:`ValueError`. + + This exception is a subclass of both :exc:`ConfigurationError` and + :exc:`ValueError`. The :func:`str` of the exception provides the + explanation from the original :exc:`ValueError`, and the line + number and URL of the value which provoked the error. The + following additional attributes are provided: + + ``colno`` + column number at which the value starts, or ``None`` + ``exception`` + the original :exc:`ValueError` instance + ``lineno`` + line number on which the value starts + ``message`` + :func:`str` returned by the original :exc:`ValueError` + ``value`` + original value passed to the conversion function + ``url`` + URL of the resource providing the value text + """ + + def __init__(self, exception, value, position): + ConfigurationError.__init__(self, str(exception)) + self.exception = exception + self.value = value + self.lineno, self.colno, self.url = position + + def __str__(self): + s = "%s (line %s" % (self.message, self.lineno) + if self.colno is not None: + s += ", %s" % self.colno + if self.url: + s += ", in %s)" % self.url + else: + s += ")" + return s + + +class SubstitutionSyntaxError(ConfigurationError): + """Raised when interpolation source text contains syntactical errors.""" + + +class SubstitutionReplacementError(ConfigurationSyntaxError, LookupError): + """Raised when the source text contains references to names which are + not defined in *mapping*. + + The attributes ``source`` and ``name`` provide the complete source + text and the name (converted to lower case) for which no replacement + is defined. + """ + + def __init__(self, source, name, url=None, lineno=None): + self.source = source + self.name = name + ConfigurationSyntaxError.__init__( + self, "no replacement for " + repr(name), url, lineno) + + +def configureLoggers(text): + """Configure one or more loggers from configuration text.""" + schema = loadSchemaFile(TextIO(""" + + + + + """)) + + for factory in loadConfigFile(schema, TextIO(text))[0].loggers: + factory() diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/_compat.py b/thesisenv/lib/python3.6/site-packages/ZConfig/_compat.py new file mode 100644 index 0000000..c0068fd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/_compat.py @@ -0,0 +1,101 @@ +############################################################################## +# +# Copyright (c) 2016 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import sys + +PY3 = sys.version_info[0] >= 3 + +# Native string object IO +if str is not bytes: + from io import StringIO as NStringIO + string_types = str +else: + # Python 2 + from io import BytesIO as NStringIO + string_types = str, unicode + +NStringIO = NStringIO + +from io import StringIO +from io import BytesIO + +def TextIO(text): + "Return StringIO or BytesIO as appropriate" + return BytesIO(text) if isinstance(text, bytes) else StringIO(text) + +try: + import urllib2 +except ImportError: + # Python 3 support. + import urllib.request as urllib2 + +urllib2 = urllib2 + +try: + from urllib import pathname2url +except ImportError: + # Python 3 support. + from urllib.request import pathname2url + +pathname2url = pathname2url + +try: + import urlparse as urlparse +except ImportError: + # Python 3 support + import urllib.parse as urlparse + +urlparse = urlparse + +if PY3: # pragma: no cover + import builtins + exec_ = getattr(builtins, "exec") + text_type = str + binary_type = bytes + maxsize = sys.maxsize + + def reraise(tp, value, tb=None): #pragma NO COVER + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: # pragma: no cover + text_type = unicode + binary_type = bytes + maxsize = sys.maxint + + def exec_(code, globs=None, locs=None): #pragma NO COVER + """Execute code in a namespace.""" + if globs is None: + frame = sys._getframe(1) + globs = frame.f_globals + if locs is None: + locs = frame.f_locals + del frame + elif locs is None: + locs = globs + exec("""exec code in globs, locs""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +def raise_with_same_tb(exception): + "Raise an exception having the current traceback (if there is one)" + reraise(type(exception), exception, sys.exc_info()[2]) + +import abc +# workaround the metaclass diff in Py2/Py3 +AbstractBaseClass = abc.ABCMeta('AbstractBaseClass', (object,), {}) diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/_schema_utils.py b/thesisenv/lib/python3.6/site-packages/ZConfig/_schema_utils.py new file mode 100644 index 0000000..ec3b707 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/_schema_utils.py @@ -0,0 +1,314 @@ +############################################################################## +# +# Copyright (c) 2017 Zope Corporation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from __future__ import print_function + +from abc import abstractmethod +import argparse +import itertools +import sys +import textwrap + +try: + from itertools import ifilterfalse + from itertools import ifilter +except ImportError: + # Py3 + from itertools import filterfalse as ifilterfalse + ifilter = filter + +import ZConfig.loader + +from ZConfig._compat import AbstractBaseClass + +from ZConfig.datatypes import null_conversion +from ZConfig.info import SectionType +from ZConfig.info import SectionInfo +from ZConfig.info import ValueInfo +from ZConfig.info import MultiKeyInfo +from ZConfig.info import AbstractType + + +class _VisitorBuilder(object): + + def __init__(self): + self.visitors = [] + + def __call__(self, Type): + def dec(func): + self.visitors.append((Type, func)) + return func + return dec + +MARKER = object() + +class AbstractSchemaFormatter(AbstractBaseClass): + + def __init__(self, schema, stream=None): + self.stream = stream or sys.stdout + self._dt = schema.registry.find_name + + def write(self, *args): + print(*args, file=self.stream) + + @abstractmethod + def esc(self, x): + "Escape blocks of text if needed" + + def _dedent(self, text): + # dedent the text to avoid producing unwanted + # definition lists. The XML parser strips leading whitespace from + # the first line, but preserves it for subsequent lines, so for dedent + # to work we have to ignore that first line. + texts = text.split("\n") + if len(texts) > 1: + trail = textwrap.dedent('\n'.join(texts[1:])) + text = texts[0] + '\n' + trail + return text + + @abstractmethod + def item_list(self): + "Context manager for listing description items" + + def _describing(self, description, after): + if description is not MARKER: + with self.described_as(): + self.description(description) + if after: + after() + + @abstractmethod + def describing(self, description=MARKER, after=None): + "description term, optional body" + + def describing_name(self, concrete_name, + description=MARKER, datatype=None, + **kwargs): + with self.describing(description): + self.concrete_name(concrete_name) + self.datatype(datatype) + + for k, v in sorted(kwargs.items()): + if v: + self.write(self.esc("(%s: %s)" % (k, v))) + + def description(self, description): + if description: + self.write(self.esc(description)) + + example = description + + @abstractmethod + def described_as(self): + "Description body context manager" + + @abstractmethod + def abstract_name(self, name): + "Abstract name" + + @abstractmethod + def concrete_name(self, *name): + "Concrete name" + + @abstractmethod + def concrete_section_name(self, *name): + "Name of a section a user can type in a config" + + def datatype(self, datatype): + self.write("(%s)" % self._dt(datatype)) + + @abstractmethod + def body(self): + "Context manager for the whole document" + + +class AbstractSchemaPrinter(AbstractBaseClass): + + + def __init__(self, schema, stream=None, allowed_names=(), excluded_names=()): + self.schema = schema + stream = stream or sys.stdout + self._explained = set() + self._seen_typenames = set() + self.fmt = self._schema_formatter(schema, stream) + + + def _make_predicate(names): + names = {x.lower() for x in names} + def predicate(name_info): + name, _ = name_info + return name and name.lower() in names + return predicate + + def _make_filter(names, filt): + iter_all = self._iter_schema_items + pred = _make_predicate(names) + def it(): + return filt(pred, iter_all()) + return it + + if allowed_names: + self._iter_schema_items = _make_filter(allowed_names, ifilter) + + if excluded_names: + excluded_names = {x.lower() for x in excluded_names} + self._iter_schema_items = _make_filter(excluded_names, ifilterfalse) + self._included = lambda st: st.name not in excluded_names + + @abstractmethod + def _schema_formatter(self, schema, stream): + "Return a formatter" + + def _included(self, st): + return True + + def _explain(self, st): + if st.name in self._explained: # pragma: no cover + return + + self._explained.add(st.name) + + self.fmt.description(st.description) + if not self._included(st): + return + + self.fmt.example(getattr(st, 'example', None)) + + for sub in st.getsubtypenames(): + with self.fmt.item_list(): + self.visit(None, st.getsubtype(sub)) + + def _iter_schema_items(self): + def everything(): + return itertools.chain(self.schema.itertypes(), + self.schema) + # The abstract types tend to be the most important. Since + # we only document a concrete type the first time we find it, + # and we can find extensions of abstract types beneath + # the abstract type which is itself buried under a concrete section, + # all the different permutations would be only documented once under + # that section. By exposing these first, they get documented at the top-level, + # and each concrete section that uses the abstract type gets a reference + # to it. + + def abstract_sections(base): + for name, info in base: + if isinstance(info, SectionInfo): + if info.sectiontype.isabstract(): + yield name, info + + # XXX: This isn't catching everything. Witness the + # relstorage component. + elif isinstance(info, SectionType): + for x in abstract_sections(info): + yield x + return itertools.chain(abstract_sections(everything()), everything()) + + def printSchema(self): + # side-effect of building may be printing + self.buildSchema() + + def buildSchema(self): + seen = set() # prevent duplicates at the top-level + # as we find multiple abstract types + with self.fmt.body(): + with self.fmt.item_list(): + for name, info in self._iter_schema_items(): + if info in seen: + continue + seen.add(info) + self.visit(name, info) + + TypeVisitor = _VisitorBuilder() + visitors = TypeVisitor.visitors + + def visit(self, name, info): + for t, f in self.visitors: + if isinstance(info, t): + f(self, name, info) + break + else: + self._visit_default(name, info) + + @TypeVisitor(SectionType) + def _visit_SectionType(self, name, info): + if info.name in self._seen_typenames: + return + self._seen_typenames.add(info.name) + with self.fmt.describing(): + if info.datatype is not null_conversion: + self.fmt.concrete_section_name(info.name) + else: + self.fmt.abstract_name(info.name) + self.fmt.datatype(info.datatype) + + with self.fmt.described_as(): + self.fmt.description(info.description) + self.fmt.example(info.example) + + with self.fmt.item_list(): + for sub in info: + self.visit(*sub) # pragma: no cover + + + @TypeVisitor(SectionInfo) + def _visit_SectionInfo(self, name, info): + st = info.sectiontype + if st.isabstract(): + with self.fmt.describing(info.description, lambda: self._explain(st)): + self.fmt.abstract_name(st.name) + self.fmt.concrete_name(info.name) + + else: + with self.fmt.describing(): + self.fmt.concrete_section_name(info.attribute, info.name) + self.fmt.datatype(info.datatype) + + with self.fmt.described_as(): + with self.fmt.item_list(): + for sub in info.sectiontype: + self.visit(*sub) + + self.fmt.example(info.example) + + @TypeVisitor(AbstractType) + def _visit_AbstractType(self, name, info): + with self.fmt.describing(info.description, lambda: self._explain(info)): + self.fmt.abstract_name(info.name) + + def _visit_default(self, name, info): + # KeyInfo or MultiKeyInfo + default = info.getdefault() + if isinstance(default, ValueInfo): + default = default.value + + name = info.name + if isinstance(info, MultiKeyInfo): + name = name + " (*)" + self.fmt.describing_name(name, info.description, info.datatype, + default=default, metadefault=info.metadefault) + + del TypeVisitor + + +def load_schema(schema, package, package_file): + if not package: + schema_reader = argparse.FileType('r')(schema) + else: + schema_template = "" % ( + schema, package_file or 'component.xml') + from ZConfig._compat import TextIO + schema_reader = TextIO(schema_template) + + schema = ZConfig.loader.loadSchemaFile(schema_reader) + return schema diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/cfgparser.py b/thesisenv/lib/python3.6/site-packages/ZConfig/cfgparser.py new file mode 100644 index 0000000..d9337f7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/cfgparser.py @@ -0,0 +1,192 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Configuration parser.""" + +import ZConfig +import ZConfig.url + +from ZConfig.substitution import isname, substitute +from ZConfig._compat import raise_with_same_tb + +class ZConfigParser(object): + + __slots__ = ('resource', 'context', 'lineno', + 'stack', 'defines', 'file', 'url') + + def __init__(self, resource, context, defines=None): + self.resource = resource + self.context = context + self.file = resource.file + self.url = resource.url + self.lineno = 0 + self.stack = [] # [(type, name, prevmatcher), ...] + if defines is None: + defines = {} + self.defines = defines + + def nextline(self): + line = self.file.readline() + if line: + self.lineno += 1 + return False, line.strip() + return True, None + + def parse(self, section): + done, line = self.nextline() + while not done: + if line[:1] in ("", "#"): + # blank line or comment + pass + + elif line[:2] == "": + self.error("malformed section end") + section = self.end_section(section, line[2:-1]) + + elif line[0] == "<": + # section start + if line[-1] != ">": + self.error("malformed section start") + section = self.start_section(section, line[1:-1]) + + elif line[0] == "%": + self.handle_directive(section, line[1:]) + + else: + self.handle_key_value(section, line) + + done, line = self.nextline() + + if self.stack: + self.error("unclosed sections not allowed") + + def start_section(self, section, rest): + isempty = rest[-1:] == "/" + if isempty: + rest = rest[:-1] + text = rest.rstrip() + # parse section start stuff here + m = _section_start_rx.match(text) + if not m: + self.error("malformed section header") + type_, name = m.group('type', 'name') + type_ = self._normalize_case(type_) + if name: + name = self._normalize_case(name) + try: + newsect = self.context.startSection(section, type_, name) + except ZConfig.ConfigurationError as e: + self.error(e.message) + + if isempty: + self.context.endSection(section, type_, name, newsect) + return section + + self.stack.append((type_, name, section)) + return newsect + + def end_section(self, section, rest): + if not self.stack: + self.error("unexpected section end") + type_ = self._normalize_case(rest.rstrip()) + opentype, name, prevsection = self.stack.pop() + if type_ != opentype: + self.error("unbalanced section end") + try: + self.context.endSection( + prevsection, type_, name, section) + except ZConfig.ConfigurationError as e: + self.error(e.args[0]) + return prevsection + + def handle_key_value(self, section, rest): + m = _keyvalue_rx.match(rest) + if not m: + self.error("malformed configuration data") + key, value = m.group('key', 'value') + if not value: + value = '' + else: + value = self.replace(value) + try: + section.addValue(key, value, (self.lineno, None, self.url)) + except ZConfig.ConfigurationError as e: + self.error(e.args[0]) + + def handle_directive(self, section, rest): + m = _keyvalue_rx.match(rest) + if not m: + self.error("missing or unrecognized directive") + name, arg = m.group('key', 'value') + if name not in ("define", "import", "include"): + self.error("unknown directive: " + repr(name)) + if not arg: + self.error("missing argument to %%%s directive" % name) + + getattr(self, 'handle_' + name)(section, arg) + + def handle_import(self, section, rest): + pkgname = self.replace(rest.strip()) + self.context.importSchemaComponent(pkgname) + + def handle_include(self, section, rest): + rest = self.replace(rest.strip()) + newurl = ZConfig.url.urljoin(self.url, rest) + self.context.includeConfiguration(section, newurl, self.defines) + + def handle_define(self, section, rest): + parts = rest.split(None, 1) + defname = self._normalize_case(parts[0]) + defvalue = '' + if len(parts) == 2: + defvalue = parts[1] + if defname in self.defines: + if self.defines[defname] != defvalue: + self.error("cannot redefine " + repr(defname)) + if not isname(defname): + self.error("not a substitution legal name: " + repr(defname)) + self.defines[defname] = self.replace(defvalue) + + def replace(self, text): + try: + return substitute(text, self.defines) + except ZConfig.SubstitutionReplacementError as e: + e.lineno = self.lineno + e.url = self.url + raise + + def error(self, message): + raise_with_same_tb( + ZConfig.ConfigurationSyntaxError( + message, self.url, self.lineno)) + + + def _normalize_case(self, string): + # This method is factored out solely to allow subclasses to modify + # the behavior of the parser. + return string.lower() + + +import re +# _name_re does not allow "(" or ")" for historical reasons. Though +# the restriction could be lifted, there seems no need to do so. +_name_re = r"[^\s()]+" +_keyvalue_rx = re.compile(r"(?P%s)\s*(?P[^\s].*)?$" + % _name_re) +_section_start_rx = re.compile(r"(?P%s)" + r"(?:\s+(?P%s))?" + r"$" + % (_name_re, _name_re)) +del re diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/cmdline.py b/thesisenv/lib/python3.6/site-packages/ZConfig/cmdline.py new file mode 100644 index 0000000..084f613 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/cmdline.py @@ -0,0 +1,212 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +"""Support for command-line overrides for configuration settings. + +This module exports an extended version of the :class:`~.ConfigLoader` +class from the :mod:`ZConfig.loader` module. This provides support for +overriding specific settings from the configuration file from the +command line, without requiring the application to provide specific +options for everything the configuration file can include. + +Each setting is given by a value specifier string, as described by +:meth:`ExtendedConfigLoader.addOption`. +""" + +import ZConfig +import ZConfig.loader +import ZConfig.matcher + +from ZConfig._compat import raise_with_same_tb + +class ExtendedConfigLoader(ZConfig.loader.ConfigLoader): + """A :class:`~.ConfigLoader` subclass that adds support for + command-line overrides. + """ + + def __init__(self, schema): + ZConfig.loader.ConfigLoader.__init__(self, schema) + self.clopts = [] # [(optpath, value, source-position), ...] + + def addOption(self, spec, pos=None): + """Add a single value to the list of overridden values. + + The *spec* argument is a value specifier string of the form + ``optionpath=value``. For example:: + + some/path/to/key=value + + The *optionpath* specifies the "full path" to the + configuration setting: it can contain a sequence of names, + separated by ``/`` characters. Each name before the last names + a section from the configuration file, and the last name + corresponds to a key within the section identified by the + leading section names. If *optionpath* contains only one name, + it identifies a key in the top-level schema. *value* is a + string that will be treated just like a value in the + configuration file. + + A source position for the specifier may be given as *pos*. If + *pos* is specified and not ``None``, it must be a sequence of + three values. The first is the URL of the source (or some + other identifying string). The second and third are the line + number and column of the setting. These position information + is only used to construct a :exc:`~.DataConversionError` when + data conversion fails. + """ + if pos is None: + pos = "", -1, -1 + if "=" not in spec: + e = ZConfig.ConfigurationSyntaxError( + "invalid configuration specifier", *pos) + e.specifier = spec + raise e + # For now, just add it to the list; not clear that checking + # against the schema at this point buys anything. + opt, val = spec.split("=", 1) + optpath = opt.split("/") + if "" in optpath: + # // is not allowed in option path + e = ZConfig.ConfigurationSyntaxError( + "'//' is not allowed in an option path", *pos) + e.specifier = spec + raise e + self.clopts.append((optpath, val, pos)) + + def createSchemaMatcher(self): + if self.clopts: + sm = ExtendedSchemaMatcher(self.schema) + sm.set_optionbag(self.cook()) + else: + sm = ZConfig.loader.ConfigLoader.createSchemaMatcher(self) + return sm + + def cook(self): + if self.clopts: + return OptionBag(self.schema, self.schema, self.clopts) + + +class OptionBag(object): + def __init__(self, schema, sectiontype, options): + self.sectiontype = sectiontype + self.schema = schema + self.keypairs = {} + self.sectitems = [] + self._basic_key = schema.registry.get("basic-key") + for item in options: + optpath, val, pos = item + name = sectiontype.keytype(optpath[0]) + if len(optpath) == 1: + self.add_value(name, val, pos) + else: + self.sectitems.append(item) + + def basic_key(self, s, pos): + try: + return self._basic_key(s) + except ValueError as e: + raise_with_same_tb(ZConfig.ConfigurationSyntaxError( + "could not convert basic-key value: " + str(e), *pos)) + + def add_value(self, name, val, pos): + if name in self.keypairs: + L = self.keypairs[name] + else: + L = [] + self.keypairs[name] = L + L.append((val, pos)) + + def __contains__(self, name): + return name in self.keypairs + + def get_key(self, name): + """Return a list of (value, pos) items for the key 'name'. + + The returned list may be empty. + """ + L = self.keypairs.get(name) + if L: + del self.keypairs[name] + return L + return [] + + def keys(self): + return self.keypairs.keys() + + def get_section_info(self, type_, name): + L = [] # what pertains to the child section + R = [] # what we keep + for item in self.sectitems: + optpath, val, pos = item + s = optpath[0] + bk = self.basic_key(s, pos) + if name and self._normalize_case(s) == name: + L.append((optpath[1:], val, pos)) + elif bk == type_: # pragma: no cover + L.append((optpath[1:], val, pos)) + else: + R.append(item) + if L: + self.sectitems[:] = R + return OptionBag(self.schema, self.schema.gettype(type_), L) + + def finish(self): + if self.sectitems or self.keypairs: + raise ZConfig.ConfigurationError( + "not all command line options were consumed") + + def _normalize_case(self, string): + return string.lower() + + +class MatcherMixin(object): + + def set_optionbag(self, bag): + self.optionbag = bag + + def addValue(self, key, value, position): + try: + realkey = self.type.keytype(key) + except ValueError as e: + raise_with_same_tb(ZConfig.DataConversionError(e, key, position)) + + if realkey in self.optionbag: + return + ZConfig.matcher.BaseMatcher.addValue(self, key, value, position) + + def createChildMatcher(self, type_, name): + sm = ZConfig.matcher.BaseMatcher.createChildMatcher(self, type_, name) + bag = self.optionbag.get_section_info(type_.name, name) + if bag is not None: + sm = ExtendedSectionMatcher( + sm.info, sm.type, sm.name, sm.handlers) + sm.set_optionbag(bag) + return sm + + def finish_optionbag(self): + for key in list(self.optionbag.keys()): + for val, pos in self.optionbag.get_key(key): + ZConfig.matcher.BaseMatcher.addValue(self, key, val, pos) + self.optionbag.finish() + + +class ExtendedSectionMatcher(MatcherMixin, ZConfig.matcher.SectionMatcher): + def finish(self): + self.finish_optionbag() + return ZConfig.matcher.SectionMatcher.finish(self) + +class ExtendedSchemaMatcher(MatcherMixin, ZConfig.matcher.SchemaMatcher): + def finish(self): + self.finish_optionbag() + return ZConfig.matcher.SchemaMatcher.finish(self) diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/__init__.py b/thesisenv/lib/python3.6/site-packages/ZConfig/components/__init__.py new file mode 100644 index 0000000..f898139 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/__init__.py @@ -0,0 +1 @@ +# This is a Python package. diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/__init__.py b/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/__init__.py new file mode 100644 index 0000000..f898139 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/__init__.py @@ -0,0 +1 @@ +# This is a Python package. diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/component.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/component.xml new file mode 100644 index 0000000..5a2195d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/component.xml @@ -0,0 +1,9 @@ + + + Convenient loader which causes all the "basic" components to be + loaded. + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/mapping.py b/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/mapping.py new file mode 100644 index 0000000..e30c289 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/mapping.py @@ -0,0 +1,18 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +"""Python datatype for the ZConfig.components.basic.mapping section type.""" + +def mapping(section): + return section.mapping diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/mapping.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/mapping.xml new file mode 100644 index 0000000..4426fdd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/mapping.xml @@ -0,0 +1,34 @@ + + + + + Section that provides a simple mapping implementation. An + application should derive a more specific section type for use + in configuration files: + + <import package="ZConfig.components.basic" + file="mapping.xml" + /> + + <sectiontype name="mapping" + extends="ZConfig.basic.mapping" + /> + + If a non-standard keytype is needed, it can be overridden as + well: + + <sectiontype name="system-map" + extends="ZConfig.basic.mapping" + keytype="mypkg.datatypes.system_name" + /> + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/tests/__init__.py new file mode 100644 index 0000000..f898139 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/tests/__init__.py @@ -0,0 +1 @@ +# This is a Python package. diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/tests/test_mapping.py b/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/tests/test_mapping.py new file mode 100644 index 0000000..85e0890 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/basic/tests/test_mapping.py @@ -0,0 +1,91 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +"""Tests of the 'basic' section types provided as part of +ZConfig.components.basic.""" + +import ZConfig.tests.support +import unittest + + +SIMPLE_SCHEMA = '''\ + + + + + + + +
+ +
+ + +''' + + +class BasicSectionTypeTestCase( + ZConfig.tests.support.TestHelper, unittest.TestCase): + + schema = None + + def setUp(self): + if self.schema is None: + self.__class__.schema = self.load_schema_text(SIMPLE_SCHEMA) + + def test_simple_empty_dict(self): + conf = self.load_config_text(self.schema, "") + self.assertEqual(conf.simple_dict, {}) + conf = self.load_config_text(self.schema, """\ + + # comment + + """) + self.assertEqual(conf.simple_dict, {}) + + def test_simple_dict(self): + conf = self.load_config_text(self.schema, """\ + + key-one value-one + key-two value-two + + """) + L = sorted(conf.simple_dict.items()) + self.assertEqual(L, [("key-one", "value-one"), + ("key-two", "value-two")]) + + def test_derived_dict(self): + conf = self.load_config_text(self.schema, """\ + + 1 foo + 2 bar + 42 question? + + """) + L = sorted(conf.int_dict.items()) + self.assertEqual(L, [(1, "foo"), (2, "bar"), (42, "question?")]) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/__init__.py b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/__init__.py new file mode 100644 index 0000000..c6d3310 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/__init__.py @@ -0,0 +1,14 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""ZConfig schema component package for logging configuration.""" diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/abstract.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/abstract.xml new file mode 100644 index 0000000..301f43a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/abstract.xml @@ -0,0 +1,7 @@ + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/base-logger.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/base-logger.xml new file mode 100644 index 0000000..89cd159 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/base-logger.xml @@ -0,0 +1,58 @@ + + + + + + + Base definition for the logger types defined by + ZConfig.components.logger. This exists entirely to provide + shared key definitions and documentation. + + + level INFO + + path STDOUT + format %(levelname)s %(name)s %(message)s + + + ]]> + + + + + Verbosity setting for the logger. Values must be a name of + a level, or an integer in the range [0..50]. The names of the + levels, in order of increasing verbosity (names on the same + line are equivalent):: + + critical, fatal + error + warn, warning + info + blather + debug + trace + all + + The special name "notset", or the numeric value 0, indicates + that the setting for the parent logger should be used. + + It is strongly recommended that names be used rather than + numeric values to ensure that configuration files can be + deciphered more easily. + + + + + + Handlers to install on this logger. Each handler describes + how logging events should be presented. + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/component.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/component.xml new file mode 100644 index 0000000..4ea9d0e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/component.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/datatypes.py b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/datatypes.py new file mode 100644 index 0000000..8f7ff37 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/datatypes.py @@ -0,0 +1,39 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +"""ZConfig datatypes for logging support.""" + + +_logging_levels = { + "critical": 50, + "fatal": 50, + "error": 40, + "warn": 30, + "warning": 30, + "info": 20, + "blather": 15, + "debug": 10, + "trace": 5, + "all": 1, + "notset": 0, + } + +def logging_level(value): + s = str(value).lower() + if s in _logging_levels: + return _logging_levels[s] + else: + v = int(s) + if v < 0 or v > 50: + raise ValueError("log level not in range: " + repr(v)) + return v diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/eventlog.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/eventlog.xml new file mode 100644 index 0000000..696df30 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/eventlog.xml @@ -0,0 +1,15 @@ + + + + + + + + Configuration for the root logger. + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/factory.py b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/factory.py new file mode 100644 index 0000000..dfe57b5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/factory.py @@ -0,0 +1,41 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +_marker = object() + +from abc import abstractmethod + +from ZConfig._compat import AbstractBaseClass + +class Factory(AbstractBaseClass): + """Generic wrapper for instance construction. + + Calling the factory causes the instance to be created if it hasn't + already been created, and returns the object. Calling the factory + multiple times returns the same object. + + The instance is created using the factory's create() method, which + must be overriden by subclasses. + """ + def __init__(self): + self.instance = _marker + + def __call__(self): + if self.instance is _marker: + self.instance = self.create() + return self.instance + + @abstractmethod + def create(self): + "Subclasses must override create()" diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/handlers.py b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/handlers.py new file mode 100644 index 0000000..1c05f70 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/handlers.py @@ -0,0 +1,227 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""ZConfig factory datatypes for log handlers.""" + +from abc import abstractmethod +import sys + +from ZConfig._compat import urlparse + +from ZConfig.components.logger.factory import Factory + +_log_format_variables = { + 'name': '', + 'levelno': '3', + 'levelname': 'DEBUG', + 'pathname': 'apath', + 'filename': 'afile', + 'module': 'amodule', + 'lineno': 1, + 'created': 1.1, + 'asctime': 'atime', + 'msecs': 1, + 'relativeCreated': 1, + 'thread': 1, + 'message': 'amessage', + 'process': 1, + } + +def log_format(value): + value = ctrl_char_insert(value) + try: + # Make sure the format string uses only names that will be + # provided, and has reasonable type flags for each, and does + # not expect positional args. + value % _log_format_variables + except (ValueError, KeyError): + raise ValueError('Invalid log format string %s' % value) + return value + +_control_char_rewrites = {r'\n': '\n', r'\t': '\t', r'\b': '\b', + r'\f': '\f', r'\r': '\r'}.items() + +def ctrl_char_insert(value): + for pattern, replacement in _control_char_rewrites: + value = value.replace(pattern, replacement) + return value + +def resolve(name): + """Given a dotted name, returns an object imported from a Python module.""" + name = name.split('.') + used = name.pop(0) + found = __import__(used) + for n in name: + used += '.' + n + try: + found = getattr(found, n) + except AttributeError: + __import__(used) + found = getattr(found, n) + return found + +class HandlerFactory(Factory): + def __init__(self, section): + Factory.__init__(self) + self.section = section + + @abstractmethod + def create_loghandler(self): + "subclasses must override create_loghandler()" + + def create(self): + import logging + logger = self.create_loghandler() + if self.section.formatter: + f = resolve(self.section.formatter) + else: + f = logging.Formatter + logger.setFormatter(f(self.section.format, self.section.dateformat)) + logger.setLevel(self.section.level) + return logger + + def getLevel(self): # pragma: no cover Is this used? + return self.section.level + +class FileHandlerFactory(HandlerFactory): + def create_loghandler(self): + from ZConfig.components.logger import loghandler + path = self.section.path + max_bytes = self.section.max_size + old_files = self.section.old_files + when = self.section.when + interval = self.section.interval + if path == "STDERR": + if max_bytes or old_files: + raise ValueError("cannot rotate STDERR") + handler = loghandler.StreamHandler(sys.stderr) + elif path == "STDOUT": + if max_bytes or old_files: + raise ValueError("cannot rotate STDOUT") + handler = loghandler.StreamHandler(sys.stdout) + elif when or max_bytes or old_files or interval: + if not old_files: + raise ValueError("old-files must be set for log rotation") + if when: + if max_bytes: + raise ValueError("can't set *both* max_bytes and when") + if not interval: + interval = 1 + handler = loghandler.TimedRotatingFileHandler( + path, when=when, interval=interval, + backupCount=old_files) + elif max_bytes: + handler = loghandler.RotatingFileHandler( + path, maxBytes=max_bytes, backupCount=old_files) + else: + raise ValueError( + "max-bytes or when must be set for log rotation") + else: + handler = loghandler.FileHandler(path) + return handler + +_syslog_facilities = { + "auth": 1, + "authpriv": 1, + "cron": 1, + "daemon": 1, + "kern": 1, + "lpr": 1, + "mail": 1, + "news": 1, + "security": 1, + "syslog": 1, + "user": 1, + "uucp": 1, + "local0": 1, + "local1": 1, + "local2": 1, + "local3": 1, + "local4": 1, + "local5": 1, + "local6": 1, + "local7": 1, + } + +def syslog_facility(value): + value = value.lower() + if value not in _syslog_facilities: + L = sorted(_syslog_facilities.keys()) + raise ValueError("Syslog facility must be one of " + ", ".join(L)) + return value + +class SyslogHandlerFactory(HandlerFactory): + def create_loghandler(self): + from ZConfig.components.logger import loghandler + return loghandler.SysLogHandler(self.section.address.address, + self.section.facility) + +class Win32EventLogFactory(HandlerFactory): + def create_loghandler(self): + from ZConfig.components.logger import loghandler + return loghandler.Win32EventLogHandler(self.section.appname) + +def http_handler_url(value): + scheme, netloc, path, param, query, fragment = urlparse.urlparse(value) + if scheme != 'http': + raise ValueError('url must be an http url') + if not netloc: + raise ValueError('url must specify a location') + if not path: + raise ValueError('url must specify a path') + q = [] + if param: + q.append(';') + q.append(param) + if query: + q.append('?') + q.append(query) + if fragment: + q.append('#') + q.append(fragment) + return (netloc, path + ''.join(q)) + +def get_or_post(value): + value = value.upper() + if value not in ('GET', 'POST'): + raise ValueError('method must be "GET" or "POST", instead received: ' + + repr(value)) + return value + +class HTTPHandlerFactory(HandlerFactory): + def create_loghandler(self): + from ZConfig.components.logger import loghandler + host, selector = self.section.url + return loghandler.HTTPHandler(host, selector, self.section.method) + +class SMTPHandlerFactory(HandlerFactory): + def create_loghandler(self): + from ZConfig.components.logger import loghandler + host, port = self.section.smtp_server + if not port: + mailhost = host + else: + mailhost = host, port + kwargs = {} + if self.section.smtp_username and self.section.smtp_password: + kwargs['credentials'] = (self.section.smtp_username, + self.section.smtp_password) + elif (self.section.smtp_username or self.section.smtp_password): + raise ValueError( + 'Either both smtp-username and smtp-password or none must be ' + 'given') + return loghandler.SMTPHandler(mailhost, + self.section.fromaddr, + self.section.toaddrs, + self.section.subject, + **kwargs) diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/handlers.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/handlers.xml new file mode 100644 index 0000000..39491e1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/handlers.xml @@ -0,0 +1,107 @@ + + + + + + + + + Base type for most log handlers. This is cannot be used as a + loghandler directly since it doesn't implement the loghandler + abstract section type. + + + + Logging formatter class. The default is 'logging.Formatter'. + An alternative is 'zope.exceptions.log.Formatter', + which enhances exception tracebacks with information from + __traceback_info__ and __traceback_supplement__ variables. + + + + + + + + + path STDOUT + format %(name)s %(message)s + + ]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + to sysadmin@example.com + to john@example.com + from zlog-user@example.com + level fatal + smtp-username john + smtp-password johnpw + + ]]> + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/logger.py b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/logger.py new file mode 100644 index 0000000..e786692 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/logger.py @@ -0,0 +1,102 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""ZConfig factory datatypes for loggers.""" + +from ZConfig.components.logger.factory import Factory + + +class LoggerFactoryBase(Factory): + """Base class for logger factories. + + Factory used to create loggers while delaying actual logger + instance construction. We need to do this because we may want to + reference a logger before actually instantiating it (for example, + to allow the app time to set an effective user). An instance of + this wrapper is a callable which, when called, returns a logger + object. + """ + + def __init__(self, section): + Factory.__init__(self) + self.level = section.level + self.handler_factories = section.handlers + + def create(self): + # set the logger up + import logging + logger = logging.getLogger(self.name) + logger.setLevel(self.level) + if self.handler_factories: + for handler_factory in self.handler_factories: + handler = handler_factory() + logger.addHandler(handler) + else: + from ZConfig.components.logger import loghandler + logger.addHandler(loghandler.NullHandler()) + return logger + + def startup(self): + # make sure we've instantiated the logger + self() + + def getLowestHandlerLevel(self): + """Return the lowest log level provided by any configured handler. + + If all handlers and the logger itself have level==NOTSET, this + returns NOTSET. + """ + import logging + lowest = self.level + for factory in self.handler_factories: + level = factory.getLevel() + if level != logging.NOTSET: + if lowest == logging.NOTSET: + lowest = level + else: + lowest = min(lowest, level) + return lowest + + def reopen(self): + """Re-open any handlers for which this is a meaningful operation. + + This only works on handlers on the logger provided by this + factory directly; handlers for child loggers are not affected. + (This can be considered a bug, but is sufficient at the + moment.) + """ + logger = self() + for handler in logger.handlers: + reopen = getattr(handler, "reopen", None) + if reopen is not None and callable(reopen): + reopen() + + +class EventLogFactory(LoggerFactoryBase): + """Logger factory that returns the root logger.""" + + name = None + + +class LoggerFactory(LoggerFactoryBase): + """Logger factory that returns the named logger.""" + + def __init__(self, section): + LoggerFactoryBase.__init__(self, section) + self.name = section.name + self.propagate = section.propagate + + def create(self): + logger = LoggerFactoryBase.create(self) + logger.propagate = self.propagate + return logger diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/logger.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/logger.xml new file mode 100644 index 0000000..41604a5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/logger.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + + Indicates whether events that reach this logger should be + propogated toward the root of the logger hierarchy. If true + (the default), events will be passed to the logger's parent + after being handled. If false, events will be handled and the + parent will not be informed. There is not a way to control + propogation by the severity of the event. + + + + + + The dotted name of the logger. This give it a location in the + logging hierarchy. Most applications provide a specific set + of subsystem names for which logging is meaning; consult the + application documentation for the set of names that are + actually interesting for the application. + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/loghandler.py b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/loghandler.py new file mode 100644 index 0000000..2d9c6bd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/loghandler.py @@ -0,0 +1,179 @@ +############################################################################## +# +# Copyright (c) 2001 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +"""Handlers which can plug into a PEP 282 logger.""" + +import os + +import weakref + +from logging import Handler, StreamHandler +from logging.handlers import RotatingFileHandler as _RotatingFileHandler +from logging.handlers import TimedRotatingFileHandler \ + as _TimedRotatingFileHandler +from logging.handlers import SysLogHandler, BufferingHandler +from logging.handlers import HTTPHandler, SMTPHandler +from logging.handlers import NTEventLogHandler as Win32EventLogHandler + +# Export these, they're used in handlers.py +SysLogHandler = SysLogHandler +HTTPHandler = HTTPHandler +SMTPHandler = SMTPHandler +Win32EventLogHandler = Win32EventLogHandler + +from ZConfig._compat import maxsize + + +_reopenable_handlers = [] + +def closeFiles(): + """Reopen all logfiles managed by ZConfig configuration.""" + while _reopenable_handlers: + wr = _reopenable_handlers.pop() + h = wr() + if h is not None: + h.close() + +def reopenFiles(): + """Reopen all logfiles managed by ZConfig configuration.""" + for wr in _reopenable_handlers[:]: + h = wr() + if h is None: + try: + _reopenable_handlers.remove(wr) + except ValueError: + continue + else: + h.reopen() + +def _remove_from_reopenable(wr): + try: + _reopenable_handlers.remove(wr) + except ValueError: + pass + + +class FileHandler(StreamHandler): + """File handler which supports reopening of logs. + + Re-opening should be used instead of the 'rollover' feature of + the FileHandler from the standard library's logging package. + """ + + def __init__(self, filename, mode="a"): + filename = os.path.abspath(filename) + StreamHandler.__init__(self, open(filename, mode)) + self.baseFilename = filename + self.mode = mode + self._wr = weakref.ref(self, _remove_from_reopenable) + _reopenable_handlers.append(self._wr) + + def close(self): + self.stream.close() + # This can raise a KeyError if the handler has already been + # removed, but a later error can be raised if + # StreamHandler.close() isn't called. This seems the best + # compromise. :-( + try: + StreamHandler.close(self) + except KeyError: # pragma: no cover + pass + _remove_from_reopenable(self._wr) + + def reopen(self): + self.acquire() + try: + self.stream.close() + self.stream = open(self.baseFilename, self.mode) + finally: + self.release() + + +class Win32FileHandler(FileHandler): + """File-based log handler for Windows that supports an additional 'rotate' + method. reopen() is generally useless since Windows cannot do a move on + an open file. + """ + def rotate(self, rotateFilename=None): + if not rotateFilename: + rotateFilename = self.baseFilename + ".last" + error = None + self.close() + try: + os.rename(self.baseFilename, rotateFilename) + except OSError: + pass + + self.stream = open(self.baseFilename, self.mode) + +if os.name == "nt": + # Make it the default for Windows - we install a 'reopen' handler that + # tries to rotate the logfile. + FileHandler = Win32FileHandler + + +class RotatingFileHandler(_RotatingFileHandler): + + def __init__(self, *args, **kw): + _RotatingFileHandler.__init__(self, *args, **kw) + self._wr = weakref.ref(self, _remove_from_reopenable) + _reopenable_handlers.append(self._wr) + + def close(self): + _RotatingFileHandler.close(self) + _remove_from_reopenable(self._wr) + + def reopen(self): + self.doRollover() + +class TimedRotatingFileHandler(_TimedRotatingFileHandler): + + def __init__(self, *args, **kw): + _TimedRotatingFileHandler.__init__(self, *args, **kw) + self._wr = weakref.ref(self, _remove_from_reopenable) + _reopenable_handlers.append(self._wr) + + def close(self): + _TimedRotatingFileHandler.close(self) + _remove_from_reopenable(self._wr) + + def reopen(self): + self.doRollover() + + +class NullHandler(Handler): + """Handler that does nothing.""" + + def emit(self, record): + pass + + def handle(self, record): + pass + + +class StartupHandler(BufferingHandler): + """Handler which stores messages in a buffer until later. + + This is useful at startup before we can know that we can safely + write to a configuration-specified handler. + """ + + def __init__(self): + BufferingHandler.__init__(self, maxsize) + + def shouldFlush(self, record): + return False + + def flushBufferTo(self, target): + while self.buffer: + target.handle(self.buffer.pop(0)) diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/tests/__init__.py new file mode 100644 index 0000000..f898139 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/tests/__init__.py @@ -0,0 +1 @@ +# This is a Python package. diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/tests/test_logger.py b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/tests/test_logger.py new file mode 100644 index 0000000..b90c110 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/components/logger/tests/test_logger.py @@ -0,0 +1,863 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +"""Tests for logging configuration via ZConfig.""" + +import doctest +import logging +import os +import sys +import tempfile +import unittest + +import ZConfig + +from ZConfig.components.logger import datatypes +from ZConfig.components.logger import handlers +from ZConfig.components.logger import loghandler + +from ZConfig._compat import NStringIO as StringIO +from ZConfig._compat import maxsize + +from ZConfig.tests.support import TestHelper + + +class CustomFormatter(logging.Formatter): + def formatException(self, ei): + """Format and return the exception information as a string. + + This adds helpful advice to the end of the traceback. + """ + import traceback + sio = StringIO() + traceback.print_exception(ei[0], ei[1], ei[2], file=sio) + return sio.getvalue() + "... Don't panic!" + + +def read_file(filename): + with open(filename) as f: + return f.read() + + +class LoggingTestHelper(TestHelper): + + # Not derived from unittest.TestCase; some test runners seem to + # think that means this class contains tests. + + # XXX This tries to save and restore the state of logging around + # the test. Somewhat surgical; there may be a better way. + + def setUp(self): + self._created = [] + self._old_logger = logging.getLogger() + self._old_level = self._old_logger.level + self._old_handlers = self._old_logger.handlers[:] + self._old_logger.handlers[:] = [] + self._old_logger.setLevel(logging.WARN) + + self._old_logger_dict = logging.root.manager.loggerDict.copy() + logging.root.manager.loggerDict.clear() + + def tearDown(self): + logging.root.manager.loggerDict.clear() + logging.root.manager.loggerDict.update(self._old_logger_dict) + + for h in self._old_logger.handlers: + self._old_logger.removeHandler(h) + for h in self._old_handlers: + self._old_logger.addHandler(h) # pragma: no cover + self._old_logger.setLevel(self._old_level) + + while self._created: + os.unlink(self._created.pop()) + + self.assertEqual(loghandler._reopenable_handlers, []) + loghandler.closeFiles() + loghandler._reopenable_handlers == [] + + def mktemp(self): + fd, fn = tempfile.mkstemp() + os.close(fd) + self._created.append(fn) + return fn + + def move(self, fn): + nfn = self.mktemp() + os.rename(fn, nfn) + return nfn + + _schema = None + + def get_schema(self): + if self._schema is None: + sio = StringIO(self._schematext) + self.__class__._schema = ZConfig.loadSchemaFile(sio) + return self._schema + + def get_config(self, text): + conf, handler = ZConfig.loadConfigFile(self.get_schema(), + StringIO(text)) + self.assertTrue(not handler) + return conf + + +class TestConfig(LoggingTestHelper, unittest.TestCase): + + _schematext = """ + + +
+ + """ + + def test_config_without_logger(self): + conf = self.get_config("") + self.assertTrue(conf.eventlog is None) + + def test_config_without_handlers(self): + logger = self.check_simple_logger("") + # Make sure there's a NullHandler, since a warning gets + # printed if there are no handlers: + self.assertEqual(len(logger.handlers), 1) + self.assertTrue(isinstance(logger.handlers[0], loghandler.NullHandler)) + + # And it does nothing + logger.handlers[0].emit(None) + logger.handlers[0].handle(None) + + def test_factory_without_stream(self): + factory = self.check_simple_logger_factory("\n" + " \n" + " path STDERR\n" + " \n" + " \n" + " path STDERR\n" + " level info\n" + " \n" + " \n" + " path STDERR\n" + " level debug\n" + " \n" + "") + + factory.startup() + logger = factory.instance + + factory.level = logging.NOTSET + self.assertEqual(factory.getLowestHandlerLevel(), logging.DEBUG) + logger.handlers[0].reopen = lambda: None + factory.reopen() + + def test_with_logfile(self): + fn = self.mktemp() + logger = self.check_simple_logger("\n" + " \n" + " path %s\n" + " level debug\n" + " \n" + "" % fn) + logfile = logger.handlers[0] + self.assertEqual(logfile.level, logging.DEBUG) + self.assertTrue(isinstance(logfile, loghandler.FileHandler)) + logger.removeHandler(logfile) + logfile.close() + + def test_with_stderr(self): + self.check_standard_stream("stderr") + + def test_with_stdout(self): + self.check_standard_stream("stdout") + + def test_with_rotating_logfile(self): + fn = self.mktemp() + logger = self.check_simple_logger("\n" + " \n" + " path %s\n" + " level debug\n" + " max-size 5mb\n" + " old-files 10\n" + " \n" + "" % fn) + logfile = logger.handlers[0] + self.assertEqual(logfile.level, logging.DEBUG) + self.assertEqual(logfile.backupCount, 10) + self.assertEqual(logfile.maxBytes, 5*1024*1024) + self.assertTrue(isinstance(logfile, loghandler.RotatingFileHandler)) + logger.removeHandler(logfile) + logfile.close() + + def test_with_timed_rotating_logfile(self): + fn = self.mktemp() + logger = self.check_simple_logger("\n" + " \n" + " path %s\n" + " level debug\n" + " when D\n" + " interval 3\n" + " old-files 11\n" + " \n" + "" % fn) + logfile = logger.handlers[0] + self.assertEqual(logfile.level, logging.DEBUG) + self.assertEqual(logfile.backupCount, 11) + self.assertEqual(logfile.interval, 86400*3) + self.assertTrue(isinstance(logfile, loghandler.TimedRotatingFileHandler)) + logger.removeHandler(logfile) + logfile.close() + + def test_with_timed_rotating_logfile_and_size_should_fail(self): + fn = self.mktemp() + self.assertRaises( + ValueError, + self.check_simple_logger, + "\n" + " \n" + " path %s\n" + " level debug\n" + " max-size 5mb\n" + " when D\n" + " old-files 10\n" + " \n" + "" % fn) + + # Mising old-files + self.assertRaisesRegex( + ValueError, + "old-files must be set", + self.check_simple_logger, + "\n" + " \n" + " path %s\n" + " level debug\n" + " max-size 5mb\n" + " when D\n" + " \n" + "" % fn) + + self.assertRaisesRegex( + ValueError, + "max-bytes or when must be set", + self.check_simple_logger, + "\n" + " \n" + " path %s\n" + " level debug\n" + " interval 1\n" + " old-files 10\n" + " \n" + "" % fn) + + + def test_with_rotating_logfile_and_STD_should_fail(self): + for path in ('STDERR', 'STDOUT'): + for param in ('old-files 10', 'max-size 5mb'): + self.assertRaises( + ValueError, + self.check_simple_logger, + "\n" + " \n" + " path %s\n" + " level debug\n" + " when D\n" + " %s\n" + " \n" + "" % (path, param)) + + + def check_standard_stream(self, name): + old_stream = getattr(sys, name) + conf = self.get_config(""" + + + level info + path %s + + + """ % name.upper()) + self.assertTrue(conf.eventlog is not None) + # The factory has already been created; make sure it picks up + # the stderr we set here when we create the logger and + # handlers: + sio = StringIO() + setattr(sys, name, sio) + try: + logger = conf.eventlog() + finally: + setattr(sys, name, old_stream) + logger.warning("woohoo!") + self.assertTrue(sio.getvalue().find("woohoo!") >= 0) + + def test_custom_formatter(self): + old_stream = sys.stdout + conf = self.get_config(""" + + + formatter ZConfig.components.logger.tests.test_logger.CustomFormatter + level info + path STDOUT + + + """) + sio = StringIO() + sys.stdout = sio + try: + logger = conf.eventlog() + finally: + sys.stdout = old_stream + try: + raise KeyError + except KeyError: + logger.exception("testing a KeyError") + self.assertTrue(sio.getvalue().find("KeyError") >= 0) + self.assertTrue(sio.getvalue().find("Don't panic") >= 0) + + def test_with_syslog(self): + import socket + logger = self.check_simple_logger("\n" + " \n" + " level error\n" + " facility local3\n" + " \n" + "") + syslog = logger.handlers[0] + self.assertEqual(syslog.level, logging.ERROR) + self.assertTrue(isinstance(syslog, loghandler.SysLogHandler)) + syslog.close() # avoid ResourceWarning + try: + syslog.socket.close() # ResourceWarning under 3.2 + except socket.SocketError: # pragma: no cover + pass + + def test_with_http_logger_localhost(self): + logger = self.check_simple_logger("\n" + " \n" + " level error\n" + " method post\n" + " \n" + "") + handler = logger.handlers[0] + self.assertEqual(handler.host, "localhost") + # XXX The "url" attribute of the handler is misnamed; it + # really means just the selector portion of the URL. + self.assertEqual(handler.url, "/") + self.assertEqual(handler.level, logging.ERROR) + self.assertEqual(handler.method, "POST") + self.assertTrue(isinstance(handler, loghandler.HTTPHandler)) + + def test_with_http_logger_remote_host(self): + logger = self.check_simple_logger("\n" + " \n" + " method get\n" + " url http://example.com/log/\n" + " \n" + "") + handler = logger.handlers[0] + self.assertEqual(handler.host, "example.com") + # XXX The "url" attribute of the handler is misnamed; it + # really means just the selector portion of the URL. + self.assertEqual(handler.url, "/log/") + self.assertEqual(handler.level, logging.NOTSET) + self.assertEqual(handler.method, "GET") + self.assertTrue(isinstance(handler, loghandler.HTTPHandler)) + + def test_with_email_notifier(self): + logger = self.check_simple_logger("\n" + " \n" + " to sysadmin@example.com\n" + " to sa-pager@example.com\n" + " from zlog-user@example.com\n" + " level fatal\n" + " \n" + "") + handler = logger.handlers[0] + self.assertEqual(handler.toaddrs, ["sysadmin@example.com", + "sa-pager@example.com"]) + self.assertEqual(handler.fromaddr, "zlog-user@example.com") + self.assertEqual(handler.level, logging.FATAL) + + def test_with_email_notifier_with_credentials(self): + logger = self.check_simple_logger("\n" + " \n" + " to sysadmin@example.com\n" + " from zlog-user@example.com\n" + " level fatal\n" + " smtp-server foo:487\n" + " smtp-username john\n" + " smtp-password johnpw\n" + " \n" + "") + + self.assertTrue(sys.version_info >= (2, 6)) + handler = logger.handlers[0] + self.assertEqual(handler.toaddrs, ["sysadmin@example.com"]) + self.assertEqual(handler.fromaddr, "zlog-user@example.com") + self.assertEqual(handler.fromaddr, "zlog-user@example.com") + self.assertEqual(handler.level, logging.FATAL) + self.assertEqual(handler.username, 'john') + self.assertEqual(handler.password, 'johnpw') + self.assertEqual(handler.mailhost, 'foo') + self.assertEqual(handler.mailport, 487) + + def test_with_email_notifier_with_invalid_credentials(self): + self.assertRaises(ValueError, + self.check_simple_logger, + "\n" + " \n" + " to sysadmin@example.com\n" + " from zlog-user@example.com\n" + " level fatal\n" + " smtp-username john\n" + " \n" + "") + self.assertRaises(ValueError, + self.check_simple_logger, + "\n" + " \n" + " to sysadmin@example.com\n" + " from zlog-user@example.com\n" + " level fatal\n" + " smtp-password john\n" + " \n" + "") + + def check_simple_logger_factory(self, text, level=logging.INFO): + conf = self.get_config(text) + self.assertTrue(conf.eventlog is not None) + self.assertEqual(conf.eventlog.level, level) + return conf.eventlog + + def check_simple_logger(self, text, level=logging.INFO): + logger = self.check_simple_logger_factory(text, level)() + self.assertTrue(isinstance(logger, logging.Logger)) + self.assertEqual(len(logger.handlers), 1) + return logger + + +if os.name == 'nt': + # Though log files can be closed and re-opened on Windows, these + # tests expect to be able to move the underlying files out from + # underneath the logger while open. That's not possible on + # Windows. So we don't extend TestCase so that they don't get run. + # + # Different tests are needed that only test that close/re-open + # operations are performed by the handler; those can be run on + # any platform. + _RotateTestBase = object +else: + _RotateTestBase = unittest.TestCase + + +class TestReopeningRotatingLogfiles(LoggingTestHelper, _RotateTestBase): + + # These tests should not be run on Windows. + + handler_factory = loghandler.RotatingFileHandler + + _schematext = """ + + + + + """ + + _sampleconfig_template = """ + + name foo.bar + + path %(path0)s + level debug + max-size 1mb + old-files 10 + + + path %(path1)s + level info + max-size 1mb + old-files 3 + + + path %(path1)s + level info + when D + old-files 3 + + + + + name bar.foo + + path %(path2)s + level info + max-size 10mb + old-files 10 + + + """ + + def test_filehandler_reopen(self): + + def mkrecord(msg): + args = ["foo.bar", logging.ERROR, __file__, 42, msg, (), ()] + return logging.LogRecord(*args) + + # This goes through the reopening operation *twice* to make + # sure that we don't lose our handle on the handler the first + # time around. + + fn = self.mktemp() + h = self.handler_factory(fn) + h.handle(mkrecord("message 1")) + nfn1 = self.move(fn) + h.handle(mkrecord("message 2")) + h.reopen() + h.handle(mkrecord("message 3")) + nfn2 = self.move(fn) + h.handle(mkrecord("message 4")) + h.reopen() + h.handle(mkrecord("message 5")) + h.close() + + # Check that the messages are in the right files:: + text1 = read_file(nfn1) + text2 = read_file(nfn2) + text3 = read_file(fn) + self.assertTrue("message 1" in text1) + self.assertTrue("message 2" in text1) + self.assertTrue("message 3" in text2) + self.assertTrue("message 4" in text2) + self.assertTrue("message 5" in text3) + + def test_logfile_reopening(self): + # + # This test only applies to the simple logfile reopening; it + # doesn't work the same way as the rotating logfile handler. + # + paths = self.mktemp(), self.mktemp(), self.mktemp() + d = { + "path0": paths[0], + "path1": paths[1], + "path2": paths[2], + } + text = self._sampleconfig_template % d + conf = self.get_config(text) + self.assertEqual(len(conf.loggers), 2) + # Build the loggers from the configuration, and write to them: + conf.loggers[0]().info("message 1") + conf.loggers[1]().info("message 2") + # + # We expect this to re-open the original filenames, so we'll + # have six files instead of three. + # + loghandler.reopenFiles() + # + # Write to them again: + conf.loggers[0]().info("message 3") + conf.loggers[1]().info("message 4") + # + # We expect this to re-open the original filenames, so we'll + # have nine files instead of six. + # + loghandler.reopenFiles() + # + # Write to them again: + conf.loggers[0]().info("message 5") + conf.loggers[1]().info("message 6") + # + # We should now have all nine files: + for fn in paths: + fn1 = fn + ".1" + fn2 = fn + ".2" + self.assertTrue(os.path.isfile(fn), "%r must exist" % fn) + self.assertTrue(os.path.isfile(fn1), "%r must exist" % fn1) + self.assertTrue(os.path.isfile(fn2), "%r must exist" % fn2) + # + # Clean up: + for logger in conf.loggers: + logger = logger() + for handler in logger.handlers[:]: + logger.removeHandler(handler) + handler.close() + + +class TestReopeningLogfiles(TestReopeningRotatingLogfiles): + + handler_factory = loghandler.FileHandler + + _sampleconfig_template = """ + + name foo.bar + + path %(path0)s + level debug + + + path %(path1)s + level info + + + + + name bar.foo + + path %(path2)s + level info + + + """ + + def test_logfile_reopening(self): + # + # This test only applies to the simple logfile reopening; it + # doesn't work the same way as the rotating logfile handler. + # + paths = self.mktemp(), self.mktemp(), self.mktemp() + d = { + "path0": paths[0], + "path1": paths[1], + "path2": paths[2], + } + text = self._sampleconfig_template % d + conf = self.get_config(text) + self.assertEqual(len(conf.loggers), 2) + # Build the loggers from the configuration, and write to them: + conf.loggers[0]().info("message 1") + conf.loggers[1]().info("message 2") + npaths1 = [self.move(fn) for fn in paths] + # + # We expect this to re-open the original filenames, so we'll + # have six files instead of three. + # + loghandler.reopenFiles() + # + # Write to them again: + conf.loggers[0]().info("message 3") + conf.loggers[1]().info("message 4") + npaths2 = [self.move(fn) for fn in paths] + # + # We expect this to re-open the original filenames, so we'll + # have nine files instead of six. + # + loghandler.reopenFiles() + # + # Write to them again: + conf.loggers[0]().info("message 5") + conf.loggers[1]().info("message 6") + # + # We should now have all nine files: + for fn in paths: + self.assertTrue(os.path.isfile(fn), "%r must exist" % fn) + for fn in npaths1: + self.assertTrue(os.path.isfile(fn), "%r must exist" % fn) + for fn in npaths2: + self.assertTrue(os.path.isfile(fn), "%r must exist" % fn) + # + # Clean up: + for logger in conf.loggers: + logger = logger() + for handler in logger.handlers[:]: + logger.removeHandler(handler) + handler.close() + + def test_filehandler_reopen_thread_safety(self): + # The reopen method needs to do locking to avoid a race condition + # with emit calls. For simplicity we replace the "acquire" and + # "release" methods with dummies that record calls to them. + + fn = self.mktemp() + h = self.handler_factory(fn) + + calls = [] + h.acquire = lambda: calls.append("acquire") + h.release = lambda: calls.append("release") + + h.reopen() + h.close() + + self.assertEqual(calls, ["acquire", "release"]) + +class TestFunctions(TestHelper, unittest.TestCase): + + def test_log_format_bad(self): + self.assertRaisesRegex(ValueError, + "Invalid log format string", + handlers.log_format, + "%{no-such-key}s") + + def test_resolve_deep(self): + old_mod = None + if hasattr(logging, 'handlers'): + # This module is nested so it hits our coverage target, + # and it doesn't alter any state + # on import, so a "reimport" is fine + del logging.handlers + old_mod = sys.modules['logging.handlers'] + del sys.modules['logging.handlers'] + try: + handlers.resolve('logging.handlers') + finally: + if old_mod is not None: + logging.handlers = old_mod + sys.modules['logging.handlers'] = old_mod + + def test_http_handler_url(self): + self.assertRaisesRegex(ValueError, + 'must be an http', + handlers.http_handler_url, 'file://foo/baz') + self.assertRaisesRegex(ValueError, + 'must specify a location', + handlers.http_handler_url, 'http://') + self.assertRaisesRegex(ValueError, + 'must specify a path', + handlers.http_handler_url, 'http://server') + + v = handlers.http_handler_url("http://server/path;param?q=v#fragment") + self.assertEqual(v, ('server', '/path;param?q=v#fragment')) + + def test_close_files(self): + class F(object): + closed = 0 + def close(self): + self.closed += 1 + f = F() + def wr(): + return f + + loghandler._reopenable_handlers.append(wr) + loghandler.closeFiles() + loghandler.closeFiles() + + self.assertEqual(1, f.closed) + + def test_reopen_files_missing_wref(self): + # simulate concurrent iteration that pops the ref + def wr(): + loghandler._reopenable_handlers.remove(wr) + + loghandler._reopenable_handlers.append(wr) + loghandler.reopenFiles() + + def test_logging_level(self): + # Make sure the expected names are supported; it's not clear + # how to check the values in a meaningful way. + # Just make sure they're case-insensitive. + convert = datatypes.logging_level + for name in ["notset", "all", "trace", "debug", "blather", + "info", "warn", "warning", "error", "fatal", + "critical"]: + self.assertEqual(convert(name), convert(name.upper())) + self.assertRaises(ValueError, convert, "hopefully-not-a-valid-value") + self.assertEqual(convert('10'), 10) + self.assertRaises(ValueError, convert, '100') + + def test_http_method(self): + convert = handlers.get_or_post + self.assertEqual(convert("get"), "GET") + self.assertEqual(convert("GET"), "GET") + self.assertEqual(convert("post"), "POST") + self.assertEqual(convert("POST"), "POST") + self.assertRaises(ValueError, convert, "") + self.assertRaises(ValueError, convert, "foo") + + def test_syslog_facility(self): + convert = handlers.syslog_facility + for name in ["auth", "authpriv", "cron", "daemon", "kern", + "lpr", "mail", "news", "security", "syslog", + "user", "uucp", "local0", "local1", "local2", + "local3", "local4", "local5", "local6", "local7"]: + self.assertEqual(convert(name), name) + self.assertEqual(convert(name.upper()), name) + self.assertRaises(ValueError, convert, "hopefully-never-a-valid-value") + + +class TestStartupHandler(unittest.TestCase): + + def test_buffer(self): + handler = loghandler.StartupHandler() + self.assertFalse(handler.shouldFlush(None)) + self.assertEqual(maxsize, handler.capacity) + + records = [] + def handle(record): + records.append(record) + handle.handle = handle + + handler.flushBufferTo(handle) + self.assertEqual([], records) + + handler.buffer.append(1) + handler.flushBufferTo(handle) + self.assertEqual([1], records) + + del handle.handle + +def test_logger_convenience_function_and_ommiting_name_to_get_root_logger(): + """ + +The ZConfig.loggers function can be used to configure one or more loggers. +We'll configure the rot logger and a non-root logger. + + >>> old_level = logging.getLogger().getEffectiveLevel() + >>> old_handler_count = len(logging.getLogger().handlers) + + >>> ZConfig.configureLoggers(''' + ... + ... level INFO + ... + ... PATH STDOUT + ... format root %(levelname)s %(name)s %(message)s + ... + ... + ... + ... + ... name ZConfig.TEST + ... level DEBUG + ... + ... PATH STDOUT + ... format test %(levelname)s %(name)s %(message)s + ... + ... + ... ''') + + >>> logging.getLogger('ZConfig.TEST').debug('test message') + test DEBUG ZConfig.TEST test message + root DEBUG ZConfig.TEST test message + + >>> logging.getLogger().getEffectiveLevel() == logging.INFO + True + >>> len(logging.getLogger().handlers) == old_handler_count + 1 + True + >>> logging.getLogger('ZConfig.TEST').getEffectiveLevel() == logging.DEBUG + True + >>> len(logging.getLogger('ZConfig.TEST').handlers) == 1 + True + +.. cleanup + + >>> logging.getLogger('ZConfig.TEST').setLevel(logging.NOTSET) + >>> logging.getLogger('ZConfig.TEST').removeHandler( + ... logging.getLogger('ZConfig.TEST').handlers[-1]) + >>> logging.getLogger().setLevel(old_level) + >>> logging.getLogger().removeHandler(logging.getLogger().handlers[-1]) + + + """ + +def test_suite(): + return unittest.TestSuite([ + unittest.defaultTestLoader.loadTestsFromName(__name__), + doctest.DocTestSuite() + ]) + +if __name__ == '__main__': + unittest.main(defaultTest="test_suite") diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/datatypes.py b/thesisenv/lib/python3.6/site-packages/ZConfig/datatypes.py new file mode 100644 index 0000000..aa55766 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/datatypes.py @@ -0,0 +1,528 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Default implementation of a data type registry + +This module provides the implementation of the default data type +registry and all the standard data types supported by :mod:`ZConfig`. +A number of convenience classes are also provided to assist in the +creation of additional data types. + +A "data type registry" is an object that provides conversion functions +for data types. The interface for a :class:`registry ` is +fairly simple. + +A "conversion function" is any callable object that accepts a single +argument and returns a suitable value, or raises an exception if the +input value is not acceptable. :exc:`ValueError` is the preferred +exception for disallowed inputs, but any other exception will be +properly propagated. +""" + +import os +import re +import sys +import datetime + +try: + unicode +except NameError: + # Python 3 + have_unicode = False + from functools import reduce +else: + have_unicode = True + + +class MemoizedConversion(object): + """Simple memoization for potentially expensive conversions. + + This conversion helper caches each successful conversion for re-use + at a later time; failed conversions are not cached in any way, since + it is difficult to raise a meaningful exception providing + information about the specific failure. + """ + + def __init__(self, conversion): + self._memo = {} + self._conversion = conversion + + def __call__(self, value): + try: + return self._memo[value] + except KeyError: + v = self._conversion(value) + self._memo[value] = v + return v + + +class RangeCheckedConversion(object): + """Conversion helper that performs range checks on the result of + another conversion. + + Values passed to instances of this conversion are converted using + *conversion* and then range checked. *min* and *max*, if given and + not ``None``, are the inclusive endpoints of the allowed range. + Values returned by *conversion* which lay outside the range + described by *min* and *max* cause :exc:`ValueError` to be raised. + """ + + def __init__(self, conversion, min=None, max=None): + self._min = min + self._max = max + self._conversion = conversion + + def __call__(self, value): + v = self._conversion(value) + if self._min is not None and v < self._min: + raise ValueError("%s is below lower bound (%s)" + % (repr(v), repr(self._min))) + if self._max is not None and v > self._max: + raise ValueError("%s is above upper bound (%s)" + % (repr(v), repr(self._max))) + return v + + +class RegularExpressionConversion(object): + """Conversion that checks that the input matches the regular + expression *regex*. + + If it matches, returns the input, otherwise raises + :exc:`ValueError`. + """ + + reason = "value did not match regular expression" + + def __init__(self, regex): + self._rx = re.compile(regex) + + def __call__(self, value): + m = self._rx.match(value) + if m and m.group() == value: + return value + else: + raise ValueError("%s: %s" % (self.reason, repr(value))) + + +def check_locale(value): + import locale + prev = locale.setlocale(locale.LC_ALL) + try: + try: + locale.setlocale(locale.LC_ALL, value) + finally: + locale.setlocale(locale.LC_ALL, prev) + except locale.Error: + raise ValueError( + 'The specified locale "%s" is not supported by your system.\n' + 'See your operating system documentation for more\n' + 'information on locale support.' % value) + else: + return value + + +class BasicKeyConversion(RegularExpressionConversion): + def __init__(self): + RegularExpressionConversion.__init__(self, "[a-zA-Z][-._a-zA-Z0-9]*") + + def __call__(self, value): + value = str(value) + return RegularExpressionConversion.__call__(self, value).lower() + + +class ASCIIConversion(RegularExpressionConversion): + def __call__(self, value): + value = RegularExpressionConversion.__call__(self, value) + if have_unicode and isinstance(value, unicode): + value = value.encode("ascii") + return value + + +_ident_re = "[_a-zA-Z][_a-zA-Z0-9]*" + +class IdentifierConversion(ASCIIConversion): + reason = "not a valid Python identifier" + + def __init__(self): + ASCIIConversion.__init__(self, _ident_re) + + +class DottedNameConversion(ASCIIConversion): + reason = "not a valid dotted name" + + def __init__(self): + ASCIIConversion.__init__(self, + r"%s(?:\.%s)*" % (_ident_re, _ident_re)) + + +class DottedNameSuffixConversion(ASCIIConversion): + reason = "not a valid dotted name or suffix" + + def __init__(self): + ASCIIConversion.__init__(self, + r"(?:%s)(?:\.%s)*|(?:\.%s)+" + % (_ident_re, _ident_re, _ident_re)) + + +def integer(value): + return int(value) + + +def null_conversion(value): + return value + + +def asBoolean(s): + """Convert a string value to a boolean value.""" + ss = str(s).lower() + if ss in ('yes', 'true', 'on'): + return True + elif ss in ('no', 'false', 'off'): + return False + else: + raise ValueError("not a valid boolean value: " + repr(s)) + + +def string_list(s): + """Convert a string to a list of strings using .split().""" + return s.split() + + +port_number = RangeCheckedConversion(integer, min=0, max=0xffff).__call__ + + +class InetAddress(object): + + def __init__(self, default_host): + self.DEFAULT_HOST = default_host + + def __call__(self, s): + # returns (host, port) tuple + host = '' + port = None + if ":" in s: + host, p = s.rsplit(":", 1) + if host.startswith('[') and host.endswith(']'): + # [IPv6]:port + host = host[1:-1] + elif ':' in host: + # Unbracketed IPv6 address; + # last part is not the port number + host = s + p = None + if p: # else leave port at None + port = port_number(p) + host = host.lower() + else: + try: + port = port_number(s) + except ValueError: + if len(s.split()) != 1: + raise ValueError("not a valid host name: " + repr(s)) + host = s.lower() + if not host: + host = self.DEFAULT_HOST + return host, port + + +if sys.platform[:3] == "win": + DEFAULT_HOST = "localhost" +else: + DEFAULT_HOST = "" + +inet_address = InetAddress(DEFAULT_HOST) +inet_connection_address = InetAddress("127.0.0.1") +inet_binding_address = InetAddress("") + +class SocketAddress(object): + # Parsing results in family and address + # Family can be AF_UNIX (for addresses that are path names) + # or AF_INET6 (for inet addresses with colons in them) + # or AF_INET (for all other inet addresses); + # An inet address is a (host, port) pair + # Notice that no DNS lookup is performed, so if the host + # is a DNS name, DNS lookup may end up with either IPv4 or + # IPv6 addresses, or both + def __init__(self, s): + import socket + if "/" in s or s.find(os.sep) >= 0: + self.family = getattr(socket, "AF_UNIX", None) + self.address = s + else: + self.family = socket.AF_INET + self.address = self._parse_address(s) + if ':' in self.address[0]: + self.family = socket.AF_INET6 + + def _parse_address(self, s): + return inet_address(s) + +class SocketBindingAddress(SocketAddress): + + def _parse_address(self, s): + return inet_binding_address(s) + +class SocketConnectionAddress(SocketAddress): + + def _parse_address(self, s): + return inet_connection_address(s) + + +def float_conversion(v): + return float(v) + + +class IpaddrOrHostname(RegularExpressionConversion): + def __init__(self): + # IP address regex from the Perl Cookbook, Recipe 6.23 (revised ed.) + # We allow underscores in hostnames although this is considered + # illegal according to RFC1034. + # Addition: IPv6 addresses are now also accepted + expr = (r"(^(\d|[01]?\d\d|2[0-4]\d|25[0-5])\." #ipaddr + r"(\d|[01]?\d\d|2[0-4]\d|25[0-5])\." #ipaddr cont'd + r"(\d|[01]?\d\d|2[0-4]\d|25[0-5])\." #ipaddr cont'd + r"(\d|[01]?\d\d|2[0-4]\d|25[0-5])$)" #ipaddr cont'd + r"|([A-Za-z_][-A-Za-z0-9_.]*[-A-Za-z0-9_])" # or hostname + r"|([0-9A-Fa-f:.]+:[0-9A-Fa-f:.]*)" # or superset of IPv6 addresses + # (requiring at least one colon) + ) + RegularExpressionConversion.__init__(self, expr) + + def __call__(self, value): + result = RegularExpressionConversion.__call__(self, value).lower() + # Use C library to validate IPv6 addresses, in particular wrt. + # number of colons and number of digits per group + if ':' in result: + import socket + try: + socket.inet_pton(socket.AF_INET6, result) + except socket.error: + raise ValueError('%r is not a valid IPv6 address' % value) + return result + +def existing_directory(v): + nv = os.path.expanduser(v) + if os.path.isdir(nv): + return nv + raise ValueError('%s is not an existing directory' % v) + +def existing_path(v): + nv = os.path.expanduser(v) + if os.path.exists(nv): + return nv + raise ValueError('%s is not an existing path' % v) + +def existing_file(v): + nv = os.path.expanduser(v) + if os.path.exists(nv): + return nv + raise ValueError('%s is not an existing file' % v) + +def existing_dirpath(v): + nv = os.path.expanduser(v) + dirname = os.path.dirname(nv) + if not dirname: + # relative pathname with no directory component + return nv + if os.path.isdir(dirname): + return nv + raise ValueError('The directory named as part of the path %s ' + 'does not exist.' % v) + + +class SuffixMultiplier(object): + # d is a dictionary of suffixes to integer multipliers. If no suffixes + # match, default is the multiplier. Matches are case insensitive. Return + # values are in the fundamental unit. + def __init__(self, d, default=1): + self._d = d + self._default = default + # all keys must be the same size + def check(a, b): + if len(a) != len(b): + raise ValueError("suffix length mismatch") + return a + self._keysz = len(reduce(check, d)) + + def __call__(self, v): + v = v.lower() + for s, m in self._d.items(): + if v[-self._keysz:] == s: + return int(v[:-self._keysz]) * m + return int(v) * self._default + + +def timedelta(s): + # Unlike the standard time-interval data type, which returns a float + # number of seconds, this datatype takes a wider range of syntax and + # returns a datetime.timedelta + # + # Accepts suffixes: + # w - weeks + # d - days + # h - hours + # m - minutes + # s - seconds + # + # and all arguments may be integers or floats, positive or negative. + # More than one time interval suffix value may appear on the line, but + # they should all be separated by spaces, e.g.: + # + # sleep_time 4w 2d 7h 12m 0.00001s + weeks = days = hours = minutes = seconds = 0 + for part in s.split(): + val = float(part[:-1]) + suffix = part[-1] + if suffix == 'w': + weeks = val + elif suffix == 'd': + days = val + elif suffix == 'h': + hours = val + elif suffix == 'm': + minutes = val + elif suffix == 's': + seconds = val + else: + raise TypeError('bad part %s in %s' % (part, s)) + return datetime.timedelta(weeks=weeks, days=days, hours=hours, + minutes=minutes, seconds=seconds) + + +stock_datatypes = { + "boolean": asBoolean, + "dotted-name": DottedNameConversion(), + "dotted-suffix": DottedNameSuffixConversion(), + "identifier": IdentifierConversion(), + "integer": integer, + "float": float_conversion, + "string": str, + "string-list": string_list, + "null": null_conversion, + "locale": MemoizedConversion(check_locale), + "port-number": port_number, + "basic-key": BasicKeyConversion(), + "inet-address": inet_address, + "inet-binding-address": inet_binding_address, + "inet-connection-address": inet_connection_address, + "socket-address": SocketAddress, + "socket-binding-address": SocketBindingAddress, + "socket-connection-address": SocketConnectionAddress, + "ipaddr-or-hostname":IpaddrOrHostname(), + "existing-directory":existing_directory, + "existing-path": existing_path, + "existing-file": existing_file, + "existing-dirpath": existing_dirpath, + "byte-size": SuffixMultiplier({'kb': 1024, + 'mb': 1024*1024, + 'gb': 1024*1024*1024, + }), + "time-interval": SuffixMultiplier({'s': 1, + 'm': 60, + 'h': 60*60, + 'd': 60*60*24, + }), + "timedelta": timedelta, + } + + +class Registry(object): + """Implementation of a simple type registry. + + If given, *stock* should be a mapping which defines the "built-in" + data types for the registry; if omitted or ``None``, the standard + set of data types is used (see :ref:`standard-datatypes`). + """ + def __init__(self, stock=None): + if stock is None: + stock = stock_datatypes.copy() + self._stock = stock + self._other = {} + self._basic_key = None + + def find_name(self, conversion): + """Return the best name for *conversion*, which must have been returned + from *get* on this object.""" + for dct in self._other, self._stock: + for k, v in dct.items(): + if v is conversion: + return k + + # If they followed the rules, we shouldn't get here. + return str(conversion) # pragma: no cover + + def get(self, name): + """Return the type conversion routine for *name*. + + If the conversion function cannot be found, an (unspecified) + exception is raised. If the name is not provided in the stock + set of data types by this registry and has not otherwise been + registered, this method uses the :meth:`search` method to load + the conversion function. This is the only method the rest of + :mod:`ZConfig` requires. + """ + if '.' not in name: + if self._basic_key is None: + self._basic_key = self._other.get("basic-key") + if self._basic_key is None: + self._basic_key = self._stock.get("basic-key") + if self._basic_key is None: + self._basic_key = stock_datatypes["basic-key"] + name = self._basic_key(name) + t = self._stock.get(name) + if t is None: + t = self._other.get(name) + if t is None: + t = self.search(name) + return t + + def register(self, name, conversion): + """Register the data type name *name* to use the conversion function + *conversion*. + + If *name* is already registered or provided as a stock data + type, :exc:`ValueError` is raised (this includes the case when + *name* was found using the :meth:`search` method). + """ + if name in self._stock: + raise ValueError("datatype name conflicts with built-in type: " + + repr(name)) + if name in self._other: + raise ValueError("datatype name already registered: " + repr(name)) + self._other[name] = conversion + + def search(self, name): + """This is a helper method for the default implementation of the + :meth:`get` method. + + If *name* is a Python dotted-name, this method loads the value + for the name by dynamically importing the containing module + and extracting the value of the name. The name must refer to a + usable conversion function. + """ + if not "." in name: + raise ValueError("unloadable datatype name: " + repr(name)) + components = name.split('.') + start = components[0] + g = {} + package = __import__(start, g, g) + modulenames = [start] + for component in components[1:]: + modulenames.append(component) + try: + package = getattr(package, component) + except AttributeError: + n = '.'.join(modulenames) + package = __import__(n, g, g, component) + self._other[name] = package + return package diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/info.py b/thesisenv/lib/python3.6/site-packages/ZConfig/info.py new file mode 100644 index 0000000..26962a5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/info.py @@ -0,0 +1,519 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Objects that can describe a ZConfig schema.""" + +import copy +import ZConfig + +from abc import abstractmethod +from collections import OrderedDict +from functools import total_ordering + +from ZConfig._compat import AbstractBaseClass + +@total_ordering +class UnboundedThing(object): + __slots__ = () + + def __gt__(self, other): + if isinstance(other, self.__class__): + return False + return True + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __repr__(self): # pragma: no cover + return "" + +Unbounded = UnboundedThing() + + +class ValueInfo(object): + __slots__ = 'value', 'position' + + def __init__(self, value, position): + self.value = value + # position is (lineno, colno, url) + self.position = position + + def convert(self, datatype): + try: + return datatype(self.value) + except ValueError as e: + raise ZConfig.DataConversionError(e, self.value, self.position) + + +class BaseInfo(object): + """Information about a single configuration key.""" + + description = None + example = None + metadefault = None + + def __init__(self, name, datatype, minOccurs, maxOccurs, handler, + attribute): + assert maxOccurs is not None, "Use Unbounded for an upper bound, not None" + assert minOccurs is not None, "Use 0 for a lower bound, not None" + + if maxOccurs < 1: + raise ZConfig.SchemaError( + "maxOccurs must be at least 1") + if minOccurs > maxOccurs: + raise ZConfig.SchemaError( + "minOccurs cannot be more than maxOccurs") + self.name = name + self.datatype = datatype + self.minOccurs = minOccurs + self.maxOccurs = maxOccurs + self.handler = handler + self.attribute = attribute + + def __repr__(self): + clsname = self.__class__.__name__ + return "<%s for %s>" % (clsname, repr(self.name)) + + def isabstract(self): + return False + + def ismulti(self): + return self.maxOccurs > 1 + + def issection(self): + return False + + +class BaseKeyInfo(AbstractBaseClass, BaseInfo): + + _rawdefaults = None + + def __init__(self, name, datatype, minOccurs, maxOccurs, handler, + attribute): + BaseInfo.__init__(self, name, datatype, minOccurs, maxOccurs, + handler, attribute) + self._finished = False + + def finish(self): + if self._finished: + raise ZConfig.SchemaError( + "cannot finish KeyInfo more than once") + self._finished = True + + def adddefault(self, value, position, key=None): + if self._finished: + raise ZConfig.SchemaError( + "cannot add default values to finished KeyInfo") + # Check that the name/keyed relationship is right: + if self.name == "+" and key is None: + raise ZConfig.SchemaError( + "default values must be keyed for name='+'") + elif self.name != "+" and key is not None: + raise ZConfig.SchemaError( + "unexpected key for default value") + self.add_valueinfo(ValueInfo(value, position), key) + + @abstractmethod + def add_valueinfo(self, vi, key): + """Actually add a ValueInfo to this key-info object. + + The appropriate value of None-ness of key has already been + checked with regard to the name of the key, and has been found + permissible to add. + + This method is a requirement for subclasses, and should not be + called by client code. + """ + + def prepare_raw_defaults(self): + assert self.name == "+" + if self._rawdefaults is None: + self._rawdefaults = self._default + self._default = OrderedDict() + + +class KeyInfo(BaseKeyInfo): + + _default = None + + def __init__(self, name, datatype, minOccurs, handler, attribute): + BaseKeyInfo.__init__(self, name, datatype, minOccurs, 1, + handler, attribute) + if self.name == "+": + self._default = OrderedDict() + + def add_valueinfo(self, vi, key): + if self.name == "+": + if key in self._default: + # not ideal: we're presenting the unconverted + # version of the key + raise ZConfig.SchemaError( + "duplicate default value for key %s" % repr(key)) + self._default[key] = vi + elif self._default is not None: + raise ZConfig.SchemaError( + "cannot set more than one default to key with maxOccurs == 1") + else: + self._default = vi + + def computedefault(self, keytype): + self.prepare_raw_defaults() + for k, vi in self._rawdefaults.items(): + key = ValueInfo(k, vi.position).convert(keytype) + self.add_valueinfo(vi, key) + + def getdefault(self): + # Use copy.copy() to make sure we don't allow polution of + # our internal data without having to worry about both the + # list and dictionary cases: + return copy.copy(self._default) + + +class MultiKeyInfo(BaseKeyInfo): + + def __init__(self, name, datatype, minOccurs, maxOccurs, handler, + attribute): + BaseKeyInfo.__init__(self, name, datatype, minOccurs, maxOccurs, + handler, attribute) + if self.name == "+": + self._default = OrderedDict() + else: + self._default = [] + + def add_valueinfo(self, vi, key): + if self.name == "+": + # This is a keyed value, not a simple value: + if key in self._default: + self._default[key].append(vi) + else: + self._default[key] = [vi] + else: + self._default.append(vi) + + def computedefault(self, keytype): + self.prepare_raw_defaults() + for k, vlist in self._rawdefaults.items(): + key = ValueInfo(k, vlist[0].position).convert(keytype) + for vi in vlist: + self.add_valueinfo(vi, key) + + def getdefault(self): + return copy.copy(self._default) + + +class SectionInfo(BaseInfo): + def __init__(self, name, sectiontype, minOccurs, maxOccurs, handler, + attribute): + # name - name of the section; one of '*', '+', or name1 + # sectiontype - SectionType instance + # minOccurs - minimum number of occurances of the section + # maxOccurs - maximum number of occurances; if > 1, name + # must be '*' or '+' + # handler - handler name called when value(s) must take effect, + # or None + # attribute - name of the attribute on the SectionValue object + if maxOccurs > 1: + if name not in ('*', '+'): + raise ZConfig.SchemaError( + "sections which can occur more than once must" + " use a name of '*' or '+'") + if not attribute: + raise ZConfig.SchemaError( + "sections which can occur more than once must" + " specify a target attribute name") + if sectiontype.isabstract(): + datatype = None + else: + datatype = sectiontype.datatype + BaseInfo.__init__(self, name, datatype, + minOccurs, maxOccurs, handler, attribute) + self.sectiontype = sectiontype + + def __repr__(self): + clsname = self.__class__.__name__ + return "<%s for %s (%s)>" % ( + clsname, self.sectiontype.name, repr(self.name)) + + def issection(self): + return True + + def allowUnnamed(self): + return self.name == "*" + + def isAllowedName(self, name): + if name == "*" or name == "+": + return False + elif self.name == "+": + return True if name else False + elif self.name == "*": + return True + else: + return name == self.name + + def getdefault(self): + # sections cannot have defaults + if self.maxOccurs > 1: + return [] + else: + return None + + +class AbstractType(object): + # This isn't actually "abstract" in the Python ABC sense, + # it's only abstract from the schema sense. This class is + # instantiated, and not expected to be subclassed. + __slots__ = '_subtypes', 'name', 'description' + + def __init__(self, name): + self._subtypes = OrderedDict() + self.name = name + self.description = None + + def __iter__(self): + return iter(self._subtypes.items()) + + def addsubtype(self, type_): + self._subtypes[type_.name] = type_ + + def getsubtype(self, name): + try: + return self._subtypes[name] + except KeyError: + raise ZConfig.SchemaError("no sectiontype %s in abstracttype %s" + % (repr(name), repr(self.name))) + + def hassubtype(self, name): + """Return true iff this type has 'name' as a concrete manifestation.""" + return name in self._subtypes.keys() + + def getsubtypenames(self): + """Return the names of all concrete types as a sorted list.""" + return sorted(self._subtypes.keys()) + + def isabstract(self): + return True + + +class SectionType(object): + def __init__(self, name, keytype, valuetype, datatype, registry, types): + # name - name of the section, or '*' or '+' + # datatype - type for the section itself + # keytype - type for the keys themselves + # valuetype - default type for key values + self.name = name + self.datatype = datatype + self.keytype = keytype + self.valuetype = valuetype + self.handler = None + self.description = None + self.example = None + self.registry = registry + self._children = [] # [(key, info), ...] + self._attrmap = OrderedDict() # {attribute: info, ...} + self._keymap = OrderedDict() # {key: info, ...} + self._types = types + + def gettype(self, name): + n = name.lower() + try: + return self._types[n] + except KeyError: + raise ZConfig.SchemaError("unknown type name: " + repr(name)) + + def gettypenames(self): + return list(self._types.keys()) + + def __len__(self): + return len(self._children) + + def __getitem__(self, index): + return self._children[index] + + def __iter__(self): + return iter(self._children) + + def itertypes(self): + return iter(sorted(self._types.items())) + + def _add_child(self, key, info): + # check naming constraints + assert key or info.attribute + if key and key in self._keymap: + raise ZConfig.SchemaError( + "child name %s already used" % key) + if info.attribute and info.attribute in self._attrmap: + raise ZConfig.SchemaError( + "child attribute name %s already used" % info.attribute) + # a-ok, add the item to the appropriate maps + if info.attribute: + self._attrmap[info.attribute] = info + if key: + self._keymap[key] = info + self._children.append((key, info)) + + def addkey(self, keyinfo): + self._add_child(keyinfo.name, keyinfo) + + def addsection(self, name, sectinfo): + assert name not in ("*", "+") + self._add_child(name, sectinfo) + + def getinfo(self, key): + if not key: + raise ZConfig.ConfigurationError( + "cannot match a key without a name") + try: + return self._keymap[key] + except KeyError: + raise ZConfig.ConfigurationError("no key matching " + repr(key)) + + def getrequiredtypes(self): + d = OrderedDict() + if self.name: + d[self.name] = 1 + stack = [self] + while stack: + info = stack.pop() + for key, ci in info._children: + if ci.issection(): + t = ci.sectiontype + if t.name not in d: + d[t.name] = 1 + stack.append(t) + return list(d.keys()) + + def getsectioninfo(self, type_, name): + for key, info in self._children: + if key: + if key == name: + if not info.issection(): + raise ZConfig.ConfigurationError( + "section name %s already in use for key" % key) + st = info.sectiontype + if st.isabstract(): + try: + st = st.getsubtype(type_) + except ZConfig.ConfigurationError: # pragma: no cover + raise ZConfig.ConfigurationError( + "section type %s not allowed for name %s" + % (repr(type_), repr(key))) + if st.name != type_: + raise ZConfig.ConfigurationError( + "name %s must be used for a %s section" + % (repr(name), repr(st.name))) + return info + # else must be a sectiontype or an abstracttype: + elif info.sectiontype.name == type_: + if not (name or info.allowUnnamed()): + raise ZConfig.ConfigurationError( + repr(type_) + " sections must be named") + return info + elif info.sectiontype.isabstract(): + st = info.sectiontype + if st.name == type_: # pragma: no cover + raise ZConfig.ConfigurationError( + "cannot define section with an abstract type") + try: + st = st.getsubtype(type_) + except ZConfig.ConfigurationError: # pragma: no cover + # not this one; maybe a different one + pass + else: + return info + raise ZConfig.ConfigurationError( + "no matching section defined for type='%s', name='%s'" % ( + type_, name)) + + def isabstract(self): + return False + + +class SchemaType(SectionType): + def __init__(self, keytype, valuetype, datatype, handler, url, + registry): + SectionType.__init__(self, None, keytype, valuetype, datatype, + registry, {}) + self._components = OrderedDict() + self.handler = handler + self.url = url + + def addtype(self, typeinfo): + n = typeinfo.name + if n in self._types: + raise ZConfig.SchemaError("type name cannot be redefined: " + + repr(typeinfo.name)) + self._types[n] = typeinfo + + def allowUnnamed(self): + return True + + def isAllowedName(self, name): + return False + + def issection(self): + return True + + def getunusedtypes(self): + alltypes = self.gettypenames() + reqtypes = self.getrequiredtypes() + for n in reqtypes: + alltypes.remove(n) + if self.name and self.name in alltypes: + # Not clear we can get here + alltypes.remove(self.name) # pragma: no cover. + return alltypes + + def createSectionType(self, name, keytype, valuetype, datatype): + t = SectionType(name, keytype, valuetype, datatype, + self.registry, self._types) + self.addtype(t) + return t + + def deriveSectionType(self, base, name, keytype, valuetype, datatype): + if isinstance(base, SchemaType): + raise ZConfig.SchemaError( + "cannot derive sectiontype from top-level schema") + t = self.createSectionType(name, keytype, valuetype, datatype) + t._attrmap.update(base._attrmap) + t._keymap.update(base._keymap) + t._children.extend(base._children) + for i in range(len(t._children)): + key, info = t._children[i] + if isinstance(info, BaseKeyInfo) and info.name == "+": + # need to create a new info object and recompute the + # default mapping based on the new keytype + info = copy.copy(info) + info.computedefault(t.keytype) + t._children[i] = (key, info) + return t + + def addComponent(self, name): + if name in self._components: + raise ZConfig.SchemaError("already have component %s" % name) + self._components[name] = name + + def hasComponent(self, name): + return name in self._components + + +def createDerivedSchema(base): + new = SchemaType(base.keytype, base.valuetype, base.datatype, + base.handler, base.url, base.registry) + new._components.update(base._components) + new.description = base.description + new.example = base.example + new._children[:] = base._children + new._attrmap.update(base._attrmap) + new._keymap.update(base._keymap) + new._types.update(base._types) + return new diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/loader.py b/thesisenv/lib/python3.6/site-packages/ZConfig/loader.py new file mode 100644 index 0000000..dfa2bbc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/loader.py @@ -0,0 +1,528 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Schema loader utility.""" + +import os.path +import re +import sys + +from abc import abstractmethod +from io import StringIO + +import ZConfig +import ZConfig.cfgparser +import ZConfig.datatypes +import ZConfig.info +import ZConfig.matcher +import ZConfig.schema +import ZConfig.url + +from ZConfig._compat import reraise +from ZConfig._compat import raise_with_same_tb +from ZConfig._compat import urllib2 +from ZConfig._compat import AbstractBaseClass +from ZConfig._compat import pathname2url + + +def loadSchema(url): + """Load a schema definition from the URL *url*. + + *url* may be a URL, absolute pathname, or relative pathname. + Fragment identifiers are not supported. + + The resulting schema object can be passed to :func:`loadConfig` or + :func:`loadConfigFile`. The schema object may be used as many + times as needed. + + .. seealso:: :class:`~.SchemaLoader`, :meth:`.BaseLoader.loadURL` + """ + return SchemaLoader().loadURL(url) + + +def loadSchemaFile(file, url=None): + """Load a schema definition from the open file object *file*. + + If *url* is given and not ``None``, it should be the URL of + resource represented by *file*. If *url* is omitted or ``None``, a + URL may be computed from the ``name`` attribute of *file*, if + present. The resulting schema object can be passed to + :func:`loadConfig` or :func:`loadConfigFile`. The schema object + may be used as many times as needed. + + .. seealso:: :class:`~.SchemaLoader`, :meth:`.BaseLoader.loadFile` + """ + return SchemaLoader().loadFile(file, url) + + +def loadConfig(schema, url, overrides=()): + """Load and return a configuration from a URL or pathname given by + *url*. + + *url* may be a URL, absolute pathname, or relative pathname. + Fragment identifiers are not supported. *schema* is a reference to a + schema loaded by :func:`loadSchema` or :func:`loadSchemaFile`. + + The return value is a tuple containing the configuration object and + a composite handler that, when called with a name-to-handler + mapping, calls all the handlers for the configuration. + + The optional *overrides* argument represents information derived + from command-line arguments. If given, it must be either a + sequence of value specifiers, or ``None``. A "value specifier" is + a string of the form ``optionpath=value``, for example, + ``some/path/to/key=value``. + + .. seealso:: + :meth:`.ExtendedConfigLoader.addOption` + For information on the format of value specifiers. + :class:`~.ConfigLoader` + For information about loading configs. + :meth:`.BaseLoader.loadURL` + For information about the format of *url* + """ + return _get_config_loader(schema, overrides).loadURL(url) + + +def loadConfigFile(schema, file, url=None, overrides=()): + """Load and return a configuration from an opened file object. + + If *url* is omitted, one will be computed based on the ``name`` + attribute of *file*, if it exists. If no URL can be determined, + all ``%include`` statements in the configuration must use absolute + URLs. *schema* is a reference to a schema loaded by + :func:`loadSchema` or :func:`loadSchemaFile`. + + The return value is a tuple containing the configuration object + and a composite handler that, when called with a name-to-handler + mapping, calls all the handlers for the configuration. The + *overrides* argument is the same as for the :func:`loadConfig` + function. + + .. seealso:: :class:`~.ConfigLoader`, :meth:`.BaseLoader.loadFile`, + :meth:`.ExtendedConfigLoader.addOption` + """ + return _get_config_loader(schema, overrides).loadFile(file, url) + + +def _get_config_loader(schema, overrides): + if overrides: + from ZConfig import cmdline + loader = cmdline.ExtendedConfigLoader(schema) + for opt in overrides: + loader.addOption(opt) + else: + loader = ConfigLoader(schema) + return loader + + +class BaseLoader(AbstractBaseClass): + """Base class for loader objects. + + This should not be instantiated + directly, as the :meth:`loadResource` method must be overridden + for the instance to be used via the public API. + """ + + def __init__(self): + pass + + def createResource(self, file, url): + """Returns a resource object for an open file and URL, given as *file* + and *url*, respectively. + + This may be overridden by a subclass if an alternate resource + implementation is desired. + """ + return Resource(file, url) + + def loadURL(self, url): + """Open and load a resource specified by the URL *url*. + + This method uses the :meth:`loadResource` method to perform the + actual load, and returns whatever that method returns. + """ + url = self.normalizeURL(url) + r = self.openResource(url) + try: + return self.loadResource(r) + finally: + r.close() + + def loadFile(self, file, url=None): + """Load from an open file object, *file*. + + If given and not ``None``, *url* should be the URL of the + resource represented by *file*. If omitted or *None*, the + ``name`` attribute of *file* is used to compute a ``file:`` + URL, if present. + + This method uses the :meth:`loadResource` method to perform the + actual load, and returns whatever that method returns. + """ + if not url: + url = _url_from_file(file) + r = self.createResource(file, url) + try: + return self.loadResource(r) + finally: + r.close() + + # utilities + + @abstractmethod + def loadResource(self, resource): + """Abstract method. + + Subclasses of :class:`BaseLoader` must implement this method to + actually load the resource and return the appropriate + application-level object. + """ + + def openResource(self, url): + """Returns a resource object that represents the URL *url*. + + The URL is opened using the :func:`urllib2.urlopen` function, + and the returned resource object is created using + :meth:`createResource`. If the URL cannot be opened, + :exc:`~.ConfigurationError` is raised. + """ + # ConfigurationError exceptions raised here should be + # str()able to generate a message for an end user. + # + # XXX This should be replaced to use a local cache for remote + # resources. The policy needs to support both re-retrieve on + # change and provide the cached resource when the remote + # resource is not accessible. + url = str(url) + if url.startswith("package:"): + _, package, filename = url.split(":", 2) + file = openPackageResource(package, filename) + else: + try: + file = urllib2.urlopen(url) + except urllib2.URLError as e: + # urllib2.URLError has a particularly hostile str(), so we + # generally don't want to pass it along to the user. + self._raise_open_error(url, e.reason) # pragma: no cover + except (IOError, OSError) as e: + # Python 2.1 raises a different error from Python 2.2+, + # so we catch both to make sure we detect the situation. + self._raise_open_error(url, str(e)) + + # Python 3 support: file.read() returns bytes, so we convert it + # to an StringIO. (Can't use io.TextIOWrapper because of + # http://bugs.python.org/issue16723 and probably other bugs). + # Do this even on Python 2 to avoid keeping a network connection + # open for an unbounded amount of time and to catch IOErrors here, + # where they make sense. + try: + data = file.read() + finally: + file.close() + if isinstance(data, bytes): + # Be sure to specify an (useful) encoding so we don't get + # the system default, typically ascii. + data = data.decode('utf-8') + file = StringIO(data) + return self.createResource(file, url) + + def _raise_open_error(self, url, message): + if url[:7].lower() == "file://": + what = "file" + ident = urllib2.url2pathname(url[7:]) + else: + what = "URL" + ident = url + error = ZConfig.ConfigurationError( + "error opening %s %s: %s" % (what, ident, message), + url) + raise_with_same_tb(error) + + def normalizeURL(self, url): + """Return a URL for *url* + + If *url* refers to an existing file, the corresponding + ``file:`` URL is returned. Otherwise *url* is checked + for sanity: if it does not have a schema, :exc:`ValueError` is + raised, and if it does have a fragment identifier, + :exc:`~.ConfigurationError` is raised. + + This uses :meth:`isPath` to determine whether *url* is + a URL of a filesystem path. + """ + if self.isPath(url): + url = "file://" + pathname2url(os.path.abspath(url)) + newurl, fragment = ZConfig.url.urldefrag(url) + if fragment: + raise ZConfig.ConfigurationError( + "fragment identifiers are not supported", + url) + return newurl + + # from RFC 3986: + # schema = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." ) + _pathsep_rx = re.compile(r"[a-zA-Z][-+.a-zA-Z0-9]*:") + + def isPath(self, s): + """Return true if *s* should be considered a filesystem path rather + than a URL. + """ + if ":" in s: + # XXX This assumes that one-character scheme identifiers + # are always Windows drive letters; I don't know of any + # one-character scheme identifiers. + m = self._pathsep_rx.match(s) + if m is None: + return True + # Does it look like a drive letter? + return len(m.group(0)) == 2 + else: + return True + + +def openPackageResource(package, path): + __import__(package) + pkg = sys.modules[package] + try: + loader = pkg.__loader__ + except AttributeError: + relpath = os.path.join(*path.split("/")) + for dirname in pkg.__path__: + filename = os.path.join(dirname, relpath) + if os.path.exists(filename): + break + else: + raise ZConfig.SchemaResourceError("schema component not found", + filename=path, + package=package, + path=pkg.__path__) + url = "file:" + pathname2url(filename) + url = ZConfig.url.urlnormalize(url) + return urllib2.urlopen(url) + else: + v, tb = (None, None) + for dirname in pkg.__path__: + loadpath = os.path.join(dirname, path) + try: + return StringIO( + loader.get_data(loadpath).decode('utf-8')) + except Exception as e: + v = ZConfig.SchemaResourceError( + "error opening schema component: " + repr(e), + filename=path, + package=package, + path=pkg.__path__) + tb = sys.exc_info()[2] + + if v is not None: + try: + reraise(type(v), v, tb) + finally: + del tb + + raise ZConfig.SchemaResourceError("schema component not found", + filename=path, + package=package, + path=pkg.__path__) + + +def _url_from_file(file_or_path): + name = getattr(file_or_path, "name", None) + if name and name[0] != "<" and name[-1] != ">": + return "file://" + pathname2url(os.path.abspath(name)) + + +class SchemaLoader(BaseLoader): + """ Loader that loads schema instances. + + All schema loaded by a :class:`SchemaLoader` will use the same + data type registry. If *registry* is provided and not ``None``, it + will be used, otherwise an instance of + :class:`ZConfig.datatypes.Registry` will be used. + """ + + def __init__(self, registry=None): + if registry is None: + registry = ZConfig.datatypes.Registry() + BaseLoader.__init__(self) + self.registry = registry + self._cache = {} + + def loadResource(self, resource): + if resource.url and resource.url in self._cache: + schema = self._cache[resource.url] + else: + schema = ZConfig.schema.parseResource(resource, self) + self._cache[resource.url] = schema + return schema + + # schema parser support API + + def schemaComponentSource(self, package, filename): + parts = package.split(".") + if not parts: # pragma: no cover. can we even get here? + raise ZConfig.SchemaError( + "illegal schema component name: " + repr(package)) + if "" in parts: + # '' somewhere in the package spec; still illegal + raise ZConfig.SchemaError( + "illegal schema component name: " + repr(package)) + filename = filename or "component.xml" + try: + __import__(package) + except ImportError as e: + raise ZConfig.SchemaResourceError( + "could not load package %s: %s" % (package, str(e)), + filename=filename, + package=package) + pkg = sys.modules[package] + if not hasattr(pkg, "__path__"): + raise ZConfig.SchemaResourceError( + "import name does not refer to a package", + filename=filename, package=package) + return "package:%s:%s" % (package, filename) + + +class ConfigLoader(BaseLoader): + """Loader for configuration files. + + Each configuration file must + conform to the schema *schema*. The ``load*()`` methods + return a tuple consisting of the configuration object and a + composite handler. + """ + + + def __init__(self, schema): + if schema.isabstract(): + raise ZConfig.SchemaError( + "cannot check a configuration an abstract type") + BaseLoader.__init__(self) + self.schema = schema + self._private_schema = False + + def loadResource(self, resource): + sm = self.createSchemaMatcher() + self._parse_resource(sm, resource) + result = sm.finish(), CompositeHandler(sm.handlers, self.schema) + return result + + def createSchemaMatcher(self): + return ZConfig.matcher.SchemaMatcher(self.schema) + + # config parser support API + + def startSection(self, parent, type_, name): + t = self.schema.gettype(type_) + if t.isabstract(): + raise ZConfig.ConfigurationError( + "concrete sections cannot match abstract section types;" + " found abstract type " + repr(type_)) + return parent.createChildMatcher(t, name) + + def endSection(self, parent, type_, name, matcher): + sectvalue = matcher.finish() + parent.addSection(type_, name, sectvalue) + + def importSchemaComponent(self, pkgname): + schema = self.schema + if not self._private_schema: + # replace the schema with an extended schema on the first %import + self._loader = SchemaLoader(self.schema.registry) + schema = ZConfig.info.createDerivedSchema(self.schema) + self._private_schema = True + self.schema = schema + url = self._loader.schemaComponentSource(pkgname, '') + if schema.hasComponent(url): + return + resource = self.openResource(url) + schema.addComponent(url) + try: + ZConfig.schema.parseComponent(resource, self._loader, schema) + finally: + resource.close() + + def includeConfiguration(self, section, url, defines): + url = self.normalizeURL(url) + r = self.openResource(url) + try: + self._parse_resource(section, r, defines) + finally: + r.close() + + # internal helper + + def _parse_resource(self, matcher, resource, defines=None): + parser = ZConfig.cfgparser.ZConfigParser(resource, self, defines) + parser.parse(matcher) + + +class CompositeHandler(object): + + def __init__(self, handlers, schema): + self._handlers = handlers + self._convert = schema.registry.get("basic-key") + + def __call__(self, handlermap): + d = {} + for name, callback in handlermap.items(): + n = self._convert(name) + if n in d: + raise ZConfig.ConfigurationError( + "handler name not unique when converted to a basic-key: " + + repr(name)) + d[n] = callback + L = [] + for handler, value in self._handlers: + if handler not in d: + L.append(handler) + if L: + raise ZConfig.ConfigurationError( + "undefined handlers: " + ", ".join(L)) + for handler, value in self._handlers: + f = d[handler] + if f is not None: + f(value) + + def __len__(self): + return len(self._handlers) + + +class Resource(object): + """Object that allows an open file object and a URL to be bound + together to ease handling. + + Instances have the attributes :attr:`file` and :attr:`url`, which + store the constructor arguments. These objects also have a + :meth:`close` method which will call :meth:`~file.close` on + *file*, then set the :attr:`file` attribute to ``None`` and the + :attr:`closed` attribute to ``True``. + + All other attributes are delegated to *file*. + """ + + closed = False + + def __init__(self, file, url): + self.file = file + self.url = url + + def close(self): + if self.file is not None: + self.file.close() + self.file = None + self.closed = True + + def __getattr__(self, name): + return getattr(self.file, name) diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/matcher.py b/thesisenv/lib/python3.6/site-packages/ZConfig/matcher.py new file mode 100644 index 0000000..2482478 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/matcher.py @@ -0,0 +1,305 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Utility that manages the binding of configuration data to a section.""" + +import ZConfig + +from ZConfig.info import ValueInfo +from ZConfig._compat import raise_with_same_tb + + +class BaseMatcher(object): + def __init__(self, info, type_, handlers): + self.info = info + self.type = type_ + self._values = {} + for _type_key, type_info in type_: + if type_info.name == "+" and not type_info.issection(): + v = {} + elif type_info.ismulti(): + v = [] + else: + v = None + assert type_info.attribute is not None + self._values[type_info.attribute] = v + self._sectionnames = {} + self.handlers = handlers if handlers is not None else [] + + def __repr__(self): + clsname = self.__class__.__name__ + extra = "type " + repr(self.type.name) + return "<%s for %s>" % (clsname, extra) + + def addSection(self, type_, name, sectvalue): + if name: + if name in self._sectionnames: + raise ZConfig.ConfigurationError( + "section names must not be re-used within the" + " same container:" + repr(name)) + self._sectionnames[name] = name + ci = self.type.getsectioninfo(type_, name) + attr = ci.attribute + v = self._values[attr] + if ci.ismulti(): + v.append(sectvalue) + elif v is None: + self._values[attr] = sectvalue + else: # pragma: no cover + raise ZConfig.ConfigurationError( + "too many instances of %s section" % repr(ci.sectiontype.name)) + + def addValue(self, key, value, position): + try: + realkey = self.type.keytype(key) + except ValueError as e: + raise ZConfig.DataConversionError(e, key, position) + arbkey_info = None + for i in range(len(self.type)): + k, ci = self.type[i] + if k == realkey: + break + if ci.name == "+" and not ci.issection(): + arbkey_info = k, ci + else: + if arbkey_info is None: + raise ZConfig.ConfigurationError( + repr(key) + " is not a known key name") + k, ci = arbkey_info + if ci.issection(): # pragma: no cover + if ci.name: + extra = " in %s sections" % repr(self.type.name) + else: + extra = "" + raise ZConfig.ConfigurationError( + "%s is not a valid key name%s" % (repr(key), extra)) + + ismulti = ci.ismulti() + attr = ci.attribute + assert attr is not None + v = self._values[attr] + if v is None: + if k == '+': + v = {} # pragma: no cover + elif ismulti: + v = [] # pragma: no cover + self._values[attr] = v + elif not ismulti: + if k != '+': + raise ZConfig.ConfigurationError( + repr(key) + " does not support multiple values") + elif len(v) == ci.maxOccurs: # pragma: no cover + # This code may be impossible to hit. Previously it would + # have raised a NameError because it used an unbound + # local. + raise ZConfig.ConfigurationError( + "too many values for " + repr(ci)) + + value = ValueInfo(value, position) + if k == '+': + if ismulti: + if realkey in v: + v[realkey].append(value) + else: + v[realkey] = [value] + else: + if realkey in v: # pragma: no cover + raise ZConfig.ConfigurationError( + "too many values for " + repr(key)) + v[realkey] = value + elif ismulti: + v.append(value) + else: + self._values[attr] = value + + def createChildMatcher(self, type_, name): + ci = self.type.getsectioninfo(type_.name, name) + assert not ci.isabstract() + if not ci.isAllowedName(name): + raise ZConfig.ConfigurationError( + "%s is not an allowed name for %s sections" + % (repr(name), repr(ci.sectiontype.name))) + return SectionMatcher(ci, type_, name, self.handlers) + + def finish(self): + """Check the constraints of the section and convert to an application + object.""" + values = self._values + for key, ci in self.type: + if key: + key = repr(key) + else: + key = "section type " + repr(ci.sectiontype.name) + assert ci.attribute is not None + attr = ci.attribute + v = values[attr] + if ci.name == '+' and not ci.issection(): + # v is a dict + if ci.minOccurs > len(v): + raise ZConfig.ConfigurationError( + "no keys defined for the %s key/value map; at least %d" + " must be specified" % (attr, ci.minOccurs)) + if v is None and ci.minOccurs: + default = ci.getdefault() + if default is None: + raise ZConfig.ConfigurationError( + "no values for %s; %s required" % (key, ci.minOccurs)) + else: + v = values[attr] = default[:] # pragma: no cover + if ci.ismulti(): + if not v: + default = ci.getdefault() + if isinstance(default, dict): + v.update(default) + else: + v[:] = default + if len(v) < ci.minOccurs: + raise ZConfig.ConfigurationError( + "not enough values for %s; %d found, %d required" + % (key, len(v), ci.minOccurs)) + if v is None and not ci.issection(): + if ci.ismulti(): + v = ci.getdefault()[:] # pragma: no cover + else: + v = ci.getdefault() + values[attr] = v + return self.constuct() + + def constuct(self): + values = self._values + for name, ci in self.type: + assert ci.attribute is not None + attr = ci.attribute + if ci.ismulti(): + if ci.issection(): + v = [] + for s in values[attr]: + if s is not None: + st = s.getSectionDefinition() + try: + s = st.datatype(s) + except ValueError as e: + raise_with_same_tb(ZConfig.DataConversionError( + e, s, (-1, -1, None))) + + v.append(s) + elif ci.name == '+': + v = values[attr] + for key, val in v.items(): + v[key] = [vi.convert(ci.datatype) for vi in val] + else: + v = [vi.convert(ci.datatype) for vi in values[attr]] + elif ci.issection(): + if values[attr] is not None: + st = values[attr].getSectionDefinition() + try: + v = st.datatype(values[attr]) + except ValueError as e: + raise_with_same_tb(ZConfig.DataConversionError( + e, values[attr], (-1, -1, None))) + + else: + v = None + elif name == '+': + v = values[attr] + if not v: + for key, val in ci.getdefault().items(): + v[key] = val.convert(ci.datatype) + else: + for key, val in v.items(): + v[key] = val.convert(ci.datatype) + else: + v = values[attr] + if v is not None: + v = v.convert(ci.datatype) + values[attr] = v + if ci.handler is not None: + self.handlers.append((ci.handler, v)) + return self.createValue() + + def createValue(self): + return SectionValue(self._values, None, self) + + +class SectionMatcher(BaseMatcher): + def __init__(self, info, type_, name, handlers): + if name or info.allowUnnamed(): + self.name = name + else: + raise ZConfig.ConfigurationError( + repr(type_.name) + " sections may not be unnamed") + BaseMatcher.__init__(self, info, type_, handlers) + + def createValue(self): + return SectionValue(self._values, self.name, self) + + +class SchemaMatcher(BaseMatcher): + def __init__(self, schema): + BaseMatcher.__init__(self, schema, schema, []) + + def finish(self): + # Since there's no outer container to call datatype() + # for the schema, we convert on the way out. + v = BaseMatcher.finish(self) + v = self.type.datatype(v) + if self.type.handler is not None: + self.handlers.append((self.type.handler, v)) + return v + + +class SectionValue(object): + """Generic 'bag-of-values' object for a section. + + Derived classes should always call the SectionValue constructor + before attempting to modify self. + """ + + def __init__(self, values, name, matcher): + self.__dict__.update(values) + self._name = name + self._matcher = matcher + self._attributes = tuple(values.keys()) + + def __repr__(self): + if self._name: + # probably unique for a given config file; more readable than id() + name = repr(self._name) + else: + # identify uniquely + name = "at %#x" % id(self) + clsname = self.__class__.__name__ + return "<%s for %s %s>" % (clsname, self._matcher.type.name, name) + + def __str__(self): + l = [] + attrnames = sorted([s for s in self.__dict__ if s[0] != "_"]) + for k in attrnames: + v = getattr(self, k) + l.append('%-40s: %s' % (k, v)) + return '\n'.join(l) + + def getSectionName(self): + return self._name + + def getSectionType(self): + return self._matcher.type.name + + def getSectionDefinition(self): + return self._matcher.type + + def getSectionMatcher(self): + return self._matcher + + def getSectionAttributes(self): + return self._attributes diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/schema.py b/thesisenv/lib/python3.6/site-packages/ZConfig/schema.py new file mode 100644 index 0000000..3a6b2e9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/schema.py @@ -0,0 +1,606 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Parser for ZConfig schemas.""" + +import os +import sys +import xml.sax + +import ZConfig + +from ZConfig import info +from ZConfig import url + +from ZConfig._compat import raise_with_same_tb + +BLANK = u'' + +def parseResource(resource, loader): + parser = SchemaParser(loader, resource.url) + xml.sax.parse(resource.file, parser) + return parser._schema + + +def parseComponent(resource, loader, schema): + parser = ComponentParser(loader, resource.url, schema) + xml.sax.parse(resource.file, parser) + + +def _srepr(ob): + if isinstance(ob, type(BLANK)) and sys.version_info[0] < 3: + # drop the leading "u" from a unicode repr + return repr(ob)[1:] + return repr(ob) + + +class BaseParser(xml.sax.ContentHandler): + + _cdata_tags = "description", "metadefault", "example", "default" + _handled_tags = ("import", "abstracttype", "sectiontype", + "key", "multikey", "section", "multisection") + + _allowed_parents = { + "description": ["key", "section", "multikey", "multisection", + "sectiontype", "abstracttype", + "schema", "component"], + "example": ["schema", "sectiontype", "key", "multikey", "section", "multisection"], + "metadefault": ["key", "section", "multikey", "multisection"], + "default": ["key", "multikey"], + "import": ["schema", "component"], + "abstracttype": ["schema", "component"], + "sectiontype": ["schema", "component"], + "key": ["schema", "sectiontype"], + "multikey": ["schema", "sectiontype"], + "section": ["schema", "sectiontype"], + "multisection": ["schema", "sectiontype"], + } + + def __init__(self, loader, url): + self._registry = loader.registry + self._loader = loader + self._basic_key = self._registry.get("basic-key") + self._identifier = self._registry.get("identifier") + self._cdata = None + self._locator = None + self._prefixes = [] + self._schema = None + self._stack = [] + self._url = url + self._elem_stack = [] + + # SAX 2 ContentHandler methods + + def setDocumentLocator(self, locator): + self._locator = locator + + def startElement(self, name, attrs): + attrs = dict(attrs) + if self._elem_stack: + parent = self._elem_stack[-1] + if name not in self._allowed_parents: + self.error("Unknown tag " + name) + if parent not in self._allowed_parents[name]: + self.error("%s elements may not be nested in %s elements" + % (_srepr(name), _srepr(parent))) + elif name != self._top_level: + self.error("Unknown document type " + name) + + self._elem_stack.append(name) + + # self._schema is assigned to in self.start_<_top_level>, so + # most of the checks for it being None are just extra precaution. + if name == self._top_level: + if self._schema is not None: # pragma: no cover + self.error("schema element improperly nested") + getattr(self, "start_" + name)(attrs) + elif name in self._handled_tags: + if self._schema is None: # pragma: no cover + self.error(name + " element outside of schema") + getattr(self, "start_" + name)(attrs) + elif name in self._cdata_tags: + if self._schema is None: # pragma: no cover + self.error(name + " element outside of schema") + if self._cdata is not None: # pragma: no cover + # this should be handled by the earlier nesting check + self.error(name + " element improperly nested") + self._cdata = [] + self._position = None + self._attrs = attrs + + def characters(self, data): + if self._cdata is not None: + if self._position is None: + self._position = self.get_position() + self._cdata.append(data) + elif data.strip(): + self.error("unexpected non-blank character data: " + + repr(data.strip())) + + def endElement(self, name): + del self._elem_stack[-1] + if name in self._handled_tags: + getattr(self, "end_" + name)() + else: + data = ''.join(self._cdata).strip() + self._cdata = None + getattr(self, "characters_" + name)(data) + + def endDocument(self): + if self._schema is None: # pragma: no cover + # this would have to be a broken subclass + self.error("no %s found" % self._top_level) + + # helper methods + + def get_position(self): + if self._locator: + return (self._locator.getLineNumber(), + self._locator.getColumnNumber(), + (self._locator.getSystemId() or self._url)) + return None, None, self._url # pragma: no cover + + def get_handler(self, attrs): + v = attrs.get("handler") + if v is None: + return v + return self.basic_key(v) + + def push_prefix(self, attrs): + name = attrs.get("prefix") + if name: + if self._prefixes: + convert = self._registry.get("dotted-suffix") + else: + convert = self._registry.get("dotted-name") + try: + name = convert(name) + except ValueError as err: + self.error("not a valid prefix: %s (%s)" + % (_srepr(name), str(err))) + if name[0] == ".": + prefix = self._prefixes[-1] + name + else: + prefix = name + elif self._prefixes: + prefix = self._prefixes[-1] + else: + prefix = '' + self._prefixes.append(prefix) + + def pop_prefix(self): + del self._prefixes[-1] + + def get_classname(self, name): + name = str(name) + if name.startswith("."): + return self._prefixes[-1] + name + return name + + def get_datatype(self, attrs, attrkey, default, base=None): + if attrkey in attrs: + dtname = self.get_classname(attrs[attrkey]) + else: + convert = getattr(base, attrkey, None) + if convert is not None: + return convert + dtname = default + + try: + return self._registry.get(dtname) + except ValueError as e: + self.error(e.args[0]) + + def get_sect_typeinfo(self, attrs, base=None): + keytype = self.get_datatype(attrs, "keytype", "basic-key", base) + valuetype = self.get_datatype(attrs, "valuetype", "string") + datatype = self.get_datatype(attrs, "datatype", "null", base) + return keytype, valuetype, datatype + + def get_required(self, attrs): + if "required" in attrs: + v = attrs["required"] + if v == "yes": + return True + elif v == "no": + return False + self.error("value for 'required' must be 'yes' or 'no'") + else: + return False + + def get_ordinality(self, attrs): + # used by start_multi*() + minOccurs, maxOccurs = 0, info.Unbounded + if self.get_required(attrs): + minOccurs = 1 + return minOccurs, maxOccurs + + def get_sectiontype(self, attrs): + type_name = attrs.get("type") + if not type_name: + self.error("section must specify type") + return self._schema.gettype(type_name) + + def get_key_info(self, attrs, element): + any_name, name, attribute = self.get_name_info(attrs, element) + if any_name == '*': + self.error(element + " may not specify '*' for name") + if not name and any_name != '+': # pragma: no cover + # Can we even get here? + self.error(element + " name may not be omitted or empty") + datatype = self.get_datatype(attrs, "datatype", "string") + handler = self.get_handler(attrs) + return name or any_name, datatype, handler, attribute + + def get_name_info(self, attrs, element, default=None): + name = attrs.get("name", default) + if not name: + self.error(element + " name must be specified and non-empty") + aname = attrs.get("attribute") + if aname: + aname = self.identifier(aname) + if aname.startswith("getSection"): + # reserved; used for SectionValue methods to get meta-info + self.error("attribute names may not start with 'getSection'") + if name in ("*", "+"): + if not aname: + self.error( + "container attribute must be specified and non-empty" + " when using '*' or '+' for a section name") + return name, None, aname + else: + # run the keytype converter to make sure this is a valid key + try: + name = self._stack[-1].keytype(name) + except ValueError as e: + self.error("could not convert key name to keytype: " + str(e)) + if not aname: + aname = self.basic_key(name) + aname = self.identifier(aname.replace('-', '_')) + return None, name, aname + + # schema loading logic + + def characters_default(self, data): + key = self._attrs.get("key") + self._stack[-1].adddefault(data, self._position, key) + + def characters_description(self, data): + if self._stack[-1].description is not None: + self.error( + "at most one may be used for each element") + self._stack[-1].description = data + + def characters_example(self, data): + if self._stack[-1].example is not None: + self.error( + "at most one may be used for each element") + self._stack[-1].example = data + + def characters_metadefault(self, data): + self._stack[-1].metadefault = data + + def start_import(self, attrs): + src = attrs.get("src", "").strip() + pkg = attrs.get("package", "").strip() + filename = attrs.get("file", "").strip() + if not (src or pkg): + self.error("import must specify either src or package") + if src and pkg: + self.error("import may only specify one of src or package") + if src: + if filename: + self.error("import may not specify file and src") + src = url.urljoin(self._url, src) + src, fragment = url.urldefrag(src) + if fragment: + self.error("import src may not include" + " a fragment identifier") + schema = self._loader.loadURL(src) + for n in schema.gettypenames(): + self._schema.addtype(schema.gettype(n)) + else: + if os.path.dirname(filename): + self.error("file may not include a directory part") + pkg = self.get_classname(pkg) + src = self._loader.schemaComponentSource(pkg, filename) + if not self._schema.hasComponent(src): + self._schema.addComponent(src) + self.loadComponent(src) + + def loadComponent(self, src): + r = self._loader.openResource(src) + parser = ComponentParser(self._loader, src, self._schema) + try: + xml.sax.parse(r.file, parser) + finally: + r.close() + + def end_import(self): + pass + + def start_sectiontype(self, attrs): + name = attrs.get("name") + if not name: + self.error("sectiontype name must not be omitted or empty") + name = self.basic_key(name) + self.push_prefix(attrs) + if "extends" in attrs: + basename = self.basic_key(attrs["extends"]) + base = self._schema.gettype(basename) + if base.isabstract(): + self.error("sectiontype cannot extend an abstract type") + keytype, valuetype, datatype = self.get_sect_typeinfo(attrs, base) + sectinfo = self._schema.deriveSectionType( + base, name, keytype, valuetype, datatype) + else: + keytype, valuetype, datatype = self.get_sect_typeinfo(attrs) + sectinfo = self._schema.createSectionType( + name, keytype, valuetype, datatype) + if "implements" in attrs: + ifname = self.basic_key(attrs["implements"]) + interface = self._schema.gettype(ifname) + if not interface.isabstract(): + self.error( + "type specified by implements is not an abstracttype") + interface.addsubtype(sectinfo) + self._stack.append(sectinfo) + + def end_sectiontype(self): + self.pop_prefix() + self._stack.pop() + + def start_section(self, attrs): + sectiontype = self.get_sectiontype(attrs) + handler = self.get_handler(attrs) + minOccurs = 1 if self.get_required(attrs) else 0 + any_name, name, attribute = self.get_name_info(attrs, "section", "*") + if any_name and not attribute: # pragma: no cover + # It seems like this is handled by get_name_info. + self.error( + "attribute must be specified if section name is '*' or '+'") + section = info.SectionInfo(any_name or name, sectiontype, + minOccurs, 1, handler, attribute) + self._stack[-1].addsection(name, section) + self._stack.append(section) + + def end_section(self): + self._stack.pop() + + def start_multisection(self, attrs): + sectiontype = self.get_sectiontype(attrs) + minOccurs, maxOccurs = self.get_ordinality(attrs) + any_name, name, attribute = self.get_name_info(attrs, "multisection", "*") + if any_name not in ("*", "+"): + self.error("multisection must specify '*' or '+' for the name") + handler = self.get_handler(attrs) + section = info.SectionInfo(any_name or name, sectiontype, + minOccurs, maxOccurs, handler, attribute) + self._stack[-1].addsection(name, section) + self._stack.append(section) + + def end_multisection(self): + self._stack.pop() + + def start_abstracttype(self, attrs): + name = attrs.get("name") + if not name: + self.error("abstracttype name must not be omitted or empty") + name = self.basic_key(name) + abstype = info.AbstractType(name) + self._schema.addtype(abstype) + self._stack.append(abstype) + + def end_abstracttype(self): + self._stack.pop() + + def start_key(self, attrs): + name, datatype, handler, attribute = self.get_key_info(attrs, "key") + minOccurs = 1 if self.get_required(attrs) else 0 + key = info.KeyInfo(name, datatype, minOccurs, handler, attribute) + if "default" in attrs: + if minOccurs: + self.error("required key cannot have a default value") + key.adddefault(str(attrs["default"]).strip(), + self.get_position()) + if name != "+": + key.finish() + self._stack[-1].addkey(key) + self._stack.append(key) + + def end_key(self): + key = self._stack.pop() + if key.name == "+": + key.computedefault(self._stack[-1].keytype) + key.finish() + + def start_multikey(self, attrs): + if "default" in attrs: + self.error("default values for multikey must be given using" + " 'default' elements") + name, datatype, handler, attribute = self.get_key_info(attrs, + "multikey") + minOccurs, maxOccurs = self.get_ordinality(attrs) + key = info.MultiKeyInfo(name, datatype, minOccurs, maxOccurs, handler, attribute) + self._stack[-1].addkey(key) + self._stack.append(key) + + def end_multikey(self): + multikey = self._stack.pop() + if multikey.name == "+": + multikey.computedefault(self._stack[-1].keytype) + multikey.finish() + + # datatype conversion wrappers + + def basic_key(self, s): + try: + return self._basic_key(s) + except ValueError as e: + self.error(str(e)) + + def identifier(self, s): + try: + return self._identifier(s) + except ValueError as e: + self.error(str(e)) + + # exception setup helpers + + def initerror(self, e): + if self._locator is not None: + e.colno = self._locator.getColumnNumber() + e.lineno = self._locator.getLineNumber() + e.url = self._locator.getSystemId() + return e + + def error(self, message): + raise_with_same_tb(self.initerror(ZConfig.SchemaError(message))) + + +class SchemaParser(BaseParser): + + # needed by startElement() and endElement() + _handled_tags = BaseParser._handled_tags + ("schema",) + _top_level = "schema" + + def __init__(self, loader, url, extending_parser=None): + BaseParser.__init__(self, loader, url) + self._extending_parser = extending_parser + self._base_keytypes = [] + self._base_datatypes = [] + self._descriptions = [] + + def start_schema(self, attrs): + self.push_prefix(attrs) + handler = self.get_handler(attrs) + keytype, valuetype, datatype = self.get_sect_typeinfo(attrs) + + if self._extending_parser is None: + # We're not being inherited, so we need to create the schema + self._schema = info.SchemaType(keytype, valuetype, datatype, + handler, self._url, self._registry) + else: + # Parse into the extending ("subclass") parser's schema + self._schema = self._extending_parser._schema + + self._stack = [self._schema] + + if "extends" in attrs: + sources = attrs["extends"].split() + sources.reverse() + + for src in sources: + src = url.urljoin(self._url, src) + src, fragment = url.urldefrag(src) + if fragment: + self.error("schema extends many not include" + " a fragment identifier") + self.extendSchema(src) + + # Inherit keytype from bases, if unspecified and not conflicting + if self._base_keytypes and "keytype" not in attrs: + keytype = self._base_keytypes[0] + for kt in self._base_keytypes[1:]: + if kt is not keytype: + self.error("base schemas have conflicting keytypes," + " but no keytype was specified in the" + " extending schema") + + # Inherit datatype from bases, if unspecified and not conflicting + if self._base_datatypes and "datatype" not in attrs: + datatype = self._base_datatypes[0] + for dt in self._base_datatypes[1:]: + if dt is not datatype: + self.error("base schemas have conflicting datatypes," + " but no datatype was specified in the" + " extending schema") + + # Reset the schema types to our own, while we parse the schema body + self._schema.keytype = keytype + self._schema.valuetype = valuetype + self._schema.datatype = datatype + + # Update base key/datatypes for the "extending" parser + if self._extending_parser is not None: + self._extending_parser._base_keytypes.append(keytype) + self._extending_parser._base_datatypes.append(datatype) + + def extendSchema(self, src): + parser = SchemaParser(self._loader, src, self) + r = self._loader.openResource(src) + try: + xml.sax.parse(r.file, parser) + finally: + r.close() + + def end_schema(self): + del self._stack[-1] + assert not self._stack + self.pop_prefix() + assert not self._prefixes + schema = self._schema + if self._extending_parser is None: + # Top-level schema: + if self._descriptions and not schema.description: + # Use the last one, since the base schemas are processed in + # reverse order. + schema.description = self._descriptions[-1] + elif schema.description: + self._extending_parser._descriptions.append(schema.description) + schema.description = None + + +class ComponentParser(BaseParser): + + _handled_tags = BaseParser._handled_tags + ("component",) + _top_level = "component" + + def __init__(self, loader, url, schema): + BaseParser.__init__(self, loader, url) + self._parent = schema + + def characters_description(self, data): + if self._stack: + self._stack[-1].description = data + + def start_key(self, attrs): + self._check_not_toplevel("key") + BaseParser.start_key(self, attrs) + + def start_multikey(self, attrs): + self._check_not_toplevel("multikey") + BaseParser.start_multikey(self, attrs) + + def start_section(self, attrs): + self._check_not_toplevel("section") + BaseParser.start_section(self, attrs) + + def start_multisection(self, attrs): + self._check_not_toplevel("multisection") + BaseParser.start_multisection(self, attrs) + + def start_component(self, attrs): + self._schema = self._parent + self.push_prefix(attrs) + + def end_component(self): + self.pop_prefix() + + def _check_not_toplevel(self, what): + if not self._stack: # pragma: no cover + # we can't get here because the elements that call + # this function have specified _allowed_parents that are + # checked first + self.error("cannot define top-level %s in a schema %s" + % (what, self._top_level)) diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/schema2html.py b/thesisenv/lib/python3.6/site-packages/ZConfig/schema2html.py new file mode 100644 index 0000000..99eb8e1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/schema2html.py @@ -0,0 +1,152 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Corporation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from __future__ import print_function + +import argparse +from contextlib import contextmanager +try: + import html +except ImportError: + # Py2 + import cgi as html +import sys + +from ZConfig._schema_utils import AbstractSchemaPrinter +from ZConfig._schema_utils import AbstractSchemaFormatter +from ZConfig._schema_utils import MARKER +from ZConfig._schema_utils import load_schema +from ZConfig.sphinx import RstSchemaPrinter + +class HtmlSchemaFormatter(AbstractSchemaFormatter): + + def esc(self, x): + return html.escape(str(x)) + + @contextmanager + def _simple_tag(self, tag): + self.write("<%s>" % tag) + yield + self.write("" % tag) + + def item_list(self): + return self._simple_tag("dl") + + @contextmanager + def describing(self, description=MARKER, after=None): + with self._simple_tag("dt"): + yield + self._describing(description, after) + + def described_as(self): + return self._simple_tag("dd") + + def abstract_name(self, name): + self.write("", name, "") + + def concrete_name(self, *name): + self.write("", *name) + self.write("") + + def concrete_section_name(self, *name): + name = ' '.join(name) + self.write("", self.esc("<%s>" % name), "") + + def datatype(self, datatype): + self.write("(%s)" % self._dt(datatype)) + + def example(self, text): + if not text: + return + + with self._simple_tag("p"): + with self._simple_tag("i"): + self.write("Example:") + with self._simple_tag("pre"): + self.write(self.esc(self._dedent(text))) + + @contextmanager + def body(self): + self.write(''' + + ''') + yield + self.write('') + +class HtmlSchemaPrinter(AbstractSchemaPrinter): + + _schema_formatter = HtmlSchemaFormatter + +def main(argv=None): + argv = argv if argv is not None else sys.argv[1:] + + argparser = argparse.ArgumentParser( + description="Print an HTML version of a schema") + argparser.add_argument( + "schema", + metavar='[SCHEMA-OR-PACKAGE]', + help="The schema to print. By default, a file. Optionally, a Python package." + " If not given, defaults to reading a schema file from stdin", + default="-" + ) + argparser.add_argument( + "--out", "-o", + help="Write the schema to this file; if not given, write to stdout", + type=argparse.FileType('w')) + argparser.add_argument( + "--package", + action='store_true', + default=False, + help="The SCHEMA-OR-PACKAGE argument indicates a Python package instead of a file." + " The component.xml (by default) from the package will be read.") + argparser.add_argument( + "--package-file", + action="store", + default="component.xml", + help="When PACKAGE is given, this can specify the file inside it to load.") + + argparser.add_argument( + "--members", + action="store", + nargs="*", + help="Only output sections and types in this list (and reachable from it)") + + if RstSchemaPrinter: + argparser.add_argument( + "--format", + action="store", + choices=('html', 'xml'), # XXX Can we get actual valid RST out? + default="HTML", + help="What output format to produce" + ) + + args = argparser.parse_args(argv) + + out = args.out or sys.stdout + + schema = load_schema(args.schema, args.package, args.package_file) + + printer_factory = HtmlSchemaPrinter + if hasattr(args, 'format') and args.format == 'xml': + printer_factory = RstSchemaPrinter + + + printer_factory(schema, out, allowed_names=args.members).printSchema() + + + return 0 + +if __name__ == '__main__': + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/schemaless.py b/thesisenv/lib/python3.6/site-packages/ZConfig/schemaless.py new file mode 100644 index 0000000..f444427 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/schemaless.py @@ -0,0 +1,115 @@ +############################################################################## +# +# Copyright (c) 2007 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""\ +Support for working with ZConfig data without a schema. + +""" +__docformat__ = "reStructuredText" + +import ZConfig.cfgparser + + +def loadConfigFile(file, url=None): + c = Context() + Parser(Resource(file, url), c).parse(c.top) + return c.top + + +class Resource(object): + + def __init__(self, file, url=''): + self.file, self.url = file, url + + +class Section(dict): + + imports = () + + def __init__(self, type='', name='', data=None, sections=None): + dict.__init__(self) + if data: + self.update(data) + self.sections = sections or [] + self.type, self.name = type, name + + def addValue(self, key, value, *args): + if key in self: + self[key].append(value) + else: + self[key] = [value] + + def __str__(self, pre=''): + result = [] + + if self.imports: + for pkgname in self.imports: + result.append('%import ' + pkgname) + result.append('') + + if self.type: + if self.name: + start = '%s<%s %s>' % (pre, self.type, self.name) + else: + start = '%s<%s>' % (pre, self.type) + result.append(start) + pre += ' ' + + lst = sorted(self.items()) + for name, values in lst: + for value in values: + result.append('%s%s %s' % (pre, name, value)) + + if self.sections and self: + result.append('') + + for section in self.sections: + result.append(section.__str__(pre)) + + if self.type: + pre = pre[:-2] + result.append('%s' % (pre, self.type)) + result.append('') + + result = '\n'.join(result).rstrip() + if not pre: + result += '\n' + return result + + +class Context(object): + + def __init__(self): + self.top = Section() + self.sections = [] + + def startSection(self, container, type_, name): + newsec = Section(type_, name) + container.sections.append(newsec) + return newsec + + def endSection(self, container, type_, name, newsect): + pass + + def importSchemaComponent(self, pkgname): + if pkgname not in self.top.imports: + self.top.imports += (pkgname, ) + + def includeConfiguration(self, section, newurl, defines): + raise NotImplementedError('includes are not supported') + + +class Parser(ZConfig.cfgparser.ZConfigParser): + + def handle_define(self, section, rest): + raise NotImplementedError('defines are not supported') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/schemaless.txt b/thesisenv/lib/python3.6/site-packages/ZConfig/schemaless.txt new file mode 100644 index 0000000..18c9070 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/schemaless.txt @@ -0,0 +1,293 @@ +================================= +Using ZConfig data without schema +================================= + +Sometimes it's useful to use ZConfig configuration data without a +schema. This is most interesting when assembling a configuration from +fragments, as some buildout recipes do. This is not recommended for +general application use. + +The ``ZConfig.schemaless`` module provides some support for working +without schema. Something things are not (currently) supported, +including the %define and %include directives. The %import directive +is supported. + +This module provides basic support for loading configuration, +inspecting and modifying it, and re-serializing the result. + + >>> from ZConfig import schemaless + +There is a single function which loads configuration data from a file +open for reading. Let's take a look at this, and what it returns:: + + >>> config_text = ''' + ... + ... some-key some-value + ... + ... some-key another-value + ... + ...
+ ... key1 value1.1 + ... key1 value1.2 + ... key2 value2 + ... + ... + ... another key + ... another value + ... + ...
+ ... + ... another-key whee! + ... + ... + ... + ... nothing here + ... + ... + ... ''' + + >>> from ZConfig._compat import NStringIO as StringIO + >>> config = schemaless.loadConfigFile(StringIO(config_text)) + +The `config` object is a mapping from top-level keys to lists of +values:: + + >>> config["some-key"] + ['some-value', 'another-value'] + + >>> config["another-key"] + ['whee!'] + + >>> config["no-such-key-in-the-config"] + Traceback (most recent call last): + KeyError: 'no-such-key-in-the-config' + + >>> lst = list(config) + >>> lst.sort() + >>> lst + ['another-key', 'some-key'] + +There is also a ``sections`` attribute that lists child sections:: + + >>> len(config.sections) + 2 + +Let's take a look at one of the sections. Like the top-level +configuration, the section maps keys + + >>> section = config.sections[0] + + >>> section["key1"] + ['value1.1', 'value1.2'] + + >>> section["key2"] + ['value2'] + + >>> section["no-such-key-in-the-config"] + Traceback (most recent call last): + KeyError: 'no-such-key-in-the-config' + + >>> lst = list(section) + >>> lst.sort() + >>> lst + ['key1', 'key2'] + +Child sections are again available via the ``sections`` attribute:: + + >>> len(section.sections) + 1 + +In addition, the section has ``type`` and ``name`` attributes that +record the type and name of the section as ZConfig understands them:: + + >>> section.type + 'section' + + >>> print(section.name) + None + +Let's look at the named section from our example, so we can see the +name:: + + >>> section = config.sections[1] + >>> section.type + 'another' + >>> section.name + 'named' + +We can also mutate the configuration, adding new keys and values as +desired:: + + >>> config["new-key"] = ["new-value-1", "new-value-2"] + >>> config["some-key"].append("third-value") + +New sections can also be added:: + + >>> section = schemaless.Section("sectiontype", "my-name") + >>> section["key"] = ["value"] + >>> config.sections.insert(1, section) + +The configuration can be re-serialized using ``str()``:: + + >>> print(str(config)) + another-key whee! + new-key new-value-1 + new-key new-value-2 + some-key some-value + some-key another-value + some-key third-value + +
+ key1 value1.1 + key1 value1.2 + key2 value2 + + + another key + another value + +
+ + + key value + + + + nothing here + + + +Note that some adjustments have been made: + +- key/value pairs come before child sections + +- keys are sorted at each level + +- blank lines are removed, with new blank lines inserted to preserve + some semblance of readability + +These are all presentation changes, but not essential changes to the +configuration data. The ordering of sections is not modified in +rendering, nor are the values for a single key re-ordered within a +section or top-level configuration. + + +Support for %import +------------------- + +Imports are supported, and are re-ordered in much the same way that +other elements of a configuration are:: + + >>> config_text = ''' + ... + ... %import some.package + ... + ...
+ ... + ... %import another.package + ... + ... + ... some value + ... + ... + ...
+ ... + ... some-key some-value + ... + ... ''' + + >>> config = schemaless.loadConfigFile(StringIO(config_text)) + + >>> print(config) + %import some.package + %import another.package + + some-key some-value + +
+ + some value + +
+ + +The imports are also available as the ``imports`` attribute of the +configuration object:: + + >>> config.imports + ('some.package', 'another.package') + +Multiple imports of the same name are removed:: + + >>> config_text = ''' + ... + ... %import some.package + ... %import another.package + ... %import some.package + ... + ... ''' + + >>> config = schemaless.loadConfigFile(StringIO(config_text)) + + >>> print(config) + %import some.package + %import another.package + + + >>> config.imports + ('some.package', 'another.package') + + +Limitations +----------- + +There are some limitations of handling ZConfig-based configurations +using the ``ZConfig.schemaless`` module. Some of these are +implementation issues, and may be corrected in the future: + +- %define is not supported. + +- %include is not supported. + +Others are a function of not processing the schema, and can't easily +be avoided: + +- normalization of keys based on keytypes specified in the or + elements of the schema if not performed. + + If the transformation of a key might affect the behavior controlled + by the resulting configuration, the generated configuration may not + be equivalent. Examples of this are unusual, but exist. + +Limitations related to the non-processing of the schema cannot be +detected by the ``ZConfig.schemaless``, so no errors are reported in +these situations. + +For the strictly syntactic limitations, we do get errors when the +input data requires they be supported. Let's look at both the %define +and %include handling. + +When %define is used in the input configuration, an exception is +raised when loading the configuration:: + + >>> config_text = ''' + ... + ... %define somename somevalue + ... + ... ''' + + >>> schemaless.loadConfigFile(StringIO(config_text)) + Traceback (most recent call last): + NotImplementedError: defines are not supported + +A similar exception is raised for %include:: + + >>> config_text = ''' + ... + ... %include some/other/file.conf + ... + ... ''' + + >>> schemaless.loadConfigFile(StringIO(config_text)) + Traceback (most recent call last): + NotImplementedError: includes are not supported diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/sphinx.py b/thesisenv/lib/python3.6/site-packages/ZConfig/sphinx.py new file mode 100644 index 0000000..3686bbb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/sphinx.py @@ -0,0 +1,183 @@ +############################################################################## +# +# Copyright (c) 2017 Zope Corporation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from __future__ import print_function, absolute_import + + +from contextlib import contextmanager + +try: + from docutils import nodes + import docutils.utils + import docutils.frontend + import docutils.parsers.rst + from docutils.parsers.rst import Directive +except ImportError: # pragma: no cover + RstSchemaPrinter = None + RstSchemaFormatter = None +else: + + from ZConfig._compat import string_types + from ZConfig._schema_utils import load_schema + from ZConfig._schema_utils import AbstractSchemaFormatter + from ZConfig._schema_utils import AbstractSchemaPrinter + from ZConfig._schema_utils import MARKER + + class RstSchemaFormatter(AbstractSchemaFormatter): + + settings = None + + def __init__(self, schema, stream=None): + super(RstSchemaFormatter, self).__init__(schema, stream) + self.document = None + self._current_node = None + self._nodes = [] + self.settings = docutils.frontend.OptionParser( + components=(docutils.parsers.rst.Parser,)).get_default_values() + + def esc(self, text): + return text + + def _parsed(self, text, name='Schema'): + document = docutils.utils.new_document( + name, + settings=self.settings) + + + parser = docutils.parsers.rst.Parser() + parser.parse(text, document) + return document.children + + def write(self, *texts): + for text in texts: + if isinstance(text, string_types): + self._current_node += nodes.Text(' ' + text + ' ', text) + else: + # Already parsed + self._current_node += text + + def description(self, text): + if not text: + return + + self.write(self._parsed(self._dedent(text), "description")) + + def example(self, text): + if not text: + return + + dedented = self._dedent(text) + example = "Example::\n\n\t" + '\n\t'.join(dedented.split('\n')) + self.write(self._parsed(example, "example")) + + @contextmanager + def item_list(self): + old_node = self._current_node + self._current_node = nodes.definition_list() + old_node += self._current_node + yield + self._current_node = old_node + + + @contextmanager + def describing(self, description=MARKER, after=None): + dl = self._current_node + assert isinstance(dl, nodes.definition_list), dl + item = nodes.definition_list_item() + dl += item + term = nodes.term() + item += term + self._current_node = term + + yield + + # We must now have either a description (so we call + # described_as) or they must call described_as + # des + self._current_node = item + + self._describing(description, after) + + + @contextmanager + def described_as(self): + item = self._current_node + assert isinstance(item, nodes.definition_list_item), item + + definition = nodes.definition() + para = nodes.paragraph() + definition += para + item += definition + self._current_node = para + + yield + + # When this is done, we're back to the list + self._current_node = item.parent + + def abstract_name(self, name): + self._current_node += nodes.emphasis(text=name, rawsource=name) + + def concrete_name(self, *name): + name = ' '.join(name) + self._current_node += nodes.strong(text=name, rawsource=name) + + def concrete_section_name(self, *name): + name = ' '.join(name) + return self.concrete_name("<" + name + ">") + + @contextmanager + def body(self): + self.document = self._current_node = docutils.utils.new_document( + "Schema", + settings=self.settings) + yield + + class RstSchemaPrinter(AbstractSchemaPrinter): + _schema_formatter = RstSchemaFormatter + + def printSchema(self): + super(RstSchemaPrinter, self).printSchema() + print(self.fmt.document.pformat(), file=self.fmt.stream) + + + class SchemaToRstDirective(Directive): + required_arguments = 1 + optional_arguments = 2 + option_spec = { + 'file': str, + 'members': str, + 'excluded-members': str, + } + def run(self): + schema = load_schema(self.arguments[0], + True, self.options.get('file')) + + members = () + if 'members' in self.options: + members = self.options['members'].split() + + excluded_members = () + if 'excluded-members' in self.options: + excluded_members = self.options['excluded-members'].split() + + printer = RstSchemaPrinter(schema, allowed_names=members, excluded_names=excluded_members) + printer.fmt.settings = self.state.document.settings + + printer.buildSchema() + + return printer.fmt.document.children + + def setup(app): # pragma: no cover + "Sphinx extension entry point to add the zconfig directive." + app.add_directive("zconfig", SchemaToRstDirective) diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/substitution.py b/thesisenv/lib/python3.6/site-packages/ZConfig/substitution.py new file mode 100644 index 0000000..50c3529 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/substitution.py @@ -0,0 +1,115 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Shell-style string substitution helper.""" + +import os +import ZConfig + + +def substitute(s, mapping): + """Substitute values from *mapping* into *s*. + + *mapping* can be a :class:`dict` or any type that supports the + ``get()`` method of the mapping protocol. Replacement values are + copied into the result without further interpretation. Raises + :exc:`~.SubstitutionSyntaxError` if there are malformed constructs + in *s*. + """ + + if "$" in s: + result = '' + rest = s + while rest: + p, name, namecase, rest, vtype = _split(rest) + result += p + if name: + v = None + if vtype == 'define': + v = mapping.get(name) + if vtype == 'env': + v = os.getenv(namecase) + + if v is None: + raise ZConfig.SubstitutionReplacementError(s, namecase) + result += v + return result + else: + return s + + +def isname(s): + """Returns ``True`` if *s* is a valid name for a substitution + text, otherwise returns ``False``. + """ + + m = _name_match(s) + if m: + return m.group() == s + else: + return False + + +def _split(s): + # Return a four tuple: prefix, name, namecase, suffix + # - prefix is text that can be used literally in the result (may be '') + # - name is a referenced name, or None + # - namecase is the name with case preserved + # - suffix is trailling text that may contain additional references + # (may be '' or None) + if "$" in s: + i = s.find("$") + c = s[i+1:i+2] + if c == "": + raise ZConfig.SubstitutionSyntaxError( + "illegal lone '$' at end of source") + if c == "$": + return s[:i+1], None, None, s[i+2:], None + prefix = s[:i] + vtype = 'define' + if c == "{": + m = _name_match(s, i + 2) + if not m: + raise ZConfig.SubstitutionSyntaxError( + "'${' not followed by name") + name = m.group(0) + i = m.end() + 1 + if not s.startswith("}", i - 1): + raise ZConfig.SubstitutionSyntaxError( + "'${%s' not followed by '}'" % name) + elif c == "(": + m = _name_match(s, i + 2) + if not m: + raise ZConfig.SubstitutionSyntaxError( + "'$(' not followed by name") + name = m.group(0) + i = m.end() + 1 + if not s.startswith(")", i - 1): + raise ZConfig.SubstitutionSyntaxError( + "'$(%s' not followed by ')'" % name) + vtype = 'env' + else: + m = _name_match(s, i+1) + if not m: + raise ZConfig.SubstitutionSyntaxError( + "'$' not followed by '$' or name") + name = m.group(0) + i = m.end() + return prefix, name.lower(), name, s[i:], vtype + else: + return s, None, None, None, None + + +import re +_name_match = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*").match +del re diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/__init__.py new file mode 100644 index 0000000..40eb211 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/__init__.py @@ -0,0 +1,17 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests for the configuration data structures and loader. + +$Id: __init__.py,v 1.2 2003/01/03 21:05:56 fdrake Exp $ +""" diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/bad-component.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/bad-component.xml new file mode 100644 index 0000000..a80c1d6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/bad-component.xml @@ -0,0 +1,5 @@ + + +
+ + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/bad-component2.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/bad-component2.xml new file mode 100644 index 0000000..e5dff55 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/bad-component2.xml @@ -0,0 +1,7 @@ + + + +
+ + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/foosample.zip b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/foosample.zip new file mode 100644 index 0000000..16fedd6 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/foosample.zip differ diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base-datatype1.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base-datatype1.xml new file mode 100644 index 0000000..d065939 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base-datatype1.xml @@ -0,0 +1,4 @@ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base-datatype2.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base-datatype2.xml new file mode 100644 index 0000000..076b0bc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base-datatype2.xml @@ -0,0 +1,3 @@ + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base-keytype1.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base-keytype1.xml new file mode 100644 index 0000000..11b89cd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base-keytype1.xml @@ -0,0 +1,3 @@ + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base-keytype2.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base-keytype2.xml new file mode 100644 index 0000000..c595342 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base-keytype2.xml @@ -0,0 +1,3 @@ + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base.xml new file mode 100644 index 0000000..df195b4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/base.xml @@ -0,0 +1,7 @@ + + + + base description + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/include.conf b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/include.conf new file mode 100644 index 0000000..52d5b56 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/include.conf @@ -0,0 +1,4 @@ +var2 value2 +%include simple.conf +var3 value3 +var4 $name diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/inner.conf b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/inner.conf new file mode 100644 index 0000000..d3b9f97 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/inner.conf @@ -0,0 +1,2 @@ +refouter $outervar +%define innervar inner diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/library.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/library.xml new file mode 100644 index 0000000..0bc4507 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/library.xml @@ -0,0 +1,7 @@ + + + Sample library of reusable data types. + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/logger.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/logger.xml new file mode 100644 index 0000000..46e296a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/logger.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/non-ascii.txt b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/non-ascii.txt new file mode 100644 index 0000000..26b9add --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/non-ascii.txt @@ -0,0 +1,2 @@ +# -*-coding: utf-8; mode: conf-*- +This file contains a snowman, U+2603: ☃ diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/outer.conf b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/outer.conf new file mode 100644 index 0000000..7d85746 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/outer.conf @@ -0,0 +1,3 @@ +%define outervar outer +%include inner.conf +refinner $innervar diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/simple.conf b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/simple.conf new file mode 100644 index 0000000..6cec15e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/simple.conf @@ -0,0 +1,32 @@ +empty + +var1 abc +int-var 12 +float-var 12.02 +neg-int -2 + +true-var-1 true +true-var-2 on +true-var-3 yes + +false-var-1 false +false-var-2 off +false-var-3 no + +list-1 +list-2 abc +list-3 abc def ghi +list-4 [ what now? ] + +# These test the %define mechanism: + +%define dollars $$$$ +%define empty +%define name value +%define twowords two words + +getname $name +getnametwice $name${name} +getdollars $dollars +getempty x${empty}y +getwords abc $twowords def diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/simple.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/simple.xml new file mode 100644 index 0000000..c0703f4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/simple.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/simplesections.conf b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/simplesections.conf new file mode 100644 index 0000000..d00023f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/simplesections.conf @@ -0,0 +1,40 @@ +var foo +var-0 foo-0 + +
+ var bar + var-one splat +
+ +var-1 foo-1 + +
+ var spam + var-two stuff +
+ +var-2 foo-2 + +
+ var quack! + var-three yet +
+ +var-3 foo-3 + +# An anonymous empty section: +
+ +var-4 foo-4 + +# A fairly trivial section: + + var triv + + +var-5 foo-5 + +# A minimal section: + + +var-6 foo-6 diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/simplesections.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/simplesections.xml new file mode 100644 index 0000000..79e04e8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/input/simplesections.xml @@ -0,0 +1,63 @@ + + + + + + + + + + + + + + + + + + Multisection Example + +
+
+
+ + + + + + + + + + Description + For humans + + + + Description + + + Description + + + Description + + + Description + + + Description + +
+ Description + Section Example +
+
+
+
+
+ + 1 + 2 + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/README.txt b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/README.txt new file mode 100644 index 0000000..bec9c57 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/README.txt @@ -0,0 +1,2 @@ +This is a sample library of configuration schema components. This is +used for testing. diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/__init__.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/__init__.py new file mode 100644 index 0000000..91b1aa4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/__init__.py @@ -0,0 +1 @@ +# Make this a package. diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/thing/__init__.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/thing/__init__.py new file mode 100644 index 0000000..4ab184e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/thing/__init__.py @@ -0,0 +1,22 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Example of a package that extends its __path__. + +$Id: __init__.py,v 1.2 2003/10/03 17:11:33 fdrake Exp $ +""" + +import os + +here = os.path.dirname(__file__) +__path__.append(os.path.join(here, "extras")) diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/thing/component.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/thing/component.xml new file mode 100644 index 0000000..e130e75 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/thing/component.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/thing/extras/extras.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/thing/extras/extras.xml new file mode 100644 index 0000000..001b464 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/thing/extras/extras.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/widget/__init__.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/widget/__init__.py new file mode 100644 index 0000000..91b1aa4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/widget/__init__.py @@ -0,0 +1 @@ +# Make this a package. diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/widget/component.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/widget/component.xml new file mode 100644 index 0000000..d74706c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/widget/component.xml @@ -0,0 +1,7 @@ + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/widget/extra.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/widget/extra.xml new file mode 100644 index 0000000..5a2fe3f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/library/widget/extra.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/support.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/support.py new file mode 100644 index 0000000..8e42965 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/support.py @@ -0,0 +1,97 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +"""Support code shared among the tests.""" + +import contextlib +import os +import sys +import unittest + +import ZConfig + +from ZConfig.loader import ConfigLoader +from ZConfig.url import urljoin + +from ZConfig._compat import NStringIO as StringIO +from ZConfig._compat import pathname2url + +INPUT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "input")) +CONFIG_BASE = "file://%s/" % pathname2url(INPUT_DIR) + +def input_file(fname): + return os.path.abspath(os.path.join(INPUT_DIR, fname)) + +def with_stdin_from_input_file(fname): + input_fname = input_file(fname) + @contextlib.contextmanager + def stdin_replaced(): + old_stdin = sys.stdin + sys.stdin = open(input_fname) + try: + yield + finally: + sys.stdin.close() + sys.stdin = old_stdin + + def make_wrapper(f): + def f2(self): + with stdin_replaced(): + f(self) + return f2 + + return make_wrapper + + +class TestHelper(object): + """Utility methods which can be used with the schema support.""" + + # Not derived from unittest.TestCase; some test runners seem to + # think that means this class contains tests. + + assertRaisesRegex = getattr(unittest.TestCase, 'assertRaisesRegex', + unittest.TestCase.assertRaisesRegexp) + + def load_both(self, schema_url, conf_url): + schema = self.load_schema(schema_url) + conf = self.load_config(schema, conf_url) + return schema, conf + + def load_schema(self, relurl): + self.url = urljoin(CONFIG_BASE, relurl) + self.schema = ZConfig.loadSchema(self.url) + self.assertTrue(self.schema.issection()) + return self.schema + + def load_schema_text(self, text, url=None): + sio = StringIO(text) + self.schema = ZConfig.loadSchemaFile(sio, url) + return self.schema + + def load_config(self, schema, conf_url, num_handlers=0): + conf_url = urljoin(CONFIG_BASE, conf_url) + loader = self.create_config_loader(schema) + self.conf, self.handlers = loader.loadURL(conf_url) + self.assertEqual(len(self.handlers), num_handlers) + return self.conf + + def load_config_text(self, schema, text, num_handlers=0, url=None): + sio = StringIO(text) + loader = self.create_config_loader(schema) + self.conf, self.handlers = loader.loadFile(sio, url) + self.assertEqual(len(self.handlers), num_handlers) + return self.conf + + def create_config_loader(self, schema): + return ConfigLoader(schema) diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_cfgimports.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_cfgimports.py new file mode 100644 index 0000000..6622bab --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_cfgimports.py @@ -0,0 +1,57 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests of the %import mechanism. +""" +import unittest + +import ZConfig +import ZConfig.tests.support + +from ZConfig._compat import NStringIO as StringIO + + +class TestImportFromConfiguration( + ZConfig.tests.support.TestHelper, unittest.TestCase): + + def test_simple_import(self): + schema = self.load_schema_text("") + loader = self.create_config_loader(schema) + config, _ = loader.loadFile( + StringIO("%import ZConfig.tests.library.widget\n")) + # make sure we now have a "private" schema object; the only + # way to get it is from the loader itself + self.assertTrue(schema is not loader.schema) + # make sure component types are only found on the private schema: + loader.schema.gettype("widget-b") + self.assertRaises(ZConfig.SchemaError, schema.gettype, "widget-b") + + def test_repeated_import(self): + schema = self.load_schema_text("") + loader = self.create_config_loader(schema) + config, _ = loader.loadFile( + StringIO("%import ZConfig.tests.library.widget\n" + "%import ZConfig.tests.library.widget\n")) + + def test_missing_import(self): + schema = self.load_schema_text("") + loader = self.create_config_loader(schema) + self.assertRaises(ZConfig.SchemaError, loader.loadFile, + StringIO("%import ZConfig.tests.missing\n")) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_cmdline.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_cmdline.py new file mode 100644 index 0000000..e36b3e8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_cmdline.py @@ -0,0 +1,213 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +"""Tests of the command-line integration.""" + +import unittest + +import ZConfig +import ZConfig.tests.support + +from ZConfig.cmdline import ExtendedConfigLoader + + +class CommandLineTest(ZConfig.tests.support.TestHelper, unittest.TestCase): + + clopts = () + + def create_config_loader(self, schema): + loader = ExtendedConfigLoader(schema) + for item in self.clopts: + loader.addOption(*item) + return loader + + def test_loading(self): + schema = self.load_schema_text("""\ + + + + + +
+ + """) + self.clopts = [("mykey=splat!", None), + ("section/innerkey=spoogey", None)] + bag = self.create_config_loader(schema).cook() + # Test a variety of queries on the OptionBag: + self.assertTrue("mykey" in bag) + self.assertTrue("another" not in bag) + self.assertEqual(bag.get_section_info("st", None), None) + self.assertEqual(bag.get_section_info("st", "missing-sect"), None) + # Consume everything in the OptionBag: + L = bag.get_key("mykey") + s, pos = L[0] + self.assertEqual(len(L), 1) + self.assertEqual(s, "splat!") + bag2 = bag.get_section_info("st", "section") + self.assertTrue("innerkey" in bag2) + self.assertTrue("another" not in bag2) + L = bag2.get_key("innerkey") + s, pos = L[0] + self.assertEqual(len(L), 1) + self.assertEqual(s, "spoogey") + # "Finish" to make sure everything has been consumed: + bag2.finish() + bag.finish() + + def test_named_sections(self): + schema = self.load_schema_text("""\ + + + + + + + + +
+
+ + """) + self.clopts = [("foo/k1=v1", None), ("bar/k2=v2", ("someurl", 2, 3))] + loader = self.create_config_loader(schema) + bag = loader.cook() + foo = bag.get_section_info("st2", "foo") + bar = bag.get_section_info("st2", "bar") + bag.finish() + self.assertEqual(bar.get_key("k2"), [("v2", ("someurl", 2, 3))]) + bar.finish() + # Ignore foo for now; it's not really important *when* it fails. + + # ValueErrors are converted into ConfigurationSyntaxErrors + self.assertRaisesRegex(ZConfig.ConfigurationSyntaxError, + "could not convert", + foo.basic_key, + 'invalid name', ('', 1,)) + + # missing keys return empty lists + self.assertEqual(foo.get_key('no such key'), []) + + # VE for matchers do the same conversion + matcher = loader.createSchemaMatcher() + self.assertRaisesRegex(ZConfig.DataConversionError, + "value did not match", + matcher.addValue, + 'invalid name', 'value', (1, 1, '')) + + + simple_schema = None + + def get_simple_schema(self): + if self.simple_schema is None: + self.__class__.simple_schema = self.load_schema_text("""\ + + + + + + + """) + return self.simple_schema + + def test_reading_config(self): + self.clopts = [("k1=stringvalue", None), ("k2=12", None)] + schema = self.get_simple_schema() + conf = self.load_config_text(schema, """\ + k0 stuff + k1 replaced-stuff + k2 42 + """) + self.assertEqual(conf.k0, "stuff") + self.assertEqual(conf.k1, "stringvalue") + self.assertEqual(conf.k2, 12) + self.assertEqual(conf.k3, 19) + + def test_unknown_key(self): + self.clopts = [("foo=bar", None)] + schema = self.get_simple_schema() + self.assertRaises(ZConfig.ConfigurationError, + self.load_config_text, schema, "") + + def test_too_many_keys(self): + self.clopts = [("k1=v1", None), ("k1=v2", None)] + schema = self.get_simple_schema() + self.assertRaises(ZConfig.ConfigurationError, + self.load_config_text, schema, "") + + def test_bad_datatype(self): + self.clopts = [("k2=42.0", None)] + schema = self.get_simple_schema() + self.assertRaises(ZConfig.DataConversionError, + self.load_config_text, schema, "") + + def test_without_clopts(self): + self.clopts = [] + schema = self.get_simple_schema() + conf = self.load_config_text(schema, "k3 42") + self.assertEqual(conf.k0, None) + self.assertEqual(conf.k1, None) + self.assertEqual(conf.k2, None) + self.assertEqual(conf.k3, 42) + + def test_section_contents(self): + schema = self.load_schema_text("""\ + + + + + + k3-v1 + k3-v2 + k3-v3 + + +
+
+ + """) + self.clopts = [("s1/k1=foo", None), + ("s2/k3=value1", None), + ("s2/k3=value2", None), + ("s1/k2=99", None), + ("s2/k3=value3", None), + ("s2/k3=value4", None), + ] + conf = self.load_config_text(schema, "\n") + self.assertEqual(conf.s1.k1, "foo") + self.assertEqual(conf.s1.k2, 99) + self.assertEqual(conf.s1.k3, ["k3-v1", "k3-v2", "k3-v3"]) + self.assertEqual(conf.s2.k1, None) + self.assertEqual(conf.s2.k2, 3) + self.assertEqual(conf.s2.k3, ["value1", "value2", "value3", "value4"]) + + self.clopts = [("path/that/dne=foo",)] + self.assertRaisesRegex(ZConfig.ConfigurationError, + "not all command line options were consumed", + self.load_config_text, + schema, "") + + def test_bad_overrides(self): + schema = self.get_simple_schema() + self.clopts = [('',)] + self.assertRaisesRegex(ZConfig.ConfigurationSyntaxError, + "invalid configuration specifier", + self.create_config_loader, + schema) + + self.clopts = [('double//slashes=value',)] + self.assertRaisesRegex(ZConfig.ConfigurationSyntaxError, + "not allowed in an option path", + self.create_config_loader, + schema) diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_config.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_config.py new file mode 100644 index 0000000..1a4bc74 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_config.py @@ -0,0 +1,274 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests of the configuration data structures and loader.""" +import os +import tempfile +import unittest + +import ZConfig + +from ZConfig.tests.support import CONFIG_BASE +from ZConfig.tests.support import TestHelper + +from ZConfig._compat import NStringIO as StringIO + +class ConfigurationTestCase(TestHelper, unittest.TestCase): + + schema = None + + def get_schema(self): + if self.schema is None: + ConfigurationTestCase.schema = ZConfig.loadSchema( + CONFIG_BASE + "simple.xml") + return self.schema + + def load(self, relurl, context=None): + url = CONFIG_BASE + relurl + self.conf, self.handlers = ZConfig.loadConfig(self.get_schema(), url) + conf = self.conf + #self.assertEqual(conf.url, url) + self.assertTrue(conf.getSectionName() is None) + self.assertTrue(conf.getSectionType() is None) + #self.assertTrue(conf.delegate is None) + return conf + + def loadtext(self, text): + sio = StringIO(text) + return self.loadfile(sio) + + def loadfile(self, file_or_path): + schema = self.get_schema() + self.conf, self.handlers = ZConfig.loadConfigFile(schema, file_or_path) + return self.conf + + def check_simple_gets(self, conf): + self.assertEqual(conf.empty, '') + self.assertEqual(conf.int_var, 12) + self.assertEqual(conf.neg_int, -2) + self.assertEqual(conf.float_var, 12.02) + self.assertEqual(conf.var1, 'abc') + self.assertTrue(conf.true_var_1) + self.assertTrue(conf.true_var_2) + self.assertTrue(conf.true_var_3) + self.assertTrue(not conf.false_var_1) + self.assertTrue(not conf.false_var_2) + self.assertTrue(not conf.false_var_3) + self.assertEqual(conf.list_1, []) + self.assertEqual(conf.list_2, ['abc']) + self.assertEqual(conf.list_3, ['abc', 'def', 'ghi']) + self.assertEqual(conf.list_4, ['[', 'what', 'now?', ']']) + + def test_simple_gets(self): + conf = self.load("simple.conf") + self.check_simple_gets(conf) + + def test_type_errors(self): + Error = ZConfig.DataConversionError + raises = self.assertRaises + raises(Error, self.loadtext, "int-var true") + raises(Error, self.loadtext, "float-var true") + raises(Error, self.loadtext, "neg-int false") + raises(Error, self.loadtext, "true-var-1 0") + raises(Error, self.loadtext, "true-var-1 1") + with raises(Error) as e: + self.loadtext("true-var-1 -1") + + # str doesn't fail + exc = e.exception + str(exc) + self.assertIsNone(exc.colno) + self.assertIsNone(exc.url) + + exc.colno = 1 + exc.url = 'url' + self.assertIn('url', str(exc)) + + def test_simple_sections(self): + self.schema = ZConfig.loadSchema(CONFIG_BASE + "simplesections.xml") + conf = self.load("simplesections.conf") + self.assertEqual(conf.var, "foo") + # check each interleaved position between sections + for c in "0123456": + self.assertEqual(getattr(conf, "var_" +c), "foo-" + c) + sect = [sect for sect in conf.sections + if sect.getSectionName() == "name"][0] + self.assertEqual(sect.var, "bar") + self.assertEqual(sect.var_one, "splat") + self.assertTrue(sect.var_three is None) + sect = [sect for sect in conf.sections + if sect.getSectionName() == "delegate"][0] + self.assertEqual(sect.var, "spam") + self.assertEqual(sect.var_two, "stuff") + self.assertTrue(sect.var_three is None) + + def test_include(self): + conf = self.load("include.conf") + self.assertEqual(conf.var1, "abc") + self.assertEqual(conf.var2, "value2") + self.assertEqual(conf.var3, "value3") + self.assertEqual(conf.var4, "value") + + def test_includes_with_defines(self): + self.schema = ZConfig.loadSchemaFile(StringIO("""\ + + + + + """)) + conf = self.load("outer.conf") + self.assertEqual(conf.refinner, "inner") + self.assertEqual(conf.refouter, "outer") + + def test_define(self): + conf = self.load("simple.conf") + self.assertEqual(conf.getname, "value") + self.assertEqual(conf.getnametwice, "valuevalue") + self.assertEqual(conf.getdollars, "$$") + self.assertEqual(conf.getempty, "xy") + self.assertEqual(conf.getwords, "abc two words def") + + def test_define_errors(self): + # doesn't raise if value is equal + self.loadtext("%define a value\n%define a value\n") + + self.assertRaises(ZConfig.ConfigurationSyntaxError, + self.loadtext, "%define\n") + self.assertRaises(ZConfig.ConfigurationSyntaxError, + self.loadtext, "%define abc-def\n") + + self.assertRaises(ZConfig.SubstitutionReplacementError, + self.loadtext, + "foo $name") + + with self.assertRaises(ZConfig.ConfigurationSyntaxError) as e: + self.loadtext("%define a value\n%define a other\n") + + # str doesn't throw unexpected exceptions + exc = e.exception + self.assertIn('line', str(exc)) + self.assertNotIn('column', str(exc)) + # doesn't have these properties + self.assertIsNone(exc.colno) + self.assertIsNone(exc.url) + + # If we fill them in, we get different str output + exc.colno = 10 + exc.url = 'a url' + self.assertIn('column', str(exc)) + + # There's also a case if we don't have a line number + exc.lineno = None + self.assertNotIn('line', str(exc)) + + def test_bad_directive(self): + self.assertRaisesRegex(ZConfig.ConfigurationSyntaxError, + 'unknown directive', + self.loadtext, '%not a directive') + + self.assertRaisesRegex(ZConfig.ConfigurationSyntaxError, + 'missing or unrecognized', + self.loadtext, '%') + + def test_bad_key(self): + self.assertRaisesRegex(ZConfig.ConfigurationSyntaxError, + 'malformed configuration data', + self.loadtext, '(int-var') + + def test_bad_section(self): + self.schema = ZConfig.loadSchema(CONFIG_BASE + "simplesections.xml") + self.assertRaisesRegex(ZConfig.ConfigurationSyntaxError, + 'unexpected section end', + self.loadtext, '') + + self.assertRaisesRegex(ZConfig.ConfigurationSyntaxError, + 'unbalanced section end', + self.loadtext, '
\n') + + self.assertRaisesRegex(ZConfig.ConfigurationSyntaxError, + 'unclosed sections not allowed', + self.loadtext, '
\n') + + self.assertRaisesRegex(ZConfig.ConfigurationSyntaxError, + 'malformed section header', + self.loadtext, '\n') + + self.assertRaisesRegex(ZConfig.ConfigurationSyntaxError, + 'malformed section end', + self.loadtext, '
\n\n") + + def test_configuration_error_str(self): + + e = ZConfig.ConfigurationError('message') + self.assertEqual(e.message, 'message') + self.assertEqual('message', str(e)) + + # We can delete the message, for some reason + del e.message + + def test_fragment_ident_disallowed(self): + self.assertRaises(ZConfig.ConfigurationError, + self.load, "simplesections.conf#another") + + def test_load_from_fileobj(self): + sio = StringIO("%define name value\n" + "getname x $name y \n") + cf = self.loadfile(sio) + self.assertEqual(cf.getname, "x value y") + + def test_load_from_abspath(self): + fn = self.write_tempfile() + try: + self.check_load_from_path(fn) + finally: + os.unlink(fn) + + def test_load_from_relpath(self): + fn = self.write_tempfile() + dirname, name = os.path.split(fn) + pwd = os.getcwd() + try: + os.chdir(dirname) + self.check_load_from_path(name) + finally: + os.chdir(pwd) + os.unlink(fn) + + def write_tempfile(self): + fn = tempfile.mktemp() + fp = open(fn, "w") + fp.write("var1 value\n") + fp.close() + return fn + + def check_load_from_path(self, path): + schema = self.get_schema() + ZConfig.loadConfig(schema, path) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_cookbook.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_cookbook.py new file mode 100644 index 0000000..cf7191e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_cookbook.py @@ -0,0 +1,70 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests of examples from the online cookbook, so we don't break them +down the road. Unless we really mean to. + +The ZConfig Cookbook is available online at: + + http://dev.zope.org/Zope3/ZConfig + +""" + +import ZConfig.tests.support +import unittest + + +def basic_key_mapping_password_to_passwd(key): + # Lower-case the key since that's what basic-key does: + key = key.lower() + # Now map password to passwd: + if key == "password": + key = "passwd" + return key + +def user_info_conversion(section): + return section + + +class CookbookTestCase(ZConfig.tests.support.TestHelper, unittest.TestCase): + + def test_rewriting_key_names(self): + schema = self.load_schema_text(""" + + + + + + +
+ + """ % __name__) + config = self.load_config_text(schema, """\ + + USERID 42 + USERNAME foouser + PASSWORD yeah-right + + """) + self.assertEqual(config.userinfo.userid, 42) + self.assertEqual(config.userinfo.username, "foouser") + self.assertEqual(config.userinfo.passwd, "yeah-right") + self.assertTrue(not hasattr(config.userinfo, "password")) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_datatypes.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_datatypes.py new file mode 100644 index 0000000..b94e745 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_datatypes.py @@ -0,0 +1,435 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests of standard ZConfig datatypes.""" + +import os +import sys +import shutil +import socket +import datetime +import tempfile +import unittest + +import ZConfig.datatypes + +from ZConfig.tests.support import TestHelper + +here = os.path.abspath(__file__) + +try: + unicode +except NameError: + have_unicode = False +else: + have_unicode = True + + +class DatatypeTestCase(unittest.TestCase): + types = ZConfig.datatypes.Registry() + + def test_datatype_basickey(self): + convert = self.types.get("basic-key") + eq = self.assertEqual + raises = self.assertRaises + + eq(convert("abc"), "abc") + eq(convert("ABC_DEF.123"), "abc_def.123") + eq(convert("Abc-Def-456"), "abc-def-456") + eq(convert("Abc.Def"), "abc.def") + + raises(ValueError, convert, "_abc") + raises(ValueError, convert, "-abc") + raises(ValueError, convert, "123") + raises(ValueError, convert, "") + + def test_datatype_boolean(self): + convert = self.types.get("boolean") + check = self.assertTrue + raises = self.assertRaises + + check(convert("on")) + check(convert("true")) + check(convert("yes")) + check(not convert("off")) + check(not convert("false")) + check(not convert("no")) + raises(ValueError, convert, '0') + raises(ValueError, convert, '1') + raises(ValueError, convert, '') + raises(ValueError, convert, 'junk') + + def test_datatype_float(self): + convert = self.types.get("float") + eq = self.assertEqual + raises = self.assertRaises + + eq(convert("1"), 1.0) + self.assertTrue(type(convert(1)) is type(1.0)) + eq(convert("1.1"), 1.1) + eq(convert("50.50"), 50.50) + eq(convert("-50.50"), -50.50) + eq(convert(0), 0.0) + eq(convert("0"), 0.0) + eq(convert("-0"), 0.0) + eq(convert("0.0"), 0.0) + + raises(ValueError, convert, "junk") + raises(ValueError, convert, "0x234.1.9") + raises(ValueError, convert, "0.9-") + + # float handles inf/nan portably in both bytes and + # unicode on both Python 2.6+ and Python 3. Make sure conversion + # does too. + for literal in ("inf", "-inf", b"inf", b"-inf"): + eq(convert(literal), float(literal)) + + # notably, nan is not equal to itself + self.assertNotEqual(convert("nan"), float("nan")) + self.assertNotEqual(convert(b"nan"), float(b"nan")) + + def test_datatype_identifier(self): + convert = self.types.get("identifier") + raises = self.assertRaises + self.check_names(convert) + self.check_never_namelike(convert) + raises(ValueError, convert, ".abc") + + def check_names(self, convert): + eq = self.assert_ascii_equal + eq(convert, "AbcDef") + eq(convert, "a________") + eq(convert, "abc_def") + eq(convert, "int123") + eq(convert, "_abc") + eq(convert, "_123") + eq(convert, "__dict__") + + def assert_ascii_equal(self, convert, value): + v = convert(value) + self.assertEqual(v, value) + self.assertTrue(isinstance(v, str)) + if have_unicode: + unicode_value = unicode(value) + v = convert(unicode_value) + self.assertEqual(v, value) + self.assertTrue(isinstance(v, str)) + + def check_never_namelike(self, convert): + raises = self.assertRaises + raises(ValueError, convert, "2345") + raises(ValueError, convert, "23.45") + raises(ValueError, convert, ".45") + raises(ValueError, convert, "23.") + raises(ValueError, convert, "abc.") + raises(ValueError, convert, "-abc") + raises(ValueError, convert, "-123") + raises(ValueError, convert, "abc-") + raises(ValueError, convert, "123-") + raises(ValueError, convert, "-") + raises(ValueError, convert, ".") + raises(ValueError, convert, "&%$*()") + raises(ValueError, convert, "") + + def test_datatype_dotted_name(self): + convert = self.types.get("dotted-name") + raises = self.assertRaises + self.check_names(convert) + self.check_dotted_names(convert) + self.check_never_namelike(convert) + raises(ValueError, convert, "abc.") + raises(ValueError, convert, ".abc.") + raises(ValueError, convert, "abc.def.") + raises(ValueError, convert, ".abc.def.") + raises(ValueError, convert, ".abc.def") + + def test_datatype_dotted_suffix(self): + convert = self.types.get("dotted-suffix") + eq = self.assert_ascii_equal + raises = self.assertRaises + self.check_names(convert) + self.check_dotted_names(convert) + self.check_never_namelike(convert) + eq(convert, ".a") + eq(convert, ".a.b") + eq(convert, ".a.b.c.d.e.f.g.h.i.j.k.l.m.n.o") + raises(ValueError, convert, "abc.") + raises(ValueError, convert, ".abc.") + raises(ValueError, convert, "abc.def.") + raises(ValueError, convert, ".abc.def.") + + def check_dotted_names(self, convert): + eq = self.assert_ascii_equal + eq(convert, "abc.def") + eq(convert, "abc.def.ghi") + eq(convert, "a.d.g.g.g.g.g.g.g") + + def test_datatype_inet_address(self): + convert = self.types.get("inet-address") + eq = self.assertEqual + defhost = ZConfig.datatypes.DEFAULT_HOST + eq(convert("Host.Example.Com:80"), ("host.example.com", 80)) + eq(convert("Host.Example.Com:0"), ("host.example.com", 0)) + eq(convert(":80"), (defhost, 80)) + eq(convert("80"), (defhost, 80)) + eq(convert("[::1]:80"), ("::1", 80)) + eq(convert("host.EXAMPLE.com"), ("host.example.com", None)) + eq(convert("2001::ABCD"), ("2001::abcd", None)) + self.assertRaises(ValueError, convert, "40 # foo") + + def test_datatype_inet_binding_address(self): + convert = self.types.get("inet-binding-address") + eq = self.assertEqual + defhost = "" + eq(convert("Host.Example.Com:80"), ("host.example.com", 80)) + eq(convert(":80"), (defhost, 80)) + eq(convert("80"), (defhost, 80)) + eq(convert("host.EXAMPLE.com"), ("host.example.com", None)) + self.assertRaises(ValueError, convert, "40 # foo") + + def test_datatype_inet_connection_address(self): + convert = self.types.get("inet-connection-address") + eq = self.assertEqual + defhost = "127.0.0.1" + eq(convert("Host.Example.Com:80"), ("host.example.com", 80)) + eq(convert(":80"), (defhost, 80)) + eq(convert("80"), (defhost, 80)) + eq(convert("host.EXAMPLE.com"), ("host.example.com", None)) + self.assertRaises(ValueError, convert, "40 # foo") + + def test_datatype_integer(self): + convert = self.types.get("integer") + eq = self.assertEqual + raises = self.assertRaises + + eq(convert('-100'), -100) + eq(convert('-1'), -1) + eq(convert('-0'), 0) + eq(convert('0'), 0) + eq(convert('1'), 1) + eq(convert('100'), 100) + eq(convert('65535'), 65535) + eq(convert('65536'), 65536) + + raises(ValueError, convert, 'abc') + raises(ValueError, convert, '-0xabc') + raises(ValueError, convert, '') + raises(ValueError, convert, '123 456') + raises(ValueError, convert, '123-') + + def test_datatype_locale(self): + convert = self.types.get("locale") + # Python supports "C" even when the _locale module is not available + self.assertEqual(convert("C"), "C") + self.assertRaises(ValueError, convert, "locale-does-not-exist") + + def test_datatype_port(self): + convert = self.types.get("port-number") + eq = self.assertEqual + raises = self.assertRaises + + raises(ValueError, convert, '-1') + eq(convert('0'), 0) + eq(convert('1'), 1) + eq(convert('80'), 80) + eq(convert('1023'), 1023) + eq(convert('1024'), 1024) + eq(convert('60000'), 60000) + eq(convert('65535'), 0xffff) + raises(ValueError, convert, '65536') + + def test_datatype_socket_address(self): + convert = self.types.get("socket-address") + eq = self.assertEqual + AF_INET = socket.AF_INET + AF_INET6 = socket.AF_INET6 + defhost = ZConfig.datatypes.DEFAULT_HOST + + def check(value, family, address): + a = convert(value) + self.assertEqual(a.family, family) + self.assertEqual(a.address, address) + + check("Host.Example.Com:80", AF_INET, ("host.example.com", 80)) + check(":80", AF_INET, (defhost, 80)) + check("80", AF_INET, (defhost, 80)) + check("host.EXAMPLE.com", AF_INET, ("host.example.com",None)) + check("::1", AF_INET6,("::1", None)) + check("[::]:80", AF_INET6,("::", 80)) + a1 = convert("/tmp/var/@345.4") + a2 = convert("/tmp/var/@345.4:80") + self.assertEqual(a1.address, "/tmp/var/@345.4") + self.assertEqual(a2.address, "/tmp/var/@345.4:80") + if hasattr(socket, "AF_UNIX"): + self.assertEqual(a1.family, socket.AF_UNIX) + self.assertEqual(a2.family, socket.AF_UNIX) + else: # pragma: no cover + self.assertTrue(a1.family is None) + self.assertTrue(a2.family is None) + + convert = self.types.get('socket-binding-address') + check(":80", AF_INET, (defhost, 80)) + + convert = self.types.get('socket-connection-address') + check(":80", AF_INET, ("127.0.0.1", 80)) + + def test_ipaddr_or_hostname(self): + convert = self.types.get('ipaddr-or-hostname') + eq = self.assertEqual + raises = self.assertRaises + eq(convert('hostname'), 'hostname') + eq(convert('hostname.com'), 'hostname.com') + eq(convert('www.hostname.com'), 'www.hostname.com') + eq(convert('HOSTNAME'), 'hostname') + eq(convert('HOSTNAME.COM'), 'hostname.com') + eq(convert('WWW.HOSTNAME.COM'), 'www.hostname.com') + eq(convert('127.0.0.1'), '127.0.0.1') + eq(convert('::1'), '::1') + eq(convert('2001:DB8:1234:4567:89AB:cdef:0:1'), '2001:db8:1234:4567:89ab:cdef:0:1') + eq(convert('2001:DB8:1234:4567::10.11.12.13'), '2001:db8:1234:4567::10.11.12.13') + raises(ValueError, convert, '1hostnamewithleadingnumeric') + raises(ValueError, convert, '255.255') + raises(ValueError, convert, '12345678') + raises(ValueError, convert, '999.999.999.999') + raises(ValueError, convert, 'a!badhostname') + raises(ValueError, convert, '2001:DB8:0123:4567:89AB:cdef:0:1:2') + raises(ValueError, convert, '2001:DB8:0123:4567::10.11.12.13.14') + + def test_existing_directory(self): + convert = self.types.get('existing-directory') + eq = self.assertEqual + raises = self.assertRaises + eq(convert('.'), '.') + eq(convert(os.path.dirname(here)), os.path.dirname(here)) + raises(ValueError, convert, tempfile.mktemp()) + + def test_existing_file(self): + convert = self.types.get('existing-file') + eq = self.assertEqual + raises = self.assertRaises + eq(convert('.'), '.') + eq(convert(here), here) + raises(ValueError, convert, tempfile.mktemp()) + + def test_existing_path(self): + convert = self.types.get('existing-path') + eq = self.assertEqual + raises = self.assertRaises + eq(convert('.'), '.') + eq(convert(here), here) + eq(convert(os.path.dirname(here)), os.path.dirname(here)) + raises(ValueError, convert, tempfile.mktemp()) + + def test_existing_dirpath(self): + convert = self.types.get('existing-dirpath') + eq = self.assertEqual + raises = self.assertRaises + eq(convert('.'), '.') + eq(convert(here), here) + raises(ValueError, convert, '/a/hopefully/nonexistent/path') + raises(ValueError, convert, here + '/bogus') + + def test_byte_size(self): + eq = self.assertEqual + raises = self.assertRaises + convert = self.types.get('byte-size') + eq(convert('128'), 128) + eq(convert('128KB'), 128*1024) + eq(convert('128MB'), 128*1024*1024) + eq(convert('128GB'), 128*1024*1024*1024) + raises(ValueError, convert, '128TB') + eq(convert('128'), 128) + eq(convert('128kb'), 128*1024) + eq(convert('128mb'), 128*1024*1024) + eq(convert('128gb'), 128*1024*1024*1024) + raises(ValueError, convert, '128tb') + + def test_time_interval(self): + eq = self.assertEqual + raises = self.assertRaises + convert = self.types.get('time-interval') + eq(convert('120'), 120) + eq(convert('120S'), 120) + eq(convert('120M'), 120*60) + eq(convert('120H'), 120*60*60) + eq(convert('120D'), 120*60*60*24) + raises(ValueError, convert, '120W') + eq(convert('120'), 120) + eq(convert('120s'), 120) + eq(convert('120m'), 120*60) + eq(convert('120h'), 120*60*60) + eq(convert('120d'), 120*60*60*24) + raises(ValueError, convert, '120w') + + def test_timedelta(self): + eq = self.assertEqual + raises = self.assertRaises + convert = self.types.get('timedelta') + eq(convert('4w'), datetime.timedelta(weeks=4)) + eq(convert('2d'), datetime.timedelta(days=2)) + eq(convert('7h'), datetime.timedelta(hours=7)) + eq(convert('12m'), datetime.timedelta(minutes=12)) + eq(convert('14s'), datetime.timedelta(seconds=14)) + eq(convert('4w 2d 7h 12m 14s'), + datetime.timedelta(2, 14, minutes=12, hours=7, weeks=4)) + + raises(TypeError, convert, '1y') + +class RegistryTestCase(TestHelper, unittest.TestCase): + + def test_registry_does_not_mask_toplevel_imports(self): + old_sys_path = sys.path[:] + tmpdir = tempfile.mkdtemp(prefix="test_datatypes_") + fn = os.path.join(tmpdir, "datatypes.py") + f = open(fn, "w") + f.write(TEST_DATATYPE_SOURCE) + f.close() + registry = ZConfig.datatypes.Registry() + + # we really want the temp area to override everything else: + sys.path.insert(0, tmpdir) + try: + datatype = registry.get("datatypes.my_sample_datatype") + finally: + shutil.rmtree(tmpdir) + sys.path[:] = old_sys_path + self.assertEqual(datatype, 42) + + + def test_register_shadow(self): + reg = ZConfig.datatypes.Registry() + self.assertRaisesRegex(ValueError, + "conflicts with built-in type", + reg.register, + 'integer', None) + + reg.register("foobar", None) + self.assertRaisesRegex(ValueError, + "already registered", + reg.register, + 'foobar', None) + + def test_get_fallback_basic_key(self): + reg = ZConfig.datatypes.Registry({}) + self.assertIsNone(reg._basic_key) + self.assertRaisesRegex(ValueError, + "unloadable datatype name", + reg.get, + 'integer') + self.assertIsNotNone(reg._basic_key) + +TEST_DATATYPE_SOURCE = """ +# sample datatypes file + +my_sample_datatype = 42 +""" diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_info.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_info.py new file mode 100644 index 0000000..beb5f04 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_info.py @@ -0,0 +1,216 @@ +############################################################################## +# +# Copyright (c) 2017 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import unittest + +from ZConfig import SchemaError +from ZConfig import ConfigurationError + +from ZConfig.info import Unbounded +from ZConfig.info import BaseInfo +from ZConfig.info import BaseKeyInfo +from ZConfig.info import KeyInfo +from ZConfig.info import SectionInfo +from ZConfig.info import AbstractType +from ZConfig.info import SectionType +from ZConfig.info import SchemaType + +from ZConfig.tests.support import TestHelper + + +class UnboundTestCase(unittest.TestCase): + + def test_order(self): + self.assertGreater(Unbounded, self) + self.assertFalse(Unbounded > Unbounded) + self.assertEqual(Unbounded, Unbounded) + +class InfoMixin(TestHelper): + + Class = None + + default_kwargs = {'name': '', 'datatype': None, 'handler': None, + 'minOccurs': 0, 'maxOccurs': Unbounded, 'attribute': None} + + def make_one(self, **kwargs): + args = self.default_kwargs.copy() + args.update(kwargs) + return self.Class(**args) + + +class BaseInfoTestCase(InfoMixin, unittest.TestCase): + + Class = BaseInfo + + def test_constructor_error(self): + self.assertRaisesRegex(SchemaError, + 'maxOccurs', + self.make_one, + maxOccurs=0, minOccurs=0) + + # This case doesn't really make sense + self.assertRaisesRegex(SchemaError, + 'minOccurs', + self.make_one, + maxOccurs=1, + minOccurs=2) + + def test_repr(self): + # just doesn't raise + repr(self.make_one()) + +class BaseKeyInfoTestCase(InfoMixin, unittest.TestCase): + + class Class(BaseKeyInfo): + def add_valueinfo(self, vi, key): + "This wont actually be called" + + def test_cant_instantiate(self): + self.Class = BaseKeyInfo + with self.assertRaises(TypeError): + self.make_one() + del self.Class + + def test_finish(self): + info = self.make_one(minOccurs=1) + info.finish() + with self.assertRaises(SchemaError): + info.finish() + + def test_adddefaultc(self): + info = self.make_one(name='foo', minOccurs=1) + self.assertRaisesRegex(SchemaError, + 'unexpected key for default', + info.adddefault, + None, None, key='key') + +class KeyInfoTestCase(InfoMixin, unittest.TestCase): + + Class = KeyInfo + default_kwargs = InfoMixin.default_kwargs.copy() + default_kwargs.pop('maxOccurs') + + def test_add_with_default(self): + info = self.make_one(minOccurs=1, name='name') + info.adddefault('value', None) + self.assertRaisesRegex(SchemaError, + 'cannot set more than one', + info.adddefault, + 'value', None) + +class SectionInfoTestCase(InfoMixin, unittest.TestCase): + + Class = SectionInfo + + class MockSectionType(object): + name = None + @classmethod + def isabstract(cls): + return True + + default_kwargs = InfoMixin.default_kwargs.copy() + default_kwargs.pop('datatype') + default_kwargs['sectiontype'] = MockSectionType + + def test_constructor_error(self): + self.assertRaisesRegex(SchemaError, + 'must use a name', + self.make_one, + name='name', maxOccurs=2) + self.assertRaisesRegex(SchemaError, + 'must specify a target attribute', + self.make_one, + name='*', maxOccurs=2) + + def test_misc(self): + info = self.make_one(maxOccurs=1) + repr(info) + self.assertFalse(info.isAllowedName('*')) + self.assertFalse(info.isAllowedName('+')) + +class AbstractTypeTestCase(unittest.TestCase): + + def test_subtypes(self): + + t = AbstractType('name') + self.assertFalse(t.hassubtype('foo')) + self.assertEqual([], list(t.getsubtypenames())) + + self.name = 'foo' + t.addsubtype(self) + self.assertTrue(t.hassubtype('foo')) + +class SectionTypeTestCase(TestHelper, unittest.TestCase): + + def make_one(self, name='', keytype=None, valuetype=None, + datatype=None, registry={}, types=None): + return SectionType(name, keytype, valuetype, datatype, registry, types) + + def test_getinfo_no_key(self): + info = self.make_one() + self.assertRaisesRegex(ConfigurationError, + "cannot match a key without a name", + info.getinfo, + None) + + def test_required_types_with_name(self): + info = self.make_one(name='foo') + self.assertEqual(['foo'], info.getrequiredtypes()) + + def test_getsectioninfo(self): + class MockChild(object): + _issection = False + def issection(self): + return self._issection + child = MockChild() + + info = self.make_one() + + info._children.append(('foo', child)) + + self.assertRaisesRegex(ConfigurationError, + 'already in use for key', + info.getsectioninfo, + None, 'foo') + + self.assertRaisesRegex(ConfigurationError, + 'no matching section', + info.getsectioninfo, + None, 'baz') + +class SchemaTypeTestCase(TestHelper, unittest.TestCase): + + def test_various(self): + class Mock(object): + pass + + mock = Mock() + schema = SchemaType(None, None, None, None, 'url', {}) + + mock.name = 'name' + schema.addtype(mock) + with self.assertRaises(SchemaError): + schema.addtype(mock) + + self.assertTrue(schema.allowUnnamed()) + self.assertFalse(schema.isAllowedName(None)) + + with self.assertRaises(SchemaError): + schema.deriveSectionType(schema, None, None, None, None) + + schema.addComponent('name') + self.assertRaisesRegex(SchemaError, + 'already have component', + schema.addComponent, + 'name') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_loader.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_loader.py new file mode 100644 index 0000000..cd3db2d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_loader.py @@ -0,0 +1,420 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests of ZConfig.loader classes and helper functions.""" + +import os.path +import sys +import tempfile +import unittest + +import ZConfig +import ZConfig.loader +import ZConfig.url + +from ZConfig._compat import NStringIO as StringIO +from ZConfig._compat import urllib2 + +from ZConfig.tests.support import CONFIG_BASE, TestHelper + + +myfile = os.path.abspath(__file__) +LIBRARY_DIR = os.path.join(os.path.dirname(myfile), "library") + + +class LoaderTestCase(TestHelper, unittest.TestCase): + + def test_open_resource_non_ascii(self): + # Files are decoded using utf-8 on open + loader = ZConfig.loader.SchemaLoader() + url = ZConfig.url.urljoin(CONFIG_BASE, "non-ascii.txt") + stream = loader.openResource(url) + val = stream.read() + self.assertEqual( + val, + u'# -*-coding: utf-8; mode: conf-*-\n' + u'This file contains a snowman, U+2603: \u2603\n' + ) + + def test_schema_caching(self): + loader = ZConfig.loader.SchemaLoader() + url = ZConfig.url.urljoin(CONFIG_BASE, "simple.xml") + schema1 = loader.loadURL(url) + schema2 = loader.loadURL(url) + self.assertIs(schema1, schema2) + + def test_simple_import_with_cache(self): + loader = ZConfig.loader.SchemaLoader() + url1 = ZConfig.url.urljoin(CONFIG_BASE, "library.xml") + schema1 = loader.loadURL(url1) + sio = StringIO("" + " " + "
" + "") + url2 = ZConfig.url.urljoin(CONFIG_BASE, "stringio") + schema2 = loader.loadFile(sio, url2) + self.assertTrue(schema1.gettype("type-a") is schema2.gettype("type-a")) + + def test_schema_loader_source_errors(self): + loader = ZConfig.loader.SchemaLoader() + self.assertRaisesRegex(ZConfig.SchemaError, + "illegal schema component name", + loader.schemaComponentSource, + '', None) + self.assertRaisesRegex(ZConfig.SchemaError, + "illegal schema component name", + loader.schemaComponentSource, + 'foo..bar', None) + + def test_config_loader_abstract_schema(self): + class MockSchema(object): + _abstract = True + def isabstract(self): + return self._abstract + def gettype(self, _t): + return self + + self.assertRaisesRegex(ZConfig.SchemaError, + "abstract type", + ZConfig.loader.ConfigLoader, + MockSchema()) + + s = MockSchema() + s._abstract = False + + loader = ZConfig.loader.ConfigLoader(s) + s._abstract = True + + self.assertRaisesRegex(ZConfig.ConfigurationError, + "cannot match abstract section", + loader.startSection, + None, None, None) + + def test_simple_import_using_prefix(self): + self.load_schema_text("""\ + + + + """) + + def test_import_errors(self): + # must specify exactly one of package or src + self.assertRaises(ZConfig.SchemaError, ZConfig.loadSchemaFile, + StringIO("")) + self.assertRaises(ZConfig.SchemaError, ZConfig.loadSchemaFile, + StringIO("" + " " + "")) + # cannot specify src and file + self.assertRaises(ZConfig.SchemaError, ZConfig.loadSchemaFile, + StringIO("" + " " + "")) + # cannot specify module as package + sio = StringIO("" + " " + "") + with self.assertRaises(ZConfig.SchemaResourceError) as ctx: + ZConfig.loadSchemaFile(sio) + + e = ctx.exception + self.assertEqual(e.filename, "component.xml") + self.assertEqual(e.package, "ZConfig.tests.test_loader") + self.assertTrue(e.path is None) + # make sure the str() doesn't raise an unexpected exception + str(e) + + def test_import_from_package(self): + loader = ZConfig.loader.SchemaLoader() + sio = StringIO("" + " " + "") + schema = loader.loadFile(sio) + self.assertTrue(schema.gettype("widget-a") is not None) + + def test_import_from_package_with_file(self): + loader = ZConfig.loader.SchemaLoader() + sio = StringIO("" + " " + "") + schema = loader.loadFile(sio) + self.assertTrue(schema.gettype("extra-type") is not None) + + def test_import_from_package_extra_directory(self): + loader = ZConfig.loader.SchemaLoader() + sio = StringIO("" + " " + "") + schema = loader.loadFile(sio) + self.assertTrue(schema.gettype("extra-thing") is not None) + + def test_import_from_package_with_missing_file(self): + loader = ZConfig.loader.SchemaLoader() + sio = StringIO("" + " " + "") + with self.assertRaises(ZConfig.SchemaResourceError) as ctx: + loader.loadFile(sio) + e = ctx.exception + self.assertEqual(e.filename, "notthere.xml") + self.assertEqual(e.package, "ZConfig.tests.library.widget") + self.assertTrue(e.path) + # make sure the str() doesn't raise an unexpected exception + str(e) + + def test_import_from_package_with_directory_file(self): + loader = ZConfig.loader.SchemaLoader() + sio = StringIO("" + " " + "") + self.assertRaises(ZConfig.SchemaError, loader.loadFile, sio) + + def test_import_two_components_one_package(self): + loader = ZConfig.loader.SchemaLoader() + sio = StringIO("" + " " + " " + "") + schema = loader.loadFile(sio) + schema.gettype("widget-a") + schema.gettype("extra-type") + + def test_import_component_twice_1(self): + # Make sure we can import a component twice from a schema. + # This is most likely to occur when the component is imported + # from each of two other components, or from the top-level + # schema and a component. + loader = ZConfig.loader.SchemaLoader() + sio = StringIO("" + " " + " " + "") + schema = loader.loadFile(sio) + schema.gettype("widget-a") + + def test_import_component_twice_2(self): + # Make sure we can import a component from a config file even + # if it has already been imported from the schema. + loader = ZConfig.loader.SchemaLoader() + sio = StringIO("" + " " + "") + schema = loader.loadFile(sio) + loader = ZConfig.loader.ConfigLoader(schema) + sio = StringIO("%import ZConfig.tests.library.widget") + loader.loadFile(sio) + + def test_urlsplit_urlunsplit(self): + # Extracted from Python's test.test_urlparse module: + for url, parsed, split in [ + ('http://www.python.org', + ('http', 'www.python.org', '', '', '', ''), + ('http', 'www.python.org', '', '', '')), + ('http://www.python.org#abc', + ('http', 'www.python.org', '', '', '', 'abc'), + ('http', 'www.python.org', '', '', 'abc')), + ('http://www.python.org/#abc', + ('http', 'www.python.org', '/', '', '', 'abc'), + ('http', 'www.python.org', '/', '', 'abc')), + ("http://a/b/c/d;p?q#f", + ('http', 'a', '/b/c/d', 'p', 'q', 'f'), + ('http', 'a', '/b/c/d;p', 'q', 'f')), + ('file:///tmp/junk.txt', + ('file', '', '/tmp/junk.txt', '', '', ''), + ('file', '', '/tmp/junk.txt', '', '')), + ]: + result = ZConfig.url.urlsplit(url) + self.assertEqual(result, split) + result2 = ZConfig.url.urlunsplit(result) + self.assertEqual(result2, url) + + def test_file_url_normalization(self): + self.assertEqual( + ZConfig.url.urlnormalize("file:/abc/def"), + "file:///abc/def") + self.assertEqual( + ZConfig.url.urlunsplit(("file", "", "/abc/def", "", "")), + "file:///abc/def") + self.assertEqual( + ZConfig.url.urljoin("file:/abc/", "def"), + "file:///abc/def") + self.assertEqual( + ZConfig.url.urldefrag("file:/abc/def#frag"), + ("file:///abc/def", "frag")) + + def test_url_from_file(self): + class MockFile(object): + name = 'path' + self.assertEqual('file://', + ZConfig.loader._url_from_file(MockFile)[:7]) + + def test_isPath(self): + assertTrue = self.assertTrue + isPath = ZConfig.loader.SchemaLoader().isPath + assertTrue(isPath("abc")) + assertTrue(isPath("abc/def")) + assertTrue(isPath("/abc")) + assertTrue(isPath("/abc/def")) + assertTrue(isPath(r"\abc")) + assertTrue(isPath(r"\abc\def")) + assertTrue(isPath(r"c:\abc\def")) + assertTrue(isPath("/ab:cd")) + assertTrue(isPath(r"\ab:cd")) + assertTrue(isPath("long name with spaces")) + assertTrue(isPath("long name:with spaces")) + assertTrue(not isPath("ab:cd")) + assertTrue(not isPath("http://www.example.com/")) + assertTrue(not isPath("http://www.example.com/sample.conf")) + assertTrue(not isPath("file:///etc/zope/zope.conf")) + assertTrue(not isPath("file:///c|/foo/bar.conf")) + + +class TestNonExistentResources(unittest.TestCase): + + # XXX Not sure if this is the best approach for these. These + # tests make sure that the error reported by ZConfig for missing + # resources is handled in a consistent way. Since ZConfig uses + # urllib2.urlopen() for opening all resources, what we do is + # replace that function with one that always raises an exception. + # Since urllib2.urlopen() can raise either IOError or OSError + # (depending on the version of Python), we run test for each + # exception. urllib2.urlopen() is restored after running the + # test. + + def setUp(self): + self.urllib2_urlopen = urllib2.urlopen + urllib2.urlopen = self.fake_urlopen + + def tearDown(self): + urllib2.urlopen = self.urllib2_urlopen + + def fake_urlopen(self, url): + raise self.error() + + def test_nonexistent_file_ioerror(self): + self.error = IOError + self.check_nonexistent_file() + + def test_nonexistent_file_oserror(self): + self.error = OSError + self.check_nonexistent_file() + + def check_nonexistent_file(self): + fn = tempfile.mktemp() + schema = ZConfig.loadSchemaFile(StringIO("")) + self.assertRaises(ZConfig.ConfigurationError, + ZConfig.loadSchema, fn) + self.assertRaises(ZConfig.ConfigurationError, + ZConfig.loadConfig, schema, fn) + self.assertRaises(ZConfig.ConfigurationError, + ZConfig.loadConfigFile, schema, + StringIO("%include " + fn)) + self.assertRaises(ZConfig.ConfigurationError, + ZConfig.loadSchema, + "http://www.zope.org/no-such-document/") + self.assertRaises(ZConfig.ConfigurationError, + ZConfig.loadConfig, schema, + "http://www.zope.org/no-such-document/") + + +class TestResourcesInZip(unittest.TestCase): + + def setUp(self): + self.old_path = sys.path[:] + # now add our sample EGG to sys.path: + zipfile = os.path.join(os.path.dirname(myfile), "foosample.zip") + sys.path.append(zipfile) + + def tearDown(self): + sys.path[:] = self.old_path + + def test_zip_import_component_from_schema(self): + sio = StringIO(''' + + + +
+ + ''') + schema = ZConfig.loadSchemaFile(sio) + t = schema.gettype("sample") + self.assertFalse(t.isabstract()) + + def test_zip_import_component_from_config(self): + sio = StringIO(''' + + +
+ + ''') + schema = ZConfig.loadSchemaFile(sio) + + value = ''' + %import foo.sample + + data value + + ''' + sio = StringIO(value) + config, _ = ZConfig.loadConfigFile(schema, sio) + self.assertEqual(config.something.data, "| value |") + + sio = StringIO(value) + with self.assertRaises(ZConfig.ConfigurationSyntaxError): + ZConfig.loadConfigFile(schema, sio, + overrides=["sample/data=othervalue"]) + +class TestOpenPackageResource(TestHelper, unittest.TestCase): + + magic_name = 'not a valid import name' + + def setUp(self): + sys.modules[self.magic_name] = self + + def tearDown(self): + del sys.modules[self.magic_name] + + def test_package_loader_resource_error(self): + class MockLoader(object): + pass + self.__loader__ = MockLoader() + self.__path__ = ['dir'] + + self.assertRaisesRegex(ZConfig.SchemaResourceError, + "error opening schema component", + ZConfig.loader.openPackageResource, + self.magic_name, 'a path') + + # Now with an empty path + self.__path__ = [] + self.assertRaisesRegex(ZConfig.SchemaResourceError, + "schema component not found", + ZConfig.loader.openPackageResource, + self.magic_name, 'a path') + + def test_resource(self): + r = ZConfig.loader.Resource(self, None) + self.assertEqual(self.magic_name, r.magic_name) diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_matcher.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_matcher.py new file mode 100644 index 0000000..53d4805 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_matcher.py @@ -0,0 +1,142 @@ +############################################################################## +# +# Copyright (c) 2017 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import unittest + +from ZConfig import ConfigurationError +from ZConfig import DataConversionError + +from ZConfig.matcher import SectionValue +from ZConfig.matcher import SectionMatcher +from ZConfig.matcher import BaseMatcher + +from ZConfig.tests.support import TestHelper + + +class SectionValueTestCase(unittest.TestCase): + + def test_repr(self): + class MockMatcher(object): + type = None + + matcher = MockMatcher() + matcher.type = MockMatcher() + matcher.type.name = 'matcher' + + sv = SectionValue({}, 'name', matcher) + self.assertIn('name', repr(sv)) + + sv = SectionValue({}, None, matcher) + self.assertIn('at', repr(sv)) + + self.assertIs(matcher, sv.getSectionMatcher()) + + def test_str(self): + d = {'k': 'v'} + sv = SectionValue(d, None, None) + self.assertEqual( + 'k : v', + str(sv)) + +class SectionMatcherTestCase(TestHelper, unittest.TestCase): + + def test_constructor_error(self): + class Mock(object): + name = 'name' + def allowUnnamed(self): + return False + mock = Mock() + self.assertRaisesRegex(ConfigurationError, + "sections may not be unnamed", + SectionMatcher, + mock, mock, None, None) + +class BaseMatcherTestCase(TestHelper, unittest.TestCase): + + def test_repr(self): + class Mock(dict): + name = 'name' + + matcher = BaseMatcher(None, Mock(), None) + repr(matcher) + + def test_duplicate_section_names(self): + class Mock(dict): + name = 'name' + + matcher = BaseMatcher(None, Mock(), None) + matcher._sectionnames['foo'] = None + + self.assertRaisesRegex(ConfigurationError, + "section names must not be re-used", + matcher.addSection, + None, 'foo', None) + + def test_construct_errors(self): + class MockType(object): + attribute = 'attr' + + _multi = True + _section = True + + def ismulti(self): + return self._multi + + def issection(self): + return self._section + + type_ = [] + matcher = BaseMatcher(None, type_, None) + type_.append( ('key', MockType() ) ) + + class MockSection(object): + def getSectionDefinition(self): + return self + + def datatype(self, _s): + raise ValueError() + + matcher._values['attr'] = [MockSection()] + + with self.assertRaises(DataConversionError): + matcher.constuct() + + type_[0][1]._multi = False + matcher._values['attr'] = MockSection() + with self.assertRaises(DataConversionError): + matcher.constuct() + + + def test_create_child_bad_name(self): + + class MockType(list): + name = 'foo' + sectiontype = None + + def getsectioninfo(self, type_name, name): + return self + + def isabstract(self): + return False + + def isAllowedName(self, name): + return False + + t = MockType() + t.sectiontype = MockType() + matcher = BaseMatcher(None, t, None) + self.assertRaisesRegex(ConfigurationError, + 'is not an allowed name', + matcher.createChildMatcher, + MockType(), 'ignored') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_readme.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_readme.py new file mode 100644 index 0000000..f9613aa --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_readme.py @@ -0,0 +1,87 @@ +############################################################################## +# +# Copyright (c) 2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import doctest +import manuel.capture +import manuel.doctest +import manuel.testing +import os +import os.path +import unittest +import logging + + +options = doctest.REPORT_NDIFF | doctest.ELLIPSIS + +old = {} + +def setUp(test): + logger = logging.getLogger() + old['level'] = logger.level + old['handlers'] = logger.handlers[:] + +def tearDown(test): + logger = logging.getLogger() + logger.level = old['level'] + logger.handlers = old['handlers'] + +def findRoot(): + here = os.path.dirname(os.path.abspath(__file__)) + while not os.path.exists(os.path.join(here, 'setup.py')): + prev, here = here, os.path.dirname(here) + if here == prev: + # Let's avoid infinite loops at root + raise AssertionError('could not find my setup.py') + return here + +def docSetUp(test): + # Python 2 makes __path__ and __file__ relative in some + # cases (such as when we're executing with the 'ZConfig' + # directory on sys.path as CWD). This breaks finding + # schema components when we change directories. + import ZConfig.components.logger as logger + logger.__file__ = os.path.abspath(logger.__file__) + logger.__path__ = [os.path.abspath(x) for x in logger.__path__] + + old['pwd'] = os.getcwd() + doc_path = os.path.join( + findRoot(), + 'doc') + os.chdir(doc_path) + setUp(test) + +def docTearDown(test): + os.chdir(old['pwd']) + tearDown(test) + old.clear() + +def test_suite(): + root = findRoot() + plugins = manuel.doctest.Manuel(optionflags=options) + plugins += manuel.capture.Manuel() + return unittest.TestSuite([ + manuel.testing.TestSuite( + plugins, + os.path.join(root, 'README.rst'), + setUp=setUp, tearDown=tearDown, + ), + manuel.testing.TestSuite( + plugins, + os.path.join(root, 'doc', 'using-logging.rst'), + globs={'resetLoggers': lambda: tearDown(None)}, + setUp=docSetUp, tearDown=docTearDown, + ), + ]) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_schema.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_schema.py new file mode 100644 index 0000000..09550dc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_schema.py @@ -0,0 +1,1335 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests of ZConfig schemas.""" + +import unittest + +import ZConfig + +from ZConfig.tests.support import TestHelper, CONFIG_BASE + + +def uppercase(value): + return str(value).upper() + +def appsection(value): + return MySection(value) + +def get_foo(section): + return section.foo + +class MySection: + def __init__(self, value): + self.conf = value + + +def get_section_attributes(section): + L = list(section.getSectionAttributes()) + return sorted(L) + + +class SchemaTestCase(TestHelper, unittest.TestCase): + """Tests of the basic schema support itself.""" + + + def test_minimal_schema(self): + schema = self.load_schema_text("") + self.assertEqual(len(schema), 0) + self.assertRaises(IndexError, + lambda schema=schema: schema[0]) + self.assertRaises(ZConfig.ConfigurationError, + schema.getinfo, "foo") + + def test_simple(self): + schema, conf = self.load_both("simple.xml", "simple.conf") + self._verifySimpleConf(conf) + + def _verifySimpleConf(self,conf): + eq = self.assertEqual + eq(conf.var1, 'abc') + eq(conf.int_var, 12) + eq(conf.float_var, 12.02) + eq(conf.neg_int, -2) + + check = self.assertTrue + check(conf.true_var_1) + check(conf.true_var_2) + check(conf.true_var_3) + check(not conf.false_var_1) + check(not conf.false_var_2) + check(not conf.false_var_3) + + def test_app_datatype(self): + dtname = __name__ + ".uppercase" + schema = self.load_schema_text("""\ + + + + + abc + abc + + + not + lower + case + + + """ % (dtname, dtname, dtname, dtname)) + conf = self.load_config_text(schema, """\ + a qwerty + c upp + c er + c case + """) + eq = self.assertEqual + eq(conf.a, 'QWERTY') + eq(conf.b, 'ABC') + eq(conf.c, ['UPP', 'ER', 'CASE']) + eq(conf.d, ['NOT', 'LOWER', 'CASE']) + eq(get_section_attributes(conf), + ["a", "b", "c", "d"]) + + def test_app_sectiontype(self): + schema = self.load_schema_text("""\ + + + + +
+ + """ % __name__) + conf = self.load_config_text(schema, """\ + + sample 42 + + """) + self.assertTrue(isinstance(conf, MySection)) + o1 = conf.conf.sect + self.assertTrue(isinstance(o1, MySection)) + self.assertEqual(o1.conf.sample, 42) + + def test_empty_sections(self): + schema = self.load_schema_text("""\ + + +
+
+ + """) + conf = self.load_config_text(schema, """\ +
+
+
+ """) + self.assertTrue(conf.s1 is not None) + self.assertTrue(conf.s2 is not None) + self.assertEqual(get_section_attributes(conf), + ["s1", "s2"]) + + def test_deeply_nested_sections(self): + schema = self.load_schema_text("""\ + + + + + + +
+ + + +
+ +
+ + """) + conf = self.load_config_text(schema, """\ + + key sect3-value + + key sect2-value + + + + """) + eq = self.assertEqual + eq(conf.sect.sect.sect.key, "type1-value") + eq(conf.sect.sect.key, "sect2-value") + eq(conf.sect.key, "sect3-value") + eq(get_section_attributes(conf), + ["sect"]) + eq(get_section_attributes(conf.sect), + ["key", "sect"]) + eq(get_section_attributes(conf.sect.sect), + ["key", "sect"]) + eq(get_section_attributes(conf.sect.sect.sect), + ["key"]) + + def test_multivalued_keys(self): + schema = self.load_schema_text("""\ + + + + 1 + 2 + + + 3 + 4 + 5 + + + + """) + conf = self.load_config_text(schema, """\ + a foo + a bar + c 41 + c 42 + c 43 + """, num_handlers=2) + L = [] + self.handlers({'abc': L.append, + 'DEF': L.append}) + self.assertEqual(L, [['foo', 'bar'], conf]) + L = [] + self.handlers({'abc': None, + 'DEF': L.append}) + self.assertEqual(L, [conf]) + self.assertEqual(conf.a, ['foo', 'bar']) + self.assertEqual(conf.b, [1, 2]) + self.assertEqual(conf.c, [41, 42, 43]) + self.assertEqual(conf.d, []) + self.assertEqual(get_section_attributes(conf), + ["a", "b", "c", "d"]) + + def test_multikey_required(self): + schema = self.load_schema_text("""\ + + + + """) + self.assertRaises(ZConfig.ConfigurationError, + self.load_config_text, schema, "") + + def test_multisection_required(self): + schema = self.load_schema_text("""\ + + + + + """) + self.assertRaises(ZConfig.ConfigurationError, + self.load_config_text, schema, "") + + def test_key_required_but_missing(self): + schema = self.load_schema_text("""\ + + + + """) + self.assertRaises(ZConfig.ConfigurationError, + self.load_config_text, schema, "") + + def test_section_required_but_missing(self): + schema = self.load_schema_text("""\ + + +
+ + """) + self.assertRaises(ZConfig.ConfigurationError, + self.load_config_text, schema, "") + + def test_key_default_element(self): + self.assertRaises( + ZConfig.SchemaError, self.load_schema_text, """\ + + + text + + + """) + + def test_bad_handler_maps(self): + schema = self.load_schema_text("""\ + + + + + """) + conf = self.load_config_text(schema, """\ + a foo + b bar + """, num_handlers=2) + self.assertEqual(get_section_attributes(conf), + ["a", "b"]) + self.assertRaises(ZConfig.ConfigurationError, + self.handlers, {'abc': id, 'ABC': id, 'def': id}) + self.assertRaises(ZConfig.ConfigurationError, + self.handlers, {}) + + def test_handler_ordering(self): + schema = self.load_schema_text("""\ + + + + +
+ +
+ + """) + conf = self.load_config_text(schema, """\ + + + + """, num_handlers=3) + L = [] + self.handlers({'a': L.append, + 'b': L.append, + 'c': L.append}) + outer = conf.sect_outer + inner = outer.sect_inner + self.assertEqual(L, [inner, outer, conf]) + + def test_duplicate_section_names(self): + schema = self.load_schema_text("""\ + + + +
+ +
+ + """) + self.assertRaises(ZConfig.ConfigurationError, self.load_config_text, + schema, """\ + + + """) + conf = self.load_config_text(schema, """\ + + + + """) + + def test_disallowed_duplicate_attribute(self): + self.assertRaises(ZConfig.SchemaError, self.load_schema_text, """\ + + + + + """) + + def test_unknown_datatype_name(self): + self.assertRaises(ZConfig.SchemaError, + self.load_schema_text, "") + + def test_load_abstracttype(self): + schema = self.load_schema_text("""\ + + + This is an abstract section type. + + + + + + + + + + """) + # check the types that get defined + t = schema.gettype("group") + self.assertTrue(t.isabstract()) + t1 = schema.gettype("t1") + self.assertTrue(not t1.isabstract()) + self.assertTrue(t.getsubtype("t1") is t1) + t2 = schema.gettype("t2") + self.assertTrue(not t2.isabstract()) + self.assertTrue(t.getsubtype("t2") is t2) + self.assertRaises(ZConfig.ConfigurationError, t.getsubtype, "group") + self.assertTrue(t1 is not t2) + # try loading a config that relies on this schema + conf = self.load_config_text(schema, """\ + + + k1 value1 + + + + k2 value2 + + """) + eq = self.assertEqual + eq(get_section_attributes(conf), ["g"]) + eq(len(conf.g), 4) + eq(conf.g[0].k1, "default1") + eq(conf.g[1].k1, "value1") + eq(conf.g[2].k2, "default2") + eq(conf.g[3].k2, "value2") + + # white box: + self.assertTrue(conf.g[0].getSectionDefinition() is t1) + self.assertTrue(conf.g[1].getSectionDefinition() is t1) + self.assertTrue(conf.g[2].getSectionDefinition() is t2) + self.assertTrue(conf.g[3].getSectionDefinition() is t2) + + def test_abstracttype_extension(self): + schema = self.load_schema_text("""\ + + + +
+ + """) + abstype = schema.gettype("group") + self.assertTrue(schema.gettype("extra") is abstype.getsubtype("extra")) + + # make sure we can use the extension in a config: + conf = self.load_config_text(schema, "") + self.assertEqual(conf.thing.getSectionType(), "extra") + self.assertEqual(get_section_attributes(conf), ["thing"]) + self.assertEqual(get_section_attributes(conf.thing), []) + + def test_abstracttype_extension_errors(self): + # specifying a non-existant abstracttype + self.assertRaises(ZConfig.SchemaError, self.load_schema_text, """\ + + + + """) + # specifying something that isn't an abstracttype + self.assertRaises(ZConfig.SchemaError, self.load_schema_text, """\ + + + + + """) + + def test_arbitrary_key(self): + schema = self.load_schema_text("""\ + + + + """) + conf = self.load_config_text(schema, "some-key 42") + self.assertEqual(conf.keymap, {'some-key': 42}) + self.assertEqual(get_section_attributes(conf), ["keymap"]) + + def test_arbitrary_multikey_required(self): + schema = self.load_schema_text("""\ + + + + """) + conf = self.load_config_text(schema, """\ + some-key 42 + some-key 43 + """) + self.assertEqual(conf.keymap, {'some-key': [42, 43]}) + + def test_arbitrary_multikey_optional(self): + schema = self.load_schema_text("""\ + + + + +
+ + """) + conf = self.load_config_text(schema, """\ + + some-key 42 + some-key 43 + + """) + self.assertEqual(conf.stuff.keymap, {'some-key': ['42', '43']}) + self.assertEqual(get_section_attributes(conf), ["stuff"]) + + def test_arbitrary_multikey_optional_empty(self): + schema = self.load_schema_text("""\ + + + + +
+ + """) + conf = self.load_config_text(schema, "") + self.assertEqual(conf.stuff.keymap, {}) + + def test_arbitrary_multikey_with_defaults(self): + schema = self.load_schema_text("""\ + + + value-a1 + value-a2 + value-b + + + """) + conf = self.load_config_text(schema, "") + self.assertEqual(conf.keymap, {'a': ['value-a1', 'value-a2'], + 'b': ['value-b']}) + + def test_arbitrary_multikey_with_unkeyed_default(self): + self.assertRaises(ZConfig.SchemaError, + self.load_schema_text, """\ + + + value-a1 + + + """) + + def test_arbitrary_key_with_defaults(self): + schema = self.load_schema_text("""\ + + + value-a + value-b + + + """) + conf = self.load_config_text(schema, "") + self.assertEqual(conf.keymap, {'a': 'value-a', 'b': 'value-b'}) + + def test_arbitrary_key_with_unkeyed_default(self): + self.assertRaises(ZConfig.SchemaError, + self.load_schema_text, """\ + + + value-a1 + + + """) + + def test_arbitrary_keys_with_others(self): + schema = self.load_schema_text("""\ + + + + + + """) + conf = self.load_config_text(schema, """\ + some-key 42 + k2 3 + """) + self.assertEqual(conf.k1, 'v1') + self.assertEqual(conf.k2, 3) + self.assertEqual(conf.keymap, {'some-key': 42}) + self.assertEqual(get_section_attributes(conf), + ["k1", "k2", "keymap"]) + + def test_arbitrary_key_missing(self): + schema = self.load_schema_text("""\ + + + + """) + self.assertRaises(ZConfig.ConfigurationError, + self.load_config_text, schema, "# empty config file") + + def test_arbitrary_key_bad_schema(self): + self.assertRaises(ZConfig.SchemaError, self.load_schema_text, """\ + + + + + """) + + def test_getrequiredtypes(self): + schema = self.load_schema("library.xml") + self.assertEqual(schema.getrequiredtypes(), []) + + schema = self.load_schema_text("""\ + + + +
+ + """) + L = sorted(schema.getrequiredtypes()) + self.assertEqual(L, ["used"]) + + def test_getunusedtypes(self): + schema = self.load_schema("library.xml") + L = sorted(schema.getunusedtypes()) + self.assertEqual(L, ["type-a", "type-b"]) + + schema = self.load_schema_text("""\ + + + +
+ + """) + self.assertEqual(schema.getunusedtypes(), ["unused"]) + + def test_section_value_mutation(self): + schema, conf = self.load_both("simple.xml", "simple.conf") + orig = conf.empty + new = [] + conf.empty = new + self.assertTrue(conf.empty is new) + + def test_simple_anonymous_section(self): + schema = self.load_schema_text("""\ + + + + +
+ + """) + conf = self.load_config_text(schema, "") + self.assertEqual(conf.attr.key, "value") + + def test_simple_anonymous_section_without_name(self): + # make sure we get the same behavior without name='*' + schema = self.load_schema_text("""\ + + + + +
+ + """) + conf = self.load_config_text(schema, "") + self.assertEqual(conf.attr.key, "value") + + def test_simple_anynamed_section(self): + schema = self.load_schema_text("""\ + + + + +
+ + """) + conf = self.load_config_text(schema, "") + self.assertEqual(conf.attr.key, "value") + self.assertEqual(conf.attr.getSectionName(), "name") + + # if we omit the name, it's an error + self.assertRaises(ZConfig.ConfigurationError, + self.load_config_text, schema, "") + + def test_nested_abstract_sectiontype(self): + schema = self.load_schema_text("""\ + + + + +
+ +
+ + """) + conf = self.load_config_text(schema, """\ + + + + """) + + def test_nested_abstract_sectiontype_without_name(self): + # make sure we get the same behavior without name='*' + schema = self.load_schema_text("""\ + + + + +
+ +
+ + """) + conf = self.load_config_text(schema, """\ + + + + """) + + def test_reserved_attribute_prefix(self): + template = """\ + + + %s + + """ + def check(thing, self=self, template=template): + text = template % thing + self.assertRaises(ZConfig.SchemaError, + self.load_schema_text, text) + + check("") + check("") + check("") + check("") + check("
") + check("
") + check("") + check("") + + def test_sectiontype_as_schema(self): + schema = self.load_schema_text("""\ + + + + + + +
+ + + """) + t = schema.gettype("t") + conf = self.load_config_text(t, "") + self.assertEqual(conf.tkey, "tkey-default") + self.assertEqual(conf.section.skey, "skey-default") + self.assertEqual(get_section_attributes(conf), ["section", "tkey"]) + self.assertEqual(get_section_attributes(conf.section), ["skey"]) + + def test_datatype_conversion_error(self): + schema_url = "file:///tmp/fake-url-1.xml" + config_url = "file:///tmp/fake-url-2.xml" + schema = self.load_schema_text("""\ + + + + """, url=schema_url) + e = self.get_data_conversion_error( + schema, "", config_url) + self.assertEqual(e.url, schema_url) + self.assertEqual(e.lineno, 2) + + e = self.get_data_conversion_error(schema, """\ + # comment + + key splat + """, config_url) + self.assertEqual(e.url, config_url) + self.assertEqual(e.lineno, 3) + + def get_data_conversion_error(self, schema, src, url): + with self.assertRaises(ZConfig.DataConversionError) as e: + self.load_config_text(schema, src, url=url) + + return e.exception + + def test_numeric_section_name(self): + schema = self.load_schema_text("""\ + + + + + """) + conf = self.load_config_text(schema, "") + self.assertEqual(len(conf.things), 1) + + def test_sectiontype_extension(self): + schema = self.load_schema_text("""\ + + + + + + + +
+ + """) + conf = self.load_config_text(schema, """\ + + k1 k1-value + k2 k2-value + + """) + eq = self.assertEqual + eq(conf.s.k1, "k1-value") + eq(conf.s.k2, "k2-value") + eq(get_section_attributes(conf), ["s"]) + eq(get_section_attributes(conf.s), ["k1", "k2"]) + + def test_sectiontype_extension_errors(self): + # cannot override key from base + self.assertRaises(ZConfig.SchemaError, self.load_schema_text, """\ + + + + + + + + + """) + # cannot extend non-existing section + self.assertRaises(ZConfig.SchemaError, self.load_schema_text, """\ + + + + """) + # cannot extend abstract type + self.assertRaises(ZConfig.SchemaError, self.load_schema_text, """\ + + + + + """) + + def test_sectiontype_derived_keytype(self): + # make sure that a derived section type inherits the keytype + # of its base + schema = self.load_schema_text("""\ + + + + + + +
+ + """) + conf = self.load_config_text(schema, """\ + + foo bar + Foo BAR + + """) + self.assertEqual(conf.foo.foo, "bar") + self.assertEqual(conf.foo.Foo, "BAR") + self.assertEqual(get_section_attributes(conf.foo), ["Foo", "foo"]) + + def test_sectiontype_override_keytype(self): + schema = self.load_schema_text("""\ + + + + + +
+
+ + """) + conf = self.load_config_text(schema, """\ + + ident1 foo + Ident2 bar + + + EXAMPLE.COM foo + + """) + L = sorted(conf.base.map.items()) + self.assertEqual(L, [("Ident2", "bar"), ("ident1", "foo")]) + L = sorted(conf.derived.map.items()) + self.assertEqual(L, [("example.com", "foo")]) + self.assertEqual(get_section_attributes(conf), ["base", "derived"]) + + def test_keytype_applies_to_default_key(self): + schema = self.load_schema_text("""\ + + + + 42 + 24 + + +
+ + """) + conf = self.load_config_text(schema, "") + items = sorted(conf.sect.mapping.items()) + self.assertEqual(items, [("bar", "24"), ("foo", "42")]) + + def test_duplicate_default_key_checked_in_schema(self): + self.assertRaises(ZConfig.SchemaError, + self.load_schema_text, """\ + + + + 42 + 24 + + +
+ + """) + + def test_default_keys_rechecked_clash_in_derived_sectiontype(self): + # If the default values associated with a can't + # be supported by a new keytype for a derived sectiontype, an + # error should be indicated. + self.assertRaises(ZConfig.SchemaError, + self.load_schema_text, """\ + + + + 42 + 42 + + + + + +
+ + """) + + def test_default_keys_rechecked_dont_clash_in_derived_sectiontype(self): + # If the default values associated with a can't + # be supported by a new keytype for a derived sectiontype, an + # error should be indicated. + schema = self.load_schema_text("""\ + + + + 42 + 42 + + + + + +
+
+ + """) + conf = self.load_config_text(schema, """\ + + + """) + base = sorted(conf.base.mapping.items()) + self.assertEqual(base, [("Foo", ["42"]), ("foo", ["42"])]) + sect = sorted(conf.sect.mapping.items()) + self.assertEqual(sect, [("foo", ["42", "42"])]) + + def test_sectiontype_inherited_datatype(self): + schema = self.load_schema_text("""\ + + + + + +
+ + """) + conf = self.load_config_text(schema, """\ + + foo bar + + """) + self.assertEqual(conf.splat, "bar") + + def test_schema_keytype(self): + schema = self.load_schema_text("""\ + + + + """) + conf = self.load_config_text(schema, + "host.example.com 127.0.0.1\n" + "www.example.org 127.0.0.2\n") + table = conf.table + self.assertEqual(len(table), 2) + L = sorted(table.items()) + self.assertEqual(L, [("host.example.com", "127.0.0.1"), + ("www.example.org", "127.0.0.2")]) + + self.assertRaises(ZConfig.ConfigurationError, + self.load_config_text, schema, "abc. 127.0.0.1") + + def test_keytype_identifier(self): + schema = self.load_schema_text("""\ + + + + + """) + conf = self.load_config_text(schema, + "Foo Foo-value\n" + "foo foo-value\n") + self.assertEqual(conf.foo, "foo-value") + self.assertEqual(conf.Foo, "Foo-value") + self.assertEqual(get_section_attributes(conf), ["Foo", "foo"]) + # key mis-match based on case: + self.assertRaises(ZConfig.ConfigurationError, + self.load_config_text, schema, "FOO frob\n") + # attribute names conflict, since the keytype isn't used to + # generate attribute names + self.assertRaises(ZConfig.SchemaError, + self.load_schema_text, """\ + + + + + """) + + def test_datatype_casesensitivity(self): + self.load_schema_text("") + + def test_simple_extends(self): + schema = self.load_schema_text("""\ + +
+ + """ % (CONFIG_BASE, CONFIG_BASE)) + self._verifySimpleConf(self.load_config(schema, "simple.conf")) + + def test_extends_fragment_failure(self): + self.assertRaises(ZConfig.SchemaError, + self.load_schema_text, + "" % CONFIG_BASE) + + def test_extends_description_override(self): + schema = self.load_schema_text("""\ + + + overriding description + +
+
+ + """ % (CONFIG_BASE, CONFIG_BASE)) + description = schema.description.strip() + self.assertEqual(description, "overriding description") + + def test_extends_description_first_extended_wins(self): + schema = self.load_schema_text("""\ + +
+
+ + """ % (CONFIG_BASE, CONFIG_BASE)) + description = schema.description.strip() + self.assertEqual(description, "base description") + + def test_multi_extends_implicit_OK(self): + self.load_schema_text("""\ + +
+
+ + """ % (CONFIG_BASE, CONFIG_BASE)) + + def test_multi_extends_explicit_datatype_OK(self): + self.load_schema_text("""\ + +
+
+ + """ % (CONFIG_BASE, CONFIG_BASE)) + + def test_multi_extends_explicit_keytype_OK(self): + self.load_schema_text("""\ + +
+
+ + """ % (CONFIG_BASE, CONFIG_BASE, __name__)) + + def test_multi_extends_datatype_conflict(self): + self.assertRaises(ZConfig.SchemaError, + self.load_schema_text, """\ + + """ % (CONFIG_BASE, CONFIG_BASE)) + + def test_multi_extends_keytype_conflict(self): + self.assertRaises(ZConfig.SchemaError, + self.load_schema_text, """\ + + """ % (CONFIG_BASE, CONFIG_BASE)) + + def test_multiple_descriptions_is_error(self): + self.assertRaises(ZConfig.SchemaError, + self.load_schema_text, """\ + + foo + bar + + """) + + def test_srepr(self): + from ZConfig.schema import _srepr + FOO = u'foo' + self.assertEqual(_srepr('foo'), "'foo'") + self.assertEqual(_srepr(FOO), "'foo'") + + def test_schema_example(self): + schema = self.load_schema_text("""\ + + This is an example + + """) + self.assertEqual(schema.example, 'This is an example') + + def test_key_example(self): + schema = self.load_schema_text("""\ + + + + This is an example + + + + """) + self.assertEqual(schema.gettype('abc').getinfo('def').example, 'This is an example') + + def test_multikey_example(self): + schema = self.load_schema_text("""\ + + + + This is an example + + + + """) + self.assertEqual(schema.gettype('abc').getinfo('def').example, 'This is an example') + + def test_sectiontype_example(self): + schema = self.load_schema_text("""\ + + + This is an example + +
+ + """) + self.assertEqual(schema.gettype('abc').example, 'This is an example') + + def test_multiple_examples_is_error(self): + self.assertRaises(ZConfig.SchemaError, self.load_schema_text, """\ + + This is an example + This is an example + + """) + + def test_section_example(self): + schema = self.load_schema_text("""\ + + + This is a sectiontype example + +
+ This is an example +
+
+ """) + self.assertEqual(schema.getinfo('def').sectiontype.example, 'This is a sectiontype example') + self.assertEqual(schema.getinfo('def').example, 'This is an example') + + def test_multisection_example(self): + schema = self.load_schema_text("""\ + + + This is a sectiontype example + + + This is an example + + + """) + self.assertEqual(schema[0][1].sectiontype.example, 'This is a sectiontype example') + self.assertEqual(schema[0][1].example, 'This is an example') + + def checkErrorText(self, schema, error_text): + self.assertRaisesRegex(ZConfig.SchemaError, error_text, + self.load_schema_text, schema) + + def test_error_bad_parent(self): + self.checkErrorText( + "", + "Unknown tag") + + def test_error_unknown_doc(self): + self.checkErrorText("", "Unknown document type") + + def test_error_extra_cdata(self): + self.checkErrorText("text", + "non-blank character data") + + + def test_error_subclass(self): + import ZConfig.schema + import ZConfig.datatypes + class MockLoader(object): + registry = ZConfig.datatypes.Registry() + parser = ZConfig.schema.SchemaParser(MockLoader(), 'url') + parser.push_prefix({'prefix': __name__}) + parser.push_prefix({'prefix': '.' + __name__}) + + def cv(n): + raise ValueError() + MockLoader.registry._stock['dotted-suffix'] = cv + + self.assertRaises(ZConfig.SchemaError, + parser.push_prefix, + {'prefix': __name__}) + + self.assertRaises(ZConfig.SchemaError, + parser.basic_key, + "not a basic key") + + self.assertRaises(ZConfig.SchemaError, + parser.identifier, + "not an identifier") + + def test_error_required_value(self): + self.checkErrorText( + """ + + + + """, + "value for 'required' must be") + + def test_error_section(self): + self.checkErrorText( + """ + +
+ """, + "section must specify type") + + def test_error_multisection(self): + self.checkErrorText( + """ + + + + """, + "multisection must specify .* for the name") + + def test_error_multikey(self): + + self.checkErrorText( + """ + + + + """, + "default values for multikey must be given") + + def test_error_key_info(self): + + self.checkErrorText( + """ + + + + """, + "required key cannot have a default") + + self.checkErrorText( + """ + + + + """, + r"key may not specify '\*' for name") + + self.checkErrorText( + """ + + + + """, + "name must be specified and non-empty") + + self.checkErrorText( + """ + + + + """, + "container attribute must be specified") + + self.checkErrorText( + """ + + + + """, + "could not convert key name to keytype") + + def test_error_import_fragment(self): + self.checkErrorText( + """ + + + """, + "may not include a fragment identifier") + + def test_error_sectiontype(self): + self.checkErrorText( + """ + + + """, + "sectiontype name must not be omitted or empty") + + def test_error_abstracttype(self): + self.checkErrorText( + """ + + + """, + "abstracttype name must not be omitted or empty") + + def test_metadefault(self): + self.load_schema_text( + """ + + + a default + + + """) + + def test_error_component_section(self): + self.checkErrorText( + """ + + + """, + "elements may not be nested") + + self.load_schema_text( + """ + + + + """) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_schema2html.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_schema2html.py new file mode 100644 index 0000000..3ab1fe3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_schema2html.py @@ -0,0 +1,227 @@ +############################################################################## +# +# Copyright (c) 2017 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from __future__ import absolute_import + +import contextlib +import sys +import textwrap +import unittest + +import docutils +import docutils.utils +import docutils.frontend +import docutils.parsers.rst +import docutils.parsers.rst.directives + + + +try: + # Note that we're purposely using the old + # StringIO object on Python 2 because it auto-converts + # Unicode to str, which io.BytesIO and io.StringIO don't + # but which normal printing to default sys.stdout *does* + from cStringIO import StringIO +except ImportError: + from ZConfig._compat import NStringIO as StringIO + + +from ZConfig import schema2html + +from ZConfig.sphinx import SchemaToRstDirective +docutils.parsers.rst.directives.register_directive("zconfig", SchemaToRstDirective) +from ZConfig.sphinx import RstSchemaFormatter + +from .support import input_file +from .support import with_stdin_from_input_file + + +@contextlib.contextmanager +def stdout_replaced(buf): + old_stdout = sys.stdout + sys.stdout = buf + try: + yield + finally: + sys.stdout = old_stdout + + +def run_transform(*args): + if '--out' not in args and '-o' not in args: + buf = StringIO() + with stdout_replaced(buf): + schema2html.main(args) + return buf + return schema2html.main(args) # pragma: no cover + +if schema2html.RstSchemaPrinter: + def run_transform_rst(*args): + args += ('--format', 'xml') + return run_transform(*args) +else: + def run_transform_rst(*args): + pass + +class TestSchema2HTML(unittest.TestCase): + + def test_schema_only(self): + res = run_transform(input_file('simple.xml')) + self.assertIn('', res.getvalue()) + run_transform_rst(input_file('simple.xml')) + + @with_stdin_from_input_file('simple.xml') + def test_schema_only_redirect(self): + res = run_transform("-") + self.assertIn('', res.getvalue()) + + def test_cover_all_schemas(self): + for name in ('base-datatype1.xml', + 'base-datatype2.xml', + 'base-keytype1.xml', + 'base-keytype2.xml', + 'base.xml', + 'library.xml', + 'simplesections.xml',): + res = run_transform(input_file(name)) + self.assertIn('', res.getvalue()) + run_transform_rst(input_file(name)) + + def test_html_section_example(self): + name = 'simplesections.xml' + res = run_transform(input_file(name)) + out = res.getvalue() + self.assertIn('Section Example', out) + self.assertIn('Multisection Example', out) + + def test_rst_section_example(self): + name = 'simplesections.xml' + res = run_transform_rst(input_file(name)) + out = res.getvalue() + self.assertIn('Section Example', out) + self.assertIn('Multisection Example', out) + + def test_cover_logging_components(self): + res = run_transform('--package', 'ZConfig.components.logger') + self.assertIn('eventlog', res.getvalue()) + run_transform_rst('--package', 'ZConfig.components.logger') + +class TestRst(unittest.TestCase): + + def _parse(self, text): + document = docutils.utils.new_document( + "Schema", + settings=docutils.frontend.OptionParser( + components=(docutils.parsers.rst.Parser,) + ).get_default_values()) + + parser = docutils.parsers.rst.Parser() + text = textwrap.dedent(text) + parser.parse(text, document) + return document + + def test_parse_package(self): + text = """ + Document + ======== + .. zconfig:: ZConfig.components.logger + + """ + document = self._parse(text) + doc_text = document.astext() + # Check that it produced output + self.assertIn("SMTPHandler", doc_text) + self.assertIn("Example:", doc_text) + + def test_parse_package_file(self): + text = """ + Document + ======== + .. zconfig:: ZConfig.components.logger + :file: base-logger.xml + + """ + document = self._parse(text) + doc_text = document.astext() + # Check that it produced output, limited to + # just that one file. + self.assertNotIn("SMTPHandler", doc_text) + self.assertIn("base-logger", doc_text) + self.assertIn("Base definition", doc_text) + self.assertIn("Example:", doc_text) + + def test_parse_package_limited_names(self): + text = """ + Document + ======== + .. zconfig:: ZConfig.components.logger + :members: syslog logfile + """ + document = self._parse(text) + doc_text = document.astext() + + # Check that it produced output, limited to + # just that one part of the tree + self.assertNotIn("SMTPHandler", doc_text) + self.assertIn("syslog", doc_text) + self.assertIn("SyslogHandlerFactory", doc_text) + self.assertIn("FileHandlerFactory", doc_text) + + def test_parse_package_excluded_names(self): + text = """ + Document + ======== + .. zconfig:: ZConfig.components.logger + :members: ZConfig.logger.base-logger + :excluded-members: ZConfig.logger.handler + """ + document = self._parse(text) + doc_text = document.astext() + + # Check that it produced output, limited to + # just that one part of the tree + # In this case, the root base-logger, but the handlers subtree + # was excluded. + self.assertIn("zconfig.logger.base-logger", doc_text) + self.assertNotIn("SMTPHandler", doc_text) + self.assertNotIn("syslog", doc_text) + self.assertNotIn("SyslogHandlerFactory", doc_text) + self.assertNotIn("FileHandlerFactory", doc_text) + + + def test_description_dedent(self): + text = """No leading whitespace on this line. + But this line has whitespace. + As does this one. + """ + written = [] + class FUT(RstSchemaFormatter): + def __init__(self): + pass + def _parsed(self, text, _): + return text + def write(self, *texts): + written.extend(texts) + fut = FUT() + fut.description(text) + + dedented = ("""No leading whitespace on this line.\n""" + """But this line has whitespace.\n""" + """As does this one.\n""") + + self.assertEqual(written[0], dedented) + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_schemaless.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_schemaless.py new file mode 100644 index 0000000..ebc0114 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_schemaless.py @@ -0,0 +1,42 @@ +############################################################################## +# +# Copyright (c) 2007 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""\ +Test driver for ZConfig.schemaless. + +""" +__docformat__ = "reStructuredText" + +import manuel.doctest +import manuel.testing +import unittest + +from ZConfig.schemaless import Section + +class TestSection(unittest.TestCase): + + def test_init_with_data(self): + s = Section(data={'k': 'v'}) + self.assertDictEqual(s, {'k': 'v'}) + + +def test_suite(): + return unittest.TestSuite([ + unittest.defaultTestLoader.loadTestsFromName(__name__), + manuel.testing.TestSuite( + manuel.doctest.Manuel(), + '../schemaless.txt'), + ]) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_subst.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_subst.py new file mode 100644 index 0000000..33339e9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_subst.py @@ -0,0 +1,109 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests of the string interpolation module.""" + +# This is needed to support Python 2.1. +from __future__ import nested_scopes +import os + +import unittest + +from ZConfig import SubstitutionReplacementError, SubstitutionSyntaxError +from ZConfig.substitution import isname, substitute + + +class SubstitutionTestCase(unittest.TestCase): + def test_simple_names(self): + d = {"name": "value", + "name1": "abc", + "name_": "def", + "_123": "ghi"} + def check(s, v): + self.assertEqual(substitute(s, d), v) + check("$name", "value") + check(" $name ", " value ") + check("${name}", "value") + check(" ${name} ", " value ") + check("$name$name", "valuevalue") + check("$name1$name", "abcvalue") + check("$name_$name", "defvalue") + check("$_123$name", "ghivalue") + check("$name $name", "value value") + check("$name1 $name", "abc value") + check("$name_ $name", "def value") + check("$_123 $name", "ghi value") + check("splat", "splat") + check("$$", "$") + check("$$$name$$", "$value$") + + # Check for an ENV var + self.assertEqual(substitute("$(PATH)", d), os.getenv("PATH")) + + def test_undefined_names(self): + d = {"name": "value"} + self.assertRaises(SubstitutionReplacementError, + substitute, "$splat", d) + self.assertRaises(SubstitutionReplacementError, + substitute, "$splat1", d) + self.assertRaises(SubstitutionReplacementError, + substitute, "$splat_", d) + + # An undefined ENV should also rise + self.assertRaises(SubstitutionReplacementError, + substitute, "$(MY_SUPER_PATH)", d) + + def test_syntax_errors(self): + d = {"name": "${next"} + def check(s): + self.assertRaises(SubstitutionSyntaxError, + substitute, s, d) + check("${") + check("${name") + check("${1name}") + check("${ name}") + check("$(") + check("$(name") + check("$(1name)") + check("$( name)") + + def test_edge_cases(self): + # It's debatable what should happen for these cases, so we'll + # follow the lead of the Bourne shell here. + def check(s): + self.assertRaises(SubstitutionSyntaxError, + substitute, s, {}) + check("$1") + check("$") + check("$ stuff") + + def test_non_nesting(self): + d = {"name": "$value"} + self.assertEqual(substitute("$name", d), "$value") + + def test_isname(self): + self.assertTrue(isname("abc")) + self.assertTrue(isname("abc_def")) + self.assertTrue(isname("_abc")) + self.assertTrue(isname("abc_")) + self.assertTrue(not isname("abc-def")) + self.assertTrue(not isname("-def")) + self.assertTrue(not isname("abc-")) + self.assertTrue(not isname("")) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_validator.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_validator.py new file mode 100644 index 0000000..fa1e4e1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/test_validator.py @@ -0,0 +1,57 @@ +############################################################################## +# +# Copyright (c) 2017 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from __future__ import absolute_import + +import unittest + +from ZConfig import validator + +from .support import input_file +from .support import with_stdin_from_input_file + +def run_validator(*args): + return validator.main(args) + +class TestValidator(unittest.TestCase): + + def test_no_schema(self): + self.assertRaises(SystemExit, + run_validator) + + def test_schema_only(self): + res = run_validator("--schema", input_file('simple.xml')) + self.assertEqual(res, 0) + + @with_stdin_from_input_file('simple.conf') + def test_schema_only_redirect(self): + res = run_validator("--schema", input_file('simple.xml')) + self.assertEqual(res, 0) + + def test_good_config(self): + res = run_validator("--schema", input_file('simple.xml'), + input_file('simple.conf'), + input_file('simple.conf')) + self.assertEqual(res, 0) + + def test_bad_config(self): + res = run_validator("--schema", input_file("simple.xml"), + input_file("outer.conf")) + self.assertEqual(res, 1) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/README.txt b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/README.txt new file mode 100644 index 0000000..f6a74ae --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/README.txt @@ -0,0 +1,2 @@ +This directory contains a sample package that is used to create the +'foosample.zip' file used in the tests. diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/foo/__init__.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/foo/__init__.py new file mode 100644 index 0000000..196d378 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/foo/__init__.py @@ -0,0 +1 @@ +# This directory is a Python package. diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/foo/sample/__init__.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/foo/sample/__init__.py new file mode 100644 index 0000000..196d378 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/foo/sample/__init__.py @@ -0,0 +1 @@ +# This directory is a Python package. diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/foo/sample/component.xml b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/foo/sample/component.xml new file mode 100644 index 0000000..934d950 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/foo/sample/component.xml @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/foo/sample/datatypes.py b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/foo/sample/datatypes.py new file mode 100644 index 0000000..5ac962c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/tests/zipsource/foo/sample/datatypes.py @@ -0,0 +1,7 @@ +"""Sample datatypes used for testing. +""" +__docformat__ = "reStructuredText" + + +def data(value): + return "| %s |" % value diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/url.py b/thesisenv/lib/python3.6/site-packages/ZConfig/url.py new file mode 100644 index 0000000..bd6b598 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/url.py @@ -0,0 +1,55 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""urlparse-like helpers that normalize file: URLs. + +ZConfig and urllib2 expect file: URLs to consistently use the '//' +hostpart seperator; the functions here enforce this constraint. +""" + +from ZConfig._compat import urlparse as _urlparse + +urlsplit = _urlparse.urlsplit + +def urlnormalize(url): + lc = url.lower() + if lc.startswith("file:/") and not lc.startswith("file:///"): + url = "file://" + url[5:] + return url + + +def urlunsplit(parts): + parts = list(parts) + parts.insert(3, '') + url = _urlparse.urlunparse(tuple(parts)) + if (parts[0] == "file" + and url.startswith("file:/") + and not url.startswith("file:///")): + # It may not be possible to get here anymore with + # modern urlparse, at least not on posix? + url = "file://" + url[5:] # pragma: no cover + return url + + +def urldefrag(url): + url, fragment = _urlparse.urldefrag(url) + return urlnormalize(url), fragment + + +def urljoin(base, relurl): + url = _urlparse.urljoin(base, relurl) + if url.startswith("file:/") and not url.startswith("file:///"): + # It may not be possible to get here anymore with + # modern urlparse, at least not on posix? + url = "file://" + url[5:] # pragma: no cover + return url diff --git a/thesisenv/lib/python3.6/site-packages/ZConfig/validator.py b/thesisenv/lib/python3.6/site-packages/ZConfig/validator.py new file mode 100644 index 0000000..ddfa529 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZConfig/validator.py @@ -0,0 +1,75 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Corporation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +"""Script to check validity of a configuration file. +""" + +from __future__ import print_function +import argparse +import sys + + +import ZConfig + + +def main(args=None): + optparser = argparse.ArgumentParser( + description="Script to check validity of a configuration file", + epilog=""" + Each file named on the command line is checked for syntactical errors + and schema conformance. The schema must be specified. If no files + are specified and standard input is not a TTY, standard in is treated + as a configuration file. Specifying a schema and no configuration + files causes the schema to be checked.""", + ) + + optparser.add_argument( + "-s", "--schema", dest="schema", + required=True, + help="use the schema in FILE (can be a URL)", + metavar="FILE" + ) + + optparser.add_argument( + "file", + nargs='*', + help="Optional configuration file to check", + type=argparse.FileType('r'), + ) + + options = optparser.parse_args(args=args) + + schema = ZConfig.loadSchema(options.schema) + + if not options.file: + if sys.stdin.isatty(): + # just checking the schema + return 0 + + # stdin is a pipe + options.file = [sys.stdin] + + errors = False + for f in options.file: + try: + ZConfig.loadConfigFile(schema, f) + except ZConfig.ConfigurationError as e: + print(str(e), file=sys.stderr) + errors = True + + return int(errors) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/PKG-INFO b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..85a7113 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/PKG-INFO @@ -0,0 +1,1006 @@ +Metadata-Version: 2.1 +Name: ZEO +Version: 5.2.0 +Summary: ZEO - Single-server client-server database server for ZODB +Home-page: https://github.com/zopefoundation/ZEO +Author: Zope Foundation and Contributors +Author-email: zodb@googlegroups.com +License: ZPL 2.1 +Description: ============================================================ + ZEO - Single-server client-server database server for ZODB + ============================================================ + + ZEO is a client-server storage for `ZODB `_ for + sharing a single storage among many clients. When you use ZEO, a + lower-level storage, typically a file storage, is opened in the ZEO + server process. Client programs connect to this process using a ZEO + ClientStorage. ZEO provides a consistent view of the database to all + clients. The ZEO client and server communicate using a custom + protocol layered on top of TCP. + + Some alternatives to ZEO: + + - `NEO `_ is a distributed-server + client-server storage. + + - `RelStorage `_ + leverages the RDBMS servers to provide a client-server storage. + + .. contents:: + + Introduction + ============ + + There are several features that affect the behavior of + ZEO. This section describes how a few of these features + work. Subsequent sections describe how to configure every option. + + Client cache + ------------ + + Each ZEO client keeps an on-disk cache of recently used data records + to avoid fetching those records from the server each time they are + requested. It is usually faster to read the objects from disk than it + is to fetch them over the network. The cache can also provide + read-only copies of objects during server outages. + + The cache may be persistent or transient. If the cache is persistent, + then the cache files are retained for use after process restarts. A + non-persistent cache uses temporary files that are removed when the + client storage is closed. + + The client cache size is configured when the ClientStorage is created. + The default size is 20MB, but the right size depends entirely on the + particular database. Setting the cache size too small can hurt + performance, but in most cases making it too big just wastes disk + space. + + ZEO uses invalidations for cache consistency. Every time an object is + modified, the server sends a message to each client informing it of + the change. The client will discard the object from its cache when it + receives an invalidation. (It's actually a little more complicated, + but we won't get into that here.) + + Each time a client connects to a server, it must verify that its cache + contents are still valid. (It did not receive any invalidation + messages while it was disconnected.) This involves asking the server + to replay invalidations it missed. If it's been disconnected too long, + it discards its cache. + + + Invalidation queue + ------------------ + + The ZEO server keeps a queue of recent invalidation messages in + memory. When a client connects to the server, it sends the timestamp + of the most recent invalidation message it has received. If that + message is still in the invalidation queue, then the server sends the + client all the missing invalidations. + + The default size of the invalidation queue is 100. If the + invalidation queue is larger, it will be more likely that a client + that reconnects will be able to verify its cache using the queue. On + the other hand, a large queue uses more memory on the server to store + the message. Invalidation messages tend to be small, perhaps a few + hundred bytes each on average; it depends on the number of objects + modified by a transaction. + + You can also provide an invalidation age when configuring the + server. In this case, if the invalidation queue is too small, but a + client has been disconnected for a time interval that is less than the + invalidation age, then invalidations are replayed by iterating over + the lower-level storage on the server. If the age is too high, and + clients are disconnected for a long time, then this can put a lot of + load on the server. + + Transaction timeouts + -------------------- + + A ZEO server can be configured to timeout a transaction if it takes + too long to complete. Only a single transaction can commit at a time; + so if one transaction takes too long, all other clients will be + delayed waiting for it. In the extreme, a client can hang during the + commit process. If the client hangs, the server will be unable to + commit other transactions until it restarts. A well-behaved client + will not hang, but the server can be configured with a transaction + timeout to guard against bugs that cause a client to hang. + + If any transaction exceeds the timeout threshold, the client's + connection to the server will be closed and the transaction aborted. + Once the transaction is aborted, the server can start processing other + client's requests. Most transactions should take very little time to + commit. The timer begins for a transaction after all the data has + been sent to the server. At this point, the cost of commit should be + dominated by the cost of writing data to disk; it should be unusual + for a commit to take longer than 1 second. A transaction timeout of + 30 seconds should tolerate heavy load and slow communications between + client and server, while guarding against hung servers. + + When a transaction times out, the client can be left in an awkward + position. If the timeout occurs during the second phase of the two + phase commit, the client will log a panic message. This should only + cause problems if the client transaction involved multiple storages. + If it did, it is possible that some storages committed the client + changes and others did not. + + Connection management + --------------------- + + A ZEO client manages its connection to the ZEO server. If it loses + the connection, it attempts to reconnect. While + it is disconnected, it can satisfy some reads by using its cache. + + The client can be configured with multiple server addresses. In this + case, it assumes that each server has identical content and will use + any server that is available. It is possible to configure the client + to accept a read-only connection to one of these servers if no + read-write connection is available. If it has a read-only connection, + it will continue to poll for a read-write connection. + + If a single address resolves to multiple IPv4 or IPv6 addresses, + the client will connect to an arbitrary of these addresses. + + SSL + --- + + ZEO supports the use of SSL connections between servers and clients, + including certificate authentication. We're still understanding use + cases for this, so details of operation may change. + + Installing software + =================== + + ZEO is installed like any other Python package using pip, buildout, or + other Python packaging tools. + + Running the server + ================== + + Typically, the ZEO server is run using the ``runzeo`` script that's + installed as part of a ZEO installation. The ``runzeo`` script + accepts command line options, the most important of which is the + ``-C`` (``--configuration``) option. ZEO servers are best configured + via configuration files. The ``runzeo`` script also accepts some + command-line arguments for ad-hoc configurations, but there's an + easier way to run an ad-hoc server described below. For more on + configuring a ZEO server see `Server configuration`_ below. + + Server quick-start/ad-hoc operation + ----------------------------------- + + You can quickly start a ZEO server from a Python prompt:: + + import ZEO + address, stop = ZEO.server() + + This runs a ZEO server on a dynamic address and using an in-memory + storage. + + We can then create a ZEO client connection using the address + returned:: + + connection = ZEO.connection(addr) + + This is a ZODB connection for a database opened on a client storage + instance created on the fly. This is a shorthand for:: + + db = ZEO.DB(addr) + connection = db.open() + + Which is a short-hand for:: + + client_storage = ZEO.client(addr) + + import ZODB + db = ZODB.db(client_storage) + connection = db.open() + + If you exit the Python process, the storage exits as well, as it's run + in an in-process thread. + + You shut down the server more cleanly by calling the stop function + returned by the ``ZEO.server`` function. + + To have data stored persistently, you can specify a file-storage path + name using a ``path`` parameter. If you want blob support, you can + specify a blob-file directory using the ``blob_dir`` directory. + + You can also supply a port to listen on, full storage configuration + and ZEO server configuration options to the ``ZEO.server`` + function. See it's documentation string for more information. + + Server configuration + -------------------- + + The script ``runzeo`` runs the ZEO server. The server can be + configured using command-line arguments or a configuration file. This + document only describes the configuration file. Run ``runzeo`` + -h to see the list of command-line arguments. + + The configuration file specifies the underlying storage the server + uses, the address it binds to, and a few other optional parameters. + An example is:: + + + address zeo.example.com:8090 + + + + path /var/tmp/Data.fs + + + + + path /var/tmp/zeo.log + format %(asctime)s %(message)s + + + + The format is similar to the Apache configuration format. Individual + settings have a name, 1 or more spaces and a value, as in:: + + address zeo.example.com:8090 + + Settings are grouped into hierarchical sections. + + The example above configures a server to use a file storage from + ``/var/tmp/Data.fs``. The server listens on port ``8090`` of + ``zeo.example.com``. The ZEO server writes its log file to + ``/var/tmp/zeo.log`` and uses a custom format for each line. Assuming the + example configuration it stored in ``zeo.config``, you can run a server by + typing:: + + runzeo -C zeo.config + + A configuration file consists of a ```` section and a storage + section, where the storage section can use any of the valid ZODB + storage types. It may also contain an event log configuration. See + `ZODB documentation `_ for information on + configuring storages. + + The ``zeo`` section must list the address. All the other keys are + optional. + + address + The address at which the server should listen. This can be in + the form 'host:port' to signify a TCP/IP connection or a + pathname string to signify a Unix domain socket connection (at + least one '/' is required). A hostname may be a DNS name or a + dotted IP address. If the hostname is omitted, the platform's + default behavior is used when binding the listening socket ('' + is passed to socket.bind() as the hostname portion of the + address). + + read-only + Flag indicating whether the server should operate in read-only + mode. Defaults to false. Note that even if the server is + operating in writable mode, individual storages may still be + read-only. But if the server is in read-only mode, no write + operations are allowed, even if the storages are writable. Note + that pack() is considered a read-only operation. + + invalidation-queue-size + The storage server keeps a queue of the objects modified by the + last N transactions, where N == invalidation_queue_size. This + queue is used to support client cache verification when a client + disconnects for a short period of time. + + invalidation-age + The maximum age of a client for which quick-verification + invalidations will be provided by iterating over the served + storage. This option should only be used if the served storage + supports efficient iteration from a starting point near the + end of the transaction history (e.g. end of file). + + transaction-timeout + The maximum amount of time, in seconds, to wait for a + transaction to commit after acquiring the storage lock, + specified in seconds. If the transaction takes too long, the + client connection will be closed and the transaction aborted. + + This defaults to 30 seconds. + + client-conflict-resolution + Flag indicating that clients should perform conflict + resolution. This option defaults to false. + + msgpack + Use `msgpack `_ to serialize + and de-serialize ZEO protocol messages. + + An advantage of using msgpack for ZEO communication is that + it's a tiny bit faster and a ZEO server can support Python 2 + or Python 3 clients (but not both). + + msgpack can also be enabled by setting the ``ZEO_MSGPACK`` + environment to a non-empty string. + + Server SSL configuration + ~~~~~~~~~~~~~~~~~~~~~~~~ + + A server can optionally support SSL. Do do so, include a `ssl` + subsection of the ZEO section, as in:: + + + address zeo.example.com:8090 + + certificate server_certificate.pem + key server_certificate_key.pem + + + + + path /var/tmp/Data.fs + + + + + path /var/tmp/zeo.log + format %(asctime)s %(message)s + + + + The ``ssl`` section has settings: + + certificate + The path to an SSL certificate file for the server. (required) + + key + The path to the SSL key file for the server certificate (if not + included in certificate file). + + password-function + The dotted name if an importable function that, when imported, returns + the password needed to unlock the key (if the key requires a password.) + + authenticate + The path to a file or directory containing client certificates + to authenticate. ((See the ``cafile`` and ``capath`` + parameters in the Python documentation for + ``ssl.SSLContext.load_verify_locations``.) + + If this setting is used. then certificate authentication is + used to authenticate clients. A client must be configured + with one of the certificates supplied using this setting. + + This option assumes that you're using self-signed certificates. + + Running the ZEO server as a daemon + ---------------------------------- + + In an operational setting, you will want to run the ZEO server as a + daemon process that is restarted when it dies. ``runzeo`` makes no + attempt to implement a well behaved daemon. It expects that + functionality to be provided by a wrapper like `zdaemon + `_ or `supervisord + `_. + + Rotating log files + ------------------ + + ``runzeo`` will re-initialize its logging subsystem when it receives a + SIGUSR2 signal. If you are using the standard event logger, you + should first rename the log file and then send the signal to the + server. The server will continue writing to the renamed log file + until it receives the signal. After it receives the signal, the + server will create a new file with the old name and write to it. + + ZEO Clients + =========== + + To use a ZEO server, you need to connect to it using a ZEO client + storage. You create client storages either using a Python API or + using a ZODB storage configuration in a ZODB storage configuration + section. + + Python API for creating a ZEO client storage + -------------------------------------------- + + To create a client storage from Python, use the ``ZEO.client`` + function:: + + import ZEO + client = ZEO.client(8200) + + In the example above, we created a client that connected to a storage + listening on port 8200 on local host. The first argument is an + address, or list of addresses to connect to. There are many additional + options, documented below that should be given as keyword arguments. + + Addresses can be: + + - A host/port tuple + + - An integer, which implies that the host is '127.0.0.1' + + - A unix domain socket file name. + + Options: + + cache_size + The cache size in bytes. This defaults to a 20MB. + + cache + The ZEO cache to be used. This can be a file name, which will + cause a persistent standard persistent ZEO cache to be used and + stored in the given name. This can also be an object that + implements ``ZEO.interfaces.ICache``. + + If not specified, then a non-persistent cache will be used. + + blob_dir + The name of a directory to hold/cache blob data downloaded from the + server. This must be provided if blobs are to be used. (Of + course, the server storage must be configured to use blobs as + well.) + + shared_blob_dir + A client can use a network files system (or a local directory if + the server runs on the same machine) to share a blob directory with + the server. This allows downloading of blobs (except via a + distributed file system) to be avoided. + + blob_cache_size + The size of the blob cache in bytes. IF unset, then blobs will + accumulate. If set, then blobs are removed when the total size + exceeds this amount. Blobs accessed least recently are removed + first. + + blob_cache_size_check + The total size of data to be downloaded to trigger blob cache size + reduction. The default is 10 (percent). This controls how often to + remove blobs from the cache. + + ssl + An ``ssl.SSLContext`` object used to make SSL connections. + + ssl_server_hostname + Host name to use for SSL host name checks. + + If using SSL and if host name checking is enabled in the given SSL + context then use this as the value to check. If an address is a + host/port pair, then this defaults to the host in the address. + + read_only + Set to true for a read-only connection. + + If false (the default), then request a read/write connection. + + This option is ignored if ``read_only_fallback`` is set to a true value. + + read_only_fallback + Set to true, then prefer a read/write connection, but be willing to + use a read-only connection. This defaults to a false value. + + If ``read_only_fallback`` is set, then ``read_only`` is ignored. + + server_sync + Flag, false by default, indicating whether the ``sync`` method + should make a server request. The ``sync`` method is called at the + start of explicitly begin transactions. Making a server requests assures + that any invalidations outstanding at the beginning of a + transaction are processed. + + Setting this to True is important when application activity is + spread over multiple ZEO clients. The classic example of this is + when a web browser makes a request to an application server (ZEO + client) that makes a change and then makes a request to another + application server that depends on the change. + + Setting this to True makes transactions a little slower because of + the added server round trip. For transactions that don't otherwise + need to access the storage server, the impact can be significant. + + wait_timeout + How long to wait for an initial connection, defaulting to 30 + seconds. If an initial connection can't be made within this time + limit, then creation of the client storage will fail with a + ``ZEO.Exceptions.ClientDisconnected`` exception. + + After the initial connection, if the client is disconnected: + + - In-flight server requests will fail with a + ``ZEO.Exceptions.ClientDisconnected`` exception. + + - New requests will block for up to ``wait_timeout`` waiting for a + connection to be established before failing with a + ``ZEO.Exceptions.ClientDisconnected`` exception. + + client_label + A short string to display in *server* logs for an event relating to + this client. This can be helpful when debugging. + + disconnect_poll + The delay in seconds between attempts to connect to the + server, in seconds. Defaults to 1 second. + + Configuration strings/files + --------------------------- + + ZODB databases and storages can be configured using configuration + files, or strings (extracted from configuration files). They use the + same syntax as the server configuration files described above, but + with different sections and options. + + An application that used ZODB might configure it's database using a + string like:: + + + cache-size-bytes 1000MB + + + path /var/lib/Data.fs + + + + In this example, we configured a ZODB database with a object cache + size of 1GB. Inside the database, we configured a file storage. The + ``filestorage`` section provided file-storage parameters. We saw a + similar section in the storage-server configuration example in `Server + configuration`_. + + To configure a client storage, you use a ``clientstorage`` section, + but first you have to import it's definition, because ZEO isn't built + into ZODB. Here's an example:: + + + cache-size-bytes 1000MB + + %import ZEO + + + server 8200 + + + + In this example, we defined a client storage that connected to a + server on port 8200. + + The following settings are supported: + + cache-size + The cache size in bytes, KB or MB. This defaults to a 20MB. + Optional ``KB`` or ``MB`` suffixes can (and usually are) used to + specify units other than bytes. + + cache-path + The file path of a persistent cache file + + blob-dir + The name of a directory to hold/cache blob data downloaded from the + server. This must be provided if blobs are to be used. (Of + course, the server storage must be configured to use blobs as + well.) + + shared-blob-dir + A client can use a network files system (or a local directory if + the server runs on the same machine) to share a blob directory with + the server. This allows downloading of blobs (except via a + distributed file system) to be avoided. + + blob-cache-size + The size of the blob cache in bytes. IF unset, then blobs will + accumulate. If set, then blobs are removed when the total size + exceeds this amount. Blobs accessed least recently are removed + first. + + blob-cache-size-check + The total size of data to be downloaded to trigger blob cache size + reduction. The default is 10 (percent). This controls how often to + remove blobs from the cache. + + read-only + Set to true for a read-only connection. + + If false (the default), then request a read/write connection. + + This option is ignored if ``read_only_fallback`` is set to a true value. + + read-only-fallback + Set to true, then prefer a read/write connection, but be willing to + use a read-only connection. This defaults to a false value. + + If ``read_only_fallback`` is set, then ``read_only`` is ignored. + + server-sync + Sets the ``server_sync`` option described above. + + wait_timeout + How long to wait for an initial connection, defaulting to 30 + seconds. If an initial connection can't be made within this time + limit, then creation of the client storage will fail with a + ``ZEO.Exceptions.ClientDisconnected`` exception. + + After the initial connection, if the client is disconnected: + + - In-flight server requests will fail with a + ``ZEO.Exceptions.ClientDisconnected`` exception. + + - New requests will block for up to ``wait_timeout`` waiting for a + connection to be established before failing with a + ``ZEO.Exceptions.ClientDisconnected`` exception. + + client_label + A short string to display in *server* logs for an event relating to + this client. This can be helpful when debugging. + + disconnect_poll + The delay in seconds between attempts to connect to the + server, in seconds. Defaults to 1 second. + + Client SSL configuration + ~~~~~~~~~~~~~~~~~~~~~~~~ + + An ``ssl`` subsection can be used to enable and configure SSL, as in:: + + %import ZEO + + + server zeo.example.com8200 + + + + + In the example above, SSL is enabled in it's simplest form: + + - The client expects the server to have a signed certificate, which the + client validates. + + - The server server host name ``zeo.example.com`` is checked against + the server's certificate. + + A number of settings can be provided to configure SSL: + + certificate + The path to an SSL certificate file for the client. This is + needed to allow the server to authenticate the client. + + key + The path to the SSL key file for the client certificate (if not + included in the certificate file). + + password-function + A dotted name if an importable function that, when imported, returns + the password needed to unlock the key (if the key requires a password.) + + authenticate + The path to a file or directory containing server certificates + to authenticate. ((See the ``cafile`` and ``capath`` + parameters in the Python documentation for + ``ssl.SSLContext.load_verify_locations``.) + + If this setting is used. then certificate authentication is + used to authenticate the server. The server must be configured + with one of the certificates supplied using this setting. + + check-hostname + This is a boolean setting that defaults to true. Verify the + host name in the server certificate is as expected. + + server-hostname + The expected server host name. This defaults to the host name + used in the server address. This option must be used when + ``check-hostname`` is true and when a server address has no host + name (localhost, or unix domain socket) or when there is more + than one server and server hostnames differ. + + Using this setting implies a true value for the ``check-hostname`` setting. + + Changelog + ========= + + 5.2.0 (2018-03-28) + ------------------ + + - Fixed: The quickstart/ad-hoc/play ZEO server relied on test + dependencies. See `issue 105 + `_. + + - Disallow passing strings as addresses to ClientStorage under Windows + because string addresses are used for unix-domain sockets, which + aren't supported on Windows. See `issue 107 + `_. + + - Renamed all ``async`` attributes to ``async_`` for compatibility + with Python 3.7. See `issue 104 + `_. + + - Fixed to work with some changes made in ZODB 5.4.0. + + Client-side updates are incuded for ZODB 5.4.0 or databases that + already had ``zodbpickle.binary`` OIDs. See `issue 113 + `_. + + - ZEO now uses pickle protocol 3 for both Python 2 and Python 3. + (Previously protocol 1 was used for Python 2.) This matches the + change in ZODB 5.4.0. + + 5.1.2 (2018-03-27) + ------------------ + + - Fix: ZEO didn't work with a change in ZODB 5.4.0. + + (Allow ``zodbpickle.binary`` to be used in RPC requests, which is + necessary for compatibility with ZODB 5.4.0 on Python 2. See `issue + 107 `_.) + + 5.1.1 (2017-12-18) + ------------------ + + - All classes are new-style classes on Python 2 (they were already + new-style on Python 3). This improves performance on PyPy. See + `issue 86 `_. + + - Fixed removing UNIX socket files under Python 2 with ZConfig 3.2.0. + See `issue 90 `_. + + 5.1.0 (2017-04-03) + ------------------ + + - Added support for serializing ZEO messages using `msgpack + `_ rather than pickle. This helps + pave the way to supporting `byteserver + `_, but it also allows ZEO + servers to support Python 2 or 3 clients (but not both at the same + time) and may provide a small performance improvement. + + - Possibly fixed the deprecated and untested zeoctl script. + + - Removed zeopasswd, which no longer makes sense given that ZEO + authentication was removed, in favor of SSL. + + 5.0.4 (2016-11-18) + ------------------ + + - Fixed: ZEO needed changes to work with recent transaction changes. + + ZEO now works with the latest versions of ZODB and transaction + + 5.0.3 (2016-11-18) + ------------------ + + - Temporarily require non-quite-current versions of ZODB and + transaction until we can sort out some recent breakage. + + 5.0.2 (2016-11-02) + ------------------ + + - Provide much better performance on Python 2. + + - Provide better error messages when pip tries to install ZEO on an + unsupported Python version. See `issue 75 + `_. + + 5.0.1 (2016-09-06) + ------------------ + + Packaging-related doc fix + + 5.0.0 (2016-09-06) + ------------------ + + This is a major ZEO revision, which replaces the ZEO network protocol + implementation. + + New features: + + - SSL support + + - Optional client-side conflict resolution. + + - Lots of mostly internal clean ups. + + - ``ClientStorage``server-sync`` configuration option and + ``server_sync`` constructor argument to force a server round trip at + the beginning of transactions to wait for any outstanding + invalidations at the start of the transaction to be delivered. + + - Client disconnect errors are now transient errors. When + applications retry jobs that raise transient errors, jobs (e.g. web + requests) with disconnect errors will be retried. Together with + blocking synchronous ZEO server calls for a limited time while + disconnected, this change should allow brief disconnections due to + server restart to avoid generating client-visible errors (e.g. 500 + web responses). + + - ClientStorage prefetch method to prefetch oids. + + When oids are prefetched, requests are made at once, but the caller + doesn't block waiting for the results. Rather, then the caller + later tries to fetch data for one of the object ids, it's either + delivered right away from the ZEO cache, if the prefetch for the + object id has completed, or the caller blocks until the inflight + prefetch completes. (No new request is made.) + + Dropped features: + + - The ZEO authentication protocol. + + This will be replaced by new authentication mechanims leveraging SSL. + + - The ZEO monitor server. + + - Full cache verification. + + - Client suppprt for servers older than ZODB 3.9 + + - Server support for clients older than ZEO 4.2.0 + + 5.0.0b0 (2016-08-18) + -------------------- + + - Added a ``ClientStorage`` ``server-sync`` configuration option and + ``server_sync`` constructor argument to force a server round trip at + the beginning of transactions to wait for any outstanding + invalidations at the start of the transaction to be delivered. + + - When creating an ad hoc server, a log file isn't created by + default. You must pass a ``log`` option specifying a log file name. + + - The ZEO server register method now returns the storage last + transaction, allowing the client to avoid an extra round trip during + cache verification. + + - Client disconnect errors are now transient errors. When + applications retry jobs that raise transient errors, jobs (e.g. web + requests) with disconnect errors will be retried. Together with + blocking synchronous ZEO server calls for a limited time while + disconnected, this change should allow brief disconnections due to + server restart to avoid generating client-visible errors (e.g. 500 + web responses). + + - Fixed bugs in using the ZEO 5 client with ZEO 4 servers. + + 5.0.0a2 (2016-07-30) + -------------------- + + - Added the ability to pass credentials when creating client storages. + + This is experimental in that passing credentials will cause + connections to an ordinary ZEO server to fail, but it facilitates + experimentation with custom ZEO servers. Doing this with custom ZEO + clients would have been awkward due to the many levels of + composition involved. + + In the future, we expect to support server security plugins that + consume credentials for authentication (typically over SSL). + + Note that credentials are opaque to ZEO. They can be any object with + a true value. The client mearly passes them to the server, which + will someday pass them to a plugin. + + 5.0.0a1 (2016-07-21) + -------------------- + + - Added a ClientStorage prefetch method to prefetch oids. + + When oids are prefetched, requests are made at once, but the caller + doesn't block waiting for the results. Rather, then the caller + later tries to fetch data for one of the object ids, it's either + delivered right away from the ZEO cache, if the prefetch for the + object id has completed, or the caller blocks until the inflight + prefetch completes. (No new request is made.) + + - Fixed: SSL clients of servers with signed certs didn't load default + certs and were unable to connect. + + 5.0.0a0 (2016-07-08) + -------------------- + + This is a major ZEO revision, which replaces the ZEO network protocol + implementation. + + New features: + + - SSL support + + - Optional client-side conflict resolution. + + - Lots of mostly internal clean ups. + + Dropped features: + + - The ZEO authentication protocol. + + This will be replaced by new authentication mechanims leveraging SSL. + + - The ZEO monitor server. + + - Full cache verification. + + - Client suppprt for servers older than ZODB 3.9 + + - Server support for clients older than ZEO 4.2.0 + + 4.2.0 (2016-06-15) + ------------------ + + - Changed loadBefore to operate more like load behaved, especially + with regard to the load lock. This allowes ZEO to work with the + upcoming ZODB 5, which used loadbefore rather than load. + + Reimplemented load using loadBefore, thus testing loadBefore + extensively via existing tests. + + - Other changes to work with ZODB 5 (as well as ZODB 4) + + - Fixed: the ZEO cache loadBefore method failed to utilize current data. + + - Drop support for Python 2.6 and 3.2. + + - Fix AttributeError: 'ZEOServer' object has no attribute 'server' when + StorageServer creation fails. + + 4.2.0b1 (2015-06-05) + -------------------- + + - Add support for PyPy. + + 4.1.0 (2015-01-06) + ------------------ + + - Add support for Python 3.4. + + - Added a new ``ruok`` client protocol for getting server status on + the ZEO port without creating a full-blown client connection and + without logging in the server log. + + - Log errors on server side even if using multi threaded delay. + + 4.0.0 (2013-08-18) + ------------------ + + - Avoid reading excess random bytes when setting up an auth_digest session. + + - Optimize socket address enumeration in ZEO client (avoid non-TCP types). + + - Improve Travis CI testing support. + + - Assign names to all threads for better runtime debugging. + + - Fix "assignment to keyword" error under Py3k in 'ZEO.scripts.zeoqueue'. + + 4.0.0b1 (2013-05-20) + -------------------- + + - Depend on ZODB >= 4.0.0b2 + + - Add support for Python 3.2 / 3.3. + + 4.0.0a1 (2012-11-19) + -------------------- + + First (in a long time) separate ZEO release. + + Since ZODB 3.10.5: + + - Storage servers now emit Serving and Closed events so subscribers + can discover addresses when dynamic port assignment (bind to port 0) + is used. This could, for example, be used to update address + information in a ZooKeeper database. + + - Client storages have a method, new_addr, that can be used to change + the server address(es). This can be used, for example, to update a + dynamically determined server address from information in a + ZooKeeper database. + +Keywords: database,zodb +Platform: any +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: Unix +Classifier: Framework :: ZODB +Requires-Python: >=2.7.9,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* +Provides-Extra: test +Provides-Extra: uvloop +Provides-Extra: msgpack diff --git a/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/SOURCES.txt b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..ab9002d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,141 @@ +.travis.yml +CHANGES.rst +COPYING +COPYRIGHT.txt +LICENSE.txt +MANIFEST.in +README.rst +asyncio-todo.rst +bootstrap.py +buildout.cfg +log.ini +perf.py +pip-delete-this-directory.txt +release.py +setup.cfg +setup.py +tox.ini +doc/HOWTO-Blobs-NFS.txt +doc/zeo-client-cache-tracing.txt +doc/zeo-client-cache.txt +src/ZEO/ClientStorage.py +src/ZEO/Exceptions.py +src/ZEO/StorageServer.py +src/ZEO/TransactionBuffer.py +src/ZEO/__init__.py +src/ZEO/_compat.py +src/ZEO/_forker.py +src/ZEO/cache.py +src/ZEO/component.xml +src/ZEO/hash.py +src/ZEO/interfaces.py +src/ZEO/monitor.py +src/ZEO/nagios.py +src/ZEO/nagios.rst +src/ZEO/ordering.rst +src/ZEO/protocol.txt +src/ZEO/runzeo.py +src/ZEO/schema.xml +src/ZEO/server.xml +src/ZEO/shortrepr.py +src/ZEO/util.py +src/ZEO/version.txt +src/ZEO/zconfig.py +src/ZEO/zeoctl.py +src/ZEO/zeoctl.xml +src/ZEO.egg-info/PKG-INFO +src/ZEO.egg-info/SOURCES.txt +src/ZEO.egg-info/dependency_links.txt +src/ZEO.egg-info/entry_points.txt +src/ZEO.egg-info/not-zip-safe +src/ZEO.egg-info/requires.txt +src/ZEO.egg-info/top_level.txt +src/ZEO/asyncio/README.rst +src/ZEO/asyncio/__init__.py +src/ZEO/asyncio/base.py +src/ZEO/asyncio/client.py +src/ZEO/asyncio/compat.py +src/ZEO/asyncio/marshal.py +src/ZEO/asyncio/mtacceptor.py +src/ZEO/asyncio/server.py +src/ZEO/asyncio/testing.py +src/ZEO/asyncio/tests.py +src/ZEO/scripts/README.txt +src/ZEO/scripts/__init__.py +src/ZEO/scripts/cache_simul.py +src/ZEO/scripts/cache_stats.py +src/ZEO/scripts/parsezeolog.py +src/ZEO/scripts/tests.py +src/ZEO/scripts/timeout.py +src/ZEO/scripts/zeopack.py +src/ZEO/scripts/zeopack.test +src/ZEO/scripts/zeoqueue.py +src/ZEO/scripts/zeoreplay.py +src/ZEO/scripts/zeoserverlog.py +src/ZEO/scripts/zeoup.py +src/ZEO/tests/Cache.py +src/ZEO/tests/CommitLockTests.py +src/ZEO/tests/ConnectionTests.py +src/ZEO/tests/InvalidationTests.py +src/ZEO/tests/IterationTests.py +src/ZEO/tests/TestThread.py +src/ZEO/tests/ThreadTests.py +src/ZEO/tests/__init__.py +src/ZEO/tests/client-config.test +src/ZEO/tests/client.pem +src/ZEO/tests/client_key.pem +src/ZEO/tests/drop_cache_rather_than_verify.txt +src/ZEO/tests/dynamic_server_ports.test +src/ZEO/tests/forker.py +src/ZEO/tests/invalidation-age.txt +src/ZEO/tests/new_addr.test +src/ZEO/tests/protocols.test +src/ZEO/tests/server.pem +src/ZEO/tests/server.pem.csr +src/ZEO/tests/server_key.pem +src/ZEO/tests/serverpw.pem +src/ZEO/tests/serverpw_key.pem +src/ZEO/tests/servertesting.py +src/ZEO/tests/speed.py +src/ZEO/tests/stress.py +src/ZEO/tests/testConfig.py +src/ZEO/tests/testConnection.py +src/ZEO/tests/testConversionSupport.py +src/ZEO/tests/testTransactionBuffer.py +src/ZEO/tests/testZEO.py +src/ZEO/tests/testZEO2.py +src/ZEO/tests/testZEOOptions.py +src/ZEO/tests/testZEOServer.py +src/ZEO/tests/test_cache.py +src/ZEO/tests/test_client_credentials.py +src/ZEO/tests/test_client_side_conflict_resolution.py +src/ZEO/tests/test_marshal.py +src/ZEO/tests/test_sync.py +src/ZEO/tests/testssl.py +src/ZEO/tests/threaded.py +src/ZEO/tests/utils.py +src/ZEO/tests/zdoptions.test +src/ZEO/tests/zeo-fan-out.test +src/ZEO/tests/zeo_blob_cache.test +src/ZEO/tests/ZEO4/README.rst +src/ZEO/tests/ZEO4/StorageServer.py +src/ZEO/tests/ZEO4/__init__.py +src/ZEO/tests/ZEO4/component.xml +src/ZEO/tests/ZEO4/hash.py +src/ZEO/tests/ZEO4/monitor.py +src/ZEO/tests/ZEO4/runzeo.py +src/ZEO/tests/ZEO4/schema.xml +src/ZEO/tests/ZEO4/auth/__init__.py +src/ZEO/tests/ZEO4/auth/auth_digest.py +src/ZEO/tests/ZEO4/auth/base.py +src/ZEO/tests/ZEO4/auth/hmac.py +src/ZEO/tests/ZEO4/zrpc/__init__.py +src/ZEO/tests/ZEO4/zrpc/_hmac.py +src/ZEO/tests/ZEO4/zrpc/client.py +src/ZEO/tests/ZEO4/zrpc/connection.py +src/ZEO/tests/ZEO4/zrpc/error.py +src/ZEO/tests/ZEO4/zrpc/log.py +src/ZEO/tests/ZEO4/zrpc/marshal.py +src/ZEO/tests/ZEO4/zrpc/server.py +src/ZEO/tests/ZEO4/zrpc/smac.py +src/ZEO/tests/ZEO4/zrpc/trigger.py \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/dependency_links.txt b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/entry_points.txt b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/entry_points.txt new file mode 100644 index 0000000..7f9f735 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/entry_points.txt @@ -0,0 +1,7 @@ + + [console_scripts] + zeopack = ZEO.scripts.zeopack:main + runzeo = ZEO.runzeo:main + zeoctl = ZEO.zeoctl:main + zeo-nagios = ZEO.nagios:main + \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/installed-files.txt b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..ac084ea --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/installed-files.txt @@ -0,0 +1,209 @@ +../../../../bin/runzeo +../../../../bin/zeo-nagios +../../../../bin/zeoctl +../../../../bin/zeopack +../ZEO/ClientStorage.py +../ZEO/Exceptions.py +../ZEO/StorageServer.py +../ZEO/TransactionBuffer.py +../ZEO/__init__.py +../ZEO/__pycache__/ClientStorage.cpython-36.pyc +../ZEO/__pycache__/Exceptions.cpython-36.pyc +../ZEO/__pycache__/StorageServer.cpython-36.pyc +../ZEO/__pycache__/TransactionBuffer.cpython-36.pyc +../ZEO/__pycache__/__init__.cpython-36.pyc +../ZEO/__pycache__/_compat.cpython-36.pyc +../ZEO/__pycache__/_forker.cpython-36.pyc +../ZEO/__pycache__/cache.cpython-36.pyc +../ZEO/__pycache__/hash.cpython-36.pyc +../ZEO/__pycache__/interfaces.cpython-36.pyc +../ZEO/__pycache__/monitor.cpython-36.pyc +../ZEO/__pycache__/nagios.cpython-36.pyc +../ZEO/__pycache__/runzeo.cpython-36.pyc +../ZEO/__pycache__/shortrepr.cpython-36.pyc +../ZEO/__pycache__/util.cpython-36.pyc +../ZEO/__pycache__/zconfig.cpython-36.pyc +../ZEO/__pycache__/zeoctl.cpython-36.pyc +../ZEO/_compat.py +../ZEO/_forker.py +../ZEO/asyncio/README.rst +../ZEO/asyncio/__init__.py +../ZEO/asyncio/__pycache__/__init__.cpython-36.pyc +../ZEO/asyncio/__pycache__/base.cpython-36.pyc +../ZEO/asyncio/__pycache__/client.cpython-36.pyc +../ZEO/asyncio/__pycache__/compat.cpython-36.pyc +../ZEO/asyncio/__pycache__/marshal.cpython-36.pyc +../ZEO/asyncio/__pycache__/mtacceptor.cpython-36.pyc +../ZEO/asyncio/__pycache__/server.cpython-36.pyc +../ZEO/asyncio/__pycache__/testing.cpython-36.pyc +../ZEO/asyncio/__pycache__/tests.cpython-36.pyc +../ZEO/asyncio/base.py +../ZEO/asyncio/client.py +../ZEO/asyncio/compat.py +../ZEO/asyncio/marshal.py +../ZEO/asyncio/mtacceptor.py +../ZEO/asyncio/server.py +../ZEO/asyncio/testing.py +../ZEO/asyncio/tests.py +../ZEO/cache.py +../ZEO/component.xml +../ZEO/hash.py +../ZEO/interfaces.py +../ZEO/monitor.py +../ZEO/nagios.py +../ZEO/nagios.rst +../ZEO/ordering.rst +../ZEO/protocol.txt +../ZEO/runzeo.py +../ZEO/schema.xml +../ZEO/scripts/README.txt +../ZEO/scripts/__init__.py +../ZEO/scripts/__pycache__/__init__.cpython-36.pyc +../ZEO/scripts/__pycache__/cache_simul.cpython-36.pyc +../ZEO/scripts/__pycache__/cache_stats.cpython-36.pyc +../ZEO/scripts/__pycache__/parsezeolog.cpython-36.pyc +../ZEO/scripts/__pycache__/tests.cpython-36.pyc +../ZEO/scripts/__pycache__/timeout.cpython-36.pyc +../ZEO/scripts/__pycache__/zeopack.cpython-36.pyc +../ZEO/scripts/__pycache__/zeoqueue.cpython-36.pyc +../ZEO/scripts/__pycache__/zeoreplay.cpython-36.pyc +../ZEO/scripts/__pycache__/zeoserverlog.cpython-36.pyc +../ZEO/scripts/__pycache__/zeoup.cpython-36.pyc +../ZEO/scripts/cache_simul.py +../ZEO/scripts/cache_stats.py +../ZEO/scripts/parsezeolog.py +../ZEO/scripts/tests.py +../ZEO/scripts/timeout.py +../ZEO/scripts/zeopack.py +../ZEO/scripts/zeopack.test +../ZEO/scripts/zeoqueue.py +../ZEO/scripts/zeoreplay.py +../ZEO/scripts/zeoserverlog.py +../ZEO/scripts/zeoup.py +../ZEO/server.xml +../ZEO/shortrepr.py +../ZEO/tests/Cache.py +../ZEO/tests/CommitLockTests.py +../ZEO/tests/ConnectionTests.py +../ZEO/tests/InvalidationTests.py +../ZEO/tests/IterationTests.py +../ZEO/tests/TestThread.py +../ZEO/tests/ThreadTests.py +../ZEO/tests/ZEO4/README.rst +../ZEO/tests/ZEO4/StorageServer.py +../ZEO/tests/ZEO4/__init__.py +../ZEO/tests/ZEO4/__pycache__/StorageServer.cpython-36.pyc +../ZEO/tests/ZEO4/__pycache__/__init__.cpython-36.pyc +../ZEO/tests/ZEO4/__pycache__/hash.cpython-36.pyc +../ZEO/tests/ZEO4/__pycache__/monitor.cpython-36.pyc +../ZEO/tests/ZEO4/__pycache__/runzeo.cpython-36.pyc +../ZEO/tests/ZEO4/auth/__init__.py +../ZEO/tests/ZEO4/auth/__pycache__/__init__.cpython-36.pyc +../ZEO/tests/ZEO4/auth/__pycache__/auth_digest.cpython-36.pyc +../ZEO/tests/ZEO4/auth/__pycache__/base.cpython-36.pyc +../ZEO/tests/ZEO4/auth/__pycache__/hmac.cpython-36.pyc +../ZEO/tests/ZEO4/auth/auth_digest.py +../ZEO/tests/ZEO4/auth/base.py +../ZEO/tests/ZEO4/auth/hmac.py +../ZEO/tests/ZEO4/component.xml +../ZEO/tests/ZEO4/hash.py +../ZEO/tests/ZEO4/monitor.py +../ZEO/tests/ZEO4/runzeo.py +../ZEO/tests/ZEO4/schema.xml +../ZEO/tests/ZEO4/zrpc/__init__.py +../ZEO/tests/ZEO4/zrpc/__pycache__/__init__.cpython-36.pyc +../ZEO/tests/ZEO4/zrpc/__pycache__/_hmac.cpython-36.pyc +../ZEO/tests/ZEO4/zrpc/__pycache__/client.cpython-36.pyc +../ZEO/tests/ZEO4/zrpc/__pycache__/connection.cpython-36.pyc +../ZEO/tests/ZEO4/zrpc/__pycache__/error.cpython-36.pyc +../ZEO/tests/ZEO4/zrpc/__pycache__/log.cpython-36.pyc +../ZEO/tests/ZEO4/zrpc/__pycache__/marshal.cpython-36.pyc +../ZEO/tests/ZEO4/zrpc/__pycache__/server.cpython-36.pyc +../ZEO/tests/ZEO4/zrpc/__pycache__/smac.cpython-36.pyc +../ZEO/tests/ZEO4/zrpc/__pycache__/trigger.cpython-36.pyc +../ZEO/tests/ZEO4/zrpc/_hmac.py +../ZEO/tests/ZEO4/zrpc/client.py +../ZEO/tests/ZEO4/zrpc/connection.py +../ZEO/tests/ZEO4/zrpc/error.py +../ZEO/tests/ZEO4/zrpc/log.py +../ZEO/tests/ZEO4/zrpc/marshal.py +../ZEO/tests/ZEO4/zrpc/server.py +../ZEO/tests/ZEO4/zrpc/smac.py +../ZEO/tests/ZEO4/zrpc/trigger.py +../ZEO/tests/__init__.py +../ZEO/tests/__pycache__/Cache.cpython-36.pyc +../ZEO/tests/__pycache__/CommitLockTests.cpython-36.pyc +../ZEO/tests/__pycache__/ConnectionTests.cpython-36.pyc +../ZEO/tests/__pycache__/InvalidationTests.cpython-36.pyc +../ZEO/tests/__pycache__/IterationTests.cpython-36.pyc +../ZEO/tests/__pycache__/TestThread.cpython-36.pyc +../ZEO/tests/__pycache__/ThreadTests.cpython-36.pyc +../ZEO/tests/__pycache__/__init__.cpython-36.pyc +../ZEO/tests/__pycache__/forker.cpython-36.pyc +../ZEO/tests/__pycache__/servertesting.cpython-36.pyc +../ZEO/tests/__pycache__/speed.cpython-36.pyc +../ZEO/tests/__pycache__/stress.cpython-36.pyc +../ZEO/tests/__pycache__/testConfig.cpython-36.pyc +../ZEO/tests/__pycache__/testConnection.cpython-36.pyc +../ZEO/tests/__pycache__/testConversionSupport.cpython-36.pyc +../ZEO/tests/__pycache__/testTransactionBuffer.cpython-36.pyc +../ZEO/tests/__pycache__/testZEO.cpython-36.pyc +../ZEO/tests/__pycache__/testZEO2.cpython-36.pyc +../ZEO/tests/__pycache__/testZEOOptions.cpython-36.pyc +../ZEO/tests/__pycache__/testZEOServer.cpython-36.pyc +../ZEO/tests/__pycache__/test_cache.cpython-36.pyc +../ZEO/tests/__pycache__/test_client_credentials.cpython-36.pyc +../ZEO/tests/__pycache__/test_client_side_conflict_resolution.cpython-36.pyc +../ZEO/tests/__pycache__/test_marshal.cpython-36.pyc +../ZEO/tests/__pycache__/test_sync.cpython-36.pyc +../ZEO/tests/__pycache__/testssl.cpython-36.pyc +../ZEO/tests/__pycache__/threaded.cpython-36.pyc +../ZEO/tests/__pycache__/utils.cpython-36.pyc +../ZEO/tests/client-config.test +../ZEO/tests/client.pem +../ZEO/tests/client_key.pem +../ZEO/tests/drop_cache_rather_than_verify.txt +../ZEO/tests/dynamic_server_ports.test +../ZEO/tests/forker.py +../ZEO/tests/invalidation-age.txt +../ZEO/tests/new_addr.test +../ZEO/tests/protocols.test +../ZEO/tests/server.pem +../ZEO/tests/server.pem.csr +../ZEO/tests/server_key.pem +../ZEO/tests/serverpw.pem +../ZEO/tests/serverpw_key.pem +../ZEO/tests/servertesting.py +../ZEO/tests/speed.py +../ZEO/tests/stress.py +../ZEO/tests/testConfig.py +../ZEO/tests/testConnection.py +../ZEO/tests/testConversionSupport.py +../ZEO/tests/testTransactionBuffer.py +../ZEO/tests/testZEO.py +../ZEO/tests/testZEO2.py +../ZEO/tests/testZEOOptions.py +../ZEO/tests/testZEOServer.py +../ZEO/tests/test_cache.py +../ZEO/tests/test_client_credentials.py +../ZEO/tests/test_client_side_conflict_resolution.py +../ZEO/tests/test_marshal.py +../ZEO/tests/test_sync.py +../ZEO/tests/testssl.py +../ZEO/tests/threaded.py +../ZEO/tests/utils.py +../ZEO/tests/zdoptions.test +../ZEO/tests/zeo-fan-out.test +../ZEO/tests/zeo_blob_cache.test +../ZEO/util.py +../ZEO/version.txt +../ZEO/zconfig.py +../ZEO/zeoctl.py +../ZEO/zeoctl.xml +PKG-INFO +SOURCES.txt +dependency_links.txt +entry_points.txt +not-zip-safe +requires.txt +top_level.txt diff --git a/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/not-zip-safe b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/requires.txt b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/requires.txt new file mode 100644 index 0000000..ad03530 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/requires.txt @@ -0,0 +1,26 @@ +ZODB>=5.1.1 +six +transaction>=2.0.3 +persistent>=4.1.0 +zc.lockfile +ZConfig +zdaemon +zope.interface + +[:python_version == "2.7"] +futures +trollius + +[msgpack] +msgpack-python + +[test] +zope.testing +manuel +random2 +mock +msgpack-python +zope.testrunner + +[uvloop] +uvloop>=0.5.1 diff --git a/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..306bb40 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO-5.2.0-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +ZEO diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/ClientStorage.py b/thesisenv/lib/python3.6/site-packages/ZEO/ClientStorage.py new file mode 100644 index 0000000..4dfb494 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/ClientStorage.py @@ -0,0 +1,1275 @@ +############################################################################## +# +# Copyright (c) 2001, 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""The ClientStorage class and the exceptions that it may raise. + +Public contents of this module: + +ClientStorage -- the main class, implementing the Storage API + +""" +import logging +import os +import re +import socket +import stat +import sys +import threading +import time +import weakref +from binascii import hexlify + +import BTrees.OOBTree + +import zc.lockfile +import ZODB +import ZODB.BaseStorage +import ZODB.ConflictResolution +import ZODB.interfaces +import zope.interface +import six + +from persistent.TimeStamp import TimeStamp +from ZEO._compat import get_ident +from ZEO._compat import WIN +from ZEO.Exceptions import ClientDisconnected +from ZEO.TransactionBuffer import TransactionBuffer +from ZODB import POSException +from ZODB import utils + +import ZEO.asyncio.client +import ZEO.cache + +logger = logging.getLogger(__name__) + +def tid2time(tid): + return str(TimeStamp(tid)) + +def get_timestamp(prev_ts=None): + """Internal helper to return a unique TimeStamp instance. + + If the optional argument is not None, it must be a TimeStamp; the + return value is then guaranteed to be at least 1 microsecond later + the argument. + + """ + t = time.time() + t = TimeStamp(*time.gmtime(t)[:5] + (t % 60,)) + if prev_ts is not None: + t = t.laterThan(prev_ts) + return t + +MB = 1024**2 + +@zope.interface.implementer(ZODB.interfaces.IMultiCommitStorage) +class ClientStorage(ZODB.ConflictResolution.ConflictResolvingStorage): + """A storage class that is a network client to a remote storage. + + This is a faithful implementation of the Storage API. + + This class is thread-safe; transactions are serialized in + tpc_begin(). + + """ + + def __init__(self, addr, storage='1', cache_size=20 * MB, + name='', wait_timeout=None, + disconnect_poll=None, + read_only=0, read_only_fallback=0, + blob_dir=None, shared_blob_dir=False, + blob_cache_size=None, blob_cache_size_check=10, + client_label=None, + cache=None, + ssl = None, ssl_server_hostname=None, + # Mostly ignored backward-compatability options + client=None, var=None, + min_disconnect_poll=1, max_disconnect_poll=None, + wait=True, + drop_cache_rather_verify=True, + credentials=None, + server_sync=False, + # The ZODB-define ZConfig support may ball these: + username=None, password=None, realm=None, + # For tests: + _client_factory=ZEO.asyncio.client.ClientThread, + ): + """ClientStorage constructor. + + This is typically invoked from a custom_zodb.py file. + + All arguments except addr should be keyword arguments. + Arguments: + + addr + The server address(es). This is either a list of + addresses or a single address. Each address can be a + (hostname, port) tuple to signify a TCP/IP connection or + a pathname string to signify a Unix domain socket + connection. A hostname may be a DNS name or a dotted IP + address. Required. + + storage + The server storage name, defaulting to '1'. The name must + match one of the storage names supported by the server(s) + specified by the addr argument. + + cache_size + The disk cache size, defaulting to 20 megabytes. + This is passed to the ClientCache constructor. + + name + The storage name, defaulting to a combination of the + address and the server storage name. This is used to + construct the response to getName() + + cache + A cache object or a name, relative to the current working + directory, used to construct persistent cache filenames. + Defaults to None, in which case the cache is not + persistent. See ClientCache for more info. + + wait_timeout + Maximum time to wait for results, including connecting. + + read_only + A flag indicating whether this should be a + read-only storage, defaulting to false (i.e. writing is + allowed by default). + + read_only_fallback + A flag indicating whether a read-only + remote storage should be acceptable as a fallback when no + writable storages are available. Defaults to false. At + most one of read_only and read_only_fallback should be + true. + + blob_dir + directory path for blob data. 'blob data' is data that + is retrieved via the loadBlob API. + + shared_blob_dir + Flag whether the blob_dir is a server-shared filesystem + that should be used instead of transferring blob data over + ZEO protocol. + + blob_cache_size + Maximum size of the ZEO blob cache, in bytes. If not set, then + the cache size isn't checked and the blob directory will + grow without bound. + + This option is ignored if shared_blob_dir is true. + + blob_cache_size_check + ZEO check size as percent of blob_cache_size. The ZEO + cache size will be checked when this many bytes have been + loaded into the cache. Defaults to 10% of the blob cache + size. This option is ignored if shared_blob_dir is true. + + client_label + A label to include in server log messages for the client. + + Note that the authentication protocol is defined by the server + and is detected by the ClientStorage upon connecting (see + testConnection() and doAuth() for details). + + """ + + assert not username or password or realm + + if isinstance(addr, int): + addr = ('127.0.0.1', addr) + + self.__name__ = name or str(addr) # Standard convention for storages + + if isinstance(addr, six.string_types): + if WIN: + raise ValueError("Unix sockets are not available on Windows") + addr = [addr] + elif (isinstance(addr, tuple) and len(addr) == 2 and + isinstance(addr[0], six.string_types) and isinstance(addr[1], int)): + addr = [addr] + + logger.info( + "%s %s (pid=%d) created %s/%s for storage: %r", + self.__name__, + self.__class__.__name__, + os.getpid(), + read_only and "RO" or "RW", + read_only_fallback and "fallback" or "normal", + storage, + ) + + self._is_read_only = read_only + self._read_only_fallback = read_only_fallback + + self._addr = addr # For tests + + self._iterators = weakref.WeakValueDictionary() + self._iterator_ids = set() + self._storage = storage + + # _server_addr is used by sortKey() + self._server_addr = None + + self._client_label = client_label + + self._info = {'length': 0, 'size': 0, 'name': 'ZEO Client', + 'supportsUndo': 0, 'interfaces': ()} + + self._db = None + + self._oids = [] # List of pre-fetched oids from server + + cache = self._cache = open_cache( + cache, var, client, storage, cache_size) + + # XXX need to check for POSIX-ness here + self.blob_dir = blob_dir + self.shared_blob_dir = shared_blob_dir + + if blob_dir is not None: + # Avoid doing this import unless we need it, as it + # currently requires pywin32 on Windows. + import ZODB.blob + if shared_blob_dir: + self.fshelper = ZODB.blob.FilesystemHelper(blob_dir) + else: + if 'zeocache' not in ZODB.blob.LAYOUTS: + ZODB.blob.LAYOUTS['zeocache'] = BlobCacheLayout() + self.fshelper = ZODB.blob.FilesystemHelper( + blob_dir, layout_name='zeocache') + self.fshelper.create() + self.fshelper.checkSecure() + else: + self.fshelper = None + + self._blob_cache_size = blob_cache_size + self._blob_data_bytes_loaded = 0 + if blob_cache_size is not None: + assert blob_cache_size_check < 100 + self._blob_cache_size_check = ( + blob_cache_size * blob_cache_size_check // 100) + self._check_blob_size() + + self.server_sync = server_sync + + self._server = _client_factory( + addr, self, cache, storage, + ZEO.asyncio.client.Fallback if read_only_fallback else read_only, + wait_timeout or 30, + ssl = ssl, ssl_server_hostname=ssl_server_hostname, + credentials=credentials, + ) + self._call = self._server.call + self._async = self._server.async_ + self._async_iter = self._server.async_iter + self._wait = self._server.wait + + self._commit_lock = threading.Lock() + + if wait: + try: + self._wait() + except Exception: + # No point in keeping the server going of the storage + # creation fails + self._server.close() + raise + + def new_addr(self, addr): + self._addr = addr + self._server.new_addrs(self._normalize_addr(addr)) + + def _normalize_addr(self, addr): + if isinstance(addr, int): + addr = ('127.0.0.1', addr) + + if isinstance(addr, str): + addr = [addr] + elif (isinstance(addr, tuple) and len(addr) == 2 and + isinstance(addr[0], str) and isinstance(addr[1], int)): + addr = [addr] + return addr + + def close(self): + "Storage API: finalize the storage, releasing external resources." + self._server.close() + + if self._check_blob_size_thread is not None: + self._check_blob_size_thread.join() + + _check_blob_size_thread = None + def _check_blob_size(self, bytes=None): + if self._blob_cache_size is None: + return + if self.shared_blob_dir or not self.blob_dir: + return + + if (bytes is not None) and (bytes < self._blob_cache_size_check): + return + + self._blob_data_bytes_loaded = 0 + + target = max(self._blob_cache_size - self._blob_cache_size_check, 0) + + check_blob_size_thread = threading.Thread( + target=_check_blob_cache_size, + args=(self.blob_dir, target), + name="%s zeo client check blob size thread" % self.__name__, + ) + check_blob_size_thread.setDaemon(True) + check_blob_size_thread.start() + self._check_blob_size_thread = check_blob_size_thread + + def registerDB(self, db): + """Storage API: register a database for invalidation messages. + + This is called by ZODB.DB (and by some tests). + + The storage isn't really ready to use until after this call. + """ + super(ClientStorage, self).registerDB(db) + self._db = db + + def is_connected(self, test=False): + """Return whether the storage is currently connected to a server.""" + return self._server.is_connected() + + def sync(self): + # The separate async thread should keep us up to date + pass + + _connection_generation = 0 + def notify_connected(self, conn, info): + reconnected = self._connection_generation + self.set_server_addr(conn.get_peername()) + self.protocol_version = conn.protocol_version + self._is_read_only = conn.is_read_only() + + # invalidate our db cache + if self._db is not None: + self._db.invalidateCache() + + logger.info("%s %s to storage: %s", + self.__name__, + 'Reconnected' if self._connection_generation + else 'Connected', + self._server_addr) + + self._connection_generation += 1 + + if self._client_label: + conn.call_async_from_same_thread( + 'set_client_label', self._client_label) + + self._info.update(info) + + for iface in ( + ZODB.interfaces.IStorageRestoreable, + ZODB.interfaces.IStorageIteration, + ZODB.interfaces.IStorageUndoable, + ZODB.interfaces.IStorageCurrentRecordIteration, + ZODB.interfaces.IBlobStorage, + ZODB.interfaces.IExternalGC, + ): + if (iface.__module__, iface.__name__) in self._info.get( + 'interfaces', ()): + zope.interface.alsoProvides(self, iface) + + if self.protocol_version[1:] >= b'5': + self.ping = lambda : self._call('ping') + else: + self.ping = lambda : self._call('lastTransaction') + + if self.server_sync: + self.sync = self.ping + + def set_server_addr(self, addr): + # Normalize server address and convert to string + if isinstance(addr, str): + self._server_addr = addr + else: + assert isinstance(addr, tuple) + # If the server is on a remote host, we need to guarantee + # that all clients used the same name for the server. If + # they don't, the sortKey() may be different for each client. + # The best solution seems to be the official name reported + # by gethostbyaddr(). + host = addr[0] + try: + canonical, aliases, addrs = socket.gethostbyaddr(host) + except socket.error as err: + logger.debug("%s Error resolving host: %s (%s)", + self.__name__, host, err) + canonical = host + self._server_addr = str((canonical, addr[1])) + + def sortKey(self): + # XXX sortKey should be explicit, possibly based on database name. + + # If the client isn't connected to anything, it can't have a + # valid sortKey(). Raise an error to stop the transaction early. + if self._server_addr is None: + raise ClientDisconnected + else: + return '%s:%s' % (self._storage, self._server_addr) + + def notify_disconnected(self): + """Internal: notify that the server connection was terminated. + + This is called by ConnectionManager when the connection is + closed or when certain problems with the connection occur. + + """ + logger.info("%s Disconnected from storage: %r", + self.__name__, self._server_addr) + self._iterator_gc(True) + self._connection_generation += 1 + self._is_read_only = self._server.is_read_only() + + def __len__(self): + """Return the size of the storage.""" + # TODO: Is this method used? + return self._info['length'] + + def getName(self): + """Storage API: return the storage name as a string. + + The return value consists of two parts: the name as determined + by the name and addr argments to the ClientStorage + constructor, and the string 'connected' or 'disconnected' in + parentheses indicating whether the storage is (currently) + connected. + + """ + return "%s (%s)" % ( + self.__name__, + self.is_connected() and "connected" or "disconnected") + + def getSize(self): + """Storage API: an approximate size of the database, in bytes.""" + return self._info['size'] + + def supportsUndo(self): + """Storage API: return whether we support undo.""" + return self._info['supportsUndo'] + + def is_read_only(self): + """Storage API: return whether we are in read-only mode. + """ + return self._is_read_only or self._server.is_read_only() + + isReadOnly = is_read_only + + def _check_trans(self, trans, meth): + """Internal helper to check a transaction argument for sanity.""" + if self._is_read_only: + raise POSException.ReadOnlyError() + + try: + buf = trans.data(self) + except KeyError: + buf = None + + if buf is None: + raise POSException.StorageTransactionError( + "Transaction not committing", meth, trans) + + if buf.connection_generation != self._connection_generation: + # We were disconneected, so this one is poisoned + raise ClientDisconnected(meth, 'on a disconnected transaction') + + return buf + + def history(self, oid, size=1): + """Storage API: return a sequence of HistoryEntry objects. + """ + return self._call('history', oid, size) + + def record_iternext(self, next=None): + """Storage API: get the next database record. + + This is part of the conversion-support API. + """ + return self._call('record_iternext', next) + + def getTid(self, oid): + # XXX deprecated: but ZODB tests use this. They shouldn't + return self._call('getTid', oid) + + def loadSerial(self, oid, serial): + """Storage API: load a historical revision of an object.""" + return self._call('loadSerial', oid, serial) + + def load(self, oid, version=''): + result = self.loadBefore(oid, utils.maxtid) + if result is None: + raise POSException.POSKeyError(oid) + return result[:2] + + def loadBefore(self, oid, tid): + result = self._cache.loadBefore(oid, tid) + if result: + return result + + return self._server.load_before(oid, tid) + + def prefetch(self, oids, tid): + self._server.prefetch(oids, tid) + + def new_oid(self): + """Storage API: return a new object identifier. + """ + if self._is_read_only: + raise POSException.ReadOnlyError() + + while 1: + try: + return self._oids.pop() + except IndexError: + pass # We ran out. We need to get some more. + + self._oids[:0] = reversed(self._call('new_oids')) + + def pack(self, t=None, referencesf=None, wait=1, days=0): + """Storage API: pack the storage. + + Deviations from the Storage API: the referencesf argument is + ignored; two additional optional arguments wait and days are + provided: + + wait -- a flag indicating whether to wait for the pack to + complete; defaults to true. + + days -- a number of days to subtract from the pack time; + defaults to zero. + + """ + # TODO: Is it okay that read-only connections allow pack()? + # rf argument ignored; server will provide its own implementation + if t is None: + t = time.time() + t = t - (days * 86400) + return self._call('pack', t, wait) + + def store(self, oid, serial, data, version, txn): + """Storage API: store data for an object.""" + assert not version + + tbuf = self._check_trans(txn, 'store') + self._async('storea', oid, serial, data, id(txn)) + tbuf.store(oid, data) + + def checkCurrentSerialInTransaction(self, oid, serial, transaction): + self._check_trans(transaction, 'checkCurrentSerialInTransaction') + self._async( + 'checkCurrentSerialInTransaction', oid, serial, id(transaction)) + + def storeBlob(self, oid, serial, data, blobfilename, version, txn): + """Storage API: store a blob object.""" + assert not version + tbuf = self._check_trans(txn, 'storeBlob') + + # Grab the file right away. That way, if we don't have enough + # room for a copy, we'll know now rather than in tpc_finish. + # Also, this releaves the client of having to manage the file + # (or the directory contianing it). + self.fshelper.getPathForOID(oid, create=True) + fd, target = self.fshelper.blob_mkstemp(oid, serial) + os.close(fd) + + # It's a bit odd (and impossible on windows) to rename over + # an existing file. We'll use the temporary file name as a base. + target += '-' + ZODB.blob.rename_or_copy_blob(blobfilename, target) + os.remove(target[:-1]) + + serials = self.store(oid, serial, data, '', txn) + if self.shared_blob_dir: + self._async( + 'storeBlobShared', + oid, serial, data, os.path.basename(target), id(txn)) + else: + + # Store a blob to the server. We don't want to real all of + # the data into memory, so we use a message iterator. This + # allows us to read the blob data as needed. + + def store(): + yield ('storeBlobStart', ()) + f = open(target, 'rb') + while 1: + chunk = f.read(59000) + if not chunk: + break + yield ('storeBlobChunk', (chunk, )) + f.close() + yield ('storeBlobEnd', (oid, serial, data, id(txn))) + + self._async_iter(store()) + tbuf.storeBlob(oid, target) + + return serials + + def receiveBlobStart(self, oid, serial): + blob_filename = self.fshelper.getBlobFilename(oid, serial) + assert not os.path.exists(blob_filename) + lockfilename = os.path.join(os.path.dirname(blob_filename), '.lock') + assert os.path.exists(lockfilename) + blob_filename += '.dl' + assert not os.path.exists(blob_filename) + f = open(blob_filename, 'wb') + f.close() + + def receiveBlobChunk(self, oid, serial, chunk): + blob_filename = self.fshelper.getBlobFilename(oid, serial)+'.dl' + assert os.path.exists(blob_filename) + f = open(blob_filename, 'r+b') + f.seek(0, 2) + f.write(chunk) + f.close() + self._blob_data_bytes_loaded += len(chunk) + self._check_blob_size(self._blob_data_bytes_loaded) + + def receiveBlobStop(self, oid, serial): + blob_filename = self.fshelper.getBlobFilename(oid, serial) + os.rename(blob_filename+'.dl', blob_filename) + os.chmod(blob_filename, stat.S_IREAD) + + def deleteObject(self, oid, serial, txn): + tbuf = self._check_trans(txn, 'deleteObject') + self._async('deleteObject', oid, serial, id(txn)) + tbuf.store(oid, None) + + def loadBlob(self, oid, serial): + # Load a blob. If it isn't present and we have a shared blob + # directory, then assume that it doesn't exist on the server + # and return None. + + if self.fshelper is None: + raise POSException.Unsupported("No blob cache directory is " + "configured.") + + blob_filename = self.fshelper.getBlobFilename(oid, serial) + if self.shared_blob_dir: + if os.path.exists(blob_filename): + return blob_filename + else: + # We're using a server shared cache. If the file isn't + # here, it's not anywhere. + raise POSException.POSKeyError( + "No blob file at %s" % blob_filename, oid, serial) + + if os.path.exists(blob_filename): + return _accessed(blob_filename) + + # First, we'll create the directory for this oid, if it doesn't exist. + self.fshelper.createPathForOID(oid) + + # OK, it's not here and we (or someone) needs to get it. We + # want to avoid getting it multiple times. We want to avoid + # getting it multiple times even accross separate client + # processes on the same machine. We'll use file locking. + + lock = _lock_blob(blob_filename) + try: + # We got the lock, so it's our job to download it. First, + # we'll double check that someone didn't download it while we + # were getting the lock: + + if os.path.exists(blob_filename): + return _accessed(blob_filename) + + # Ask the server to send it to us. When this function + # returns, it will have been sent. (The recieving will + # have been handled by the asyncore thread.) + + self._call('sendBlob', oid, serial) + + if os.path.exists(blob_filename): + return _accessed(blob_filename) + + raise POSException.POSKeyError("No blob file", oid, serial) + + finally: + lock.close() + + def openCommittedBlobFile(self, oid, serial, blob=None): + blob_filename = self.loadBlob(oid, serial) + try: + if blob is None: + return open(blob_filename, 'rb') + else: + return ZODB.blob.BlobFile(blob_filename, 'r', blob) + except (IOError): + # The file got removed while we were opening. + # Fall through and try again with the protection of the lock. + pass + + lock = _lock_blob(blob_filename) + try: + blob_filename = self.fshelper.getBlobFilename(oid, serial) + if not os.path.exists(blob_filename): + if self.shared_blob_dir: + # We're using a server shared cache. If the file isn't + # here, it's not anywhere. + raise POSException.POSKeyError("No blob file", oid, serial) + self._call('sendBlob', oid, serial) + if not os.path.exists(blob_filename): + raise POSException.POSKeyError("No blob file", oid, serial) + + _accessed(blob_filename) + if blob is None: + return open(blob_filename, 'rb') + else: + return ZODB.blob.BlobFile(blob_filename, 'r', blob) + finally: + lock.close() + + + def temporaryDirectory(self): + return self.fshelper.temp_dir + + def tpc_vote(self, txn): + """Storage API: vote on a transaction. + """ + tbuf = self._check_trans(txn, 'tpc_vote') + try: + + conflicts = True + vote_attempts = 0 + while conflicts and vote_attempts < 9: # 9? Mainly avoid inf. loop + conflicts = False + for oid in self._call('vote', id(txn)) or (): + if isinstance(oid, dict): + # Conflict, let's try to resolve it + conflicts = True + conflict = oid + oid = conflict['oid'] + committed, read = conflict['serials'] + data = self.tryToResolveConflict( + oid, committed, read, conflict['data']) + self._async('storea', oid, committed, data, id(txn)) + tbuf.resolve(oid, data) + else: + tbuf.server_resolve(oid) + + vote_attempts += 1 + + except POSException.StorageTransactionError: + # Hm, we got disconnected and reconnected bwtween + # _check_trans and voting. Let's chack the transaction again: + self._check_trans(txn, 'tpc_vote') + raise + + except POSException.ConflictError as err: + oid = getattr(err, 'oid', None) + if oid is not None: + # This is a band-aid to help recover from a situation + # that shouldn't happen. A Client somehow misses some + # invalidations and has out of date data in its + # cache. We need some whay to invalidate the cache + # entry without invalidations. So, if we see a + # (unresolved) conflict error, we assume that the + # cache entry is bad and invalidate it. + self._cache.invalidate(oid, None) + raise + + if tbuf.exception: + raise tbuf.exception + + if tbuf.server_resolved or tbuf.client_resolved: + return list(tbuf.server_resolved) + list(tbuf.client_resolved) + else: + return None + + def tpc_transaction(self): + return self._transaction + + def tpc_begin(self, txn, tid=None, status=' '): + """Storage API: begin a transaction.""" + if self._is_read_only: + raise POSException.ReadOnlyError() + + try: + tbuf = txn.data(self) + except AttributeError: + # Gaaaa. This is a recovery transaction. Work around this + # until we can think of something better. XXX + tb = {} + txn.data = tb.__getitem__ + txn.set_data = tb.__setitem__ + except KeyError: + pass + else: + if tbuf is not None: + raise POSException.StorageTransactionError( + "Duplicate tpc_begin calls for same transaction") + + txn.set_data(self, TransactionBuffer(self._connection_generation)) + + # XXX we'd like to allow multiple transactions at a time at some point, + # but for now, due to server limitations, TCBOO. + self._commit_lock.acquire() + self._tbuf = txn.data(self) + + try: + self._async( + 'tpc_begin', id(txn), + txn.user, txn.description, txn.extension, tid, status) + except ClientDisconnected: + self.tpc_end(txn) + raise + + def tpc_end(self, txn): + tbuf = txn.data(self) + if tbuf is not None: + tbuf.close() + txn.set_data(self, None) + self._commit_lock.release() + + def lastTransaction(self): + return self._cache.getLastTid() + + def tpc_abort(self, txn, timeout=None): + """Storage API: abort a transaction. + + (The timeout keyword argument is for tests to wat longer than + they normally would.) + """ + try: + tbuf = txn.data(self) + except KeyError: + return + + try: + # Caution: Are there any exceptions that should prevent an + # abort from occurring? It seems wrong to swallow them + # all, yet you want to be sure that other abort logic is + # executed regardless. + try: + # It's tempting to make an asynchronous call here, but + # it's useful for it to be synchronous because, if we + # failed due to a disconnect, synchronous calls will + # wait a little while in hopes of reconnecting. If + # we're able to reconnect and retry the transaction, + # ten it might succeed! + self._call('tpc_abort', id(txn), timeout=timeout) + except ClientDisconnected: + logger.debug("%s ClientDisconnected in tpc_abort() ignored", + self.__name__) + finally: + self._iterator_gc() + self.tpc_end(txn) + + def tpc_finish(self, txn, f=lambda tid: None): + """Storage API: finish a transaction.""" + tbuf = self._check_trans(txn, 'tpc_finish') + + try: + tid = self._server.tpc_finish(id(txn), tbuf, f) + finally: + self.tpc_end(txn) + self._iterator_gc() + + self._update_blob_cache(tbuf, tid) + + return tid + + def _update_blob_cache(self, tbuf, tid): + """Internal helper move blobs updated by a transaction to the cache. + """ + + # Not sure why _update_cache() would be called on a closed storage. + if self._cache is None: + return + + if self.fshelper is not None: + blobs = tbuf.blobs + had_blobs = False + while blobs: + oid, blobfilename = blobs.pop() + self._blob_data_bytes_loaded += os.stat(blobfilename).st_size + targetpath = self.fshelper.getPathForOID(oid, create=True) + target_blob_file_name = self.fshelper.getBlobFilename(oid, tid) + lock = _lock_blob(target_blob_file_name) + try: + ZODB.blob.rename_or_copy_blob( + blobfilename, + target_blob_file_name, + ) + finally: + lock.close() + had_blobs = True + + if had_blobs: + self._check_blob_size(self._blob_data_bytes_loaded) + + def undo(self, trans_id, txn): + """Storage API: undo a transaction. + + This is executed in a transactional context. It has no effect + until the transaction is committed. It can be undone itself. + + Zope uses this to implement undo unless it is not supported by + a storage. + + """ + self._check_trans(txn, 'undo') + self._async('undoa', trans_id, id(txn)) + + def undoInfo(self, first=0, last=-20, specification=None): + """Storage API: return undo information.""" + return self._call('undoInfo', first, last, specification) + + def undoLog(self, first=0, last=-20, filter=None): + """Storage API: return a sequence of TransactionDescription objects. + + The filter argument should be None or left unspecified, since + it is impossible to pass the filter function to the server to + be executed there. If filter is not None, an empty sequence + is returned. + + """ + if filter is not None: + return [] + return self._call('undoLog', first, last) + + # Recovery support + + def copyTransactionsFrom(self, other, verbose=0): + """Copy transactions from another storage. + + This is typically used for converting data from one storage to + another. `other` must have an .iterator() method. + """ + ZODB.BaseStorage.copy(other, self, verbose) + + def restore(self, oid, serial, data, version, prev_txn, transaction): + """Write data already committed in a separate database.""" + assert not version + self._check_trans(transaction, 'restore') + self._async('restorea', oid, serial, data, prev_txn, id(transaction)) + + # Below are methods invoked by the StorageServer + + def serialnos(self, args): + """Server callback to pass a list of changed (oid, serial) pairs. + """ + self._tbuf.serialnos(args) + + def info(self, dict): + """Server callback to update the info dictionary.""" + self._info.update(dict) + + def invalidateCache(self): + if self._db is not None: + self._db.invalidateCache() + + def invalidateTransaction(self, tid, oids): + """Server callback: Invalidate objects modified by tid.""" + if self._db is not None: + self._db.invalidate(tid, oids) + + # IStorageIteration + + def iterator(self, start=None, stop=None): + """Return an IStorageTransactionInformation iterator.""" + # iids are "iterator IDs" that can be used to query an iterator whose + # status is held on the server. + iid = self._call('iterator_start', start, stop) + return self._setup_iterator(TransactionIterator, iid) + + def _setup_iterator(self, factory, iid, *args): + self._iterators[iid] = iterator = factory(self, iid, *args) + self._iterator_ids.add(iid) + return iterator + + def _forget_iterator(self, iid): + self._iterators.pop(iid, None) + self._iterator_ids.remove(iid) + + def _iterator_gc(self, disconnected=False): + if not self._iterator_ids: + return + + if disconnected: + for i in self._iterators.values(): + i._iid = -1 + self._iterators.clear() + self._iterator_ids.clear() + return + + # Recall that self._iterators is a WeakValueDictionary. Under + # non-refcounted implementations like PyPy, this means that + # unreachable iterators (and their IDs) may still be in this + # map for some arbitrary period of time (until the next + # garbage collection occurs.) This is fine: the server + # supports being asked to GC the same iterator ID more than + # once. Iterator ids can be reused, but only after a server + # restart, after which we had already been called with + # `disconnected` True and so had cleared out our map anyway, + # plus we simply replace whatever is in the map if we get a + # duplicate id---and duplicates at that point would be dead + # objects waiting to be cleaned up. So there's never any risk + # of confusing TransactionIterator objects that are in use. + iids = self._iterator_ids - set(self._iterators) + # let tests know we've been called: + self._iterators._last_gc = time.time() + if iids: + try: + self._async('iterator_gc', list(iids)) + except ClientDisconnected: + # If we get disconnected, all of the iterators on the + # server are thrown away. We should clear ours too: + return self._iterator_gc(True) + self._iterator_ids -= iids + + def server_status(self): + return self._call('server_status') + +class TransactionIterator(object): + + def __init__(self, storage, iid, *args): + self._storage = storage + self._iid = iid + self._ended = False + + def __iter__(self): + return self + + def __next__(self): + if self._ended: + raise StopIteration() + + if self._iid < 0: + raise ClientDisconnected("Disconnected iterator") + + tx_data = self._storage._call('iterator_next', self._iid) + if tx_data is None: + # The iterator is exhausted, and the server has already + # disposed it. + self._ended = True + self._storage._forget_iterator(self._iid) + raise StopIteration() + + return ClientStorageTransactionInformation( + self._storage, self, *tx_data) + + next = __next__ + + +class ClientStorageTransactionInformation(ZODB.BaseStorage.TransactionRecord): + + def __init__(self, storage, txiter, tid, status, user, description, + extension): + self._storage = storage + self._txiter = txiter + self._completed = False + self._riid = None + + self.tid = tid + self.status = status + self.user = user + self.description = description + self.extension = extension + + def __iter__(self): + riid = self._storage._call('iterator_record_start', + self._txiter._iid, self.tid) + return self._storage._setup_iterator(RecordIterator, riid) + + +class RecordIterator(object): + + def __init__(self, storage, riid): + self._riid = riid + self._completed = False + self._storage = storage + + def __iter__(self): + return self + + def __next__(self): + if self._completed: + # We finished iteration once already and the server can't know + # about the iteration anymore. + raise StopIteration() + item = self._storage._call('iterator_record_next', self._riid) + if item is None: + # The iterator is exhausted, and the server has already + # disposed it. + self._completed = True + raise StopIteration() + return ZODB.BaseStorage.DataRecord(*item) + + next = __next__ + + +class BlobCacheLayout(object): + + size = 997 + + def oid_to_path(self, oid): + return str(utils.u64(oid) % self.size) + + def getBlobFilePath(self, oid, tid): + base, rem = divmod(utils.u64(oid), self.size) + return os.path.join( + str(rem), + "%s.%s%s" % (base, hexlify(tid).decode('ascii'), + ZODB.blob.BLOB_SUFFIX) + ) + +def _accessed(filename): + try: + os.utime(filename, (time.time(), os.stat(filename).st_mtime)) + except OSError: + pass # We tried. :) + return filename + +cache_file_name = re.compile(r'\d+$').match +def _check_blob_cache_size(blob_dir, target): + + logger = logging.getLogger(__name__+'.check_blob_cache') + + with open(os.path.join(blob_dir, ZODB.blob.LAYOUT_MARKER)) as layout_file: + layout = layout_file.read().strip() + if not layout == 'zeocache': + logger.critical("Invalid blob directory layout %s", layout) + raise ValueError("Invalid blob directory layout", layout) + + attempt_path = os.path.join(blob_dir, 'check_size.attempt') + + try: + check_lock = zc.lockfile.LockFile( + os.path.join(blob_dir, 'check_size.lock')) + except zc.lockfile.LockError: + try: + time.sleep(1) + check_lock = zc.lockfile.LockFile( + os.path.join(blob_dir, 'check_size.lock')) + except zc.lockfile.LockError: + # Someone is already cleaning up, so don't bother + logger.debug("%s Another thread is checking the blob cache size.", + get_ident()) + open(attempt_path, 'w').close() # Mark that we tried + return + + logger.debug("%s Checking blob cache size. (target: %s)", + get_ident(), target) + + try: + while 1: + size = 0 + blob_suffix = ZODB.blob.BLOB_SUFFIX + files_by_atime = BTrees.OOBTree.BTree() + + for dirname in os.listdir(blob_dir): + if not cache_file_name(dirname): + continue + base = os.path.join(blob_dir, dirname) + if not os.path.isdir(base): + continue + for file_name in os.listdir(base): + if not file_name.endswith(blob_suffix): + continue + file_path = os.path.join(base, file_name) + if not os.path.isfile(file_path): + continue + stat = os.stat(file_path) + size += stat.st_size + t = stat.st_atime + if t not in files_by_atime: + files_by_atime[t] = [] + files_by_atime[t].append(os.path.join(dirname, file_name)) + + logger.debug("%s blob cache size: %s", get_ident(), size) + + if size <= target: + if os.path.isfile(attempt_path): + try: + os.remove(attempt_path) + except OSError: + pass # Sigh, windows + continue + logger.debug("%s -->", get_ident()) + break + + while size > target and files_by_atime: + for file_name in files_by_atime.pop(files_by_atime.minKey()): + file_name = os.path.join(blob_dir, file_name) + lockfilename = os.path.join(os.path.dirname(file_name), + '.lock') + try: + lock = zc.lockfile.LockFile(lockfilename) + except zc.lockfile.LockError: + logger.debug("%s Skipping locked %s", + get_ident(), + os.path.basename(file_name)) + continue # In use, skip + + try: + fsize = os.stat(file_name).st_size + try: + ZODB.blob.remove_committed(file_name) + except OSError as v: + pass # probably open on windows + else: + size -= fsize + finally: + lock.close() + + if size <= target: + break + + logger.debug("%s reduced blob cache size: %s", + get_ident(), size) + + finally: + check_lock.close() + +def check_blob_size_script(args=None): + if args is None: + args = sys.argv[1:] + blob_dir, target = args + _check_blob_cache_size(blob_dir, int(target)) + +def _lock_blob(path): + lockfilename = os.path.join(os.path.dirname(path), '.lock') + n = 0 + while 1: + try: + return zc.lockfile.LockFile(lockfilename) + except zc.lockfile.LockError: + time.sleep(0.01) + n += 1 + if n > 60000: + raise + else: + break + +def open_cache(cache, var, client, storage, cache_size): + if isinstance(cache, (None.__class__, str)): + from ZEO.cache import ClientCache + if cache is None: + if client: + cache = os.path.join(var or os.getcwd(), + "%s-%s.zec" % (client, storage)) + else: + # ephemeral cache + return ClientCache(None, cache_size) + + cache = ClientCache(cache, cache_size) + + return cache diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/Exceptions.py b/thesisenv/lib/python3.6/site-packages/ZEO/Exceptions.py new file mode 100644 index 0000000..a5618d0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/Exceptions.py @@ -0,0 +1,43 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Exceptions for ZEO.""" + +import transaction.interfaces + +from ZODB.POSException import StorageError + +class ClientStorageError(StorageError): + """An error occurred in the ZEO Client Storage. + """ + +class UnrecognizedResult(ClientStorageError): + """A server call returned an unrecognized result. + """ + +class ClientDisconnected(ClientStorageError, + transaction.interfaces.TransientError): + """The database storage is disconnected from the storage. + """ + +class AuthError(StorageError): + """The client provided invalid authentication credentials. + """ + +class ProtocolError(ClientStorageError): + """A client contacted a server with an incomparible protocol + """ + +class ServerException(ClientStorageError): + """ + """ diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/StorageServer.py b/thesisenv/lib/python3.6/site-packages/ZEO/StorageServer.py new file mode 100644 index 0000000..5f521ac --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/StorageServer.py @@ -0,0 +1,1255 @@ +############################################################################## +# +# Copyright (c) 2001, 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""The StorageServer class and the exception that it may raise. + +This server acts as a front-end for one or more real storages, like +file storage or Berkeley storage. + +TODO: Need some basic access control-- a declaration of the methods +exported for invocation by the server. +""" +import codecs +import itertools +import logging +import os +import socket +import sys +import tempfile +import threading +import time +import warnings +import ZEO.asyncio.server +import ZODB.blob +import ZODB.event +import ZODB.serialize +import ZODB.TimeStamp +import zope.interface +import six + +from ZEO._compat import Pickler, Unpickler, PY3, BytesIO +from ZEO.Exceptions import AuthError +from ZEO.monitor import StorageStats +from ZEO.asyncio.server import Delay, MTDelay, Result +from ZODB.Connection import TransactionMetaData +from ZODB.loglevels import BLATHER +from ZODB.POSException import StorageError, StorageTransactionError +from ZODB.POSException import TransactionError, ReadOnlyError, ConflictError +from ZODB.serialize import referencesf +from ZODB.utils import oid_repr, p64, u64, z64, Lock, RLock + +if os.environ.get("ZEO_MTACCEPTOR"): # mainly for tests + from .asyncio.mtacceptor import Acceptor +else: + from .asyncio.server import Acceptor + +logger = logging.getLogger('ZEO.StorageServer') + +def log(message, level=logging.INFO, label='', exc_info=False): + """Internal helper to log a message.""" + if label: + message = "(%s) %s" % (label, message) + logger.log(level, message, exc_info=exc_info) + + +class StorageServerError(StorageError): + """Error reported when an unpicklable exception is raised.""" + +registered_methods = set(( 'get_info', 'lastTransaction', + 'getInvalidations', 'new_oids', 'pack', 'loadBefore', 'storea', + 'checkCurrentSerialInTransaction', 'restorea', 'storeBlobStart', + 'storeBlobChunk', 'storeBlobEnd', 'storeBlobShared', + 'deleteObject', 'tpc_begin', 'vote', 'tpc_finish', 'tpc_abort', + 'history', 'record_iternext', 'sendBlob', 'getTid', 'loadSerial', + 'new_oid', 'undoa', 'undoLog', 'undoInfo', 'iterator_start', + 'iterator_next', 'iterator_record_start', 'iterator_record_next', + 'iterator_gc', 'server_status', 'set_client_label', 'ping')) + +class ZEOStorage(object): + """Proxy to underlying storage for a single remote client.""" + + connected = connection = stats = storage = storage_id = transaction = None + blob_tempfile = None + log_label = 'unconnected' + locked = False # Don't have storage lock + verifying = 0 + + def __init__(self, server, read_only=0): + self.server = server + self.client_conflict_resolution = server.client_conflict_resolution + # timeout and stats will be initialized in register() + self.read_only = read_only + self._iterators = {} + self._iterator_ids = itertools.count() + # Stores the last item that was handed out for a + # transaction iterator. + self._txn_iterators_last = {} + + def set_database(self, database): + self.database = database + + def notify_connected(self, conn): + self.connection = conn + self.call_soon_threadsafe = conn.call_soon_threadsafe + self.connected = True + assert conn.protocol_version is not None + self.log_label = _addr_label(conn.addr) + self.async_ = conn.async_ + self.async_threadsafe = conn.async_threadsafe + + def notify_disconnected(self): + # When this storage closes, we must ensure that it aborts + # any pending transaction. + if self.transaction is not None: + self.log("disconnected during %s transaction" + % (self.locked and 'locked' or 'unlocked')) + self.tpc_abort(self.transaction.id) + else: + self.log("disconnected") + + self.connected = False + self.server.close_conn(self) + + def __repr__(self): + tid = self.transaction and repr(self.transaction.id) + if self.storage: + stid = (self.tpc_transaction() and + repr(self.tpc_transaction().id)) + else: + stid = None + name = self.__class__.__name__ + return "<%s %X trans=%s s_trans=%s>" % (name, id(self), tid, stid) + + def log(self, msg, level=logging.INFO, exc_info=False): + log(msg, level=level, label=self.log_label, exc_info=exc_info) + + def setup_delegation(self): + """Delegate several methods to the storage + """ + # Called from register + + storage = self.storage + + info = self.get_info() + + if not info['supportsUndo']: + self.undoLog = self.undoInfo = lambda *a,**k: () + + # XXX deprecated: but ZODB tests use getTid. They shouldn't + self.getTid = storage.getTid + + self.loadSerial = storage.loadSerial + record_iternext = getattr(storage, 'record_iternext', None) + if record_iternext is not None: + self.record_iternext = record_iternext + self.lastTransaction = storage.lastTransaction + + try: + self.tpc_transaction = storage.tpc_transaction + except AttributeError: + if hasattr(storage, '_transaction'): + log("Storage %r doesn't have a tpc_transaction method.\n" + "See ZEO.interfaces.IServeable." + "Falling back to using _transaction attribute, which\n." + "is icky.", + logging.ERROR) + self.tpc_transaction = lambda : storage._transaction + else: + raise + + self.connection.methods = registered_methods + + def history(self,tid,size=1): + # This caters for storages which still accept + # a version parameter. + return self.storage.history(tid,size=size) + + def _check_tid(self, tid, exc=None): + if self.read_only: + raise ReadOnlyError() + if self.transaction is None: + caller = sys._getframe().f_back.f_code.co_name + self.log("no current transaction: %s()" % caller, + level=logging.WARNING) + if exc is not None: + raise exc(None, tid) + else: + return 0 + if self.transaction.id != tid: + caller = sys._getframe().f_back.f_code.co_name + self.log("%s(%s) invalid; current transaction = %s" % + (caller, repr(tid), repr(self.transaction.id)), + logging.WARNING) + if exc is not None: + raise exc(self.transaction.id, tid) + else: + return 0 + return 1 + + def register(self, storage_id, read_only): + """Select the storage that this client will use + + This method must be the first one called by the client. + For authenticated storages this method will be called by the client + immediately after authentication is finished. + """ + if self.storage is not None: + self.log("duplicate register() call") + raise ValueError("duplicate register() call") + + storage = self.server.storages.get(storage_id) + if storage is None: + self.log("unknown storage_id: %s" % storage_id) + raise ValueError("unknown storage: %s" % storage_id) + + if not read_only and (self.read_only or storage.isReadOnly()): + raise ReadOnlyError() + + self.read_only = self.read_only or read_only + self.storage_id = storage_id + self.storage = storage + self.setup_delegation() + self.stats = self.server.register_connection(storage_id, self) + self.lock_manager = self.server.lock_managers[storage_id] + + return self.lastTransaction() + + def get_info(self): + storage = self.storage + + supportsUndo = (getattr(storage, 'supportsUndo', lambda : False)() + and self.connection.protocol_version[1:] >= b'310') + + # Communicate the backend storage interfaces to the client + storage_provides = zope.interface.providedBy(storage) + interfaces = [] + for candidate in storage_provides.__iro__: + interfaces.append((candidate.__module__, candidate.__name__)) + + return {'length': len(storage), + 'size': storage.getSize(), + 'name': storage.getName(), + 'supportsUndo': supportsUndo, + 'supports_record_iternext': hasattr(self, 'record_iternext'), + 'interfaces': tuple(interfaces), + } + + def get_size_info(self): + return {'length': len(self.storage), + 'size': self.storage.getSize(), + } + + def loadBefore(self, oid, tid): + self.stats.loads += 1 + return self.storage.loadBefore(oid, tid) + + def getInvalidations(self, tid): + invtid, invlist = self.server.get_invalidations(self.storage_id, tid) + if invtid is None: + return None + self.log("Return %d invalidations up to tid %s" + % (len(invlist), u64(invtid))) + return invtid, invlist + + def pack(self, time, wait=1): + # Yes, you can pack a read-only server or storage! + if wait: + return run_in_thread(self._pack_impl, time) + else: + # If the client isn't waiting for a reply, start a thread + # and forget about it. + t = threading.Thread(target=self._pack_impl, args=(time,)) + t.setName("zeo storage packing thread") + t.start() + return None + + def _pack_impl(self, time): + self.log("pack(time=%s) started..." % repr(time)) + self.storage.pack(time, referencesf) + self.log("pack(time=%s) complete" % repr(time)) + # Broadcast new size statistics + self.server.broadcast_info(self.storage_id, self.get_size_info()) + + def new_oids(self, n=100): + """Return a sequence of n new oids, where n defaults to 100""" + n = min(n, 100) + if self.read_only: + raise ReadOnlyError() + if n <= 0: + n = 1 + return [self.storage.new_oid() for i in range(n)] + + # undoLog and undoInfo are potentially slow methods + + def undoInfo(self, first, last, spec): + return run_in_thread(self.storage.undoInfo, first, last, spec) + + def undoLog(self, first, last): + return run_in_thread(self.storage.undoLog, first, last) + + def tpc_begin(self, id, user, description, ext, tid=None, status=" "): + if self.read_only: + raise ReadOnlyError() + if self.transaction is not None: + if self.transaction.id == id: + self.log("duplicate tpc_begin(%s)" % repr(id)) + return + else: + raise StorageTransactionError("Multiple simultaneous tpc_begin" + " requests from one client.") + + t = TransactionMetaData(user, description, ext) + t.id = id + + self.serials = [] + self.conflicts = {} + self.invalidated = [] + self.txnlog = CommitLog() + self.blob_log = [] + self.tid = tid + self.status = status + self.stats.active_txns += 1 + + # Assign the transaction attribute last. This is so we don't + # think we've entered TPC until everything is set. Why? + # Because if we have an error after this, the server will + # think it is in TPC and the client will think it isn't. At + # that point, the client will keep trying to enter TPC and + # server won't let it. Errors *after* the tpc_begin call will + # cause the client to abort the transaction. + # (Also see https://bugs.launchpad.net/zodb/+bug/374737.) + self.transaction = t + + def tpc_finish(self, id): + if not self._check_tid(id): + return + assert self.locked, "finished called wo lock" + + self.stats.commits += 1 + self.storage.tpc_finish(self.transaction, self._invalidate) + self.async_('info', self.get_size_info()) + # Note that the tid is still current because we still hold the + # commit lock. We'll relinquish it in _clear_transaction. + tid = self.storage.lastTransaction() + # Return the tid, for cache invalidation optimization + return Result(tid, self._clear_transaction) + + def _invalidate(self, tid): + self.server.invalidate(self, self.storage_id, tid, self.invalidated) + + def tpc_abort(self, tid): + if not self._check_tid(tid): + return + self.stats.aborts += 1 + self.storage.tpc_abort(self.transaction) + self._clear_transaction() + + def _clear_transaction(self): + # Common code at end of tpc_finish() and tpc_abort() + self.lock_manager.release(self) + self.transaction = None + self.stats.active_txns -= 1 + if self.txnlog is not None: + self.txnlog.close() + self.txnlog = None + for oid, oldserial, data, blobfilename in self.blob_log: + ZODB.blob.remove_committed(blobfilename) + del self.blob_log + + def vote(self, tid): + self._check_tid(tid, exc=StorageTransactionError) + return self.lock_manager.lock(self, self._vote) + + def _vote(self, delay=None): + # Called from client thread + + if not self.connected: + return # We're disconnected + + try: + self.log( + "Preparing to commit transaction: %d objects, %d bytes" + % (self.txnlog.stores, self.txnlog.size()), + level=BLATHER) + + if (self.tid is not None) or (self.status != ' '): + self.storage.tpc_begin(self.transaction, + self.tid, self.status) + else: + self.storage.tpc_begin(self.transaction) + + for op, args in self.txnlog: + getattr(self, op)(*args) + + # Blob support + while self.blob_log: + oid, oldserial, data, blobfilename = self.blob_log.pop() + self._store(oid, oldserial, data, blobfilename) + + + if not self.conflicts: + try: + serials = self.storage.tpc_vote(self.transaction) + except ConflictError as err: + if (self.client_conflict_resolution and + err.oid and err.serials and err.data + ): + self.conflicts[err.oid] = dict( + oid=err.oid, serials=err.serials, data=err.data) + else: + raise + else: + if serials: + self.serials.extend(serials) + + if self.conflicts: + self.storage.tpc_abort(self.transaction) + return list(self.conflicts.values()) + else: + self.locked = True # signal to lock manager to hold lock + return self.serials + + except Exception as err: + self.storage.tpc_abort(self.transaction) + self._clear_transaction() + + if isinstance(err, ConflictError): + self.stats.conflicts += 1 + self.log("conflict error %s" % err, BLATHER) + + if not isinstance(err, TransactionError): + logger.exception("While voting") + + raise + + # The public methods of the ZEO client API do not do the real work. + # They defer work until after the storage lock has been acquired. + # Most of the real implementations are in methods beginning with + # an _. + + def deleteObject(self, oid, serial, id): + self._check_tid(id, exc=StorageTransactionError) + self.stats.stores += 1 + self.txnlog.delete(oid, serial) + + def storea(self, oid, serial, data, id): + self._check_tid(id, exc=StorageTransactionError) + self.stats.stores += 1 + self.txnlog.store(oid, serial, data) + + def checkCurrentSerialInTransaction(self, oid, serial, id): + self._check_tid(id, exc=StorageTransactionError) + self.txnlog.checkread(oid, serial) + + def restorea(self, oid, serial, data, prev_txn, id): + self._check_tid(id, exc=StorageTransactionError) + self.stats.stores += 1 + self.txnlog.restore(oid, serial, data, prev_txn) + + def storeBlobStart(self): + assert self.blob_tempfile is None + self.blob_tempfile = tempfile.mkstemp( + dir=self.storage.temporaryDirectory()) + + def storeBlobChunk(self, chunk): + os.write(self.blob_tempfile[0], chunk) + + def storeBlobEnd(self, oid, serial, data, id): + self._check_tid(id, exc=StorageTransactionError) + assert self.txnlog is not None # effectively not allowed after undo + fd, tempname = self.blob_tempfile + self.blob_tempfile = None + os.close(fd) + self.blob_log.append((oid, serial, data, tempname)) + + def storeBlobShared(self, oid, serial, data, filename, id): + self._check_tid(id, exc=StorageTransactionError) + assert self.txnlog is not None # effectively not allowed after undo + + # Reconstruct the full path from the filename in the OID directory + if (os.path.sep in filename + or not (filename.endswith('.tmp') + or filename[:-1].endswith('.tmp') + ) + ): + logger.critical( + "We're under attack! (bad filename to storeBlobShared, %r)", + filename) + raise ValueError(filename) + + filename = os.path.join(self.storage.fshelper.getPathForOID(oid), + filename) + self.blob_log.append((oid, serial, data, filename)) + + def sendBlob(self, oid, serial): + blobfilename = self.storage.loadBlob(oid, serial) + + def store(): + yield ('receiveBlobStart', (oid, serial)) + with open(blobfilename, 'rb') as f: + while 1: + chunk = f.read(59000) + if not chunk: + break + yield ('receiveBlobChunk', (oid, serial, chunk, )) + yield ('receiveBlobStop', (oid, serial)) + + self.connection.call_async_iter(store()) + + def undo(*a, **k): + raise NotImplementedError + + def undoa(self, trans_id, tid): + self._check_tid(tid, exc=StorageTransactionError) + self.txnlog.undo(trans_id) + + def _delete(self, oid, serial): + self.storage.deleteObject(oid, serial, self.transaction) + + def _checkread(self, oid, serial): + self.storage.checkCurrentSerialInTransaction( + oid, serial, self.transaction) + + def _store(self, oid, serial, data, blobfile=None): + try: + if blobfile is None: + self.storage.store(oid, serial, data, '', self.transaction) + else: + self.storage.storeBlob( + oid, serial, data, blobfile, '', self.transaction) + except ConflictError as err: + if self.client_conflict_resolution and err.serials: + self.conflicts[oid] = dict( + oid=oid, serials=err.serials, data=data) + else: + raise + else: + if oid in self.conflicts: + del self.conflicts[oid] + + if serial != b"\0\0\0\0\0\0\0\0": + self.invalidated.append(oid) + + def _restore(self, oid, serial, data, prev_txn): + self.storage.restore(oid, serial, data, '', prev_txn, + self.transaction) + + def _undo(self, trans_id): + tid, oids = self.storage.undo(trans_id, self.transaction) + self.invalidated.extend(oids) + self.serials.extend(oids) + + # IStorageIteration support + + def iterator_start(self, start, stop): + iid = next(self._iterator_ids) + self._iterators[iid] = iter(self.storage.iterator(start, stop)) + return iid + + def iterator_next(self, iid): + iterator = self._iterators[iid] + try: + info = next(iterator) + except StopIteration: + del self._iterators[iid] + item = None + if iid in self._txn_iterators_last: + del self._txn_iterators_last[iid] + else: + item = (info.tid, + info.status, + info.user, + info.description, + info.extension) + # Keep a reference to the last iterator result to allow starting a + # record iterator off it. + self._txn_iterators_last[iid] = info + return item + + def iterator_record_start(self, txn_iid, tid): + record_iid = next(self._iterator_ids) + txn_info = self._txn_iterators_last[txn_iid] + if txn_info.tid != tid: + raise Exception( + 'Out-of-order request for record iterator for transaction %r' + % tid) + self._iterators[record_iid] = iter(txn_info) + return record_iid + + def iterator_record_next(self, iid): + iterator = self._iterators[iid] + try: + info = next(iterator) + except StopIteration: + del self._iterators[iid] + item = None + else: + item = (info.oid, + info.tid, + info.data, + info.data_txn) + return item + + def iterator_gc(self, iids): + for iid in iids: + self._iterators.pop(iid, None) + + def server_status(self): + return self.server.server_status(self.storage_id) + + def set_client_label(self, label): + self.log_label = str(label)+' '+_addr_label(self.connection.addr) + + def ruok(self): + return self.server.ruok() + + def ping(self): + pass + +class StorageServerDB(object): + """Adapter from StorageServerDB to ZODB.interfaces.IStorageWrapper + + This is used in a ZEO fan-out situation, where a storage server + calls registerDB on a ClientStorage. + + Note that this is called from the Client-storage's IO thread, so + always a separate thread from the storge-server connections. + """ + + def __init__(self, server, storage_id): + self.server = server + self.storage_id = storage_id + self.references = ZODB.serialize.referencesf + + def invalidate(self, tid, oids, version=''): + if version: + raise StorageServerError("Versions aren't supported.") + storage_id = self.storage_id + self.server.invalidate(None, storage_id, tid, oids) + + def invalidateCache(self): + self.server._invalidateCache(self.storage_id) + + transform_record_data = untransform_record_data = lambda self, data: data + +class StorageServer(object): + + """The server side implementation of ZEO. + + The StorageServer is the 'manager' for incoming connections. Each + connection is associated with its own ZEOStorage instance (defined + below). The StorageServer may handle multiple storages; each + ZEOStorage instance only handles a single storage. + """ + + def __init__(self, addr, storages, + read_only=0, + invalidation_queue_size=100, + invalidation_age=None, + transaction_timeout=None, + ssl=None, + client_conflict_resolution=False, + Acceptor=Acceptor, + msgpack=False, + ): + """StorageServer constructor. + + This is typically invoked from the start.py script. + + Arguments (the first two are required and positional): + + addr -- the address at which the server should listen. This + can be a tuple (host, port) to signify a TCP/IP connection + or a pathname string to signify a Unix domain socket + connection. A hostname may be a DNS name or a dotted IP + address. + + storages -- a dictionary giving the storage(s) to handle. The + keys are the storage names, the values are the storage + instances, typically FileStorage or Berkeley storage + instances. By convention, storage names are typically + strings representing small integers starting at '1'. + + read_only -- an optional flag saying whether the server should + operate in read-only mode. Defaults to false. Note that + even if the server is operating in writable mode, + individual storages may still be read-only. But if the + server is in read-only mode, no write operations are + allowed, even if the storages are writable. Note that + pack() is considered a read-only operation. + + invalidation_queue_size -- The storage server keeps a queue + of the objects modified by the last N transactions, where + N == invalidation_queue_size. This queue is used to + speed client cache verification when a client disconnects + for a short period of time. + + invalidation_age -- + If the invalidation queue isn't big enough to support a + quick verification, but the last transaction seen by a + client is younger than the invalidation age, then + invalidations will be computed by iterating over + transactions later than the given transaction. + + transaction_timeout -- The maximum amount of time to wait for + a transaction to commit after acquiring the storage lock. + If the transaction takes too long, the client connection + will be closed and the transaction aborted. + """ + + self.storages = storages + msg = ", ".join( + ["%s:%s:%s" % (name, storage.isReadOnly() and "RO" or "RW", + storage.getName()) + for name, storage in storages.items()]) + log("%s created %s with storages: %s" % + (self.__class__.__name__, read_only and "RO" or "RW", msg)) + + + self._lock = Lock() + self.ssl = ssl # For dev convenience + + self.read_only = read_only + self.database = None + + # A list, by server, of at most invalidation_queue_size invalidations. + # The list is kept in sorted order with the most recent + # invalidation at the front. The list never has more than + # self.invq_bound elements. + self.invq_bound = invalidation_queue_size + self.invq = {} + + self.zeo_storages_by_storage_id = {} # {storage_id -> [ZEOStorage]} + self.lock_managers = {} # {storage_id -> LockManager} + self.stats = {} # {storage_id -> StorageStats} + for name, storage in storages.items(): + self._setup_invq(name, storage) + storage.registerDB(StorageServerDB(self, name)) + if client_conflict_resolution: + # XXX this may go away later, when storages grow + # configuration for this. + storage.tryToResolveConflict = never_resolve_conflict + self.zeo_storages_by_storage_id[name] = [] + self.stats[name] = stats = StorageStats( + self.zeo_storages_by_storage_id[name]) + if transaction_timeout is None: + # An object with no-op methods + timeout = StubTimeoutThread() + else: + timeout = TimeoutThread(transaction_timeout) + timeout.setName("TimeoutThread for %s" % name) + timeout.start() + self.lock_managers[name] = LockManager(name, stats, timeout) + + self.invalidation_age = invalidation_age + self.client_conflict_resolution = client_conflict_resolution + + if addr is not None: + self.acceptor = Acceptor(self, addr, ssl, msgpack) + if isinstance(addr, tuple) and addr[0]: + self.addr = self.acceptor.addr + else: + self.addr = addr + self.loop = self.acceptor.loop + ZODB.event.notify(Serving(self, address=self.acceptor.addr)) + + def create_client_handler(self): + return ZEOStorage(self, self.read_only) + + def _setup_invq(self, name, storage): + lastInvalidations = getattr(storage, 'lastInvalidations', None) + if lastInvalidations is None: + # Using None below doesn't look right, but the first + # element in invq is never used. See get_invalidations. + # (If it was used, it would generate an error, which would + # be good. :) Doing this allows clients that were up to + # date when a server was restarted to pick up transactions + # it subsequently missed. + self.invq[name] = [(storage.lastTransaction() or z64, None)] + else: + self.invq[name] = list(lastInvalidations(self.invq_bound)) + self.invq[name].reverse() + + def register_connection(self, storage_id, zeo_storage): + """Internal: register a ZEOStorage with a particular storage. + + This is called by ZEOStorage.register(). + + The dictionary self.zeo_storages_by_storage_id maps each + storage name to a list of current ZEOStorages for that + storage; this information is needed to handle invalidation. + This function updates this dictionary. + + Returns the timeout and stats objects for the appropriate storage. + """ + self.zeo_storages_by_storage_id[storage_id].append(zeo_storage) + return self.stats[storage_id] + + def _invalidateCache(self, storage_id): + """We need to invalidate any caches we have. + + This basically means telling our clients to + invalidate/revalidate their caches. We do this by closing them + and making them reconnect. + """ + + # This method is called from foreign threads. We have to + # worry about interaction with the main thread. + + # Rebuild invq + self._setup_invq(storage_id, self.storages[storage_id]) + + # Make a copy since we are going to be mutating the + # connections indirectoy by closing them. We don't care about + # later transactions since they will have to validate their + # caches anyway. + for zs in self.zeo_storages_by_storage_id[storage_id][:]: + zs.call_soon_threadsafe(zs.connection.close) + + def invalidate(self, zeo_storage, storage_id, tid, invalidated): + """Internal: broadcast invalidations to clients. + + This is called from several ZEOStorage methods. + + invalidated is a sequence of oids. + """ + + # This method can be called from foreign threads. We have to + # worry about interaction with the main thread. + + invq = self.invq[storage_id] + if len(invq) >= self.invq_bound: + invq.pop() + invq.insert(0, (tid, invalidated)) + + for zs in self.zeo_storages_by_storage_id[storage_id]: + if zs is not zeo_storage: + zs.async_threadsafe('invalidateTransaction', tid, invalidated) + + def broadcast_info(self, storage_id, info): + """Internal: broadcast info to clients. + """ + for zs in self.zeo_storages_by_storage_id[storage_id]: + zs.async_threadsafe('info', info) + + def get_invalidations(self, storage_id, tid): + """Return a tid and list of all objects invalidation since tid. + + The tid is the most recent transaction id seen by the client. + + Returns None if it is unable to provide a complete list + of invalidations for tid. In this case, client should + do full cache verification. + + XXX This API is stupid. It would be better to simply return a + list of oid-tid pairs. With this API, we can't really use the + tid returned and have to discard all versions for an OID. If + we used the max tid, then loadBefore results from the cache + might be incorrect. + """ + + # We make a copy of invq because it might be modified by a + # foreign (other than main thread) calling invalidate above. + invq = self.invq[storage_id][:] + + oids = set() + latest_tid = None + if invq and invq[-1][0] <= tid: + # We have needed data in the queue + for _tid, L in invq: + if _tid <= tid: + break + oids.update(L) + latest_tid = invq[0][0] + elif (self.invalidation_age and + (self.invalidation_age > + (time.time()-ZODB.TimeStamp.TimeStamp(tid).timeTime()) + ) + ): + for t in self.storages[storage_id].iterator(p64(u64(tid)+1)): + for r in t: + oids.add(r.oid) + latest_tid = t.tid + elif not invq: + log("invq empty") + else: + log("tid to old for invq %s < %s" % (u64(tid), u64(invq[-1][0]))) + + return latest_tid, list(oids) + + __thread = None + def start_thread(self, daemon=True): + self.__thread = thread = threading.Thread(target=self.loop) + thread.setName("StorageServer(%s)" % _addr_label(self.addr)) + thread.setDaemon(daemon) + thread.start() + + __closed = False + def close(self, join_timeout=1): + """Close the dispatcher so that there are no new connections. + + This is only called from the test suite, AFAICT. + """ + if self.__closed: + return + self.__closed = True + + # Stop accepting connections + self.acceptor.close() + + ZODB.event.notify(Closed(self)) + + # Close open client connections + for sid, zeo_storages in self.zeo_storages_by_storage_id.items(): + for zs in zeo_storages[:]: + try: + logger.debug("Closing %s", zs.connection) + zs.call_soon_threadsafe(zs.connection.close) + except Exception: + logger.exception("closing connection %r", zs) + + for name, storage in six.iteritems(self.storages): + logger.info("closing storage %r", name) + storage.close() + + if self.__thread is not None: + self.__thread.join(join_timeout) + + def close_conn(self, zeo_storage): + """Remove the given zeo_storage from self.zeo_storages_by_storage_id. + + This is the inverse of register_connection(). + """ + for zeo_storages in self.zeo_storages_by_storage_id.values(): + if zeo_storage in zeo_storages: + zeo_storages.remove(zeo_storage) + + def server_status(self, storage_id): + status = self.stats[storage_id].__dict__.copy() + status['connections'] = len(status['connections']) + lock_manager = self.lock_managers[storage_id] + status['waiting'] = len(lock_manager.waiting) + status['timeout-thread-is-alive'] = lock_manager.timeout.isAlive() + last_transaction = self.storages[storage_id].lastTransaction() + last_transaction_hex = codecs.encode(last_transaction, 'hex_codec') + if PY3: + # doctests and maybe clients expect a str, not bytes + last_transaction_hex = str(last_transaction_hex, 'ascii') + status['last-transaction'] = last_transaction_hex + return status + + def ruok(self): + return dict((storage_id, self.server_status(storage_id)) + for storage_id in self.storages) + +class StubTimeoutThread(object): + + def begin(self, client): + pass + + def end(self, client): + pass + + isAlive = lambda self: 'stub' + + +class TimeoutThread(threading.Thread): + """Monitors transaction progress and generates timeouts.""" + + # There is one TimeoutThread per storage, because there's one + # transaction lock per storage. + + def __init__(self, timeout): + threading.Thread.__init__(self) + self.setName("TimeoutThread") + self.setDaemon(1) + self._timeout = timeout + self._client = None + self._deadline = None + self._cond = threading.Condition() # Protects _client and _deadline + + def begin(self, client): + # Called from the restart code the "main" thread, whenever the + # storage lock is being acquired. + with self._cond: + assert self._client is None + self._client = client + self._deadline = time.time() + self._timeout + self._cond.notify() + + def end(self, client): + # Called from the "main" thread whenever the storage lock is + # being released. + with self._cond: + assert self._client is not None + assert self._client is client + self._client = None + self._deadline = None + + def run(self): + # Code running in the thread. + while 1: + with self._cond: + while self._deadline is None: + self._cond.wait() + howlong = self._deadline - time.time() + if howlong <= 0: + # Prevent reporting timeout more than once + self._deadline = None + client = self._client # For the howlong <= 0 branch below + + if howlong <= 0: + client.log("Transaction timeout after %s seconds" % + self._timeout, logging.CRITICAL) + try: + client.call_soon_threadsafe(client.connection.close) + except: + client.log("Timeout failure", logging.CRITICAL, + exc_info=sys.exc_info()) + self.end(client) + else: + time.sleep(howlong) + + +def run_in_thread(method, *args): + t = SlowMethodThread(method, args) + t.start() + return t.delay + + +class SlowMethodThread(threading.Thread): + """Thread to run potentially slow storage methods. + + Clients can use the delay attribute to access the MTDelay object + used to send a zrpc response at the right time. + """ + + # Some storage methods can take a long time to complete. If we + # run these methods in response to an I/O event, they + # will block all other server activity until they complete. To + # avoid blocking, we spawn a separate thread, return an MTDelay() + # object, and have the thread reply() when it finishes. + + def __init__(self, method, args): + threading.Thread.__init__(self) + self.setName("SlowMethodThread for %s" % method.__name__) + self._method = method + self._args = args + self.delay = MTDelay() + + def run(self): + try: + result = self._method(*self._args) + except (SystemExit, KeyboardInterrupt): + raise + except Exception: + self.delay.error(sys.exc_info()) + else: + self.delay.reply(result) + + +def _addr_label(addr): + if isinstance(addr, six.binary_type): + return addr.decode('ascii') + if isinstance(addr, six.string_types): + return addr + else: + host, port = addr + return str(host) + ":" + str(port) + +class CommitLog(object): + + def __init__(self): + self.file = tempfile.TemporaryFile(suffix=".comit-log") + self.pickler = Pickler(self.file, 1) + self.pickler.fast = 1 + self.stores = 0 + + def size(self): + return self.file.tell() + + def delete(self, oid, serial): + self.pickler.dump(('_delete', (oid, serial))) + self.stores += 1 + + def checkread(self, oid, serial): + self.pickler.dump(('_checkread', (oid, serial))) + self.stores += 1 + + def store(self, oid, serial, data): + self.pickler.dump(('_store', (oid, serial, data))) + self.stores += 1 + + def restore(self, oid, serial, data, prev_txn): + self.pickler.dump(('_restore', (oid, serial, data, prev_txn))) + self.stores += 1 + + def undo(self, transaction_id): + self.pickler.dump(('_undo', (transaction_id, ))) + self.stores += 1 + + def __iter__(self): + self.file.seek(0) + unpickler = Unpickler(self.file) + for i in range(self.stores): + yield unpickler.load() + + def close(self): + if self.file: + self.file.close() + self.file = None + +class ServerEvent(object): + + def __init__(self, server, **kw): + self.__dict__.update(kw) + self.server = server + +class Serving(ServerEvent): + pass + +class Closed(ServerEvent): + pass + +def never_resolve_conflict(oid, committedSerial, oldSerial, newpickle, + committedData=b''): + raise ConflictError(oid=oid, serials=(committedSerial, oldSerial), + data=newpickle) + +class LockManager(object): + + def __init__(self, storage_id, stats, timeout): + self.storage_id = storage_id + self.stats = stats + self.timeout = timeout + self.locked = None + self.waiting = {} # {ZEOStorage -> (func, delay)} + self._lock = RLock() + + def lock(self, zs, func): + """Call the given function with the commit lock. + + If we can get the lock right away, return the result of + calling the function. + + If we can't get the lock right away, return a delay + + The function must set ``locked`` on the zeo-storage to + indicate that the zeo-storage should be locked. Otherwise, + the lock isn't held pas the call. + """ + with self._lock: + if self._can_lock(zs): + self._locked(zs) + else: + if any(w for w in self.waiting if w is zs): + raise StorageTransactionError("Already voting (waiting)") + + delay = Delay() + self.waiting[zs] = (func, delay) + self._log_waiting( + zs, "(%r) queue lock: transactions waiting: %s") + + return delay + + try: + result = func() + except Exception: + self.release(zs) + raise + else: + if not zs.locked: + self.release(zs) + return result + + def _lock_waiting(self, zs): + waiting = None + with self._lock: + if self.locked is zs: + assert zs.locked + return + + if self._can_lock(zs): + waiting = self.waiting.pop(zs, None) + if waiting: + self._locked(zs) + + if waiting: + func, delay = waiting + try: + result = func() + except Exception: + delay.error(sys.exc_info()) + self.release(zs) + else: + delay.reply(result) + if not zs.locked: + self.release(zs) + + def release(self, zs): + with self._lock: + locked = self.locked + if locked is zs: + self._unlocked(zs) + + for zs in list(self.waiting): + zs.call_soon_threadsafe(self._lock_waiting, zs) + + else: + if self.waiting.pop(zs, None): + self._log_waiting( + zs, "(%r) dequeue lock: transactions waiting: %s") + + def _log_waiting(self, zs, message): + l = len(self.waiting) + zs.log(message % (self.storage_id, l), + logging.CRITICAL if l > 9 else ( + logging.WARNING if l > 3 else logging.DEBUG) + ) + + def _can_lock(self, zs): + locked = self.locked + + if locked is zs: + raise StorageTransactionError("Already voting (locked)") + + if locked is not None: + if not locked.connected: + locked.log("Still locked after disconnected. Unlocking.", + logging.CRITICAL) + if locked.transaction: + locked.storage.tpc_abort(locked.transaction) + + self._unlocked(locked) + locked = None + + # Note that locked.locked may not be true here, because + # .lock may be set in the lock callback, but may not have + # been set yet. This aspect of the API may need more + # thought. :/ + + return locked is None + + def _locked(self, zs): + self.locked = zs + self.stats.lock_time = time.time() + self._log_waiting(zs, "(%r) lock: transactions waiting: %s") + self.timeout.begin(zs) + return True + + def _unlocked(self, zs): + assert self.locked is zs + self.timeout.end(zs) + self.locked = self.stats.lock_time = None + zs.locked = False + self._log_waiting(zs, "(%r) unlock: transactions waiting: %s") diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/TransactionBuffer.py b/thesisenv/lib/python3.6/site-packages/ZEO/TransactionBuffer.py new file mode 100644 index 0000000..f7e4b03 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/TransactionBuffer.py @@ -0,0 +1,108 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""A TransactionBuffer store transaction updates until commit or abort. + +A transaction may generate enough data that it is not practical to +always hold pending updates in memory. Instead, a TransactionBuffer +is used to store the data until a commit or abort. +""" + +# A faster implementation might store trans data in memory until it +# reaches a certain size. + +import os +import tempfile +import ZODB.blob + +from ZEO._compat import Pickler, Unpickler + +class TransactionBuffer(object): + + # The TransactionBuffer is used by client storage to hold update + # data until the tpc_finish(). It is only used by a single + # thread, because only one thread can be in the two-phase commit + # at one time. + + def __init__(self, connection_generation): + self.connection_generation = connection_generation + self.file = tempfile.TemporaryFile(suffix=".tbuf") + self.count = 0 + self.size = 0 + self.blobs = [] + # It's safe to use a fast pickler because the only objects + # stored are builtin types -- strings or None. + self.pickler = Pickler(self.file, 1) + self.pickler.fast = 1 + self.server_resolved = set() # {oid} + self.client_resolved = {} # {oid -> buffer_record_number} + self.exception = None + + def close(self): + self.file.close() + + def store(self, oid, data): + """Store oid, version, data for later retrieval""" + self.pickler.dump((oid, data)) + self.count += 1 + # Estimate per-record cache size + self.size = self.size + (data and len(data) or 0) + 31 + + def resolve(self, oid, data): + """Record client-resolved data + """ + self.store(oid, data) + self.client_resolved[oid] = self.count - 1 + + def server_resolve(self, oid): + self.server_resolved.add(oid) + + def storeBlob(self, oid, blobfilename): + self.blobs.append((oid, blobfilename)) + + def __iter__(self): + self.file.seek(0) + unpickler = Unpickler(self.file) + server_resolved = self.server_resolved + client_resolved = self.client_resolved + + # Gaaaa, this is awkward. There can be entries in serials that + # aren't in the buffer, because undo. Entries can be repeated + # in the buffer, because ZODB. (Maybe this is a bug now, but + # it may be a feature later. + + seen = set() + for i in range(self.count): + oid, data = unpickler.load() + if client_resolved.get(oid, i) == i: + seen.add(oid) + yield oid, data, oid in server_resolved + + # We may have leftover oids because undo + for oid in server_resolved: + if oid not in seen: + yield oid, None, True + + + # Support ZEO4: + + def serialnos(self, args): + for oid in args: + if isinstance(oid, bytes): + self.server_resolved.add(oid) + else: + oid, serial = oid + if isinstance(serial, Exception): + self.exception = serial + elif serial == b'rs': + self.server_resolved.add(oid) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/__init__.py b/thesisenv/lib/python3.6/site-packages/ZEO/__init__.py new file mode 100644 index 0000000..f0796e4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/__init__.py @@ -0,0 +1,90 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""ZEO -- Zope Enterprise Objects. + +See the file README.txt in this directory for an overview. + +ZEO is now part of ZODB; ZODB's home on the web is + + http://wiki.zope.org/ZODB + +""" + +def client(*args, **kw): + import ZEO.ClientStorage + return ZEO.ClientStorage.ClientStorage(*args, **kw) + +def DB(*args, **kw): + s = client(*args, **kw) + try: + import ZODB + return ZODB.DB(s) + except Exception: + s.close() + raise + +def connection(*args, **kw): + db = DB(*args, **kw) + try: + return db.open_then_close_db_when_connection_closes() + except Exception: + db.close() + raise + +def server(path=None, blob_dir=None, storage_conf=None, zeo_conf=None, + port=0, threaded=True, **kw): + """Convenience function to start a server for interactive exploration + + This fuction starts a ZEO server, given a storage configuration or + a file-storage path and blob directory. You can also supply a ZEO + configuration string or a port. If neither a ZEO port or + configuration is supplied, a port is chosen randomly. + + The server address and a stop function are returned. The address + can be passed to ZEO.ClientStorage.ClientStorage or ZEO.DB to + create a client to the server. The stop function can be called + without arguments to stop the server. + + Arguments: + + path + A file-storage path. This argument is ignored if a storage + configuration is supplied. + + blob_dir + A blob directory path. This argument is ignored if a storage + configuration is supplied. + + storage_conf + A storage configuration string. If none is supplied, then at + least a file-storage path must be supplied and the storage + configuration will be generated from the file-storage path and + the blob directory. + + zeo_conf + A ZEO server configuration string. + + port + If no ZEO configuration is supplied, the one will be computed + from the port. If no port is supplied, one will be chosedn + dynamically. + + """ + import ZEO._forker as forker + if storage_conf is None and path is None: + storage_conf = '\n' + + return forker.start_zeo_server( + storage_conf, zeo_conf, port, keep=True, path=path, + blob_dir=blob_dir, suicide=False, threaded=threaded, **kw) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/_compat.py b/thesisenv/lib/python3.6/site-packages/ZEO/_compat.py new file mode 100644 index 0000000..3030723 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/_compat.py @@ -0,0 +1,67 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Python versions compatiblity +""" +import sys +import platform + +PY3 = sys.version_info[0] >= 3 +PY32 = sys.version_info[:2] == (3, 2) +PYPY = getattr(platform, 'python_implementation', lambda: None)() == 'PyPy' +WIN = sys.platform.startswith('win') + +if PY3: + from zodbpickle.pickle import Pickler, Unpickler as _Unpickler, dump, dumps, loads + class Unpickler(_Unpickler): + # Py3: Python 3 doesn't allow assignments to find_global, + # instead, find_class can be overridden + + find_global = None + + def find_class(self, modulename, name): + if self.find_global is None: + return super(Unpickler, self).find_class(modulename, name) + return self.find_global(modulename, name) +else: + try: + import zodbpickle.fastpickle as cPickle + except ImportError: + import zodbpickle.pickle as cPickle + Pickler = cPickle.Pickler + Unpickler = cPickle.Unpickler + dump = cPickle.dump + dumps = cPickle.dumps + loads = cPickle.loads + +# String and Bytes IO +from ZODB._compat import BytesIO + +if PY3: + + import _thread as thread + if PY32: + from threading import _get_ident as get_ident + else: + from threading import get_ident + + +else: + + import thread + from thread import get_ident + +try: + from cStringIO import StringIO +except: + from io import StringIO diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/_forker.py b/thesisenv/lib/python3.6/site-packages/ZEO/_forker.py new file mode 100644 index 0000000..b941025 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/_forker.py @@ -0,0 +1,292 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Library for forking storage server and connecting client storage""" +from __future__ import print_function +import gc +import os +import sys +import multiprocessing +import logging +import tempfile + +from six.moves.queue import Empty +import six + +from ZEO._compat import StringIO + +logger = logging.getLogger('ZEO.tests.forker') + +DEBUG = os.environ.get('ZEO_TEST_SERVER_DEBUG') + +ZEO4_SERVER = os.environ.get('ZEO4_SERVER') + +class ZEOConfig(object): + """Class to generate ZEO configuration file. """ + + def __init__(self, addr, log=None, **options): + if log: + if isinstance(log, str): + self.logpath = log + elif isinstance(addr, str): + self.logpath = addr+'.log' + else: + self.logpath = 'server.log' + + if not isinstance(addr, six.string_types): + addr = '%s:%s' % addr + + self.log = log + self.address = addr + self.read_only = None + self.loglevel = 'INFO' + self.__dict__.update(options) + + def dump(self, f): + print("", file=f) + print("address " + self.address, file=f) + if self.read_only is not None: + print("read-only", self.read_only and "true" or "false", file=f) + + for name in ( + 'invalidation_queue_size', 'invalidation_age', + 'transaction_timeout', 'pid_filename', 'msgpack', + 'ssl_certificate', 'ssl_key', 'client_conflict_resolution', + ): + v = getattr(self, name, None) + if v: + print(name.replace('_', '-'), v, file=f) + + print("", file=f) + + if self.log: + print(""" + + level %s + + path %s + + + """ % (self.loglevel, self.logpath), file=f) + + def __str__(self): + f = StringIO() + self.dump(f) + return f.getvalue() + + +def runner(config, qin, qout, timeout=None, + debug=False, name=None, + keep=False, protocol=None): + + if debug or DEBUG: + debug_logging() + + old_protocol = None + if protocol: + import ZEO.asyncio.server + old_protocol = ZEO.asyncio.server.best_protocol_version + ZEO.asyncio.server.best_protocol_version = protocol + old_protocols = ZEO.asyncio.server.ServerProtocol.protocols + ZEO.asyncio.server.ServerProtocol.protocols = tuple(sorted( + set(old_protocols) | set([protocol]) + )) + + try: + import threading + + if ZEO4_SERVER: + # XXX: test dependency. In practice this is + # probably ok + from ZEO.tests.ZEO4 import runzeo + else: + from . import runzeo + + options = runzeo.ZEOOptions() + options.realize(['-C', config]) + server = runzeo.ZEOServer(options) + globals()[(name if name else 'last') + '_server'] = server + server.open_storages() + server.clear_socket() + server.create_server() + logger.debug('SERVER CREATED') + if ZEO4_SERVER: + qout.put(server.server.addr) + else: + qout.put(server.server.acceptor.addr) + logger.debug('ADDRESS SENT') + thread = threading.Thread( + target=server.server.loop, kwargs=dict(timeout=.2), + name=(None if name is None else name + '-server'), + ) + thread.setDaemon(True) + thread.start() + os.remove(config) + + try: + qin.get(timeout=timeout) # wait for shutdown + except Empty: + pass + server.server.close() + thread.join(3) + + if not keep: + # Try to cleanup storage files + for storage in server.server.storages.values(): + try: + storage.cleanup() + except AttributeError: + pass + + qout.put(thread.is_alive()) + + except Exception: + logger.exception("In server thread") + + finally: + if old_protocol: + ZEO.asyncio.server.best_protocol_version = old_protocol + ZEO.asyncio.server.ServerProtocol.protocols = old_protocols + +def stop_runner(thread, config, qin, qout, stop_timeout=19, pid=None): + qin.put('stop') + try: + dirty = qout.get(timeout=stop_timeout) + except Empty: + print("WARNING Couldn't stop server", file=sys.stderr) + if hasattr(thread, 'terminate'): + thread.terminate() + os.waitpid(thread.pid, 0) + else: + if dirty: + print("WARNING SERVER DIDN'T STOP CLEANLY", file=sys.stderr) + + # The runner thread didn't stop. If it was a process, + # give it some time to exit + if hasattr(thread, 'pid') and thread.pid: + os.waitpid(thread.pid, 0) + + thread.join(stop_timeout) + + gc.collect() + +def start_zeo_server(storage_conf=None, zeo_conf=None, port=None, keep=False, + path='Data.fs', protocol=None, blob_dir=None, + suicide=True, debug=False, + threaded=False, start_timeout=33, name=None, log=None, + show_config=False): + """Start a ZEO server in a separate process. + + Takes two positional arguments a string containing the storage conf + and a ZEOConfig object. + + Returns the ZEO address, the test server address, the pid, and the path + to the config file. + """ + + if not storage_conf: + storage_conf = '\npath %s\n' % path + + if blob_dir: + storage_conf = '\nblob-dir %s\n%s\n' % ( + blob_dir, storage_conf) + + if zeo_conf is None or isinstance(zeo_conf, dict): + if port is None: + port = 0 + + if isinstance(port, int): + addr = '127.0.0.1', port + else: + addr = port + + z = ZEOConfig(addr, log=log) + if zeo_conf: + z.__dict__.update(zeo_conf) + zeo_conf = str(z) + + zeo_conf = str(zeo_conf) + '\n\n' + storage_conf + if show_config: + print(zeo_conf) + + # Store the config info in a temp file. + fd, tmpfile = tempfile.mkstemp(".conf", prefix='ZEO_forker', dir=os.getcwd()) + with os.fdopen(fd, 'w') as fp: + fp.write(zeo_conf) + + if threaded: + from threading import Thread + from six.moves.queue import Queue + else: + from multiprocessing import Process as Thread + Queue = ThreadlessQueue + + qin = Queue() + qout = Queue() + thread = Thread( + target=runner, + args=[tmpfile, qin, qout, 999 if suicide else None], + kwargs=dict(debug=debug, name=name, protocol=protocol, keep=keep), + name=(None if name is None else name + '-server-runner'), + ) + thread.daemon = True + thread.start() + try: + addr = qout.get(timeout=start_timeout) + except Exception: + whine("SERVER FAILED TO START") + if thread.is_alive(): + whine("Server thread/process is still running") + elif not threaded: + whine("Exit status", thread.exitcode) + raise + + def stop(stop_timeout=99): + stop_runner(thread, tmpfile, qin, qout, stop_timeout) + + return addr, stop + + +def shutdown_zeo_server(stop): + stop() + + +def debug_logging(logger='ZEO', stream='stderr', level=logging.DEBUG): + handler = logging.StreamHandler(getattr(sys, stream)) + logger = logging.getLogger(logger) + logger.addHandler(handler) + logger.setLevel(level) + + def stop(): + logger.removeHandler(handler) + logger.setLevel(logging.NOTSET) + + return stop + +def whine(*message): + print(*message, file=sys.stderr) + sys.stderr.flush() + +class ThreadlessQueue(object): + + def __init__(self): + self.cin, self.cout = multiprocessing.Pipe(False) + + def put(self, v): + self.cout.send(v) + + def get(self, timeout=None): + if self.cin.poll(timeout): + return self.cin.recv() + else: + raise Empty() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/README.rst b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/README.rst new file mode 100644 index 0000000..1c8e875 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/README.rst @@ -0,0 +1,78 @@ +================================ +asyncio-based networking for ZEO +================================ + +This package provides the networking interface for ZEO. It provides a +somewhat RPC-like API. + +Notes +===== + +Sending data immediately: ayncio vs asyncore +-------------------------------------------- + +The previous ZEO networking implementation used the ``asyncore`` library. +When writing with asyncore, writes were done only from the event loop. +This meant that when sending data, code would have to "wake up" the +event loop, typically after adding data to some sort of output buffer. + +Asyncio takes an entirely different and saner approach. When an +application wants to send data, it writes to a transport. All +interactions with a transport (in a correct application) are from the +same thread, which is also the thread running any event loop. +Transports are always either idle or sending data. When idle, the +transport writes to the outout socket immediately. If not all data +isn't sent, then it buffers it and becomes sending. If a transport is +sending, then we know that the socket isn't ready for more data, so +``write`` can just buffer the data. There's no point in waking up the +event loop, because the socket will do so when it's ready for more +data. + +An exception to the paragraph above occurs when operations cross +threads, as occures for most client operations and when a transaction +commits on the server and results have to be sent to other clients. In +these cases, a call_soon_threadsafe method is used which queues an +operation and has to wake up an event loop to process it. + +Server threading +---------------- + +There are currently two server implementations, an implementation that +used a thread per client (and a thread to listen for connections), +``ZEO.asyncio.mtacceptor.Acceptor``, and an implementation that uses a +single networking thread, ``ZEO.asyncio.server.Acceptor``. The +implementation is selected by changing an import in +``ZEO.StorageServer``. The currently-used implementation is +``ZEO.asyncio.server.Acceptor``, although this sentance is likely to +rot, so check the import to be sure. (Maybe this should be configurable.) + +ZEO switched to a multi-threaded implementation several years ago +because it was found to improve performance for large databases using +magnetic disks. Because client threads are always working on behalf of +a single client, there's not really an issue with making blocking +calls, such as executing slow I/O operations. + +Initially, the asyncio-based implementation used a multi-threaded +server. A simple thread accepted connections and handed accepted +sockets to ``create_connection``. This became a problem when SSL was +added because ``create_connection`` sets up SSL conections as client +connections, and doesn't provide an option to create server +connections. + +In response, I created an ``asyncio.Server``-based implementation. +This required using a single thread. This was a pretty trivial +change, however, it led to the tests becoming unstable to the point +that it was impossible to run all tests without some failing. One +test was broken due to a ``asyncio.Server`` `bug +`_. It's unclear whether the test +instability is due to ``asyncio.Server`` problems or due to latent +test (or ZEO) bugs, but even after beating the tests mostly into +submission, tests failures are more likely when using +``asyncio.Server``. Beatings will continue. + +While fighting test failures using ``asyncio.Server``, the +multi-threaded implementation was updated to use a monkey patch to +allow it to create SSL server connections. Aside from the real risk of a +monkey patch, this works very well. + +Both implementations seem to perform about the same. diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/__init__.py b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/__init__.py new file mode 100644 index 0000000..792d600 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/__init__.py @@ -0,0 +1 @@ +# diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/base.py b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/base.py new file mode 100644 index 0000000..3d85618 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/base.py @@ -0,0 +1,167 @@ +from .._compat import PY3 + +if PY3: + import asyncio +else: + import trollius as asyncio + +import logging +import socket +from struct import unpack +import sys + +logger = logging.getLogger(__name__) + +INET_FAMILIES = socket.AF_INET, socket.AF_INET6 + +class Protocol(asyncio.Protocol): + """asyncio low-level ZEO base interface + """ + + # All of the code in this class runs in a single dedicated + # thread. Thus, we can mostly avoid worrying about interleaved + # operations. + + # One place where special care was required was in cache setup on + # connect. See finish connect below. + + transport = protocol_version = None + + def __init__(self, loop, addr): + self.loop = loop + self.addr = addr + self.input = [] # Input buffer when assembling messages + self.output = [] # Output buffer when paused + self.paused = [] # Paused indicator, mutable to avoid attr lookup + + # Handle the first message, the protocol handshake, differently + self.message_received = self.first_message_received + + def __repr__(self): + return self.name + + closed = False + def close(self): + if not self.closed: + self.closed = True + if self.transport is not None: + self.transport.close() + + def connection_made(self, transport): + logger.info("Connected %s", self) + + + if sys.version_info < (3, 6): + sock = transport.get_extra_info('socket') + if sock is not None and sock.family in INET_FAMILIES: + # See https://bugs.python.org/issue27456 :( + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True) + self.transport = transport + + paused = self.paused + output = self.output + append = output.append + writelines = transport.writelines + from struct import pack + + def write(message): + if paused: + append(message) + else: + writelines((pack(">I", len(message)), message)) + + self._write = write + + def writeit(data): + # Note, don't worry about combining messages. Iters + # will be used with blobs, in which case, the individual + # messages will be big to begin with. + data = iter(data) + for message in data: + writelines((pack(">I", len(message)), message)) + if paused: + append(data) + break + + self._writeit = writeit + + got = 0 + want = 4 + getting_size = True + def data_received(self, data): + + # Low-level input handler collects data into sized messages. + + # Note that the logic below assume that when new data pushes + # us over what we want, we process it in one call until we + # need more, because we assume that excess data is all in the + # last item of self.input. This is why the exception handling + # in the while loop is critical. Without it, an exception + # might cause us to exit before processing all of the data we + # should, when then causes the logic to be broken in + # subsequent calls. + + self.got += len(data) + self.input.append(data) + while self.got >= self.want: + try: + extra = self.got - self.want + if extra == 0: + collected = b''.join(self.input) + self.input = [] + else: + input = self.input + self.input = [input[-1][-extra:]] + input[-1] = input[-1][:-extra] + collected = b''.join(input) + + self.got = extra + + if self.getting_size: + # we were recieving the message size + assert self.want == 4 + self.want = unpack(">I", collected)[0] + self.getting_size = False + else: + self.want = 4 + self.getting_size = True + self.message_received(collected) + except Exception: + logger.exception("data_received %s %s %s", + self.want, self.got, self.getting_size) + + def first_message_received(self, protocol_version): + # Handler for first/handshake message, set up in __init__ + del self.message_received # use default handler from here on + self.finish_connect(protocol_version) + + def call_async(self, method, args): + self._write(self.encode(0, True, method, args)) + + def call_async_iter(self, it): + self._writeit(self.encode(0, True, method, args) + for method, args in it) + + def pause_writing(self): + self.paused.append(1) + + def resume_writing(self): + paused = self.paused + del paused[:] + output = self.output + writelines = self.transport.writelines + from struct import pack + while output and not paused: + message = output.pop(0) + if isinstance(message, bytes): + writelines((pack(">I", len(message)), message)) + else: + data = message + for message in data: + writelines((pack(">I", len(message)), message)) + if paused: # paused again. Put iter back. + output.insert(0, data) + break + + def get_peername(self): + return self.transport.get_extra_info('peername') diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/client.py b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/client.py new file mode 100644 index 0000000..38c6651 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/client.py @@ -0,0 +1,910 @@ +from ZEO.Exceptions import ClientDisconnected, ServerException +import concurrent.futures +import functools +import logging +import random +import threading + +import ZODB.event +import ZODB.POSException + +import ZEO.Exceptions +import ZEO.interfaces + +from . import base +from .compat import asyncio, new_event_loop +from .marshal import encoder, decoder + +logger = logging.getLogger(__name__) + +Fallback = object() + +local_random = random.Random() # use separate generator to facilitate tests + +def future_generator(func): + """Decorates a generator that generates futures + """ + + @functools.wraps(func) + def call_generator(*args, **kw): + gen = func(*args, **kw) + try: + f = next(gen) + except StopIteration: + gen.close() + else: + def store(gen, future): + @future.add_done_callback + def _(future): + try: + try: + result = future.result() + except Exception as exc: + f = gen.throw(exc) + else: + f = gen.send(result) + except StopIteration: + gen.close() + else: + store(gen, f) + + store(gen, f) + + return call_generator + +class Protocol(base.Protocol): + """asyncio low-level ZEO client interface + """ + + # All of the code in this class runs in a single dedicated + # thread. Thus, we can mostly avoid worrying about interleaved + # operations. + + # One place where special care was required was in cache setup on + # connect. See finish connect below. + + protocols = b'309', b'310', b'3101', b'4', b'5' + + def __init__(self, loop, + addr, client, storage_key, read_only, connect_poll=1, + heartbeat_interval=60, ssl=None, ssl_server_hostname=None, + credentials=None): + """Create a client interface + + addr is either a host,port tuple or a string file name. + + client is a ClientStorage. It must be thread safe. + + cache is a ZEO.interfaces.IClientCache. + """ + super(Protocol, self).__init__(loop, addr) + self.storage_key = storage_key + self.read_only = read_only + self.name = "%s(%r, %r, %r)" % ( + self.__class__.__name__, addr, storage_key, read_only) + self.client = client + self.connect_poll = connect_poll + self.heartbeat_interval = heartbeat_interval + self.futures = {} # { message_id -> future } + self.ssl = ssl + self.ssl_server_hostname = ssl_server_hostname + self.credentials = credentials + + self.connect() + + def close(self): + if not self.closed: + self.closed = True + self._connecting.cancel() + if self.transport is not None: + self.transport.close() + for future in self.pop_futures(): + future.set_exception(ClientDisconnected("Closed")) + + def pop_futures(self): + # Remove and return futures from self.futures. The caller + # will finalize them in some way and callbacks may modify + # self.futures. + futures = list(self.futures.values()) + self.futures.clear() + return futures + + def protocol_factory(self): + return self + + def connect(self): + if isinstance(self.addr, tuple): + host, port = self.addr + cr = self.loop.create_connection( + self.protocol_factory, host or '127.0.0.1', port, + ssl=self.ssl, server_hostname=self.ssl_server_hostname) + else: + cr = self.loop.create_unix_connection( + self.protocol_factory, self.addr, ssl=self.ssl) + + self._connecting = cr = asyncio.ensure_future(cr, loop=self.loop) + + @cr.add_done_callback + def done_connecting(future): + if future.exception() is not None: + logger.info("Connection to %r failed, retrying, %s", + self.addr, future.exception()) + # keep trying + if not self.closed: + self.loop.call_later( + self.connect_poll + local_random.random(), + self.connect, + ) + + def connection_made(self, transport): + super(Protocol, self).connection_made(transport) + self.heartbeat(write=False) + + def connection_lost(self, exc): + logger.debug('connection_lost %r', exc) + self.heartbeat_handle.cancel() + if self.closed: + for f in self.pop_futures(): + f.cancel() + else: + # We have to be careful processing the futures, because + # exception callbacks might modufy them. + for f in self.pop_futures(): + f.set_exception(ClientDisconnected(exc or 'connection lost')) + self.closed = True + self.client.disconnected(self) + + @future_generator + def finish_connect(self, protocol_version): + # The future implementation we use differs from + # asyncio.Future in that callbacks are called immediately, + # rather than using the loops call_soon. We want to avoid a + # race between invalidations and cache initialization. In + # particular, after getting a response from lastTransaction or + # getInvalidations, we want to make sure we set the cache's + # lastTid before processing (and possibly missing) subsequent + # invalidations. + + version = min(protocol_version[1:], self.protocols[-1]) + if version not in self.protocols: + self.client.register_failed( + self, ZEO.Exceptions.ProtocolError(protocol_version)) + return + + self.protocol_version = protocol_version[:1] + version + self.encode = encoder(protocol_version) + self.decode = decoder(protocol_version) + self.heartbeat_bytes = self.encode(-1, 0, '.reply', None) + + self._write(self.protocol_version) + + credentials = (self.credentials,) if self.credentials else () + + try: + try: + server_tid = yield self.fut( + 'register', self.storage_key, + self.read_only if self.read_only is not Fallback else False, + *credentials) + except ZODB.POSException.ReadOnlyError: + if self.read_only is Fallback: + self.read_only = True + server_tid = yield self.fut( + 'register', self.storage_key, True, *credentials) + else: + raise + else: + if self.read_only is Fallback: + self.read_only = False + except Exception as exc: + self.client.register_failed(self, exc) + else: + self.client.registered(self, server_tid) + + exception_type_type = type(Exception) + def message_received(self, data): + msgid, async_, name, args = self.decode(data) + if name == '.reply': + future = self.futures.pop(msgid) + if async_: # ZEO 5 exception + class_, args = args + factory = exc_factories.get(class_) + if factory: + exc = factory(class_, args) + if not isinstance(exc, unlogged_exceptions): + logger.error("%s from server: %s:%s", + self.name, class_, args) + else: + exc = ServerException(class_, args) + future.set_exception(exc) + elif (isinstance(args, tuple) and len(args) > 1 and + type(args[0]) == self.exception_type_type and + issubclass(args[0], Exception) + ): + if not issubclass(args[0], unlogged_exceptions): + logger.error("%s from server: %s.%s:%s", + self.name, + args[0].__module__, + args[0].__name__, + args[1]) + future.set_exception(args[1]) + else: + future.set_result(args) + else: + assert async_ # clients only get async calls + if name in self.client_methods: + getattr(self.client, name)(*args) + else: + raise AttributeError(name) + + message_id = 0 + def call(self, future, method, args): + self.message_id += 1 + self.futures[self.message_id] = future + self._write(self.encode(self.message_id, False, method, args)) + return future + + def fut(self, method, *args): + return self.call(Fut(), method, args) + + def load_before(self, oid, tid): + # Special-case loadBefore, so we collapse outstanding requests + message_id = (oid, tid) + future = self.futures.get(message_id) + if future is None: + future = asyncio.Future(loop=self.loop) + self.futures[message_id] = future + self._write( + self.encode(message_id, False, 'loadBefore', (oid, tid))) + return future + + # Methods called by the server. + # WARNING WARNING we can't call methods that call back to us + # syncronously, as that would lead to DEADLOCK! + + client_methods = ( + 'invalidateTransaction', 'serialnos', 'info', + 'receiveBlobStart', 'receiveBlobChunk', 'receiveBlobStop', + # plus: notify_connected, notify_disconnected + ) + client_delegated = client_methods[2:] + + def heartbeat(self, write=True): + if write: + self._write(self.heartbeat_bytes) + self.heartbeat_handle = self.loop.call_later( + self.heartbeat_interval, self.heartbeat) + +def create_Exception(class_, args): + return exc_classes[class_](*args) + +def create_ConflictError(class_, args): + exc = exc_classes[class_]( + message = args['message'], + oid = args['oid'], + serials = args['serials'], + ) + exc.class_name = args.get('class_name') + return exc + +def create_BTreesConflictError(class_, args): + return ZODB.POSException.BTreesConflictError( + p1 = args['p1'], + p2 = args['p2'], + p3 = args['p3'], + reason = args['reason'], + ) + +def create_MultipleUndoErrors(class_, args): + return ZODB.POSException.MultipleUndoErrors(args['_errs']) + +exc_classes = { + 'builtins.KeyError': KeyError, + 'builtins.TypeError': TypeError, + 'exceptions.KeyError': KeyError, + 'exceptions.TypeError': TypeError, + 'ZODB.POSException.ConflictError': ZODB.POSException.ConflictError, + 'ZODB.POSException.POSKeyError': ZODB.POSException.POSKeyError, + 'ZODB.POSException.ReadConflictError': ZODB.POSException.ReadConflictError, + 'ZODB.POSException.ReadOnlyError': ZODB.POSException.ReadOnlyError, + 'ZODB.POSException.StorageTransactionError': + ZODB.POSException.StorageTransactionError, + } +exc_factories = { + 'builtins.KeyError': create_Exception, + 'builtins.TypeError': create_Exception, + 'exceptions.KeyError': create_Exception, + 'exceptions.TypeError': create_Exception, + 'ZODB.POSException.BTreesConflictError': create_BTreesConflictError, + 'ZODB.POSException.ConflictError': create_ConflictError, + 'ZODB.POSException.MultipleUndoErrors': create_MultipleUndoErrors, + 'ZODB.POSException.POSKeyError': create_Exception, + 'ZODB.POSException.ReadConflictError': create_ConflictError, + 'ZODB.POSException.ReadOnlyError': create_Exception, + 'ZODB.POSException.StorageTransactionError': create_Exception, + } +unlogged_exceptions = (ZODB.POSException.POSKeyError, + ZODB.POSException.ConflictError) +class Client(object): + """asyncio low-level ZEO client interface + """ + + # All of the code in this class runs in a single dedicated + # thread. Thus, we can mostly avoid worrying about interleaved + # operations. + + # One place where special care was required was in cache setup on + # connect. + + protocol = None + ready = None # Tri-value: None=Never connected, True=connected, + # False=Disconnected + + def __init__(self, loop, + addrs, client, cache, storage_key, read_only, connect_poll, + register_failed_poll=9, + ssl=None, ssl_server_hostname=None, credentials=None): + """Create a client interface + + addr is either a host,port tuple or a string file name. + + client is a ClientStorage. It must be thread safe. + + cache is a ZEO.interfaces.IClientCache. + """ + self.loop = loop + self.addrs = addrs + self.storage_key = storage_key + self.read_only = read_only + self.connect_poll = connect_poll + self.register_failed_poll = register_failed_poll + self.client = client + self.ssl = ssl + self.ssl_server_hostname = ssl_server_hostname + self.credentials = credentials + for name in Protocol.client_delegated: + setattr(self, name, getattr(client, name)) + self.cache = cache + self.protocols = () + self.disconnected(None) + + # Work around odd behavior of ZEO4 server. It may send + # invalidations for transactions later than the result of + # getInvalidations. While we support ZEO 4 servers, we'll + # need to keep an invalidation queue. :( + self.verify_invalidation_queue = [] + + def new_addrs(self, addrs): + self.addrs = addrs + if self.trying_to_connect(): + self.disconnected(None) + + def trying_to_connect(self): + """Return whether we're trying to connect + + Either because we're disconnected, or because we're connected + read-only, but want a writable connection if we can get one. + """ + return (not self.ready or + self.is_read_only() and self.read_only is Fallback) + + closed = False + def close(self): + if not self.closed: + self.closed = True + self.ready = False + if self.protocol is not None: + self.protocol.close() + self.cache.close() + self._clear_protocols() + + def _clear_protocols(self, protocol=None): + for p in self.protocols: + if p is not protocol: + p.close() + self.protocols = () + + def disconnected(self, protocol=None): + logger.debug('disconnected %r %r', self, protocol) + if protocol is None or protocol is self.protocol: + if protocol is self.protocol and protocol is not None: + self.client.notify_disconnected() + if self.ready: + self.ready = False + self.connected = concurrent.futures.Future() + self.protocol = None + self._clear_protocols() + + if all(p.closed for p in self.protocols): + self.try_connecting() + + def upgrade(self, protocol): + self.ready = False + self.connected = concurrent.futures.Future() + self.protocol.close() + self.protocol = protocol + self._clear_protocols(protocol) + + def try_connecting(self): + logger.debug('try_connecting') + if not self.closed: + self.protocols = [ + Protocol(self.loop, addr, self, + self.storage_key, self.read_only, self.connect_poll, + ssl=self.ssl, + ssl_server_hostname=self.ssl_server_hostname, + credentials=self.credentials, + ) + for addr in self.addrs + ] + + def registered(self, protocol, server_tid): + if self.protocol is None: + self.protocol = protocol + if not (self.read_only is Fallback and protocol.read_only): + # We're happy with this protocol. Tell the others to + # stop trying. + self._clear_protocols(protocol) + self.verify(server_tid) + elif (self.read_only is Fallback and not protocol.read_only and + self.protocol.read_only): + self.upgrade(protocol) + self.verify(server_tid) + else: + protocol.close() # too late, we went home with another + + def register_failed(self, protocol, exc): + # A protocol failed registration. That's weird. If they've all + # failed, we should try again in a bit. + if protocol is not self: + protocol.close() + logger.exception("Registration or cache validation failed, %s", exc) + if (self.protocol is None and not + any(not p.closed for p in self.protocols) + ): + self.loop.call_later( + self.register_failed_poll + local_random.random(), + self.try_connecting) + + verify_result = None # for tests + + @future_generator + def verify(self, server_tid): + self.verify_invalidation_queue = [] # See comment in init :( + + protocol = self.protocol + if server_tid is None: + server_tid = yield protocol.fut('lastTransaction') + + try: + cache = self.cache + if cache: + cache_tid = cache.getLastTid() + if not cache_tid: + self.verify_result = "Non-empty cache w/o tid" + logger.error("Non-empty cache w/o tid -- clearing") + cache.clear() + self.client.invalidateCache() + elif cache_tid > server_tid: + self.verify_result = "Cache newer than server" + logger.critical( + 'Client has seen newer transactions than server!') + raise AssertionError("Server behind client, %r < %r, %s", + server_tid, cache_tid, protocol) + elif cache_tid == server_tid: + self.verify_result = "Cache up to date" + else: + vdata = yield protocol.fut('getInvalidations', cache_tid) + if vdata: + self.verify_result = "quick verification" + server_tid, oids = vdata + for oid in oids: + cache.invalidate(oid, None) + self.client.invalidateTransaction(server_tid, oids) + else: + # cache is too old + self.verify_result = "cache too old, clearing" + try: + ZODB.event.notify( + ZEO.interfaces.StaleCache(self.client)) + except Exception: + logger.exception("sending StaleCache event") + logger.critical( + "%s dropping stale cache", + getattr(self.client, '__name__', ''), + ) + self.cache.clear() + self.client.invalidateCache() + else: + self.verify_result = "empty cache" + + except Exception as exc: + del self.protocol + self.register_failed(protocol, exc) + else: + # The cache is validated and the last tid we got from the server. + # Set ready so we apply any invalidations that follow. + # We've been ignoring them up to this point. + self.cache.setLastTid(server_tid) + self.ready = True + + # Gaaaa, ZEO 4 work around. See comment in __init__. :( + for tid, oids in self.verify_invalidation_queue: + if tid > server_tid: + self.invalidateTransaction(tid, oids) + self.verify_invalidation_queue = [] + + try: + info = yield protocol.fut('get_info') + except Exception as exc: + # This is weird. We were connected and verified our cache, but + # Now we errored getting info. + + # XXX Need a test fpr this. The lone before is what we + # had, but it's wrong. + self.register_failed(self, exc) + + else: + self.client.notify_connected(self, info) + self.connected.set_result(None) + + def get_peername(self): + return self.protocol.get_peername() + + def call_async_threadsafe(self, future, wait_ready, method, args): + if self.ready: + self.protocol.call_async(method, args) + future.set_result(None) + else: + future.set_exception(ClientDisconnected()) + + def call_async_from_same_thread(self, method, *args): + return self.protocol.call_async(method, args) + + def call_async_iter_threadsafe(self, future, wait_ready, it): + if self.ready: + self.protocol.call_async_iter(it) + future.set_result(None) + else: + future.set_exception(ClientDisconnected()) + + def _when_ready(self, func, result_future, *args): + + if self.ready is None: + # We started without waiting for a connection. (prob tests :( ) + result_future.set_exception(ClientDisconnected("never connected")) + else: + @self.connected.add_done_callback + def done(future): + e = future.exception() + if e is not None: + future.set_exception(e) + else: + if self.ready: + func(result_future, *args) + else: + self._when_ready(func, result_future, *args) + + def call_threadsafe(self, future, wait_ready, method, args): + if self.ready: + self.protocol.call(future, method, args) + elif wait_ready: + self._when_ready( + self.call_threadsafe, future, wait_ready, method, args) + else: + future.set_exception(ClientDisconnected()) + + # Special methods because they update the cache. + + @future_generator + def load_before_threadsafe(self, future, wait_ready, oid, tid): + data = self.cache.loadBefore(oid, tid) + if data is not None: + future.set_result(data) + elif self.ready: + try: + data = yield self.protocol.load_before(oid, tid) + except Exception as exc: + future.set_exception(exc) + else: + future.set_result(data) + if data: + data, start, end = data + self.cache.store(oid, start, end, data) + elif wait_ready: + self._when_ready( + self.load_before_threadsafe, future, wait_ready, oid, tid) + else: + future.set_exception(ClientDisconnected()) + + @future_generator + def _prefetch(self, oid, tid): + try: + data = yield self.protocol.load_before(oid, tid) + if data: + data, start, end = data + self.cache.store(oid, start, end, data) + except Exception: + logger.exception("prefetch %r %r" % (oid, tid)) + + def prefetch(self, future, wait_ready, oids, tid): + if self.ready: + for oid in oids: + if self.cache.loadBefore(oid, tid) is None: + self._prefetch(oid, tid) + + future.set_result(None) + else: + future.set_exception(ClientDisconnected()) + + @future_generator + def tpc_finish_threadsafe(self, future, wait_ready, tid, updates, f): + if self.ready: + try: + tid = yield self.protocol.fut('tpc_finish', tid) + cache = self.cache + for oid, data, resolved in updates: + cache.invalidate(oid, tid) + if data and not resolved: + cache.store(oid, tid, None, data) + cache.setLastTid(tid) + except Exception as exc: + future.set_exception(exc) + + # At this point, our cache is in an inconsistent + # state. We need to reconnect in hopes of + # recovering to a consistent state. + self.protocol.close() + self.disconnected(self.protocol) + else: + f(tid) + future.set_result(tid) + else: + future.set_exception(ClientDisconnected()) + + def close_threadsafe(self, future, _): + self.close() + future.set_result(None) + + def invalidateTransaction(self, tid, oids): + if self.ready: + for oid in oids: + self.cache.invalidate(oid, tid) + self.client.invalidateTransaction(tid, oids) + self.cache.setLastTid(tid) + else: + self.verify_invalidation_queue.append((tid, oids)) + + def serialnos(self, serials): + # Method called by ZEO4 storage servers. + + # Before delegating, check for errors (likely ConflictErrors) + # and invalidate the oids they're associated with. In the + # past, this was done by the client, but now we control the + # cache and this is our last chance, as the client won't call + # back into us when there's an error. + for oid in serials: + if isinstance(oid, bytes): + self.cache.invalidate(oid, None) + else: + oid, serial = oid + if isinstance(serial, Exception) or serial == b'rs': + self.cache.invalidate(oid, None) + + self.client.serialnos(serials) + + @property + def protocol_version(self): + return self.protocol.protocol_version + + def is_read_only(self): + try: + protocol = self.protocol + except AttributeError: + return self.read_only + else: + if protocol is None: + return self.read_only + else: + return protocol.read_only + +class ClientRunner(object): + + def set_options(self, addrs, wrapper, cache, storage_key, read_only, + timeout=30, disconnect_poll=1, + **kwargs): + self.__args = (addrs, wrapper, cache, storage_key, read_only, + disconnect_poll) + self.__kwargs = kwargs + self.timeout = timeout + + def setup_delegation(self, loop): + self.loop = loop + self.client = Client(loop, *self.__args, **self.__kwargs) + self.call_threadsafe = self.client.call_threadsafe + self.call_async_threadsafe = self.client.call_async_threadsafe + + from concurrent.futures import Future + call_soon_threadsafe = loop.call_soon_threadsafe + + def call(meth, *args, **kw): + timeout = kw.pop('timeout', None) + assert not kw + + # Some explanation of the code below. + # Timeouts on Python 2 are expensive, so we try to avoid + # them if we're connected. The 3rd argument below is a + # wait flag. If false, and we're disconnected, we fail + # immediately. If that happens, then we try again with the + # wait flag set to True and wait with the default timeout. + result = Future() + call_soon_threadsafe(meth, result, timeout is not None, *args) + try: + return self.wait_for_result(result, timeout) + except ClientDisconnected: + if timeout is None: + result = Future() + call_soon_threadsafe(meth, result, True, *args) + return self.wait_for_result(result, self.timeout) + else: + raise + + self.__call = call + + def wait_for_result(self, future, timeout): + try: + return future.result(timeout) + except concurrent.futures.TimeoutError: + if not self.client.ready: + raise ClientDisconnected("timed out waiting for connection") + else: + raise + + def call(self, method, *args, **kw): + return self.__call(self.call_threadsafe, method, args, **kw) + + def call_future(self, method, *args): + # for tests + result = concurrent.futures.Future() + self.loop.call_soon_threadsafe( + self.call_threadsafe, result, True, method, args) + return result + + def async_(self, method, *args): + return self.__call(self.call_async_threadsafe, method, args) + + def async_iter(self, it): + return self.__call(self.client.call_async_iter_threadsafe, it) + + def prefetch(self, oids, tid): + return self.__call(self.client.prefetch, oids, tid) + + def load_before(self, oid, tid): + return self.__call(self.client.load_before_threadsafe, oid, tid) + + def tpc_finish(self, tid, updates, f): + return self.__call(self.client.tpc_finish_threadsafe, tid, updates, f) + + def is_connected(self): + return self.client.ready + + def is_read_only(self): + try: + protocol = self.client.protocol + except AttributeError: + return True + else: + if protocol is None: + return True + else: + return protocol.read_only + + def close(self): + self.__call(self.client.close_threadsafe) + + # Short circuit from now on. We're closed. + def call_closed(*a, **k): + raise ClientDisconnected('closed') + + self.__call = call_closed + + def apply_threadsafe(self, future, wait_ready, func, *args): + try: + future.set_result(func(*args)) + except Exception as exc: + future.set_exception(exc) + + def new_addrs(self, addrs): + # This usually doesn't have an immediate effect, since the + # addrs aren't used until the client disconnects.xs + self.__call(self.apply_threadsafe, self.client.new_addrs, addrs) + + def wait(self, timeout=None): + if timeout is None: + timeout = self.timeout + self.wait_for_result(self.client.connected, timeout) + +class ClientThread(ClientRunner): + """Thread wrapper for client interface + + A ClientProtocol is run in a dedicated thread. + + Calls to it are made in a thread-safe fashion. + """ + + def __init__(self, addrs, client, cache, + storage_key='1', read_only=False, timeout=30, + disconnect_poll=1, ssl=None, ssl_server_hostname=None, + credentials=None): + self.set_options(addrs, client, cache, storage_key, read_only, + timeout, disconnect_poll, + ssl=ssl, ssl_server_hostname=ssl_server_hostname, + credentials=credentials) + self.thread = threading.Thread( + target=self.run, + name="%s zeo client networking thread" % client.__name__, + ) + self.thread.setDaemon(True) + self.started = threading.Event() + self.thread.start() + self.started.wait() + if self.exception: + raise self.exception + + exception = None + def run(self): + loop = None + try: + loop = new_event_loop() + self.setup_delegation(loop) + self.started.set() + loop.run_forever() + except Exception as exc: + raise + logger.exception("Client thread") + self.exception = exc + finally: + if not self.closed: + self.closed = True + try: + if self.client.ready: + self.client.ready = False + self.client.client.notify_disconnected() + except AttributeError: + pass + logger.critical("Client loop stopped unexpectedly") + if loop is not None: + loop.close() + logger.debug('Stopping client thread') + + closed = False + def close(self): + if not self.closed: + self.closed = True + super(ClientThread, self).close() + self.loop.call_soon_threadsafe(self.loop.stop) + self.thread.join(9) + if self.exception: + raise self.exception + +class Fut(object): + """Lightweight future that calls it's callback immediately rather than soon + """ + + def add_done_callback(self, cb): + self.cb = cb + + exc = None + def set_exception(self, exc): + self.exc = exc + self.cb(self) + + def set_result(self, result): + self._result = result + self.cb(self) + + def result(self): + if self.exc: + raise self.exc + else: + return self._result diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/compat.py b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/compat.py new file mode 100644 index 0000000..7e5a18d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/compat.py @@ -0,0 +1,10 @@ +from .._compat import PY3 +if PY3: + import asyncio + try: + from uvloop import new_event_loop + except ImportError: + from asyncio import new_event_loop +else: + import trollius as asyncio + from trollius import new_event_loop diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/marshal.py b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/marshal.py new file mode 100644 index 0000000..a4dd81d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/marshal.py @@ -0,0 +1,168 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Support for marshaling ZEO messages + +Not to be confused with marshaling objects in ZODB. + +We currently use pickle. In the future, we may use a +Python-independent format, or possibly a minimal pickle subset. +""" + +import logging + +from .._compat import Unpickler, Pickler, BytesIO, PY3, PYPY +from ..shortrepr import short_repr + +PY2 = not PY3 +logger = logging.getLogger(__name__) + +def encoder(protocol, server=False): + """Return a non-thread-safe encoder + """ + + if protocol[:1] == b'M': + from msgpack import packb + default = server_default if server else None + def encode(*args): + return packb( + args, use_bin_type=True, default=default) + + return encode + else: + assert protocol[:1] == b'Z' + + f = BytesIO() + getvalue = f.getvalue + seek = f.seek + truncate = f.truncate + pickler = Pickler(f, 3) + pickler.fast = 1 + dump = pickler.dump + def encode(*args): + seek(0) + truncate() + dump(args) + return getvalue() + + return encode + +def encode(*args): + + return encoder(b'Z')(*args) + +def decoder(protocol): + if protocol[:1] == b'M': + from msgpack import unpackb + def msgpack_decode(data): + """Decodes msg and returns its parts""" + return unpackb(data, encoding='utf-8', use_list=False) + return msgpack_decode + else: + assert protocol[:1] == b'Z' + return pickle_decode + +def pickle_decode(msg): + """Decodes msg and returns its parts""" + unpickler = Unpickler(BytesIO(msg)) + unpickler.find_global = find_global + try: + # PyPy, zodbpickle, the non-c-accelerated version + unpickler.find_class = find_global + except AttributeError: + pass + try: + return unpickler.load() # msgid, flags, name, args + except: + logger.error("can't decode message: %s" % short_repr(msg)) + raise + +def server_decoder(protocol): + if protocol[:1] == b'M': + return decoder(protocol) + else: + assert protocol[:1] == b'Z' + return pickle_server_decode + +def pickle_server_decode(msg): + """Decodes msg and returns its parts""" + unpickler = Unpickler(BytesIO(msg)) + unpickler.find_global = server_find_global + try: + # PyPy, zodbpickle, the non-c-accelerated version + unpickler.find_class = server_find_global + except AttributeError: + pass + + try: + return unpickler.load() # msgid, flags, name, args + except: + logger.error("can't decode message: %s" % short_repr(msg)) + raise + +def server_default(obj): + if isinstance(obj, Exception): + return reduce_exception(obj) + else: + return obj + +def reduce_exception(exc): + class_ = exc.__class__ + class_ = "%s.%s" % (class_.__module__, class_.__name__) + return class_, exc.__dict__ or exc.args + +_globals = globals() +_silly = ('__doc__',) + +exception_type_type = type(Exception) + +_SAFE_MODULE_NAMES = ('ZopeUndo.Prefix', 'copy_reg', '__builtin__', 'zodbpickle') + +def find_global(module, name): + """Helper for message unpickler""" + try: + m = __import__(module, _globals, _globals, _silly) + except ImportError as msg: + raise ImportError("import error %s: %s" % (module, msg)) + + try: + r = getattr(m, name) + except AttributeError: + raise ImportError("module %s has no global %s" % (module, name)) + + safe = getattr(r, '__no_side_effects__', 0) or (PY2 and module in _SAFE_MODULE_NAMES) + if safe: + return r + + # TODO: is there a better way to do this? + if type(r) == exception_type_type and issubclass(r, Exception): + return r + + raise ImportError("Unsafe global: %s.%s" % (module, name)) + +def server_find_global(module, name): + """Helper for message unpickler""" + if module not in _SAFE_MODULE_NAMES: + raise ImportError("Module not allowed: %s" % (module,)) + + try: + m = __import__(module, _globals, _globals, _silly) + except ImportError as msg: + raise ImportError("import error %s: %s" % (module, msg)) + + try: + r = getattr(m, name) + except AttributeError: + raise ImportError("module %s has no global %s" % (module, name)) + + return r diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/mtacceptor.py b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/mtacceptor.py new file mode 100644 index 0000000..a08b869 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/mtacceptor.py @@ -0,0 +1,222 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Multi-threaded server connectin acceptor + +Each connection is run in it's own thread. Testing serveral years ago +suggsted that this was a win, but ZODB shootout and another +lower-level tests suggest otherwise. It's really unclear, which is +why we're keeping this around for now. + +Asyncio doesn't let you accept connections in one thread and handle +them in another. To get around this, we have a listener implemented +using asyncore, but when we get a connection, we hand the socket to +asyncio. This worked well until we added SSL support. (Even then, it +worked on Mac OS X for some reason.) + +SSL + non-blocking sockets requires special care, which asyncio +provides. Unfortunately, create_connection, assumes it's creating a +client connection. It would be easy to fix this, +http://bugs.python.org/issue27392, but it's hard to justify the fix to +get it accepted, so we won't bother for now. This currently uses a +horrible monley patch to work with SSL. + +To use this module, replace:: + + from .asyncio.server import Acceptor + +with:: + + from .asyncio.mtacceptor import Acceptor + +in ZEO.StorageServer. +""" +import asyncore +import socket +import threading +import time + +from .compat import asyncio, new_event_loop +from .server import ServerProtocol + +# _has_dualstack: True if the dual-stack sockets are supported +try: + # Check whether IPv6 sockets can be created + s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) +except (socket.error, AttributeError): + _has_dualstack = False +else: + # Check whether enabling dualstack (disabling v6only) works + try: + s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False) + except (socket.error, AttributeError): + _has_dualstack = False + else: + _has_dualstack = True + s.close() + del s + +import logging + +logger = logging.getLogger(__name__) + +class Acceptor(asyncore.dispatcher): + """A server that accepts incoming RPC connections + + And creates a separate thread for each. + """ + + def __init__(self, storage_server, addr, ssl, msgpack): + self.storage_server = storage_server + self.addr = addr + self.__socket_map = {} + asyncore.dispatcher.__init__(self, map=self.__socket_map) + + self.ssl_context = ssl + self.msgpack = msgpack + self._open_socket() + + def _open_socket(self): + addr = self.addr + + if type(addr) == tuple: + if addr[0] == '' and _has_dualstack: + # Wildcard listen on all interfaces, both IPv4 and + # IPv6 if possible + self.create_socket(socket.AF_INET6, socket.SOCK_STREAM) + self.socket.setsockopt( + socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False) + elif ':' in addr[0]: + self.create_socket(socket.AF_INET6, socket.SOCK_STREAM) + if _has_dualstack: + # On Linux, IPV6_V6ONLY is off by default. + # If the user explicitly asked for IPv6, don't bind to IPv4 + self.socket.setsockopt( + socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, True) + else: + self.create_socket(socket.AF_INET, socket.SOCK_STREAM) + else: + self.create_socket(socket.AF_UNIX, socket.SOCK_STREAM) + + self.set_reuse_addr() + + for i in range(25): + try: + self.bind(addr) + except Exception as exc: + logger.info("bind on %s failed %s waiting", addr, i) + if i == 24: + raise + else: + time.sleep(5) + except: + logger.exception('binding') + raise + else: + break + + if isinstance(addr, tuple) and addr[1] == 0: + self.addr = addr = self.socket.getsockname()[:2] + + logger.info("listening on %s", str(addr)) + self.listen(5) + + def writable(self): + return 0 + + def readable(self): + return 1 + + def handle_accept(self): + try: + sock, addr = self.accept() + except socket.error as msg: + logger.info("accepted failed: %s", msg) + return + + + # We could short-circuit the attempt below in some edge cases + # and avoid a log message by checking for addr being None. + # Unfortunately, our test for the code below, + # quick_close_doesnt_kill_server, causes addr to be None and + # we'd have to write a test for the non-None case, which is + # *even* harder to provoke. :/ So we'll leave things as they + # are for now. + + # It might be better to check whether the socket has been + # closed, but I don't see a way to do that. :( + + # Drop flow-info from IPv6 addresses + if addr: # Sometimes None on Mac. See above. + addr = addr[:2] + + try: + logger.debug("new connection %s" % (addr,)) + + def run(): + loop = new_event_loop() + zs = self.storage_server.create_client_handler() + protocol = ServerProtocol(loop, self.addr, zs, self.msgpack) + protocol.stop = loop.stop + + if self.ssl_context is None: + cr = loop.create_connection((lambda : protocol), sock=sock) + else: + if hasattr(loop, 'connect_accepted_socket'): + cr = loop.connect_accepted_socket( + (lambda : protocol), sock, ssl=self.ssl_context) + else: + ####################################################### + # XXX See http://bugs.python.org/issue27392 :( + _make_ssl_transport = loop._make_ssl_transport + def make_ssl_transport(*a, **kw): + kw['server_side'] = True + return _make_ssl_transport(*a, **kw) + loop._make_ssl_transport = make_ssl_transport + # + ####################################################### + cr = loop.create_connection( + (lambda : protocol), sock=sock, + ssl=self.ssl_context, + server_hostname='' + ) + + asyncio.ensure_future(cr, loop=loop) + loop.run_forever() + loop.close() + + thread = threading.Thread(target=run, name='zeo_client_hander') + thread.setDaemon(True) + thread.start() + except Exception: + if sock.fileno() in self.__socket_map: + del self.__socket_map[sock.fileno()] + logger.exception("Error in handle_accept") + else: + logger.info("connect from %s", repr(addr)) + + def loop(self, timeout=30.0): + try: + asyncore.loop(map=self.__socket_map, timeout=timeout) + except Exception: + if not self.__closed: + raise # Unexpected exc + + logger.debug('acceptor %s loop stopped', self.addr) + + __closed = False + def close(self): + if not self.__closed: + self.__closed = True + asyncore.dispatcher.close(self) + logger.debug("Closed accepter, %s", len(self.__socket_map)) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/server.py b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/server.py new file mode 100644 index 0000000..5a5dfa1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/server.py @@ -0,0 +1,285 @@ +import json +import logging +import os +import random +import threading +import ZODB.POSException + +logger = logging.getLogger(__name__) + +from ..shortrepr import short_repr + +from . import base +from .compat import asyncio, new_event_loop +from .marshal import server_decoder, encoder, reduce_exception + +class ServerProtocol(base.Protocol): + """asyncio low-level ZEO server interface + """ + + protocols = (b'5', ) + + name = 'server protocol' + methods = set(('register', )) + + unlogged_exception_types = ( + ZODB.POSException.POSKeyError, + ) + + def __init__(self, loop, addr, zeo_storage, msgpack): + """Create a server's client interface + """ + super(ServerProtocol, self).__init__(loop, addr) + self.zeo_storage = zeo_storage + + self.announce_protocol = ( + (b'M' if msgpack else b'Z') + best_protocol_version + ) + + closed = False + def close(self): + logger.debug("Closing server protocol") + if not self.closed: + self.closed = True + if self.transport is not None: + self.transport.close() + + connected = None # for tests + def connection_made(self, transport): + self.connected = True + super(ServerProtocol, self).connection_made(transport) + self._write(self.announce_protocol) + + def connection_lost(self, exc): + self.connected = False + if exc: + logger.error("Disconnected %s:%s", exc.__class__.__name__, exc) + self.zeo_storage.notify_disconnected() + self.stop() + + def stop(self): + pass # Might be replaced when running a thread per client + + def finish_connect(self, protocol_version): + if protocol_version == b'ruok': + self._write(json.dumps(self.zeo_storage.ruok()).encode("ascii")) + self.close() + else: + version = protocol_version[1:] + if version in self.protocols: + logger.info("received handshake %r" % + str(protocol_version.decode('ascii'))) + self.protocol_version = protocol_version + self.encode = encoder(protocol_version, True) + self.decode = server_decoder(protocol_version) + self.zeo_storage.notify_connected(self) + else: + logger.error("bad handshake %s" % short_repr(protocol_version)) + self.close() + + def call_soon_threadsafe(self, func, *args): + try: + self.loop.call_soon_threadsafe(func, *args) + except RuntimeError: + if self.connected: + logger.exception("call_soon_threadsafe failed while connected") + + def message_received(self, message): + try: + message_id, async_, name, args = self.decode(message) + except Exception: + logger.exception("Can't deserialize message") + self.close() + return + + if message_id == -1: + return # keep-alive + + if name not in self.methods: + logger.error('Invalid method, %r', name) + self.close() + + try: + result = getattr(self.zeo_storage, name)(*args) + except Exception as exc: + if not isinstance(exc, self.unlogged_exception_types): + logger.exception( + "Bad %srequest, %r", 'async ' if async_ else '', name) + if async_: + return self.close() # No way to recover/cry for help + else: + return self.send_error(message_id, exc) + + if not async_: + self.send_reply(message_id, result) + + def send_reply(self, message_id, result, send_error=False, flag=0): + try: + result = self.encode(message_id, flag, '.reply', result) + except Exception: + if isinstance(result, Delay): + result.set_sender(message_id, self) + return + else: + logger.exception("Unpicklable response %r", result) + if not send_error: + self.send_error( + message_id, + ValueError("Couldn't pickle response"), + True) + + self._write(result) + + def send_reply_threadsafe(self, message_id, result): + self.loop.call_soon_threadsafe(self.reply, message_id, result) + + def send_error(self, message_id, exc, send_error=False): + """Abstracting here so we can make this cleaner in the future + """ + self.send_reply(message_id, reduce_exception(exc), send_error, 2) + + def async_(self, method, *args): + self.call_async(method, args) + + def async_threadsafe(self, method, *args): + self.call_soon_threadsafe(self.call_async, method, args) + +best_protocol_version = os.environ.get( + 'ZEO_SERVER_PROTOCOL', + ServerProtocol.protocols[-1].decode('utf-8')).encode('utf-8') +assert best_protocol_version in ServerProtocol.protocols + +def new_connection(loop, addr, socket, zeo_storage, msgpack): + protocol = ServerProtocol(loop, addr, zeo_storage, msgpack) + cr = loop.create_connection((lambda : protocol), sock=socket) + asyncio.ensure_future(cr, loop=loop) + +class Delay(object): + """Used to delay response to client for synchronous calls. + + When a synchronous call is made and the original handler returns + without handling the call, it returns a Delay object that prevents + the mainloop from sending a response. + """ + + msgid = protocol = sent = None + + def set_sender(self, msgid, protocol): + self.msgid = msgid + self.protocol = protocol + + def reply(self, obj): + self.sent = 'reply' + if self.protocol: + self.protocol.send_reply(self.msgid, obj) + + def error(self, exc_info): + self.sent = 'error' + logger.error("Error raised in delayed method", exc_info=exc_info) + if self.protocol: + self.protocol.send_error(self.msgid, exc_info[1]) + + def __repr__(self): + return "%s[%s, %r, %r, %r]" % ( + self.__class__.__name__, id(self), + self.msgid, self.protocol, self.sent) + + def __reduce__(self): + raise TypeError("Can't pickle delays.") + +class Result(Delay): + + def __init__(self, *args): + self.args = args + + def set_sender(self, msgid, protocol): + reply, callback = self.args + protocol.send_reply(msgid, reply) + callback() + +class MTDelay(Delay): + + def __init__(self): + self.ready = threading.Event() + + def set_sender(self, *args): + Delay.set_sender(self, *args) + self.ready.set() + + def reply(self, obj): + self.ready.wait() + self.protocol.call_soon_threadsafe( + self.protocol.send_reply, self.msgid, obj) + + def error(self, exc_info): + self.ready.wait() + self.protocol.call_soon_threadsafe(Delay.error, self, exc_info) + + +class Acceptor(object): + + def __init__(self, storage_server, addr, ssl, msgpack): + self.storage_server = storage_server + self.addr = addr + self.ssl_context = ssl + self.msgpack = msgpack + self.event_loop = loop = new_event_loop() + + if isinstance(addr, tuple): + cr = loop.create_server(self.factory, addr[0], addr[1], + reuse_address=True, ssl=ssl) + else: + cr = loop.create_unix_server(self.factory, addr, ssl=ssl) + + f = asyncio.ensure_future(cr, loop=loop) + server = loop.run_until_complete(f) + + self.server = server + if isinstance(addr, tuple) and addr[1] == 0: + addrs = [s.getsockname() for s in server.sockets] + addrs = [a for a in addrs if len(a) == len(addr)] + if addrs: + self.addr = addrs[0] + else: + self.addr = server.sockets[0].getsockname()[:len(addr)] + + logger.info("listening on %s", str(addr)) + + def factory(self): + try: + logger.debug("Accepted connection") + zs = self.storage_server.create_client_handler() + protocol = ServerProtocol( + self.event_loop, self.addr, zs, self.msgpack) + except Exception: + logger.exception("Failure in protocol factory") + + return protocol + + def loop(self, timeout=None): + self.event_loop.run_forever() + self.event_loop.close() + + closed = False + def close(self): + if not self.closed: + self.closed = True + self.event_loop.call_soon_threadsafe(self._close) + + def _close(self): + loop = self.event_loop + + self.server.close() + + f = asyncio.ensure_future(self.server.wait_closed(), loop=loop) + @f.add_done_callback + def server_closed(f): + # stop the loop when the server closes: + loop.call_soon(loop.stop) + + def timeout(): + logger.warning("Timed out closing asyncio.Server") + loop.call_soon(loop.stop) + + # But if the server doesn't close in a second, stop the loop anyway. + loop.call_later(1, timeout) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/testing.py b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/testing.py new file mode 100644 index 0000000..61abd9f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/testing.py @@ -0,0 +1,176 @@ +from .._compat import PY3 + +if PY3: + import asyncio +else: + import trollius as asyncio + +try: + ConnectionRefusedError +except NameError: + class ConnectionRefusedError(OSError): + pass + +import pprint + +class Loop(object): + + protocol = transport = None + + def __init__(self, addrs=(), debug=True): + self.addrs = addrs + self.get_debug = lambda : debug + self.connecting = {} + self.later = [] + self.exceptions = [] + + def call_soon(self, func, *args): + func(*args) + + def _connect(self, future, protocol_factory): + self.protocol = protocol = protocol_factory() + self.transport = transport = Transport(protocol) + protocol.connection_made(transport) + future.set_result((transport, protocol)) + + def connect_connecting(self, addr): + future, protocol_factory = self.connecting.pop(addr) + self._connect(future, protocol_factory) + + def fail_connecting(self, addr): + future, protocol_factory = self.connecting.pop(addr) + if not future.cancelled(): + future.set_exception(ConnectionRefusedError()) + + def create_connection( + self, protocol_factory, host=None, port=None, sock=None, + ssl=None, server_hostname=None + ): + future = asyncio.Future(loop=self) + if sock is None: + addr = host, port + if addr in self.addrs: + self._connect(future, protocol_factory) + else: + self.connecting[addr] = future, protocol_factory + else: + self._connect(future, protocol_factory) + + return future + + def create_unix_connection(self, protocol_factory, path): + future = asyncio.Future(loop=self) + if path in self.addrs: + self._connect(future, protocol_factory) + else: + self.connecting[path] = future, protocol_factory + + return future + + def call_soon_threadsafe(self, func, *args): + func(*args) + return Handle() + + def call_later(self, delay, func, *args): + handle = Handle() + self.later.append((delay, func, args, handle)) + return handle + + def call_exception_handler(self, context): + self.exceptions.append(context) + + closed = False + def close(self): + self.closed = True + + stopped = False + def stop(self): + self.stopped = True + +class Handle(object): + + cancelled = False + + def cancel(self): + self.cancelled = True + +class Transport(object): + + capacity = 1 << 64 + paused = False + extra = dict(peername='1.2.3.4', sockname=('127.0.0.1', 4200), socket=None) + + def __init__(self, protocol): + self.data = [] + self.protocol = protocol + + def write(self, data): + self.data.append(data) + self.check_pause() + + def writelines(self, lines): + self.data.extend(lines) + self.check_pause() + + def check_pause(self): + if len(self.data) > self.capacity and not self.paused: + self.paused = True + self.protocol.pause_writing() + + def pop(self, count=None): + if count: + r = self.data[:count] + del self.data[:count] + else: + r = self.data[:] + del self.data[:] + self.check_resume() + return r + + def check_resume(self): + if len(self.data) < self.capacity and self.paused: + self.paused = False + self.protocol.resume_writing() + + closed = False + def close(self): + self.closed = True + + def get_extra_info(self, name): + return self.extra[name] + +class AsyncRPC(object): + """Adapt an asyncio API to an RPC to help hysterical tests + """ + def __init__(self, api): + self.api = api + + def __getattr__(self, name): + return lambda *a, **kw: self.api.call(name, *a, **kw) + +class ClientRunner(object): + + def __init__(self, addr, client, cache, storage, read_only, timeout, + **kw): + self.addr = addr + self.client = client + self.cache = cache + self.storage = storage + self.read_only = read_only + self.timeout = timeout, + for name in kw: + self.__dict__[name] = kw[name] + + def start(self, wait=True): + pass + + def call(self, method, *args, **kw): + return getattr(self, method)(*args) + + async_ = async_iter = call + + def wait(self, timeout=None): + pass + + def close(self): + pass diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/tests.py b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/tests.py new file mode 100644 index 0000000..dd378e2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/asyncio/tests.py @@ -0,0 +1,877 @@ +from .._compat import PY3 + +if PY3: + import asyncio +else: + import trollius as asyncio + +from zope.testing import setupstack +from concurrent.futures import Future +import mock +from ZODB.POSException import ReadOnlyError +from ZODB.utils import maxtid + +import collections +import logging +import struct +import unittest + +from ..Exceptions import ClientDisconnected, ProtocolError + +from .testing import Loop +from .client import ClientRunner, Fallback +from .server import new_connection, best_protocol_version +from .marshal import encoder, decoder + +class Base(object): + + enc = b'Z' + seq_type = list + + def setUp(self): + super(Base, self).setUp() + self.encode = encoder(self.enc) + self.decode = decoder(self.enc) + + def unsized(self, data, unpickle=False): + result = [] + while data: + size, message = data[:2] + data = data[2:] + self.assertEqual(struct.unpack(">I", size)[0], len(message)) + if unpickle: + message = self.decode(message) + result.append(message) + + if len(result) == 1: + result = result[0] + return result + + def parse(self, data): + return self.unsized(data, True) + + target = None + def send(self, method, *args, **kw): + target = kw.pop('target', self.target) + called = kw.pop('called', True) + no_output = kw.pop('no_output', True) + self.assertFalse(kw) + + self.loop.protocol.data_received( + sized(self.encode(0, True, method, args))) + if target is not None: + target = getattr(target, method) + if called: + target.assert_called_with(*args) + target.reset_mock() + else: + self.assertFalse(target.called) + if no_output: + self.assertFalse(self.loop.transport.pop()) + + def pop(self, count=None, parse=True): + return self.unsized(self.loop.transport.pop(count), parse) + +class ClientTests(Base, setupstack.TestCase, ClientRunner): + + maxDiff = None + + def tearDown(self): + self.client.close() + super(ClientTests, self) + + def start(self, + addrs=(('127.0.0.1', 8200), ), loop_addrs=None, + read_only=False, + finish_start=False, + ): + # To create a client, we need to specify an address, a client + # object and a cache. + + wrapper = mock.Mock() + self.target = wrapper + cache = MemoryCache() + self.set_options(addrs, wrapper, cache, 'TEST', read_only, timeout=1) + + # We can also provide an event loop. We'll use a testing loop + # so we don't have to actually make any network connection. + loop = Loop(addrs if loop_addrs is None else loop_addrs) + self.setup_delegation(loop) + self.assertFalse(wrapper.notify_disconnected.called) + protocol = loop.protocol + transport = loop.transport + + if finish_start: + protocol.data_received(sized(self.enc + b'3101')) + self.assertEqual(self.pop(2, False), self.enc + b'3101') + self.respond(1, None) + self.respond(2, 'a'*8) + self.pop(4) + self.assertEqual(self.pop(), (3, False, 'get_info', ())) + self.respond(3, dict(length=42)) + + return (wrapper, cache, self.loop, self.client, protocol, transport) + + def respond(self, message_id, result, async_=False): + self.loop.protocol.data_received( + sized(self.encode(message_id, async_, '.reply', result))) + + def wait_for_result(self, future, timeout): + if future.done() and future.exception() is not None: + raise future.exception() + return future + + def testClientBasics(self): + + # Here, we'll go through the basic usage of the asyncio ZEO + # network client. The client is responsible for the core + # functionality of a ZEO client storage. The client storage + # is largely just a wrapper around the asyncio client. + + wrapper, cache, loop, client, protocol, transport = self.start() + self.assertFalse(wrapper.notify_disconnected.called) + + # The client isn't connected until the server sends it some data. + self.assertFalse(client.connected.done() or transport.data) + + # The server sends the client it's protocol. In this case, + # it's a very high one. The client will send it's highest that + # it can use. + protocol.data_received(sized(self.enc + b'99999')) + + # The client sends back a handshake, and registers the + # storage, and requests the last transaction. + self.assertEqual(self.pop(2, False), self.enc + b'5') + self.assertEqual(self.pop(), (1, False, 'register', ('TEST', False))) + + # The client isn't connected until it initializes it's cache: + self.assertFalse(client.connected.done() or transport.data) + + # If we try to make calls while the client is *initially* + # connecting, we get an error. This is because some dufus + # decided to create a client storage without waiting for it to + # connect. + self.assertRaises(ClientDisconnected, self.call, 'foo', 1, 2) + + # When the client is reconnecting, it's ready flag is set to False and + # it queues calls: + client.ready = False + f1 = self.call('foo', 1, 2) + self.assertFalse(f1.done()) + + # If we try to make an async call, we get an immediate error: + self.assertRaises(ClientDisconnected, self.async_, 'bar', 3, 4) + + # The wrapper object (ClientStorage) hasn't been notified: + self.assertFalse(wrapper.notify_connected.called) + + # Let's respond to the register call: + self.respond(1, None) + + # The client requests the last transaction: + self.assertEqual(self.pop(), (2, False, 'lastTransaction', ())) + + # We respond + self.respond(2, 'a'*8) + + # After verification, the client requests info: + self.assertEqual(self.pop(), (3, False, 'get_info', ())) + self.respond(3, dict(length=42)) + + # Now we're connected, the cache was initialized, and the + # queued message has been sent: + self.assertTrue(client.connected.done()) + self.assertEqual(cache.getLastTid(), 'a'*8) + self.assertEqual(self.pop(), (4, False, 'foo', (1, 2))) + + # The wrapper object (ClientStorage) has been notified: + wrapper.notify_connected.assert_called_with(client, {'length': 42}) + + self.respond(4, 42) + self.assertEqual(f1.result(), 42) + + # Now we can make async calls: + f2 = self.async_('bar', 3, 4) + self.assertTrue(f2.done() and f2.exception() is None) + self.assertEqual(self.pop(), (0, True, 'bar', (3, 4))) + + # Loading objects gets special handling to leverage the cache. + loaded = self.load_before(b'1'*8, maxtid) + + # The data wasn't in the cache, so we made a server call: + self.assertEqual(self.pop(), ((b'1'*8, maxtid), False, 'loadBefore', (b'1'*8, maxtid))) + # Note load_before uses the oid as the message id. + self.respond((b'1'*8, maxtid), (b'data', b'a'*8, None)) + self.assertEqual(loaded.result(), (b'data', b'a'*8, None)) + + # If we make another request, it will be satisfied from the cache: + loaded = self.load_before(b'1'*8, maxtid) + self.assertEqual(loaded.result(), (b'data', b'a'*8, None)) + self.assertFalse(transport.data) + + # Let's send an invalidation: + self.send('invalidateTransaction', b'b'*8, self.seq_type([b'1'*8])) + + # Now, if we try to load current again, we'll make a server request. + loaded = self.load_before(b'1'*8, maxtid) + + # Note that if we make another request for the same object, + # the requests will be collapsed: + loaded2 = self.load_before(b'1'*8, maxtid) + + self.assertEqual(self.pop(), ((b'1'*8, maxtid), False, 'loadBefore', (b'1'*8, maxtid))) + self.respond((b'1'*8, maxtid), (b'data2', b'b'*8, None)) + self.assertEqual(loaded.result(), (b'data2', b'b'*8, None)) + self.assertEqual(loaded2.result(), (b'data2', b'b'*8, None)) + + # Loading non-current data may also be satisfied from cache + loaded = self.load_before(b'1'*8, b'b'*8) + self.assertEqual(loaded.result(), (b'data', b'a'*8, b'b'*8)) + self.assertFalse(transport.data) + loaded = self.load_before(b'1'*8, b'c'*8) + self.assertEqual(loaded.result(), (b'data2', b'b'*8, None)) + self.assertFalse(transport.data) + loaded = self.load_before(b'1'*8, b'_'*8) + + self.assertEqual(self.pop(), ((b'1'*8, b'_'*8), False, 'loadBefore', (b'1'*8, b'_'*8))) + self.respond((b'1'*8, b'_'*8), (b'data0', b'^'*8, b'_'*8)) + self.assertEqual(loaded.result(), (b'data0', b'^'*8, b'_'*8)) + + # When committing transactions, we need to update the cache + # with committed data. To do this, we pass a (oid, data, resolved) + # iteratable to tpc_finish_threadsafe. + + tids = [] + def finished_cb(tid): + tids.append(tid) + + committed = self.tpc_finish( + b'd'*8, + [(b'2'*8, 'committed 2', False), + (b'1'*8, 'committed 3', True), + (b'4'*8, 'committed 4', False), + ], + finished_cb) + self.assertFalse(committed.done() or + cache.load(b'2'*8) or + cache.load(b'4'*8)) + self.assertEqual(cache.load(b'1'*8), (b'data2', b'b'*8)) + self.assertEqual(self.pop(), + (5, False, 'tpc_finish', (b'd'*8,))) + self.respond(5, b'e'*8) + self.assertEqual(committed.result(), b'e'*8) + self.assertEqual(cache.load(b'1'*8), None) + self.assertEqual(cache.load(b'2'*8), ('committed 2', b'e'*8)) + self.assertEqual(cache.load(b'4'*8), ('committed 4', b'e'*8)) + self.assertEqual(tids.pop(), b'e'*8) + + # If the protocol is disconnected, it will reconnect and will + # resolve outstanding requests with exceptions: + loaded = self.load_before(b'1'*8, maxtid) + f1 = self.call('foo', 1, 2) + self.assertFalse(loaded.done() or f1.done()) + self.assertEqual( + self.pop(), + [((b'11111111', b'\x7f\xff\xff\xff\xff\xff\xff\xff'), + False, 'loadBefore', (b'1'*8, maxtid)), + (6, False, 'foo', (1, 2))], + ) + exc = TypeError(43) + + self.assertFalse(wrapper.notify_disconnected.called) + wrapper.notify_connected.reset_mock() + protocol.connection_lost(exc) + wrapper.notify_disconnected.assert_called_with() + + self.assertTrue(isinstance(loaded.exception(), ClientDisconnected)) + self.assertEqual(loaded.exception().args, (exc,)) + self.assertTrue(isinstance(f1.exception(), ClientDisconnected)) + self.assertEqual(f1.exception().args, (exc,)) + + # Because we reconnected, a new protocol and transport were created: + self.assertTrue(protocol is not loop.protocol) + self.assertTrue(transport is not loop.transport) + protocol = loop.protocol + transport = loop.transport + + # and we have a new incomplete connect future: + self.assertFalse(client.connected.done() or transport.data) + + # This time we'll send a lower protocol version. The client + # will send it back, because it's lower than the client's + # protocol: + protocol.data_received(sized(self.enc + b'310')) + self.assertEqual(self.unsized(transport.pop(2)), self.enc + b'310') + self.assertEqual(self.pop(), (1, False, 'register', ('TEST', False))) + self.assertFalse(wrapper.notify_connected.called) + + # If the register response is a tid, then the client won't + # request lastTransaction + self.respond(1, b'e'*8) + self.assertEqual(self.pop(), (2, False, 'get_info', ())) + self.respond(2, dict(length=42)) + + # Because the server tid matches the cache tid, we're done connecting + wrapper.notify_connected.assert_called_with(client, {'length': 42}) + self.assertTrue(client.connected.done() and not transport.data) + self.assertEqual(cache.getLastTid(), b'e'*8) + + # Because we were able to update the cache, we didn't have to + # invalidate the database cache: + self.assertFalse(wrapper.invalidateTransaction.called) + + # The close method closes the connection and cache: + client.close() + self.assertTrue(transport.closed and cache.closed) + + # The client doesn't reconnect + self.assertEqual(loop.protocol, protocol) + self.assertEqual(loop.transport, transport) + + def test_cache_behind(self): + wrapper, cache, loop, client, protocol, transport = self.start() + + cache.setLastTid(b'a'*8) + cache.store(b'4'*8, b'a'*8, None, '4 data') + cache.store(b'2'*8, b'a'*8, None, '2 data') + + self.assertFalse(client.connected.done() or transport.data) + protocol.data_received(sized(self.enc + b'3101')) + self.assertEqual(self.unsized(transport.pop(2)), self.enc + b'3101') + self.respond(1, None) + self.respond(2, b'e'*8) + self.pop(4) + + # We have to verify the cache, so we're not done connecting: + self.assertFalse(client.connected.done()) + self.assertEqual(self.pop(), (3, False, 'getInvalidations', (b'a'*8, ))) + self.respond(3, (b'e'*8, [b'4'*8])) + + self.assertEqual(self.pop(), (4, False, 'get_info', ())) + self.respond(4, dict(length=42)) + + # Now that verification is done, we're done connecting + self.assertTrue(client.connected.done() and not transport.data) + self.assertEqual(cache.getLastTid(), b'e'*8) + + # And the cache has been updated: + self.assertEqual(cache.load(b'2'*8), + ('2 data', b'a'*8)) # unchanged + self.assertEqual(cache.load(b'4'*8), None) + + # Because we were able to update the cache, we didn't have to + # invalidate the database cache: + self.assertFalse(wrapper.invalidateCache.called) + + def test_cache_way_behind(self): + wrapper, cache, loop, client, protocol, transport = self.start() + + cache.setLastTid(b'a'*8) + cache.store(b'4'*8, b'a'*8, None, '4 data') + self.assertTrue(cache) + + self.assertFalse(client.connected.done() or transport.data) + protocol.data_received(sized(self.enc + b'3101')) + self.assertEqual(self.unsized(transport.pop(2)), self.enc + b'3101') + self.respond(1, None) + self.respond(2, b'e'*8) + self.pop(4) + + # We have to verify the cache, so we're not done connecting: + self.assertFalse(client.connected.done()) + self.assertEqual(self.pop(), (3, False, 'getInvalidations', (b'a'*8, ))) + + # We respond None, indicating that we're too far out of date: + self.respond(3, None) + + self.assertEqual(self.pop(), (4, False, 'get_info', ())) + self.respond(4, dict(length=42)) + + # Now that verification is done, we're done connecting + self.assertTrue(client.connected.done() and not transport.data) + self.assertEqual(cache.getLastTid(), b'e'*8) + + # But the cache is now empty and we invalidated the database cache + self.assertFalse(cache) + wrapper.invalidateCache.assert_called_with() + + def test_multiple_addresses(self): + # We can pass multiple addresses to client constructor + addrs = [('1.2.3.4', 8200), ('2.2.3.4', 8200)] + wrapper, cache, loop, client, protocol, transport = self.start( + addrs, ()) + + # We haven't connected yet + self.assertTrue(protocol is None and transport is None) + + # There are 2 connection attempts outstanding: + self.assertEqual(sorted(loop.connecting), addrs) + + # We cause the first one to fail: + loop.fail_connecting(addrs[0]) + self.assertEqual(sorted(loop.connecting), addrs[1:]) + + # The failed connection is attempted in the future: + delay, func, args, _ = loop.later.pop(0) + self.assertTrue(1 <= delay <= 2) + func(*args) + self.assertEqual(sorted(loop.connecting), addrs) + + # Let's connect the second address + loop.connect_connecting(addrs[1]) + self.assertEqual(sorted(loop.connecting), addrs[:1]) + protocol = loop.protocol + transport = loop.transport + protocol.data_received(sized(self.enc + b'3101')) + self.assertEqual(self.unsized(transport.pop(2)), self.enc + b'3101') + self.respond(1, None) + + # Now, when the first connection fails, it won't be retried, + # because we're already connected. + # (first in later is heartbeat) + self.assertEqual(sorted(loop.later[1:]), []) + loop.fail_connecting(addrs[0]) + self.assertEqual(sorted(loop.connecting), []) + self.assertEqual(sorted(loop.later[1:]), []) + + def test_bad_server_tid(self): + # If in verification we get a server_tid behing the cache's, make sure + # we retry the connection later. + wrapper, cache, loop, client, protocol, transport = self.start() + cache.store(b'4'*8, b'a'*8, None, '4 data') + cache.setLastTid('b'*8) + protocol.data_received(sized(self.enc + b'3101')) + self.assertEqual(self.unsized(transport.pop(2)), self.enc + b'3101') + self.respond(1, None) + self.respond(2, 'a'*8) + self.pop() + self.assertFalse(client.connected.done() or transport.data) + delay, func, args, _ = loop.later.pop(1) # first in later is heartbeat + self.assertTrue(8 < delay < 10) + self.assertEqual(len(loop.later), 1) # first in later is heartbeat + func(*args) # connect again + self.assertFalse(protocol is loop.protocol) + self.assertFalse(transport is loop.transport) + protocol = loop.protocol + transport = loop.transport + protocol.data_received(sized(self.enc + b'3101')) + self.assertEqual(self.unsized(transport.pop(2)), self.enc + b'3101') + self.respond(1, None) + self.respond(2, 'b'*8) + self.pop(4) + self.assertEqual(self.pop(), (3, False, 'get_info', ())) + self.respond(3, dict(length=42)) + self.assertTrue(client.connected.done() and not transport.data) + self.assertTrue(client.ready) + + def test_readonly_fallback(self): + addrs = [('1.2.3.4', 8200), ('2.2.3.4', 8200)] + wrapper, cache, loop, client, protocol, transport = self.start( + addrs, (), read_only=Fallback) + + self.assertTrue(self.is_read_only()) + + # We'll treat the first address as read-only and we'll let it connect: + loop.connect_connecting(addrs[0]) + protocol, transport = loop.protocol, loop.transport + protocol.data_received(sized(self.enc + b'3101')) + self.assertEqual(self.unsized(transport.pop(2)), self.enc + b'3101') + # We see that the client tried a writable connection: + self.assertEqual(self.pop(), + (1, False, 'register', ('TEST', False))) + # We respond with a read-only exception: + self.respond(1, ('ZODB.POSException.ReadOnlyError', ()), True) + self.assertTrue(self.is_read_only()) + + # The client tries for a read-only connection: + self.assertEqual(self.pop(), (2, False, 'register', ('TEST', True))) + # We respond with successfully: + self.respond(2, None) + self.pop(2) + self.respond(3, 'b'*8) + self.assertTrue(self.is_read_only()) + + # At this point, the client is ready and using the protocol, + # and the protocol is read-only: + self.assertTrue(client.ready) + self.assertEqual(client.protocol, protocol) + self.assertEqual(protocol.read_only, True) + connected = client.connected + + # The client asks for info, and we respond: + self.assertEqual(self.pop(), (4, False, 'get_info', ())) + self.respond(4, dict(length=42)) + + self.assertTrue(connected.done()) + + # We connect the second address: + loop.connect_connecting(addrs[1]) + loop.protocol.data_received(sized(self.enc + b'3101')) + self.assertEqual(self.unsized(loop.transport.pop(2)), self.enc + b'3101') + self.assertEqual(self.parse(loop.transport.pop()), + (1, False, 'register', ('TEST', False))) + self.assertTrue(self.is_read_only()) + + # We respond and the writable connection succeeds: + self.respond(1, None) + + # at this point, a lastTransaction request is emitted: + + self.assertEqual(self.parse(loop.transport.pop()), + (2, False, 'lastTransaction', ())) + self.assertFalse(self.is_read_only()) + + # Now, the original protocol is closed, and the client is + # no-longer ready: + self.assertFalse(client.ready) + self.assertFalse(client.protocol is protocol) + self.assertEqual(client.protocol, loop.protocol) + self.assertEqual(protocol.closed, True) + self.assertTrue(client.connected is not connected) + self.assertFalse(client.connected.done()) + protocol, transport = loop.protocol, loop.transport + self.assertEqual(protocol.read_only, False) + + # Now, we finish verification + self.respond(2, 'b'*8) + self.respond(3, dict(length=42)) + self.assertTrue(client.ready) + self.assertTrue(client.connected.done()) + + def test_invalidations_while_verifying(self): + # While we're verifying, invalidations are ignored + wrapper, cache, loop, client, protocol, transport = self.start() + protocol.data_received(sized(self.enc + b'3101')) + self.assertEqual(self.unsized(transport.pop(2)), self.enc + b'3101') + self.respond(1, None) + self.pop(4) + self.send('invalidateTransaction', b'b'*8, [b'1'*8], called=False) + self.respond(2, b'a'*8) + self.send('invalidateTransaction', b'c'*8, self.seq_type([b'1'*8]), + no_output=False) + self.assertEqual(self.pop(), (3, False, 'get_info', ())) + + # We'll disconnect: + protocol.connection_lost(Exception("lost")) + self.assertTrue(protocol is not loop.protocol) + self.assertTrue(transport is not loop.transport) + protocol = loop.protocol + transport = loop.transport + + # Similarly, invalidations aren't processed while reconnecting: + + protocol.data_received(sized(self.enc + b'3101')) + self.assertEqual(self.unsized(transport.pop(2)), self.enc + b'3101') + self.respond(1, None) + self.pop(4) + self.send('invalidateTransaction', b'd'*8, [b'1'*8], called=False) + self.respond(2, b'c'*8) + self.send('invalidateTransaction', b'e'*8, self.seq_type([b'1'*8]), + no_output=False) + self.assertEqual(self.pop(), (3, False, 'get_info', ())) + + def test_flow_control(self): + # When sending a lot of data (blobs), we don't want to fill up + # memory behind a slow socket. Asycio's flow control helper + # seems a bit complicated. We'd rather pass an iterator that's + # consumed as we can. + + wrapper, cache, loop, client, protocol, transport = self.start( + finish_start=True) + + # Give the transport a small capacity: + transport.capacity = 2 + self.async_('foo') + self.async_('bar') + self.async_('baz') + self.async_('splat') + + # The first 2 were sent, but the remaining were queued. + self.assertEqual(self.pop(), + [(0, True, 'foo', ()), (0, True, 'bar', ())]) + + # But popping them allowed sending to resume: + self.assertEqual(self.pop(), + [(0, True, 'baz', ()), (0, True, 'splat', ())]) + + # This is especially handy with iterators: + self.async_iter((name, ()) for name in 'abcde') + self.assertEqual(self.pop(), [(0, True, 'a', ()), (0, True, 'b', ())]) + self.assertEqual(self.pop(), [(0, True, 'c', ()), (0, True, 'd', ())]) + self.assertEqual(self.pop(), (0, True, 'e', ())) + self.assertEqual(self.pop(), []) + + def test_bad_protocol(self): + wrapper, cache, loop, client, protocol, transport = self.start() + with mock.patch("ZEO.asyncio.client.logger.error") as error: + self.assertFalse(error.called) + protocol.data_received(sized(self.enc + b'200')) + self.assertTrue(isinstance(error.call_args[0][1], ProtocolError)) + + + def test_get_peername(self): + wrapper, cache, loop, client, protocol, transport = self.start( + finish_start=True) + self.assertEqual(client.get_peername(), '1.2.3.4') + + def test_call_async_from_same_thread(self): + # There are a few (1?) cases where we call into client storage + # where it needs to call back asyncronously. Because we're + # calling from the same thread, we don't need to use a futurte. + wrapper, cache, loop, client, protocol, transport = self.start( + finish_start=True) + + client.call_async_from_same_thread('foo', 1) + self.assertEqual(self.pop(), (0, True, 'foo', (1, ))) + + def test_ClientDisconnected_on_call_timeout(self): + wrapper, cache, loop, client, protocol, transport = self.start() + self.wait_for_result = super(ClientTests, self).wait_for_result + self.assertRaises(ClientDisconnected, self.call, 'foo') + client.ready = False + self.assertRaises(ClientDisconnected, self.call, 'foo') + + def test_errors_in_data_received(self): + # There was a bug in ZEO.async.client.Protocol.data_recieved + # that caused it to fail badly if errors were raised while + # handling data. + + wrapper, cache, loop, client, protocol, transport =self.start( + finish_start=True) + + wrapper.receiveBlobStart.side_effect = ValueError('test') + + chunk = 'x' * 99999 + try: + loop.protocol.data_received( + sized( + self.encode(0, True, 'receiveBlobStart', ('oid', 'serial')) + ) + + sized( + self.encode( + 0, True, 'receiveBlobChunk', ('oid', 'serial', chunk)) + ) + ) + except ValueError: + pass + loop.protocol.data_received(sized( + self.encode(0, True, 'receiveBlobStop', ('oid', 'serial')) + )) + wrapper.receiveBlobChunk.assert_called_with('oid', 'serial', chunk) + wrapper.receiveBlobStop.assert_called_with('oid', 'serial') + + def test_heartbeat(self): + # Protocols run heartbeats on a configurable (sort of) + # heartbeat interval, which defaults to every 60 seconds. + wrapper, cache, loop, client, protocol, transport = self.start( + finish_start=True) + + delay, func, args, handle = loop.later.pop() + self.assertEqual( + (delay, func, args, handle), + (60, protocol.heartbeat, (), protocol.heartbeat_handle), + ) + self.assertFalse(loop.later or handle.cancelled) + + # The heartbeat function sends heartbeat data and reschedules itself. + func() + self.assertEqual(self.pop(), (-1, 0, '.reply', None)) + self.assertTrue(protocol.heartbeat_handle != handle) + + delay, func, args, handle = loop.later.pop() + self.assertEqual( + (delay, func, args, handle), + (60, protocol.heartbeat, (), protocol.heartbeat_handle), + ) + self.assertFalse(loop.later or handle.cancelled) + + # The heartbeat is cancelled when the protocol connection is lost: + protocol.connection_lost(None) + self.assertTrue(handle.cancelled) + +class MsgpackClientTests(ClientTests): + enc = b'M' + seq_type = tuple + +class MemoryCache(object): + + def __init__(self): + # { oid -> [(start, end, data)] } + self.data = collections.defaultdict(list) + self.last_tid = None + + clear = __init__ + + closed = False + def close(self): + self.closed = True + + def __len__(self): + return len(self.data) + + def load(self, oid): + revisions = self.data[oid] + if revisions: + start, end, data = revisions[-1] + if not end: + return data, start + return None + + def store(self, oid, start_tid, end_tid, data): + assert start_tid is not None + revisions = self.data[oid] + data = (start_tid, end_tid, data) + if not revisions or data != revisions[-1]: + revisions.append(data) + revisions.sort() + + def loadBefore(self, oid, tid): + for start, end, data in self.data[oid]: + if start < tid and (end is None or end >= tid): + return data, start, end + + def invalidate(self, oid, tid): + revisions = self.data[oid] + if revisions: + if tid is None: + del revisions[:] + else: + start, end, data = revisions[-1] + if end is None: + revisions[-1] = start, tid, data + + def getLastTid(self): + return self.last_tid + + def setLastTid(self, tid): + self.last_tid = tid + + +class ServerTests(Base, setupstack.TestCase): + + # The server side of things is pretty simple compared to the + # client, because it's the clien't job to make and keep + # connections. Servers are pretty passive. + + def connect(self, finish=False): + protocol = server_protocol(self.enc == b'M') + self.loop = protocol.loop + self.target = protocol.zeo_storage + if finish: + self.assertEqual(self.pop(parse=False), + self.enc + best_protocol_version) + protocol.data_received(sized(self.enc + b'5')) + return protocol + + message_id = 0 + target = None + def call(self, meth, *args, **kw): + if kw: + expect = kw.pop('expect', self) + target = kw.pop('target', self.target) + self.assertFalse(kw) + + if target is not None: + target = getattr(target, meth) + if expect is not self: + target.return_value = expect + + self.message_id += 1 + self.loop.protocol.data_received( + sized(self.encode(self.message_id, False, meth, args))) + + if target is not None: + target.assert_called_once_with(*args) + target.reset_mock() + + if expect is not self: + self.assertEqual(self.pop(), + (self.message_id, False, '.reply', expect)) + + def testServerBasics(self): + # A simple listening thread accepts connections. It creats + # asyncio connections by calling ZEO.asyncio.new_connection: + protocol = self.connect() + self.assertFalse(protocol.zeo_storage.notify_connected.called) + + # The server sends it's protocol. + self.assertEqual(self.pop(parse=False), + self.enc + best_protocol_version) + + # The client sends it's protocol: + protocol.data_received(sized(self.enc + b'5')) + + self.assertEqual(protocol.protocol_version, self.enc + b'5') + + protocol.zeo_storage.notify_connected.assert_called_once_with(protocol) + + # The client registers: + self.call('register', False, expect=None) + + # It does other things, like, send hearbeats: + protocol.data_received(sized(b'(J\xff\xff\xff\xffK\x00U\x06.replyNt.')) + + # The client can make async calls: + self.send('register') + + # Let's close the connection + self.assertFalse(protocol.zeo_storage.notify_disconnected.called) + protocol.connection_lost(None) + protocol.zeo_storage.notify_disconnected.assert_called_once_with() + + def test_invalid_methods(self): + protocol = self.connect(True) + protocol.zeo_storage.notify_connected.assert_called_once_with(protocol) + + # If we try to call a methid that isn't in the protocol's + # white list, it will disconnect: + self.assertFalse(protocol.loop.transport.closed) + self.call('foo', target=None) + self.assertTrue(protocol.loop.transport.closed) + +class MsgpackServerTests(ServerTests): + enc = b'M' + seq_type = tuple + +def server_protocol(msgpack, + zeo_storage=None, + protocol_version=None, + addr=('1.2.3.4', '42'), + ): + if zeo_storage is None: + zeo_storage = mock.Mock() + loop = Loop() + sock = () # anything not None + new_connection(loop, addr, sock, zeo_storage, msgpack) + if protocol_version: + loop.protocol.data_received(sized(protocol_version)) + return loop.protocol + +def response(*data): + return sized(self.encode(*data)) + +def sized(message): + return struct.pack(">I", len(message)) + message + +class Logging(object): + + def __init__(self, level=logging.ERROR): + self.level = level + + def __enter__(self): + self.handler = logging.StreamHandler() + logging.getLogger().addHandler(self.handler) + logging.getLogger().setLevel(self.level) + + def __exit__(self, *args): + logging.getLogger().removeHandler(self.handler) + logging.getLogger().setLevel(logging.NOTSET) + + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(ClientTests)) + suite.addTest(unittest.makeSuite(ServerTests)) + suite.addTest(unittest.makeSuite(MsgpackClientTests)) + suite.addTest(unittest.makeSuite(MsgpackServerTests)) + return suite diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/cache.py b/thesisenv/lib/python3.6/site-packages/ZEO/cache.py new file mode 100644 index 0000000..553a8c2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/cache.py @@ -0,0 +1,835 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Disk-based client cache for ZEO. + +ClientCache exposes an API used by the ZEO client storage. FileCache stores +objects on disk using a 2-tuple of oid and tid as key. + +ClientCache's API is similar to a storage API, with methods like load(), +store(), and invalidate(). It manages in-memory data structures that allow +it to map this richer API onto the simple key-based API of the lower-level +FileCache. +""" +from __future__ import print_function +from struct import pack, unpack + +import BTrees.LLBTree +import BTrees.LOBTree +import logging +import os +import tempfile +import time + +import ZODB.fsIndex +import zc.lockfile +from ZODB.utils import p64, u64, z64, RLock +import six +from ._compat import PYPY + +logger = logging.getLogger("ZEO.cache") + +# A disk-based cache for ZEO clients. +# +# This class provides an interface to a persistent, disk-based cache +# used by ZEO clients to store copies of database records from the +# server. +# +# The details of the constructor as unspecified at this point. +# +# Each entry in the cache is valid for a particular range of transaction +# ids. The lower bound is the transaction that wrote the data. The +# upper bound is the next transaction that wrote a revision of the +# object. If the data is current, the upper bound is stored as None; +# the data is considered current until an invalidate() call is made. +# +# It is an error to call store() twice with the same object without an +# intervening invalidate() to set the upper bound on the first cache +# entry. Perhaps it will be necessary to have a call the removes +# something from the cache outright, without keeping a non-current +# entry. + +# Cache verification +# +# When the client is connected to the server, it receives +# invalidations every time an object is modified. When the client is +# disconnected then reconnects, it must perform cache verification to make +# sure its cached data is synchronized with the storage's current state. +# +# quick verification +# full verification +# + + +# FileCache stores a cache in a single on-disk file. +# +# On-disk cache structure. +# +# The file begins with a 12-byte header. The first four bytes are the +# file's magic number - ZEC3 - indicating zeo cache version 4. The +# next eight bytes are the last transaction id. + +magic = b"ZEC3" +ZEC_HEADER_SIZE = 12 + +# Maximum block size. Note that while we are doing a store, we may +# need to write a free block that is almost twice as big. If we die +# in the middle of a store, then we need to split the large free records +# while opening. +max_block_size = (1<<31) - 1 + + +# After the header, the file contains a contiguous sequence of blocks. All +# blocks begin with a one-byte status indicator: +# +# b'a' +# Allocated. The block holds an object; the next 4 bytes are >I +# format total block size. +# +# b'f' +# Free. The block is free; the next 4 bytes are >I format total +# block size. +# +# b'1', b'2', b'3', b'4' +# The block is free, and consists of 1, 2, 3 or 4 bytes total. +# +# "Total" includes the status byte, and size bytes. There are no +# empty (size 0) blocks. + + +# Allocated blocks have more structure: +# +# 1 byte allocation status (b'a'). +# 4 bytes block size, >I format. +# 8 byte oid +# 8 byte start_tid +# 8 byte end_tid +# 2 byte version length must be 0 +# 4 byte data size +# data +# 8 byte redundant oid for error detection. +allocated_record_overhead = 43 + +# The cache's currentofs goes around the file, circularly, forever. +# It's always the starting offset of some block. +# +# When a new object is added to the cache, it's stored beginning at +# currentofs, and currentofs moves just beyond it. As many contiguous +# blocks needed to make enough room for the new object are evicted, +# starting at currentofs. Exception: if currentofs is close enough +# to the end of the file that the new object can't fit in one +# contiguous chunk, currentofs is reset to ZEC_HEADER_SIZE first. + +# Under PyPy, the available dict specializations perform significantly +# better (faster) than the pure-Python BTree implementation. They may +# use less memory too. And we don't require any of the special BTree features... +_current_index_type = ZODB.fsIndex.fsIndex if not PYPY else dict +_noncurrent_index_type = BTrees.LOBTree.LOBTree if not PYPY else dict +# ...except at this leaf level +_noncurrent_bucket_type = BTrees.LLBTree.LLBucket + +class ClientCache(object): + """A simple in-memory cache.""" + + # The default size of 200MB makes a lot more sense than the traditional + # default of 20MB. The default here is misleading, though, since + # ClientStorage is the only user of ClientCache, and it always passes an + # explicit size of its own choosing. + def __init__(self, path=None, size=200*1024**2, rearrange=.8): + + # - `path`: filepath for the cache file, or None (in which case + # a temp file will be created) + self.path = path + + # - `maxsize`: total size of the cache file + # We set to the minimum size of less than the minimum. + size = max(size, ZEC_HEADER_SIZE) + self.maxsize = size + + # rearrange: if we read a current record and it's more than + # rearrange*size from the end, then copy it forward to keep it + # from being evicted. + self.rearrange = rearrange * size + + # The number of records in the cache. + self._len = 0 + + # {oid -> pos} + self.current = _current_index_type() + + # {oid -> {tid->pos}} + # Note that caches in the wild seem to have very little non-current + # data, so this would seem to have little impact on memory consumption. + # I wonder if we even need to store non-current data in the cache. + self.noncurrent = _noncurrent_index_type() + + # tid for the most recent transaction we know about. This is also + # stored near the start of the file. + self.tid = z64 + + # Always the offset into the file of the start of a block. + # New and relocated objects are always written starting at + # currentofs. + self.currentofs = ZEC_HEADER_SIZE + + self._lock = RLock() + + # self.f is the open file object. + # When we're not reusing an existing file, self.f is left None + # here -- the scan() method must be called then to open the file + # (and it sets self.f). + + fsize = ZEC_HEADER_SIZE + if path: + self._lock_file = zc.lockfile.LockFile(path + '.lock') + if not os.path.exists(path): + # Create a small empty file. We'll make it bigger in _initfile. + self.f = open(path, 'wb+') + self.f.write(magic+z64) + logger.info("created persistent cache file %r", path) + else: + fsize = os.path.getsize(self.path) + self.f = open(path, 'rb+') + logger.info("reusing persistent cache file %r", path) + else: + # Create a small empty file. We'll make it bigger in _initfile. + self.f = tempfile.TemporaryFile() + self.f.write(magic+z64) + logger.info("created temporary cache file %r", self.f.name) + + try: + self._initfile(fsize) + except: + self.f.close() + if not path: + raise # unrecoverable temp file error :( + badpath = path+'.bad' + if os.path.exists(badpath): + logger.critical( + 'Removing bad cache file: %r (prev bad exists).', + path, exc_info=1) + os.remove(path) + else: + logger.critical('Moving bad cache file to %r.', + badpath, exc_info=1) + os.rename(path, badpath) + self.f = open(path, 'wb+') + self.f.write(magic+z64) + self._initfile(ZEC_HEADER_SIZE) + + # Statistics: _n_adds, _n_added_bytes, + # _n_evicts, _n_evicted_bytes, + # _n_accesses + self.clearStats() + + self._setup_trace(path) + + # Backward compatibility. Client code used to have to use the fc + # attr to get to the file cache to get cache stats. + @property + def fc(self): + return self + + def clear(self): + with self._lock: + self.f.seek(ZEC_HEADER_SIZE) + self.f.truncate() + self._initfile(ZEC_HEADER_SIZE) + + ## + # Scan the current contents of the cache file, calling `install` + # for each object found in the cache. This method should only + # be called once to initialize the cache from disk. + def _initfile(self, fsize): + maxsize = self.maxsize + f = self.f + read = f.read + seek = f.seek + write = f.write + seek(0) + if read(4) != magic: + seek(0) + raise ValueError("unexpected magic number: %r" % read(4)) + self.tid = read(8) + if len(self.tid) != 8: + raise ValueError("cache file too small -- no tid at start") + + # Populate .filemap and .key2entry to reflect what's currently in the + # file, and tell our parent about it too (via the `install` callback). + # Remember the location of the largest free block. That seems a + # decent place to start currentofs. + + self.current = _current_index_type() + self.noncurrent = _noncurrent_index_type() + l = 0 + last = ofs = ZEC_HEADER_SIZE + first_free_offset = 0 + current = self.current + status = b' ' + while ofs < fsize: + seek(ofs) + status = read(1) + if status == b'a': + size, oid, start_tid, end_tid, lver = unpack( + ">I8s8s8sH", read(30)) + if ofs+size <= maxsize: + if end_tid == z64: + assert oid not in current, (ofs, f.tell()) + current[oid] = ofs + else: + assert start_tid < end_tid, (ofs, f.tell()) + self._set_noncurrent(oid, start_tid, ofs) + assert lver == 0, "Versions aren't supported" + l += 1 + else: + # free block + if first_free_offset == 0: + first_free_offset = ofs + if status == b'f': + size, = unpack(">I", read(4)) + if size > max_block_size: + # Oops, we either have an old cache, or a we + # crashed while storing. Split this block into two. + assert size <= max_block_size*2 + seek(ofs+max_block_size) + write(b'f'+pack(">I", size-max_block_size)) + seek(ofs) + write(b'f'+pack(">I", max_block_size)) + sync(f) + elif status in b'1234': + size = int(status) + else: + raise ValueError("unknown status byte value %s in client " + "cache file" % 0, hex(ord(status))) + + last = ofs + ofs += size + + if ofs >= maxsize: + # Oops, the file was bigger before. + if ofs > maxsize: + # The last record is too big. Replace it with a smaller + # free record + size = maxsize-last + seek(last) + if size > 4: + write(b'f'+pack(">I", size)) + else: + write("012345"[size].encode()) + sync(f) + ofs = maxsize + break + + if fsize < maxsize: + assert ofs==fsize + # Make sure the OS really saves enough bytes for the file. + seek(self.maxsize - 1) + write(b'x') + + # add as many free blocks as are needed to fill the space + seek(ofs) + nfree = maxsize - ZEC_HEADER_SIZE + for i in range(0, nfree, max_block_size): + block_size = min(max_block_size, nfree-i) + write(b'f' + pack(">I", block_size)) + seek(block_size-5, 1) + sync(self.f) + + # There is always data to read and + assert last and (status in b' f1234') + first_free_offset = last + else: + assert ofs==maxsize + if maxsize < fsize: + seek(maxsize) + f.truncate() + + # We use the first_free_offset because it is most likelyt the + # place where we last wrote. + self.currentofs = first_free_offset or ZEC_HEADER_SIZE + self._len = l + + def _set_noncurrent(self, oid, tid, ofs): + noncurrent_for_oid = self.noncurrent.get(u64(oid)) + if noncurrent_for_oid is None: + noncurrent_for_oid = _noncurrent_bucket_type() + self.noncurrent[u64(oid)] = noncurrent_for_oid + noncurrent_for_oid[u64(tid)] = ofs + + def _del_noncurrent(self, oid, tid): + try: + noncurrent_for_oid = self.noncurrent[u64(oid)] + del noncurrent_for_oid[u64(tid)] + if not noncurrent_for_oid: + del self.noncurrent[u64(oid)] + except KeyError: + logger.error("Couldn't find non-current %r", (oid, tid)) + + + def clearStats(self): + self._n_adds = self._n_added_bytes = 0 + self._n_evicts = self._n_evicted_bytes = 0 + self._n_accesses = 0 + + def getStats(self): + return (self._n_adds, self._n_added_bytes, + self._n_evicts, self._n_evicted_bytes, + self._n_accesses + ) + + ## + # The number of objects currently in the cache. + def __len__(self): + return self._len + + ## + # Close the underlying file. No methods accessing the cache should be + # used after this. + def close(self): + self._unsetup_trace() + f = self.f + self.f = None + if f is not None: + sync(f) + f.close() + + if hasattr(self,'_lock_file'): + self._lock_file.close() + + ## + # Evict objects as necessary to free up at least nbytes bytes, + # starting at currentofs. If currentofs is closer than nbytes to + # the end of the file, currentofs is reset to ZEC_HEADER_SIZE first. + # The number of bytes actually freed may be (and probably will be) + # greater than nbytes, and is _makeroom's return value. The file is not + # altered by _makeroom. filemap and key2entry are updated to reflect the + # evictions, and it's the caller's responsibility both to fiddle + # the file, and to update filemap, to account for all the space + # freed (starting at currentofs when _makeroom returns, and + # spanning the number of bytes retured by _makeroom). + def _makeroom(self, nbytes): + assert 0 < nbytes <= self.maxsize - ZEC_HEADER_SIZE, ( + nbytes, self.maxsize) + if self.currentofs + nbytes > self.maxsize: + self.currentofs = ZEC_HEADER_SIZE + ofs = self.currentofs + seek = self.f.seek + read = self.f.read + current = self.current + while nbytes > 0: + seek(ofs) + status = read(1) + if status == b'a': + size, oid, start_tid, end_tid = unpack(">I8s8s8s", read(28)) + self._n_evicts += 1 + self._n_evicted_bytes += size + if end_tid == z64: + del current[oid] + else: + self._del_noncurrent(oid, start_tid) + self._len -= 1 + else: + if status == b'f': + size = unpack(">I", read(4))[0] + else: + assert status in b'1234' + size = int(status) + ofs += size + nbytes -= size + return ofs - self.currentofs + + ## + # Update our idea of the most recent tid. This is stored in the + # instance, and also written out near the start of the cache file. The + # new tid must be strictly greater than our current idea of the most + # recent tid. + def setLastTid(self, tid): + with self._lock: + if (not tid) or (tid == z64): + return + if (tid <= self.tid) and self._len: + if tid == self.tid: + return # Be a little forgiving + raise ValueError("new last tid (%s) must be greater than " + "previous one (%s)" + % (u64(tid), u64(self.tid))) + assert isinstance(tid, bytes) and len(tid) == 8, tid + self.tid = tid + self.f.seek(len(magic)) + self.f.write(tid) + self.f.flush() + + ## + # Return the last transaction seen by the cache. + # @return a transaction id + # @defreturn string, or 8 nulls if no transaction is yet known + def getLastTid(self): + with self._lock: + return self.tid + + ## + # Return the current data record for oid. + # @param oid object id + # @return (data record, serial number, tid), or None if the object is not + # in the cache + # @defreturn 3-tuple: (string, string, string) + def load(self, oid, before_tid=None): + with self._lock: + ofs = self.current.get(oid) + if ofs is None: + self._trace(0x20, oid) + return None + self.f.seek(ofs) + read = self.f.read + status = read(1) + assert status == b'a', (ofs, self.f.tell(), oid) + size, saved_oid, tid, end_tid, lver, ldata = unpack( + ">I8s8s8sHI", read(34)) + assert saved_oid == oid, (ofs, self.f.tell(), oid, saved_oid) + assert end_tid == z64, (ofs, self.f.tell(), oid, tid, end_tid) + assert lver == 0, "Versions aren't supported" + + if before_tid and tid >= before_tid: + return None + + data = read(ldata) + assert len(data) == ldata, ( + ofs, self.f.tell(), oid, len(data), ldata) + + # WARNING: The following assert changes the file position. + # We must not depend on this below or we'll fail in optimized mode. + assert read(8) == oid, (ofs, self.f.tell(), oid) + + self._n_accesses += 1 + self._trace(0x22, oid, tid, end_tid, ldata) + + ofsofs = self.currentofs - ofs + if ofsofs < 0: + ofsofs += self.maxsize + + if (ofsofs > self.rearrange and + self.maxsize > 10*len(data) and + size > 4): + # The record is far back and might get evicted, but it's + # valuable, so move it forward. + + # Remove fromn old loc: + del self.current[oid] + self.f.seek(ofs) + self.f.write(b'f'+pack(">I", size)) + + # Write to new location: + self._store(oid, tid, None, data, size) + + return data, tid + + ## + # Return a non-current revision of oid that was current before tid. + # @param oid object id + # @param tid id of transaction that wrote next revision of oid + # @return data record, serial number, start tid, and end tid + # @defreturn 4-tuple: (string, string, string, string) + def loadBefore(self, oid, before_tid): + with self._lock: + noncurrent_for_oid = self.noncurrent.get(u64(oid)) + if noncurrent_for_oid is None: + result = self.load(oid, before_tid) + if result: + return result[0], result[1], None + else: + self._trace(0x24, oid, "", before_tid) + return result + + items = noncurrent_for_oid.items(None, u64(before_tid)-1) + if not items: + result = self.load(oid, before_tid) + if result: + return result[0], result[1], None + else: + self._trace(0x24, oid, "", before_tid) + return result + + tid, ofs = items[-1] + + self.f.seek(ofs) + read = self.f.read + status = read(1) + assert status == b'a', (ofs, self.f.tell(), oid, before_tid) + size, saved_oid, saved_tid, end_tid, lver, ldata = unpack( + ">I8s8s8sHI", read(34)) + assert saved_oid == oid, (ofs, self.f.tell(), oid, saved_oid) + assert saved_tid == p64(tid), ( + ofs, self.f.tell(), oid, saved_tid, tid) + assert end_tid != z64, (ofs, self.f.tell(), oid) + assert lver == 0, "Versions aren't supported" + data = read(ldata) + assert len(data) == ldata, (ofs, self.f.tell()) + + # WARNING: The following assert changes the file position. + # We must not depend on this below or we'll fail in optimized mode. + assert read(8) == oid, (ofs, self.f.tell(), oid) + + if end_tid < before_tid: + result = self.load(oid, before_tid) + if result: + return result[0], result[1], None + else: + self._trace(0x24, oid, "", before_tid) + return result + + self._n_accesses += 1 + self._trace(0x26, oid, "", saved_tid) + return data, saved_tid, end_tid + + ## + # Store a new data record in the cache. + # @param oid object id + # @param start_tid the id of the transaction that wrote this revision + # @param end_tid the id of the transaction that created the next + # revision of oid. If end_tid is None, the data is + # current. + # @param data the actual data + def store(self, oid, start_tid, end_tid, data): + with self._lock: + seek = self.f.seek + if end_tid is None: + ofs = self.current.get(oid) + if ofs: + seek(ofs) + read = self.f.read + status = read(1) + assert status == b'a', (ofs, self.f.tell(), oid) + size, saved_oid, saved_tid, end_tid = unpack( + ">I8s8s8s", read(28)) + assert saved_oid == oid, ( + ofs, self.f.tell(), oid, saved_oid) + assert end_tid == z64, (ofs, self.f.tell(), oid) + if saved_tid == start_tid: + return + raise ValueError("already have current data for oid") + else: + noncurrent_for_oid = self.noncurrent.get(u64(oid)) + if noncurrent_for_oid and ( + u64(start_tid) in noncurrent_for_oid): + return + + size = allocated_record_overhead + len(data) + + # A number of cache simulation experiments all concluded that the + # 2nd-level ZEO cache got a much higher hit rate if "very large" + # objects simply weren't cached. For now, we ignore the request + # only if the entire cache file is too small to hold the object. + if size >= min(max_block_size, self.maxsize - ZEC_HEADER_SIZE): + return + + self._n_adds += 1 + self._n_added_bytes += size + self._len += 1 + + self._store(oid, start_tid, end_tid, data, size) + + if end_tid: + self._trace(0x54, oid, start_tid, end_tid, dlen=len(data)) + else: + self._trace(0x52, oid, start_tid, dlen=len(data)) + + def _store(self, oid, start_tid, end_tid, data, size): + # Low-level store used by store and load + + # In the next line, we ask for an extra to make sure we always + # have a free block after the new alocated block. This free + # block acts as a ring pointer, so that on restart, we start + # where we left off. + nfreebytes = self._makeroom(size+1) + + assert size <= nfreebytes, (size, nfreebytes) + excess = nfreebytes - size + # If there's any excess (which is likely), we need to record a + # free block following the end of the data record. That isn't + # expensive -- it's all a contiguous write. + if excess == 0: + extra = b'' + elif excess < 5: + extra = "01234"[excess].encode() + else: + extra = b'f' + pack(">I", excess) + + ofs = self.currentofs + seek = self.f.seek + seek(ofs) + write = self.f.write + + # Before writing data, we'll write a free block for the space freed. + # We'll come back with a last atomic write to rewrite the start of the + # allocated-block header. + write(b'f'+pack(">I", nfreebytes)) + + # Now write the rest of the allocation block header and object data. + write(pack(">8s8s8sHI", oid, start_tid, end_tid or z64, 0, len(data))) + write(data) + write(oid) + write(extra) + + # Now, we'll go back and rewrite the beginning of the + # allocated block header. + seek(ofs) + write(b'a'+pack(">I", size)) + + if end_tid: + self._set_noncurrent(oid, start_tid, ofs) + else: + self.current[oid] = ofs + + self.currentofs += size + + + ## + # If `tid` is None, + # forget all knowledge of `oid`. (`tid` can be None only for + # invalidations generated by startup cache verification.) If `tid` + # isn't None, and we had current + # data for `oid`, stop believing we have current data, and mark the + # data we had as being valid only up to `tid`. In all other cases, do + # nothing. + # + # Paramters: + # + # - oid object id + # - tid the id of the transaction that wrote a new revision of oid, + # or None to forget all cached info about oid. + def invalidate(self, oid, tid): + with self._lock: + ofs = self.current.get(oid) + if ofs is None: + # 0x10 == invalidate (miss) + self._trace(0x10, oid, tid) + return + + self.f.seek(ofs) + read = self.f.read + status = read(1) + assert status == b'a', (ofs, self.f.tell(), oid) + size, saved_oid, saved_tid, end_tid = unpack(">I8s8s8s", read(28)) + assert saved_oid == oid, (ofs, self.f.tell(), oid, saved_oid) + assert end_tid == z64, (ofs, self.f.tell(), oid) + del self.current[oid] + if tid is None: + self.f.seek(ofs) + self.f.write(b'f'+pack(">I", size)) + # 0x1E = invalidate (hit, discarding current or non-current) + self._trace(0x1E, oid, tid) + self._len -= 1 + else: + if tid == saved_tid: + logger.warning( + "Ignoring invalidation with same tid as current") + return + self.f.seek(ofs+21) + self.f.write(tid) + self._set_noncurrent(oid, saved_tid, ofs) + # 0x1C = invalidate (hit, saving non-current) + self._trace(0x1C, oid, tid) + + ## + # Generates (oid, serial) oairs for all objects in the + # cache. This generator is used by cache verification. + def contents(self): + # May need to materialize list instead of iterating; + # depends on whether the caller may change the cache. + seek = self.f.seek + read = self.f.read + for oid, ofs in six.iteritems(self.current): + seek(ofs) + status = read(1) + assert status == b'a', (ofs, self.f.tell(), oid) + size, saved_oid, tid, end_tid = unpack(">I8s8s8s", read(28)) + assert saved_oid == oid, (ofs, self.f.tell(), oid, saved_oid) + assert end_tid == z64, (ofs, self.f.tell(), oid) + yield oid, tid + + def dump(self): + from ZODB.utils import oid_repr + print("cache size", len(self)) + L = list(self.contents()) + L.sort() + for oid, tid in L: + print(oid_repr(oid), oid_repr(tid)) + print("dll contents") + L = list(self) + L.sort(lambda x, y: cmp(x.key, y.key)) + for x in L: + end_tid = x.end_tid or z64 + print(oid_repr(x.key[0]), oid_repr(x.key[1]), oid_repr(end_tid)) + print() + + # If `path` isn't None (== we're using a persistent cache file), and + # envar ZEO_CACHE_TRACE is set to a non-empty value, try to open + # path+'.trace' as a trace file, and store the file object in + # self._tracefile. If not, or we can't write to the trace file, disable + # tracing by setting self._trace to a dummy function, and set + # self._tracefile to None. + _tracefile = None + def _trace(self, *a, **kw): + pass + + def _setup_trace(self, path): + _tracefile = None + if path and os.environ.get("ZEO_CACHE_TRACE"): + tfn = path + ".trace" + try: + _tracefile = open(tfn, "ab") + except IOError as msg: + logger.warning("cannot write tracefile %r (%s)", tfn, msg) + else: + logger.info("opened tracefile %r", tfn) + + if _tracefile is None: + return + + now = time.time + def _trace(code, oid=b"", tid=z64, end_tid=z64, dlen=0): + # The code argument is two hex digits; bits 0 and 7 must be zero. + # The first hex digit shows the operation, the second the outcome. + # This method has been carefully tuned to be as fast as possible. + # Note: when tracing is disabled, this method is hidden by a dummy. + encoded = (dlen << 8) + code + if tid is None: + tid = z64 + if end_tid is None: + end_tid = z64 + try: + _tracefile.write( + pack(">iiH8s8s", + int(now()), encoded, len(oid), tid, end_tid) + oid, + ) + except: + print(repr(tid), repr(end_tid)) + raise + + self._trace = _trace + self._tracefile = _tracefile + _trace(0x00) + + def _unsetup_trace(self): + if self._tracefile is not None: + del self._trace + self._tracefile.close() + del self._tracefile + +def sync(f): + f.flush() + +if hasattr(os, 'fsync'): + def sync(f): + f.flush() + os.fsync(f.fileno()) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/component.xml b/thesisenv/lib/python3.6/site-packages/ZEO/component.xml new file mode 100644 index 0000000..bce46d3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/component.xml @@ -0,0 +1,176 @@ + + + + + + + + + The full path to an SSL certificate file. + + + + + + The full path to an SSL key file for the client certificate. + + + + + + Dotted name of importable function for retrieving a password + for the client certificate key. + + + + + + Path to a file or directory containing server certificates to be + authenticated. + + + + + + Verify the host name in the server certificate is as expected. + + + + + + Host name to use for SSL host name checks. + + If ``check-hostname`` is true then use this as the + value to check. If an address is a host/port pair, then this + defaults to the host in the address. + + + + + + + +
+ + + + + + The cache size in bytes, KB or MB. This defaults to a 20MB. + Optional ``KB`` or ``MB`` suffixes can (and usually are) used to + specify units other than bytes. + + + + + + The file path of a persistent cache file + + + + + + Path name to the blob cache directory. + + + + + + Tells whether the cache is a shared writable directory + and that the ZEO protocol should not transfer the file + but only the filename when committing. + + + + + + Maximum size of the ZEO blob cache, in bytes. If not set, then + the cache size isn't checked and the blob directory will + grow without bound. + + This option is ignored if shared_blob_dir is true. + + + + + + ZEO check size as percent of blob_cache_size. The ZEO + cache size will be checked when this many bytes have been + loaded into the cache. Defaults to 10% of the blob cache + size. This option is ignored if shared_blob_dir is true. + + + + + + A flag indicating whether this should be a read-only storage, + defaulting to false (i.e. writing is allowed by default). + + + + + + A flag indicating whether a read-only remote storage should be + acceptable as a fallback when no writable storages are + available. Defaults to false. At most one of read_only and + read_only_fallback should be true. + + + + + + A flag indicating whether calls to sync() should make a server + request, thus causing the storage to wait for any outstanding + invalidations. The sync method is called when transactions are + explicitly begun. + + + + + + How long to wait for an initial connection, defaulting to 30 + seconds. If an initial connection can't be made within this time + limit, then creation of the client storage will fail with a + ``ZEO.Exceptions.ClientDisconnected`` exception. + + After the initial connection, if the client is disconnected: + + - In-flight server requests will fail with a + ``ZEO.Exceptions.ClientDisconnected`` exception. + + - New requests will block for up to ``wait_timeout`` waiting for a + connection to be established before failing with a + ``ZEO.Exceptions.ClientDisconnected`` exception. + + + + + + A label for the client in server logs + + + + + + + + The name of the storage that the client wants to use. If the + ZEO server serves more than one storage, the client selects + the storage it wants to use by name. The default name is '1', + which is also the default name for the ZEO server. + + + + + + The storage name. If unspecified, the address of the server + will be used as the name. + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/hash.py b/thesisenv/lib/python3.6/site-packages/ZEO/hash.py new file mode 100644 index 0000000..1cd42d8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/hash.py @@ -0,0 +1,27 @@ +############################################################################## +# +# Copyright (c) 2008 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +"""In Python 2.6, the "sha" and "md5" modules have been deprecated +in favor of using hashlib for both. This class allows for compatibility +between versions.""" + +try: + import hashlib + sha1 = hashlib.sha1 + new = sha1 +except ImportError: + import sha + sha1 = sha.new + new = sha1 + digest_size = sha.digest_size diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/interfaces.py b/thesisenv/lib/python3.6/site-packages/ZEO/interfaces.py new file mode 100644 index 0000000..ca9a7c6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/interfaces.py @@ -0,0 +1,111 @@ +############################################################################## +# +# Copyright (c) 2006 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import zope.interface + +class StaleCache(object): + """A ZEO cache is stale and requires verification. + """ + + def __init__(self, storage): + self.storage = storage + +class IClientCache(zope.interface.Interface): + """Client cache interface. + + Note that caches need to be thread safe. + """ + + def close(): + """Close the cache + """ + + def load(oid): + """Get current data for object + + Returns data and serial, or None. + """ + + def __len__(): + """Retirn the number of items in the cache. + """ + + def store(oid, start_tid, end_tid, data): + """Store data for the object + + The start_tid is the transaction that committed this data. + + The end_tid is the tid of the next transaction that modified + the objects, or None if this is the current version. + """ + + def loadBefore(oid, tid): + """Load the data for the object last modified before the tid + + Returns the data, and start and end tids. + """ + + def invalidate(oid, tid): + """Invalidate data for the object + + If ``tid`` is None, forget all knowledge of `oid`. (``tid`` + can be None only for invalidations generated by startup cache + verification.) + + If ``tid`` isn't None, and we had current data for ``oid``, + stop believing we have current data, and mark the data we had + as being valid only up to `tid`. In all other cases, do + nothing. + """ + + def getLastTid(): + """Get the last tid seen by the cache + + This is the cached last tid we've seen from the server. + + This method may be called from multiple threads. (It's assumed + to be trivial.) + """ + + def setLastTid(tid): + """Save the last tid sent by the server + """ + + def clear(): + """Clear/empty the cache + """ + +class IServeable(zope.interface.Interface): + """Interface provided by storages that can be served by ZEO + """ + + def tpc_transaction(): + """The current transaction being committed. + + If a storage is participating in a two-phase commit, then + return the transaction (object) being committed. Otherwise + return None. + """ + + def lastInvalidations(size): + """Get recent transaction invalidations + + This method is optional and is used to get invalidations + performed by the most recent transactions. + + An iterable of up to size entries must be returned, where each + entry is a transaction id and a sequence of object-id/empty-string + pairs describing the objects written by the + transaction, in chronological order. + """ diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/monitor.py b/thesisenv/lib/python3.6/site-packages/ZEO/monitor.py new file mode 100644 index 0000000..314c681 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/monitor.py @@ -0,0 +1,113 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Monitor behavior of ZEO server and record statistics. +""" +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function + +import asyncore +import socket +import time +import logging + +zeo_version = 'unknown' +try: + import pkg_resources +except ImportError: + pass +else: + zeo_dist = pkg_resources.working_set.find( + pkg_resources.Requirement.parse('ZODB3') + ) + if zeo_dist is not None: + zeo_version = zeo_dist.version + +class StorageStats(object): + """Per-storage usage statistics.""" + + def __init__(self, connections=None): + self.connections = connections + self.loads = 0 + self.stores = 0 + self.commits = 0 + self.aborts = 0 + self.active_txns = 0 + self.lock_time = None + self.conflicts = 0 + self.conflicts_resolved = 0 + self.start = time.ctime() + + @property + def clients(self): + return len(self.connections) + + def parse(self, s): + # parse the dump format + lines = s.split("\n") + for line in lines: + field, value = line.split(":", 1) + if field == "Server started": + self.start = value + elif field == "Clients": + # Hack because we use this both on the server and on + # the client where there are no connections. + self.connections = [0] * int(value) + elif field == "Clients verifying": + self.verifying_clients = int(value) + elif field == "Active transactions": + self.active_txns = int(value) + elif field == "Commit lock held for": + # This assumes + self.lock_time = time.time() - int(value) + elif field == "Commits": + self.commits = int(value) + elif field == "Aborts": + self.aborts = int(value) + elif field == "Loads": + self.loads = int(value) + elif field == "Stores": + self.stores = int(value) + elif field == "Conflicts": + self.conflicts = int(value) + elif field == "Conflicts resolved": + self.conflicts_resolved = int(value) + + def dump(self, f): + print("Server started:", self.start, file=f) + print("Clients:", self.clients, file=f) + print("Clients verifying:", self.verifying_clients, file=f) + print("Active transactions:", self.active_txns, file=f) + if self.lock_time: + howlong = time.time() - self.lock_time + print("Commit lock held for:", int(howlong), file=f) + print("Commits:", self.commits, file=f) + print("Aborts:", self.aborts, file=f) + print("Loads:", self.loads, file=f) + print("Stores:", self.stores, file=f) + print("Conflicts:", self.conflicts, file=f) + print("Conflicts resolved:", self.conflicts_resolved, file=f) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/nagios.py b/thesisenv/lib/python3.6/site-packages/ZEO/nagios.py new file mode 100644 index 0000000..6beeef4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/nagios.py @@ -0,0 +1,143 @@ +from __future__ import print_function +############################################################################## +# +# Copyright (c) 2011 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""%prog [options] address + +Where the address is an IPV6 address of the form: [addr]:port, an IPV4 +address of the form: addr:port, or the name of a unix-domain socket file. +""" +import json +import optparse +import os +import re +import socket +import struct +import sys +import time + +NO_TRANSACTION = '0'*16 + +nodiff_names = 'active_txns connections waiting'.split() +diff_names = 'aborts commits conflicts conflicts_resolved loads stores'.split() + +per_times = dict(seconds=1.0, minutes=60.0, hours=3600.0, days=86400.0) + +def new_metric(metrics, storage_id, name, value): + if storage_id == '1': + label = name + else: + if ' ' in storage_id: + label = "'%s:%s'" % (storage_id, name) + else: + label = "%s:%s" % (storage_id, name) + metrics.append("%s=%s" % (label, value)) + +def result(messages, metrics=(), status=None): + if metrics: + messages[0] += '|' + metrics[0] + if len(metrics) > 1: + messages.append('| ' + '\n '.join(metrics[1:])) + print('\n'.join(messages)) + return status + +def error(message): + return result((message, ), (), 2) + +def warn(message): + return result((message, ), (), 1) + +def check(addr, output_metrics, status, per): + m = re.match(r'\[(\S+)\]:(\d+)$', addr) + if m: + addr = m.group(1), int(m.group(2)) + s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) + else: + m = re.match(r'(\S+):(\d+)$', addr) + if m: + addr = m.group(1), int(m.group(2)) + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + else: + s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + try: + s.connect(addr) + except socket.error as err: + return error("Can't connect %s" % err) + + s.sendall(b'\x00\x00\x00\x04ruok') + proto = s.recv(struct.unpack(">I", s.recv(4))[0]) + datas = s.recv(struct.unpack(">I", s.recv(4))[0]) + s.close() + data = json.loads(datas.decode("ascii")) + if not data: + return warn("No storages") + + metrics = [] + messages = [] + level = 0 + if output_metrics: + for storage_id, sdata in sorted(data.items()): + for name in nodiff_names: + new_metric(metrics, storage_id, name, sdata[name]) + + if status: + now = time.time() + if os.path.exists(status): + dt = now - os.stat(status).st_mtime + if dt > 0: # sanity :) + with open(status) as f: # Read previous + old = json.loads(f.read()) + dt /= per_times[per] + for storage_id, sdata in sorted(data.items()): + sdata['sameple-time'] = now + if storage_id in old: + sold = old[storage_id] + for name in diff_names: + v = (sdata[name] - sold[name]) / dt + new_metric(metrics, storage_id, name, v) + with open(status, 'w') as f: # save current + f.write(json.dumps(data)) + + for storage_id, sdata in sorted(data.items()): + if sdata['last-transaction'] == NO_TRANSACTION: + messages.append("Empty storage %r" % storage_id) + level = max(level, 1) + if not messages: + messages.append('OK') + return result(messages, metrics, level or None) + +def main(args=None): + if args is None: + args = sys.argv[1:] + + parser = optparse.OptionParser(__doc__) + parser.add_option( + '-m', '--output-metrics', action="store_true", + help="Output metrics.", + ) + parser.add_option( + '-s', '--status-path', + help="Path to status file, needed to get rate metrics", + ) + parser.add_option( + '-u', '--time-units', type='choice', default='minutes', + choices=['seconds', 'minutes', 'hours', 'days'], + help="Time unit for rate metrics", + ) + (options, args) = parser.parse_args(args) + [addr] = args + return check( + addr, options.output_metrics, options.status_path, options.time_units) + +if __name__ == '__main__': + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/nagios.rst b/thesisenv/lib/python3.6/site-packages/ZEO/nagios.rst new file mode 100644 index 0000000..8d83c0f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/nagios.rst @@ -0,0 +1,151 @@ +ZEO Nagios plugin +================= + +ZEO includes a script that provides a nagios monitor plugin: + + >>> import pkg_resources, time + >>> nagios = pkg_resources.load_entry_point( + ... 'ZEO', 'console_scripts', 'zeo-nagios') + +In it's simplest form, the script just checks if it can get status: + + >>> import ZEO + >>> addr, stop = ZEO.server('test.fs', threaded=False) + >>> saddr = ':'.join(map(str, addr)) # (host, port) -> host:port + + >>> nagios([saddr]) + Empty storage u'1' + 1 + +The storage was empty. In that case, the monitor warned as much. + +Let's add some data: + + >>> ZEO.DB(addr).close() + >>> nagios([saddr]) + OK + +If we stop the server, we'll error: + + >>> stop() + >>> nagios([saddr]) + Can't connect [Errno 61] Connection refused + 2 + +Metrics +------- + +The monitor will optionally output server metric data. There are 2 +kinds of metrics it can output, level and rate metric. If we use the +-m/--output-metrics option, we'll just get rate metrics: + + >>> addr, stop = ZEO.server('test.fs', threaded=False) + >>> saddr = ':'.join(map(str, addr)) # (host, port) -> host:port + >>> nagios([saddr, '-m']) + OK|active_txns=0 + | connections=0 + waiting=0 + +We only got the metrics that are levels, like current number of +connections. If we want rate metrics, we need to be able to save +values from run to run. We need to use the -s/--status-path option to +specify the name of a file for status information: + + >>> nagios([saddr, '-m', '-sstatus']) + OK|active_txns=0 + | connections=0 + waiting=0 + +We still didn't get any rate metrics, because we've only run once. +Let's actually do something with the database and then make another +sample. + + >>> db = ZEO.DB(addr) + >>> nagios([saddr, '-m', '-sstatus']) + OK|active_txns=0 + | connections=1 + waiting=0 + aborts=0.0 + commits=0.0 + conflicts=0.0 + conflicts_resolved=0.0 + loads=81.226297803 + stores=0.0 + +Note that this time, we saw that there was a connection. + +The ZEO.nagios module provides a check function that can be used by +other monitors (e.g. that get address data from ZooKeeper). It takes: + +- Address string, + +- Metrics flag. + +- Status file name (or None), and + +- Time units for rate metrics + +:: + + >>> import ZEO.nagios + >>> ZEO.nagios.check(saddr, True, 'status', 'seconds') + OK|active_txns=0 + | connections=1 + waiting=0 + aborts=0.0 + commits=0.0 + conflicts=0.0 + conflicts_resolved=0.0 + loads=0.0 + stores=0.0 + + >>> db.close() + >>> stop() + +Multi-storage servers +--------------------- + +A ZEO server can host multiple servers. (This is a feature that will +likely be dropped in the future.) When this is the case, the monitor +profixes metrics with a storage id. + + >>> addr, stop = ZEO.server( + ... storage_conf = """ + ... + ... + ... + ... + ... """, threaded=False) + >>> saddr = ':'.join(map(str, addr)) # (host, port) -> host:port + >>> nagios([saddr, '-m', '-sstatus']) + Empty storage u'first'|first:active_txns=0 + Empty storage u'second' + | first:connections=0 + first:waiting=0 + second:active_txns=0 + second:connections=0 + second:waiting=0 + 1 + >>> nagios([saddr, '-m', '-sstatus']) + Empty storage u'first'|first:active_txns=0 + Empty storage u'second' + | first:connections=0 + first:waiting=0 + second:active_txns=0 + second:connections=0 + second:waiting=0 + first:aborts=0.0 + first:commits=0.0 + first:conflicts=0.0 + first:conflicts_resolved=0.0 + first:loads=42.42 + first:stores=0.0 + second:aborts=0.0 + second:commits=0.0 + second:conflicts=0.0 + second:conflicts_resolved=0.0 + second:loads=42.42 + second:stores=0.0 + 1 + + >>> stop() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/ordering.rst b/thesisenv/lib/python3.6/site-packages/ZEO/ordering.rst new file mode 100644 index 0000000..19c076d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/ordering.rst @@ -0,0 +1,276 @@ +============================== +Response ordering requirements +============================== + +ZEO servers are logically concurrent because they serve multiple +clients in multiple threads. Because of this, we have to make sure +that information about object histories remain consistent. + +An object history is a sequence of object revisions. Each revision has +a tid, which is essentially a time stamp. + +We load objects using either ``load``, which returns the current +object. or ``loadBefore``, which returns the object before a specific time/tid. + +When we cache revisions, we record the tid and the next/end tid, which +may be None. The end tid is important for choosing a revision for +``loadBefore``, as well as for determining whether a cached value is +current, for ``load``. + +Because the client and server are multi-threaded, the client may see +data out of order. Let's consider some scenarios. In these +scenarios + +Scenarios +========= + +When considering ordering scenarioes, we'll consider 2 different +client behaviors, traditional (T) and loadBefore (B). + +The *traditional* behaviors is that used in ZODB 4. It uses the storage +``load(oid)`` method to load objects if it hasn't seen an invalidation +for the object. If it has seen an invalidation, it uses +``loadBefore(oid, START)``, where ``START`` is the transaction time of +the first invalidation it's seen. If it hasn't seen an invalidation +*for an object*, it uses ``load(oid)`` and then checks again for an +invalidation. If it sees an invalidation, then it retries using +``loadBefore``. This approach **assumes that invalidations for a tid +are returned before loads for a tid**. + +The *loadBefore* behavior, used in ZODB5, always determines +transaction start time, ``START`` at the beginning of a transaction by +calling the storage's ``sync`` method and then querying the storage's +``lastTransaction`` method (and adding 1). It loads objects +exclusively using ``loadBefore(oid, START)``. + +Scenario 1, Invalidations seen after loads for transaction +---------------------------------------------------------- + +This scenario could occur because the commits are for a different +client, and a hypothetical; server doesn't block loads while +committing, or sends invalidations in a way that might delay them (but +not send them out of order). + +T1 + + - client starts a transaction + + - client load(O1) gets O1-T1 + + - client load(O2) + + - Server commits O2-T2 + + - Server loads (O2-T2) + + - Client gets O2-T2, updates the client cache, and completes load + + - Client sees invalidation for O2-T2. If the + client is smart, it doesn't update the cache. + + The transaction now has inconsistent data, because it should have + loaded whatever O2 was before T2. Because the invalidation came + in after O2 was loaded, the load was unaffected. + + B1 + + - client starts a transaction. Sets START to T1+1 + + - client loadBefore(O1, T1+1) gets O1-T1, T1, None + + - client loadBefore(O2, T1+1) + + - Server commits O2-T2 + + - Server loadBefore(O2, T1+1) -> O2-T0-T2 + + (assuming that the revision of O2 before T2 was T0) + + - Client gets O2-T0-T2, updates cache. + + - Client sees invalidation for O2-T2. No update to the cache is + necessary. + + In this scenario, loadBefore prevents reading incorrect data. + +A variation on this scenario is that client sees invalidations +tpc_finish in another thread after loads for the same transaction. + +Scenario 2, Client sees invalidations for later transaction before load result +------------------------------------------------------------------------------ + +T2 + + - client starts a transaction + + - client load(O1) gets O1-T1 + + - client load(O2) + + - Server loads (O2-T0) + + - Server commits O2-T2 + + - Client sees invalidation for O2-T2. O2 isn't in the cache, so + nothing to do. + + - Client gets O2-T0, updates the client cache, and completes load + + The cache is now incorrect. It has O2-T0-None, meaning it thinks + O2-T0 is current. + + The transaction is OK, because it got a consistent value for O2. + +B2 + + - client starts a transaction. Sets START to T1+1 + + - client loadBefore(O1, T1+1) gets O1-T1, T1, None + + - client loadBefore(O2, T1+1) + + - Server loadBefore(O2, T1+1) -> O2-T0-None + + - Server commits O2-T2 + + - Client sees invalidation for O2-T2. O2 isn't in the cache, so + nothing to do. + + - Client gets O2-T0-None, and completes load + + ZEO 4 doesn't cache loadBefore results with no ending transaction. + + Assume ZEO 5 updates the client cache. + + For ZEO 5, the cache is now incorrect. It has O2-T0-None, meaning + it thinks O2-T0 is current. + + The transaction is OK, because it got a consistent value for O2. + + In this case, ``loadBefore`` didn't prevent an invalid cache value. + +Scenario 3, client sees invalidation after lastTransaction result +------------------------------------------------------------------ + +(This doesn't effect the traditional behavior.) + +B3 + + - The client cache has a last tid of T1. + + - ZODB calls sync() then calls lastTransaction. Is so configured, + ZEO calls lastTransaction on the server. This is mainly to make a + round trip to get in-flight invalidations. We don't necessarily + need to use the value. In fact, in protocol 5, we could just add a + sync method that just makes a round trip, but does nothing else. + + - Server commits O1-T2, O2-T2. + + - Server reads and returns T2. (It doesn't mater what it returns + + - client sets START to T1+1, because lastTransaction is based on + what's in the cache, which is based on invalidations. + + - Client loadBefore(O1, T2+1), finds O1-T1-None in cache and uses + it. + + - Client gets invalidation for O1-T2. Updates cache to O1-T1-T2. + + - Client loadBefore(O2, T1+1), gets O2-T1-None + + This is OK, as long as the client doesn't do anything with the + lastTransaction result in ``sync``. + +Implementation notes +=================== + +ZEO 4 +----- + +The ZEO 4 server sends data to the client in correct order with +respect to loads and invalidations (or tpc_finish results). This is a +consequence of the fact that invalidations are sent in a callback +called when the storage lock is held, blocking loads while committing, +and, fact that client requests, for a particular client, are +handled by a single thread on the server, and that all output for a +client goes through a thread-safe queue. + +Invalidations are sent from different threads than clients. Outgoing +data is queued, however, using Python lists, which are protected by +the GIL. This means that the serialization provided though storage +locks is preserved by the way that server outputs are queued. **The +queueing mechanism is in part a consequence of the way asyncore, used +by ZEO4, works. + +In ZEO 4 clients, invalidations and loads are handled by separate +threads. This means that even though data arive in order, they may not +be processed in order, + +T1 + The existing servers mitigate this by blocking loads while + committing. On the client, this is still a potential issue because loads + and invalidations are handled by separate threads, however, locks are + used on the client to assure that invalidations are processed before + blocked loads complete. + +T2 + Existing storage servers serialize commits (and thus sending of + invalidations) and loads. As with scenario T1, threading on the + client can cause load results and invalidations to be processed out + of order. To mitigate this, the client uses a load lock to track + when loads are invalidated while in flight and doesn't save to the + cache when they are. This is bad on multiple levels. It serializes + loads even when there are multiple threads. It may prevent writing + to the cache unnecessarily, if the invalidation is for a revision + before the one that was loaded. + +B2 + Here, we avoid incorrect returned values and incorrect cache at the + cost of caching nothing. + + ZEO 4.2.0 addressed this by using the same locking strategy for + ``loadBefore`` that was used for ``load``, thus mitigating B2 the + same way it mitigates T2. + +ZEO 5 +----- + +In ZEO(/ZODB) 5, we want to get more concurrency, both on the client, +and on the server. On the client, cache invalidations and loads are +done by the same thread, which makes things a bit simpler. This let's +us get rid of the client load lock and prevents the scenarios above +with existing servers. + +On the client, we'd like to stop serializing loads and commits. We'd +like commits (tpc_finish calls) to be in flight with loads (and with +other commits). In the current protocol, tpc_finish, load and +loadBefore are all synchronous calls that are handled by a single +thread on the server, so these calls end up being serialized on the +server anyway. + +The server-side hndling of invalidations is a bit tricker in ZEO 5 +because there isn't a thread-safe queue of outgoing messages in ZEO 5 +as there was in ZEO 4. The natural approach in ZEO 5 would be to use +asyncio's ``call_soon_threadsafe`` to send invalidations in a client's +thread. This could easily cause invalidations to be sent after loads. +As shown above, this isn't a problem for ZODB 5, at least assuming +that invalidations arrive in order. This would be a problem for +ZODB 4. For this reason, we require ZODB 5 for ZEO 5. + +Note that this approach can't cause invalidations to be sent early, +because they could only be sent by the thread that's busy loading, so +scenario 2 wouldn't happen. + +B2 + Because the server send invalidations by calling + ``call_soon_threadsafe``, it's impoossible for invalidations to be + send while a load request is being handled. + +The main server opportunity is allowing commits for separate oids to +happen concurrently. This wouldn't effect the invalidation/load +ordering though. + +It would be nice not to block loads while making tpc_finish calls, but +storages do this anyway now, so there's nothing to be done about it +now. Storage locking requirements aren't well specified, and probably +should be rethought in light of ZODB5/loadBefore. diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/protocol.txt b/thesisenv/lib/python3.6/site-packages/ZEO/protocol.txt new file mode 100644 index 0000000..3254084 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/protocol.txt @@ -0,0 +1,55 @@ +ZEO Network Protocol (sans authentication) +========================================== + +This document describes the ZEO network protocol. It assumes that the +optional authentication protocol isn't used. At the lowest +level, the protocol consists of sized messages. All communication +between the client and server consists of sized messages. A sized +message consists of a 4-byte unsigned big-endian content length, +followed by the content. There are two subprotocols, for protocol +negotiation, and for normal operation. The normal operation protocol +is a basic RPC protocol. + +In the protocol negotiation phase, the server sends a protocol +identifier to the client. The client chooses a protocol to use to the +server. The client or the server can fail if it doesn't like the +protocol string sent by the other party. After sending their protocol +strings, the client and server switch to RPC mode. + +The RPC protocol uses messages that are pickled tuples consisting of: + +message_id + The message id is used to match replies with requests, allowing + multiple outstanding synchronous requests. + +async_flag + An integer 0 for a regular (2-way) request and 1 for a one-way + request. Two-way requests have a reply. One way requests don't. + ZRS tries to use as many one-way requests as possible to avoid + network round trips. + +name + The name of a method to call. If this is the special string + ".reply", then the message is interpreted as a return from a + synchronous call. + +args + A tuple of positional arguments or returned values. + +After making a connection and negotiating the protocol, the following +interactions occur: + +- The client requests the authentication protocol by calling + getAuthProtocol. For this discussion, we'll assume the server + returns None. Note that if the server doesn't require + authentication, this step is optional. + +- The client calls register passing a storage identifier and a + read-only flag. The server doesn't return a value, but it may raise + an exception either if the storage doesn't exist, or if the + storage is readonly and the read-only flag passed by the client is + false. + +At this point, the client and server send each other messages as +needed. The client may make regular or one-way calls to the +server. The server sends replies and one-way calls to the client. diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/runzeo.py b/thesisenv/lib/python3.6/site-packages/ZEO/runzeo.py new file mode 100644 index 0000000..9df9112 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/runzeo.py @@ -0,0 +1,399 @@ +############################################################################## +# +# Copyright (c) 2001, 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Start the ZEO storage server. + +Usage: %s [-C URL] [-a ADDRESS] [-f FILENAME] [-h] + +Options: +-C/--configuration URL -- configuration file or URL +-a/--address ADDRESS -- server address of the form PORT, HOST:PORT, or PATH + (a PATH must contain at least one "/") +-f/--filename FILENAME -- filename for FileStorage +-t/--timeout TIMEOUT -- transaction timeout in seconds (default no timeout) +-h/--help -- print this usage message and exit +--pid-file PATH -- relative path to output file containing this process's pid; + default $(INSTANCE_HOME)/var/ZEO.pid but only if envar + INSTANCE_HOME is defined + +Unless -C is specified, -a and -f are required. +""" +from __future__ import print_function + +# The code here is designed to be reused by other, similar servers. + +import os +import sys +import signal +import socket +import logging + +import six + +import ZConfig.datatypes +from zdaemon.zdoptions import ZDOptions + +logger = logging.getLogger('ZEO.runzeo') +_pid = str(os.getpid()) + +def log(msg, level=logging.INFO, exc_info=False): + """Internal: generic logging function.""" + message = "(%s) %s" % (_pid, msg) + logger.log(level, message, exc_info=exc_info) + +def parse_binding_address(arg): + # Caution: Not part of the official ZConfig API. + obj = ZConfig.datatypes.SocketBindingAddress(arg) + return obj.family, obj.address + +def windows_shutdown_handler(): + # Called by the signal mechanism on Windows to perform shutdown. + import asyncore + asyncore.close_all() + +class ZEOOptionsMixin(object): + + storages = None + + def handle_address(self, arg): + self.family, self.address = parse_binding_address(arg) + + def handle_filename(self, arg): + from ZODB.config import FileStorage # That's a FileStorage *opener*! + class FSConfig(object): + def __init__(self, name, path): + self._name = name + self.path = path + self.stop = None + def getSectionName(self): + return self._name + if not self.storages: + self.storages = [] + name = str(1 + len(self.storages)) + conf = FileStorage(FSConfig(name, arg)) + self.storages.append(conf) + + testing_exit_immediately = False + def handle_test(self, *args): + self.testing_exit_immediately = True + + def add_zeo_options(self): + self.add(None, None, None, "test", self.handle_test) + self.add(None, None, "a:", "address=", self.handle_address) + self.add(None, None, "f:", "filename=", self.handle_filename) + self.add("family", "zeo.address.family") + self.add("address", "zeo.address.address", + required="no server address specified; use -a or -C") + self.add("read_only", "zeo.read_only", default=0) + self.add("client_conflict_resolution", + "zeo.client_conflict_resolution", + default=0) + self.add("msgpack", "zeo.msgpack", default=0) + self.add("invalidation_queue_size", "zeo.invalidation_queue_size", + default=100) + self.add("invalidation_age", "zeo.invalidation_age") + self.add("transaction_timeout", "zeo.transaction_timeout", + "t:", "timeout=", float) + self.add('pid_file', 'zeo.pid_filename', + None, 'pid-file=') + self.add("ssl", "zeo.ssl") + +class ZEOOptions(ZDOptions, ZEOOptionsMixin): + + __doc__ = __doc__ + + logsectionname = "eventlog" + schemadir = os.path.dirname(__file__) + + def __init__(self): + ZDOptions.__init__(self) + self.add_zeo_options() + self.add("storages", "storages", + required="no storages specified; use -f or -C") + + def realize(self, *a, **k): + ZDOptions.realize(self, *a, **k) + nunnamed = [s for s in self.storages if s.name is None] + if nunnamed: + if len(nunnamed) > 1: + return self.usage("No more than one storage may be unnamed.") + if [s for s in self.storages if s.name == '1']: + return self.usage( + "Can't have an unnamed storage and a storage named 1.") + for s in self.storages: + if s.name is None: + s.name = '1' + break + + +class ZEOServer(object): + + def __init__(self, options): + self.options = options + self.server = None + + def main(self): + self.setup_default_logging() + self.check_socket() + self.clear_socket() + self.make_pidfile() + try: + self.open_storages() + self.setup_signals() + self.create_server() + self.loop_forever() + finally: + self.close_server() + self.clear_socket() + self.remove_pidfile() + + def setup_default_logging(self): + if self.options.config_logger is not None: + return + # No log file is configured; default to stderr. + root = logging.getLogger() + root.setLevel(logging.INFO) + fmt = logging.Formatter( + "------\n%(asctime)s %(levelname)s %(name)s %(message)s", + "%Y-%m-%dT%H:%M:%S") + handler = logging.StreamHandler() + handler.setFormatter(fmt) + root.addHandler(handler) + + def check_socket(self): + if (isinstance(self.options.address, tuple) and + self.options.address[1] is None): + self.options.address = self.options.address[0], 0 + return + + if self.can_connect(self.options.family, self.options.address): + self.options.usage("address %s already in use" % + repr(self.options.address)) + + def can_connect(self, family, address): + s = socket.socket(family, socket.SOCK_STREAM) + try: + s.connect(address) + except socket.error: + return 0 + else: + s.close() + return 1 + + def clear_socket(self): + if isinstance(self.options.address, six.string_types): + try: + os.unlink(self.options.address) + except os.error: + pass + + def open_storages(self): + self.storages = {} + for opener in self.options.storages: + log("opening storage %r using %s" + % (opener.name, opener.__class__.__name__)) + self.storages[opener.name] = opener.open() + + def setup_signals(self): + """Set up signal handlers. + + The signal handler for SIGFOO is a method handle_sigfoo(). + If no handler method is defined for a signal, the signal + action is not changed from its initial value. The handler + method is called without additional arguments. + """ + if os.name != "posix": + if os.name == "nt": + self.setup_win32_signals() + return + if hasattr(signal, 'SIGXFSZ'): + signal.signal(signal.SIGXFSZ, signal.SIG_IGN) # Special case + init_signames() + for sig, name in signames.items(): + method = getattr(self, "handle_" + name.lower(), None) + if method is not None: + def wrapper(sig_dummy, frame_dummy, method=method): + method() + signal.signal(sig, wrapper) + + def setup_win32_signals(self): + # Borrow the Zope Signals package win32 support, if available. + # Signals does a check/log for the availability of pywin32. + try: + import Signals.Signals + except ImportError: + logger.debug("Signals package not found. " + "Windows-specific signal handler " + "will *not* be installed.") + return + SignalHandler = Signals.Signals.SignalHandler + if SignalHandler is not None: # may be None if no pywin32. + SignalHandler.registerHandler(signal.SIGTERM, + windows_shutdown_handler) + SignalHandler.registerHandler(signal.SIGINT, + windows_shutdown_handler) + SIGUSR2 = 12 # not in signal module on Windows. + SignalHandler.registerHandler(SIGUSR2, self.handle_sigusr2) + + def create_server(self): + self.server = create_server(self.storages, self.options) + + def loop_forever(self): + if self.options.testing_exit_immediately: + print("testing exit immediately") + else: + self.server.loop() + + def close_server(self): + if self.server is not None: + self.server.close() + + def handle_sigterm(self): + log("terminated by SIGTERM") + sys.exit(0) + + def handle_sigint(self): + log("terminated by SIGINT") + sys.exit(0) + + def handle_sighup(self): + log("restarted by SIGHUP") + sys.exit(1) + + def handle_sigusr2(self): + # log rotation signal - do the same as Zope 2.7/2.8... + if self.options.config_logger is None or os.name not in ("posix", "nt"): + log("received SIGUSR2, but it was not handled!", + level=logging.WARNING) + return + + loggers = [self.options.config_logger] + + if os.name == "posix": + for l in loggers: + l.reopen() + log("Log files reopened successfully", level=logging.INFO) + else: # nt - same rotation code as in Zope's Signals/Signals.py + for l in loggers: + for f in l.handler_factories: + handler = f() + if hasattr(handler, 'rotate') and callable(handler.rotate): + handler.rotate() + log("Log files rotation complete", level=logging.INFO) + + def _get_pidfile(self): + pidfile = self.options.pid_file + # 'pidfile' is marked as not required. + if not pidfile: + # Try to find a reasonable location if the pidfile is not + # set. If we are running in a Zope environment, we can + # safely assume INSTANCE_HOME. + instance_home = os.environ.get("INSTANCE_HOME") + if not instance_home: + # If all our attempts failed, just log a message and + # proceed. + logger.debug("'pidfile' option not set, and 'INSTANCE_HOME' " + "environment variable could not be found. " + "Cannot guess pidfile location.") + return + self.options.pid_file = os.path.join(instance_home, + "var", "ZEO.pid") + + def make_pidfile(self): + if not self.options.read_only: + self._get_pidfile() + pidfile = self.options.pid_file + if pidfile is None: + return + pid = os.getpid() + try: + if os.path.exists(pidfile): + os.unlink(pidfile) + f = open(pidfile, 'w') + print(pid, file=f) + f.close() + log("created PID file '%s'" % pidfile) + except IOError: + logger.error("PID file '%s' cannot be opened" % pidfile) + + def remove_pidfile(self): + if not self.options.read_only: + pidfile = self.options.pid_file + if pidfile is None: + return + try: + if os.path.exists(pidfile): + os.unlink(pidfile) + log("removed PID file '%s'" % pidfile) + except IOError: + logger.error("PID file '%s' could not be removed" % pidfile) + + +def create_server(storages, options): + from .StorageServer import StorageServer + return StorageServer( + options.address, + storages, + read_only = options.read_only, + client_conflict_resolution=options.client_conflict_resolution, + msgpack=(options.msgpack if isinstance(options.msgpack, bool) + else os.environ.get('ZEO_MSGPACK')), + invalidation_queue_size = options.invalidation_queue_size, + invalidation_age = options.invalidation_age, + transaction_timeout = options.transaction_timeout, + ssl = options.ssl, + ) + + +# Signal names + +signames = None + +def signame(sig): + """Return a symbolic name for a signal. + + Return "signal NNN" if there is no corresponding SIG name in the + signal module. + """ + + if signames is None: + init_signames() + return signames.get(sig) or "signal %d" % sig + +def init_signames(): + global signames + signames = {} + for name, sig in signal.__dict__.items(): + k_startswith = getattr(name, "startswith", None) + if k_startswith is None: + continue + if k_startswith("SIG") and not k_startswith("SIG_"): + signames[sig] = name + + +# Main program + +def main(args=None): + options = ZEOOptions() + options.realize(args) + s = ZEOServer(options) + s.main() + +def run(args): + options = ZEOOptions() + options.realize(args) + s = ZEOServer(options) + s.run() + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/schema.xml b/thesisenv/lib/python3.6/site-packages/ZEO/schema.xml new file mode 100644 index 0000000..ca15272 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/schema.xml @@ -0,0 +1,40 @@ + + + + + + This schema describes the configuration of the ZEO storage server + process. + + + + + + + + + + + + + + +
+ +
+ + + + One or more storages that are provided by the ZEO server. The + section names are used as the storage names, and must be unique + within each ZEO storage server. Traditionally, these names + represent small integers starting at '1'. + + + +
+ + diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/scripts/README.txt b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/README.txt new file mode 100644 index 0000000..d43d83f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/README.txt @@ -0,0 +1,64 @@ +This directory contains a collection of utilities for working with +ZEO. Some are more useful than others. If you install ZODB using +distutils ("python setup.py install"), some of these will be +installed. + +Unless otherwise noted, these scripts are invoked with the name of the +Data.fs file as their only argument. Example: checkbtrees.py data.fs. + + +parsezeolog.py -- parse BLATHER logs from ZEO server + +This script may be obsolete. It has not been tested against the +current log output of the ZEO server. + +Reports on the time and size of transactions committed by a ZEO +server, by inspecting log messages at BLATHER level. + + + +timeout.py -- script to test transaction timeout + +usage: timeout.py address delay [storage-name] + +This script connects to a storage, begins a transaction, calls store() +and tpc_vote(), and then sleeps forever. This should trigger the +transaction timeout feature of the server. + + +zeopack.py -- pack a ZEO server + +The script connects to a server and calls pack() on a specific +storage. See the script for usage details. + + +zeoreplay.py -- experimental script to replay transactions from a ZEO log + +Like parsezeolog.py, this may be obsolete because it was written +against an earlier version of the ZEO server. See the script for +usage details. + + +zeoup.py + +usage: zeoup.py [options] + +The test will connect to a ZEO server, load the root object, and +attempt to update the zeoup counter in the root. It will report +success if it updates to counter or if it gets a ConflictError. A +ConflictError is considered a success, because the client was able to +start a transaction. + +See the script for details about the options. + + + +zeoserverlog.py -- analyze ZEO server log for performance statistics + +See the module docstring for details; there are a large number of +options. New in ZODB3 3.1.4. + + +zeoqueue.py -- report number of clients currently waiting in the ZEO queue + +See the module docstring for details. diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/scripts/__init__.py b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/__init__.py new file mode 100644 index 0000000..792d600 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/__init__.py @@ -0,0 +1 @@ +# diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/scripts/cache_simul.py b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/cache_simul.py new file mode 100644 index 0000000..bcbec9e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/cache_simul.py @@ -0,0 +1,581 @@ +#! /usr/bin/env python +############################################################################## +# +# Copyright (c) 2001-2005 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +""" +Cache simulation. + + +Note: + +- The simulation isn't perfect. + +- The simulation will be far off if the trace file + was created starting with a non-empty cache +""" +from __future__ import print_function, absolute_import + +import bisect +import struct +import re +import sys +import ZEO.cache +import argparse + +from ZODB.utils import z64 + +from .cache_stats import add_interval_argument +from .cache_stats import add_tracefile_argument + +# we assign ctime locally to facilitate test replacement! +from time import ctime +import six + + +def main(args=None): + if args is None: + args = sys.argv[1:] + # Parse options. + MB = 1<<20 + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument("--size", "-s", + default=20*MB, dest="cachelimit", + type=lambda s: int(float(s)*MB), + help="cache size in MB (default 20MB)") + add_interval_argument(parser) + parser.add_argument("--rearrange", "-r", + default=0.8, type=float, + help="rearrange factor") + add_tracefile_argument(parser) + + simclass = CircularCacheSimulation + + options = parser.parse_args(args) + + f = options.tracefile + interval_step = options.interval + + # Create simulation object. + sim = simclass(options.cachelimit, options.rearrange) + interval_sim = simclass(options.cachelimit, options.rearrange) + + # Print output header. + sim.printheader() + + # Read trace file, simulating cache behavior. + f_read = f.read + unpack = struct.unpack + FMT = ">iiH8s8s" + FMT_SIZE = struct.calcsize(FMT) + assert FMT_SIZE == 26 + + last_interval = None + while 1: + # Read a record and decode it. + r = f_read(FMT_SIZE) + if len(r) < FMT_SIZE: + break + ts, code, oidlen, start_tid, end_tid = unpack(FMT, r) + if ts == 0: + # Must be a misaligned record caused by a crash; skip 8 bytes + # and try again. Why 8? Lost in the mist of history. + f.seek(f.tell() - FMT_SIZE + 8) + continue + oid = f_read(oidlen) + if len(oid) < oidlen: + break + # Decode the code. + dlen, version, code = ((code & 0x7fffff00) >> 8, + code & 0x80, + code & 0x7e) + # And pass it to the simulation. + this_interval = int(ts) // interval_step + if this_interval != last_interval: + if last_interval is not None: + interval_sim.report() + interval_sim.restart() + if not interval_sim.warm: + sim.restart() + last_interval = this_interval + sim.event(ts, dlen, version, code, oid, start_tid, end_tid) + interval_sim.event(ts, dlen, version, code, oid, start_tid, end_tid) + + f.close() + # Finish simulation. + interval_sim.report() + sim.finish() + +class Simulation(object): + """Base class for simulations. + + The driver program calls: event(), printheader(), finish(). + + The standard event() method calls these additional methods: + write(), load(), inval(), report(), restart(); the standard + finish() method also calls report(). + """ + + def __init__(self, cachelimit, rearrange): + self.cachelimit = cachelimit + self.rearrange = rearrange + # Initialize global statistics. + self.epoch = None + self.warm = False + self.total_loads = 0 + self.total_hits = 0 # subclass must increment + self.total_invals = 0 # subclass must increment + self.total_writes = 0 + if not hasattr(self, "extras"): + self.extras = (self.extraname,) + self.format = self.format + " %7s" * len(self.extras) + # Reset per-run statistics and set up simulation data. + self.restart() + + def restart(self): + # Reset per-run statistics. + self.loads = 0 + self.hits = 0 # subclass must increment + self.invals = 0 # subclass must increment + self.writes = 0 + self.ts0 = None + + def event(self, ts, dlen, _version, code, oid, + start_tid, end_tid): + # Record first and last timestamp seen. + if self.ts0 is None: + self.ts0 = ts + if self.epoch is None: + self.epoch = ts + self.ts1 = ts + + # Simulate cache behavior. Caution: the codes in the trace file + # record whether the actual cache missed or hit on each load, but + # that bears no necessary relationship to whether the simulated cache + # will hit or miss. Relatedly, if the actual cache needed to store + # an object, the simulated cache may not need to (it may already + # have the data). + action = code & 0x70 + if action & 0x20: + # Load. + self.loads += 1 + self.total_loads += 1 + # Asserting that dlen is 0 iff it's a load miss. + # assert (dlen == 0) == (code in (0x20, 0x24)) + self.load(oid, dlen, start_tid, code) + elif action & 0x40: + # Store. + assert dlen + self.write(oid, dlen, start_tid, end_tid) + elif action & 0x10: + # Invalidate. + self.inval(oid, start_tid) + elif action == 0x00: + # Restart. + self.restart() + else: + raise ValueError("unknown trace code 0x%x" % code) + + def write(self, oid, size, start_tid, end_tid): + pass + + def load(self, oid, size, start_tid, code): + # Must increment .hits and .total_hits as appropriate. + pass + + def inval(self, oid, start_tid): + # Must increment .invals and .total_invals as appropriate. + pass + + format = "%12s %6s %7s %7s %6s %6s %7s" + + # Subclass should override extraname to name known instance variables; + # if extraname is 'foo', both self.foo and self.total_foo must exist: + extraname = "*** please override ***" + + def printheader(self): + print("%s, cache size %s bytes" % (self.__class__.__name__, + addcommas(self.cachelimit))) + self.extraheader() + extranames = tuple([s.upper() for s in self.extras]) + args = ("START TIME", "DUR.", "LOADS", "HITS", + "INVALS", "WRITES", "HITRATE") + extranames + print(self.format % args) + + def extraheader(self): + pass + + nreports = 0 + + def report(self): + if not hasattr(self, 'ts1'): + return + self.nreports += 1 + args = (ctime(self.ts0)[4:-8], + duration(self.ts1 - self.ts0), + self.loads, self.hits, self.invals, self.writes, + hitrate(self.loads, self.hits)) + args += tuple([getattr(self, name) for name in self.extras]) + print(self.format % args) + + def finish(self): + # Make sure that the last line of output ends with "OVERALL". This + # makes it much easier for another program parsing the output to + # find summary statistics. + print('-'*74) + if self.nreports < 2: + self.report() + else: + self.report() + args = ( + ctime(self.epoch)[4:-8], + duration(self.ts1 - self.epoch), + self.total_loads, + self.total_hits, + self.total_invals, + self.total_writes, + hitrate(self.total_loads, self.total_hits)) + args += tuple([getattr(self, "total_" + name) + for name in self.extras]) + print(self.format % args) + + +# For use in CircularCacheSimulation. +class CircularCacheEntry(object): + __slots__ = ( + # object key: an (oid, start_tid) pair, where start_tid is the + # tid of the transaction that created this revision of oid + 'key', + + # tid of transaction that created the next revision; z64 iff + # this is the current revision + 'end_tid', + + # Offset from start of file to the object's data record; this + # includes all overhead bytes (status byte, size bytes, etc). + 'offset', + ) + + def __init__(self, key, end_tid, offset): + self.key = key + self.end_tid = end_tid + self.offset = offset + +from ZEO.cache import ZEC_HEADER_SIZE + +class CircularCacheSimulation(Simulation): + """Simulate the ZEO 3.0 cache.""" + + # The cache is managed as a single file with a pointer that + # goes around the file, circularly, forever. New objects + # are written at the current pointer, evicting whatever was + # there previously. + + extras = "evicts", "inuse" + + evicts = 0 + + def __init__(self, cachelimit, rearrange): + from ZEO import cache + + Simulation.__init__(self, cachelimit, rearrange) + self.total_evicts = 0 # number of cache evictions + + # Current offset in file. + self.offset = ZEC_HEADER_SIZE + + # Map offset in file to (size, CircularCacheEntry) pair, or to + # (size, None) if the offset starts a free block. + self.filemap = {ZEC_HEADER_SIZE: (self.cachelimit - ZEC_HEADER_SIZE, + None)} + # Map key to CircularCacheEntry. A key is an (oid, tid) pair. + self.key2entry = {} + + # Map oid to tid of current revision. + self.current = {} + + # Map oid to list of (start_tid, end_tid) pairs in sorted order. + # Used to find matching key for load of non-current data. + self.noncurrent = {} + + # The number of overhead bytes needed to store an object pickle + # on disk (all bytes beyond those needed for the object pickle). + self.overhead = ZEO.cache.allocated_record_overhead + + # save evictions so we can replay them, if necessary + self.evicted = {} + + def restart(self): + Simulation.restart(self) + if self.evicts: + self.warm = True + self.evicts = 0 + self.evicted_hit = self.evicted_miss = 0 + + evicted_hit = evicted_miss = 0 + def load(self, oid, size, tid, code): + if (code == 0x20) or (code == 0x22): + # Trying to load current revision. + if oid in self.current: # else it's a cache miss + self.hits += 1 + self.total_hits += 1 + + tid = self.current[oid] + entry = self.key2entry[(oid, tid)] + offset_offset = self.offset - entry.offset + if offset_offset < 0: + offset_offset += self.cachelimit + assert offset_offset >= 0 + + if offset_offset > self.rearrange * self.cachelimit: + # we haven't accessed it in a while. Move it forward + size = self.filemap[entry.offset][0] + self._remove(*entry.key) + self.add(oid, size, tid) + + elif oid in self.evicted: + size, e = self.evicted[oid] + self.write(oid, size, e.key[1], z64, 1) + self.evicted_hit += 1 + else: + self.evicted_miss += 1 + + return + + # May or may not be trying to load current revision. + cur_tid = self.current.get(oid) + if cur_tid == tid: + self.hits += 1 + self.total_hits += 1 + return + + # It's a load for non-current data. Do we know about this oid? + L = self.noncurrent.get(oid) + if L is None: + return # cache miss + i = bisect.bisect_left(L, (tid, None)) + if i == 0: + # This tid is smaller than any we know about -- miss. + return + lo, hi = L[i-1] + assert lo < tid + if tid > hi: + # No data in the right tid range -- miss. + return + # Cache hit. + self.hits += 1 + self.total_hits += 1 + + # (oid, tid) is in the cache. Remove it: take it out of key2entry, + # and in `filemap` mark the space it occupied as being free. The + # caller is responsible for removing it from `current` or `noncurrent`. + def _remove(self, oid, tid): + key = oid, tid + e = self.key2entry.pop(key) + pos = e.offset + size, _e = self.filemap[pos] + assert e is _e + self.filemap[pos] = size, None + + def _remove_noncurrent_revisions(self, oid): + noncurrent_list = self.noncurrent.get(oid) + if noncurrent_list: + self.invals += len(noncurrent_list) + self.total_invals += len(noncurrent_list) + for start_tid, end_tid in noncurrent_list: + self._remove(oid, start_tid) + del self.noncurrent[oid] + + def inval(self, oid, tid): + if tid == z64: + # This is part of startup cache verification: forget everything + # about this oid. + self._remove_noncurrent_revisions(oid) + + if oid in self.evicted: + del self.evicted[oid] + + cur_tid = self.current.get(oid) + if cur_tid is None: + # We don't have current data, so nothing more to do. + return + + # We had current data for oid, but no longer. + self.invals += 1 + self.total_invals += 1 + del self.current[oid] + if tid == z64: + # Startup cache verification: forget this oid entirely. + self._remove(oid, cur_tid) + return + + # Our current data becomes non-current data. + # Add the validity range to the list of non-current data for oid. + assert cur_tid < tid + L = self.noncurrent.setdefault(oid, []) + bisect.insort_left(L, (cur_tid, tid)) + # Update the end of oid's validity range in its CircularCacheEntry. + e = self.key2entry[oid, cur_tid] + assert e.end_tid == z64 + e.end_tid = tid + + def write(self, oid, size, start_tid, end_tid, evhit=0): + if end_tid == z64: + # Storing current revision. + if oid in self.current: # we already have it in cache + if evhit: + import pdb; pdb.set_trace() + raise ValueError('WTF') + return + self.current[oid] = start_tid + self.writes += 1 + self.total_writes += 1 + self.add(oid, size, start_tid) + return + if evhit: + import pdb; pdb.set_trace() + raise ValueError('WTF') + # Storing non-current revision. + L = self.noncurrent.setdefault(oid, []) + p = start_tid, end_tid + if p in L: + return # we already have it in cache + bisect.insort_left(L, p) + self.writes += 1 + self.total_writes += 1 + self.add(oid, size, start_tid, end_tid) + + # Add `oid` to the cache, evicting objects as needed to make room. + # This updates `filemap` and `key2entry`; it's the caller's + # responsibilty to update `current` or `noncurrent` appropriately. + def add(self, oid, size, start_tid, end_tid=z64): + key = oid, start_tid + assert key not in self.key2entry + size += self.overhead + avail = self.makeroom(size+1) # see cache.py + e = CircularCacheEntry(key, end_tid, self.offset) + self.filemap[self.offset] = size, e + self.key2entry[key] = e + self.offset += size + # All the space made available must be accounted for in filemap. + excess = avail - size + if excess: + self.filemap[self.offset] = excess, None + + # Evict enough objects to make at least `need` contiguous bytes, starting + # at `self.offset`, available. Evicted objects are removed from + # `filemap`, `key2entry`, `current` and `noncurrent`. The caller is + # responsible for adding new entries to `filemap` to account for all + # the freed bytes, and for advancing `self.offset`. The number of bytes + # freed is the return value, and will be >= need. + def makeroom(self, need): + if self.offset + need > self.cachelimit: + self.offset = ZEC_HEADER_SIZE + pos = self.offset + while need > 0: + assert pos < self.cachelimit + size, e = self.filemap.pop(pos) + if e: # there is an object here (else it's already free space) + self.evicts += 1 + self.total_evicts += 1 + assert pos == e.offset + _e = self.key2entry.pop(e.key) + assert e is _e + oid, start_tid = e.key + if e.end_tid == z64: + del self.current[oid] + self.evicted[oid] = size-self.overhead, e + else: + L = self.noncurrent[oid] + L.remove((start_tid, e.end_tid)) + need -= size + pos += size + return pos - self.offset # total number of bytes freed + + def report(self): + self.check() + free = used = total = 0 + for size, e in six.itervalues(self.filemap): + total += size + if e: + used += size + else: + free += size + + self.inuse = round(100.0 * used / total, 1) + self.total_inuse = self.inuse + Simulation.report(self) + #print self.evicted_hit, self.evicted_miss + + def check(self): + oidcount = 0 + pos = ZEC_HEADER_SIZE + while pos < self.cachelimit: + size, e = self.filemap[pos] + if e: + oidcount += 1 + assert self.key2entry[e.key].offset == pos + pos += size + assert oidcount == len(self.key2entry) + assert pos == self.cachelimit + + def dump(self): + print(len(self.filemap)) + L = list(self.filemap) + L.sort() + for k in L: + v = self.filemap[k] + print(k, v[0], repr(v[1])) + + +def roundup(size): + k = MINSIZE + while k < size: + k += k + return k + +def hitrate(loads, hits): + if loads < 1: + return 'n/a' + return "%5.1f%%" % (100.0 * hits / loads) + +def duration(secs): + mm, ss = divmod(secs, 60) + hh, mm = divmod(mm, 60) + if hh: + return "%d:%02d:%02d" % (hh, mm, ss) + if mm: + return "%d:%02d" % (mm, ss) + return "%d" % ss + +nre = re.compile('([=-]?)(\d+)([.]\d*)?').match +def addcommas(n): + sign, s, d = nre(str(n)).group(1, 2, 3) + if d == '.0': + d = '' + + result = s[-3:] + s = s[:-3] + while s: + result = s[-3:]+','+result + s = s[:-3] + + return (sign or '') + result + (d or '') + +import random + +def maybe(f, p=0.5): + if random.random() < p: + f() + +if __name__ == "__main__": + sys.exit(main()) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/scripts/cache_stats.py b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/cache_stats.py new file mode 100644 index 0000000..293994a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/cache_stats.py @@ -0,0 +1,381 @@ +from __future__ import print_function +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Trace file statistics analyzer. + +File format: + +Each record is 26 bytes, plus a variable number of bytes to store an oid, +with the following layout. Numbers are big-endian integers. + +Offset Size Contents + +0 4 timestamp (seconds since 1/1/1970) +4 3 data size, in 256-byte increments, rounded up +7 1 code (see below) +8 2 object id length +10 8 start tid +18 8 end tid +26 variable object id + +The code at offset 7 packs three fields: + +Mask bits Contents + +0x80 1 set if there was a non-empty version string +0x7e 6 function and outcome code +0x01 1 current cache file (0 or 1) + +The "current cache file" bit is no longer used; it refers to a 2-file +cache scheme used before ZODB 3.3. + +The function and outcome codes are documented in detail at the end of +this file in the 'explain' dictionary. Note that the keys there (and +also the arguments to _trace() in ClientStorage.py) are 'code & 0x7e', +i.e. the low bit is always zero. +""" +import sys +import time +import argparse +import struct +import gzip + +# we assign ctime locally to facilitate test replacement! +from time import ctime +import six + +def add_interval_argument(parser): + def _interval(a): + interval = int(60 * float(a)) + if interval <= 0: + interval = 60 + elif interval > 3600: + interval = 3600 + return interval + parser.add_argument("--interval", "-i", + default=15*60, type=_interval, + help="summarizing interval in minutes (default 15; max 60)") + +def add_tracefile_argument(parser): + + class GzipFileType(argparse.FileType): + def __init__(self): + super(GzipFileType, self).__init__(mode='rb') + + def __call__(self, s): + f = super(GzipFileType, self).__call__(s) + if s.endswith(".gz"): + f = gzip.GzipFile(filename=s, fileobj=f) + return f + + parser.add_argument("tracefile", type=GzipFileType(), + help="The trace to read; may be gzipped") + +def main(args=None): + if args is None: + args = sys.argv[1:] + # Parse options + parser = argparse.ArgumentParser(description="Trace file statistics analyzer", + # Our -h, short for --load-histogram + # conflicts with default for help, so we handle + # manually. + add_help=False) + verbose_group = parser.add_mutually_exclusive_group() + verbose_group.add_argument('--verbose', '-v', + default=False, action='store_true', + help="Be verbose; print each record") + verbose_group.add_argument('--quiet', '-q', + default=False, action='store_true', + help="Reduce output; don't print summaries") + parser.add_argument("--sizes", '-s', + default=False, action="store_true", dest="print_size_histogram", + help="print histogram of object sizes") + parser.add_argument("--no-stats", '-S', + default=True, action="store_false", dest="dostats", + help="don't print statistics") + parser.add_argument("--load-histogram", "-h", + default=False, action="store_true", dest="print_histogram", + help="print histogram of object load frequencies") + parser.add_argument("--check", "-X", + default=False, action="store_true", dest="heuristic", + help=" enable heuristic checking for misaligned records: oids > 2**32" + " will be rejected; this requires the tracefile to be seekable") + add_interval_argument(parser) + add_tracefile_argument(parser) + + if '--help' in args: + parser.print_help() + sys.exit(2) + + options = parser.parse_args(args) + + f = options.tracefile + + rt0 = time.time() + bycode = {} # map code to count of occurrences + byinterval = {} # map code to count in current interval + records = 0 # number of trace records read + versions = 0 # number of trace records with versions + datarecords = 0 # number of records with dlen set + datasize = 0 # sum of dlen across records with dlen set + oids = {} # map oid to number of times it was loaded + bysize = {} # map data size to number of loads + bysizew = {} # map data size to number of writes + total_loads = 0 + t0 = None # first timestamp seen + te = None # most recent timestamp seen + h0 = None # timestamp at start of current interval + he = None # timestamp at end of current interval + thisinterval = None # generally te//interval + f_read = f.read + unpack = struct.unpack + FMT = ">iiH8s8s" + FMT_SIZE = struct.calcsize(FMT) + assert FMT_SIZE == 26 + # Read file, gathering statistics, and printing each record if verbose. + print(' '*16, "%7s %7s %7s %7s" % ('loads', 'hits', 'inv(h)', 'writes'), end=' ') + print('hitrate') + try: + while 1: + r = f_read(FMT_SIZE) + if len(r) < FMT_SIZE: + break + ts, code, oidlen, start_tid, end_tid = unpack(FMT, r) + if ts == 0: + # Must be a misaligned record caused by a crash. + if not options.quiet: + print("Skipping 8 bytes at offset", f.tell() - FMT_SIZE) + f.seek(f.tell() - FMT_SIZE + 8) + continue + oid = f_read(oidlen) + if len(oid) < oidlen: + break + records += 1 + if t0 is None: + t0 = ts + thisinterval = t0 // options.interval + h0 = he = ts + te = ts + if ts // options.interval != thisinterval: + if not options.quiet: + dumpbyinterval(byinterval, h0, he) + byinterval = {} + thisinterval = ts // options.interval + h0 = ts + he = ts + dlen, code = (code & 0x7fffff00) >> 8, code & 0xff + if dlen: + datarecords += 1 + datasize += dlen + if code & 0x80: + version = 'V' + versions += 1 + else: + version = '-' + code &= 0x7e + bycode[code] = bycode.get(code, 0) + 1 + byinterval[code] = byinterval.get(code, 0) + 1 + if dlen: + if code & 0x70 == 0x20: # All loads + bysize[dlen] = d = bysize.get(dlen) or {} + d[oid] = d.get(oid, 0) + 1 + elif code & 0x70 == 0x50: # All stores + bysizew[dlen] = d = bysizew.get(dlen) or {} + d[oid] = d.get(oid, 0) + 1 + if options.verbose: + print("%s %02x %s %016x %016x %c%s" % ( + ctime(ts)[4:-5], + code, + oid_repr(oid), + U64(start_tid), + U64(end_tid), + version, + dlen and (' '+str(dlen)) or "")) + if code & 0x70 == 0x20: + oids[oid] = oids.get(oid, 0) + 1 + total_loads += 1 + elif code == 0x00: # restart + if not options.quiet: + dumpbyinterval(byinterval, h0, he) + byinterval = {} + thisinterval = ts // options.interval + h0 = he = ts + if not options.quiet: + print(ctime(ts)[4:-5], end=' ') + print('='*20, "Restart", '='*20) + except KeyboardInterrupt: + print("\nInterrupted. Stats so far:\n") + + end_pos = f.tell() + f.close() + rte = time.time() + if not options.quiet: + dumpbyinterval(byinterval, h0, he) + + # Error if nothing was read + if not records: + print("No records processed", file=sys.stderr) + return 1 + + # Print statistics + if options.dostats: + print() + print("Read %s trace records (%s bytes) in %.1f seconds" % ( + addcommas(records), addcommas(end_pos), rte-rt0)) + print("Versions: %s records used a version" % addcommas(versions)) + print("First time: %s" % ctime(t0)) + print("Last time: %s" % ctime(te)) + print("Duration: %s seconds" % addcommas(te-t0)) + print("Data recs: %s (%.1f%%), average size %d bytes" % ( + addcommas(datarecords), + 100.0 * datarecords / records, + datasize / datarecords)) + print("Hit rate: %.1f%% (load hits / loads)" % hitrate(bycode)) + print() + codes = sorted(bycode.keys()) + print("%13s %4s %s" % ("Count", "Code", "Function (action)")) + for code in codes: + print("%13s %02x %s" % ( + addcommas(bycode.get(code, 0)), + code, + explain.get(code) or "*** unknown code ***")) + + # Print histogram. + if options.print_histogram: + print() + print("Histogram of object load frequency") + total = len(oids) + print("Unique oids: %s" % addcommas(total)) + print("Total loads: %s" % addcommas(total_loads)) + s = addcommas(total) + width = max(len(s), len("objects")) + fmt = "%5d %" + str(width) + "s %5.1f%% %5.1f%% %5.1f%%" + hdr = "%5s %" + str(width) + "s %6s %6s %6s" + print(hdr % ("loads", "objects", "%obj", "%load", "%cum")) + cum = 0.0 + for binsize, count in histogram(oids): + obj_percent = 100.0 * count / total + load_percent = 100.0 * count * binsize / total_loads + cum += load_percent + print(fmt % (binsize, addcommas(count), + obj_percent, load_percent, cum)) + + # Print size histogram. + if options.print_size_histogram: + print() + print("Histograms of object sizes") + print() + dumpbysize(bysizew, "written", "writes") + dumpbysize(bysize, "loaded", "loads") + +def dumpbysize(bysize, how, how2): + print() + print("Unique sizes %s: %s" % (how, addcommas(len(bysize)))) + print("%10s %6s %6s" % ("size", "objs", how2)) + sizes = sorted(bysize.keys()) + for size in sizes: + loads = 0 + for n in six.itervalues(bysize[size]): + loads += n + print("%10s %6d %6d" % (addcommas(size), + len(bysize.get(size, "")), + loads)) + +def dumpbyinterval(byinterval, h0, he): + loads = hits = invals = writes = 0 + for code in byinterval: + if code & 0x20: + n = byinterval[code] + loads += n + if code in (0x22, 0x26): + hits += n + elif code & 0x40: + writes += byinterval[code] + elif code & 0x10: + if code != 0x10: + invals += byinterval[code] + + if loads: + hr = "%5.1f%%" % (100.0 * hits / loads) + else: + hr = 'n/a' + + print("%s-%s %7s %7s %7s %7s %7s" % ( + ctime(h0)[4:-8], ctime(he)[14:-8], + loads, hits, invals, writes, hr)) + +def hitrate(bycode): + loads = hits = 0 + for code in bycode: + if code & 0x70 == 0x20: + n = bycode[code] + loads += n + if code in (0x22, 0x26): + hits += n + if loads: + return 100.0 * hits / loads + else: + return 0.0 + +def histogram(d): + bins = {} + for v in six.itervalues(d): + bins[v] = bins.get(v, 0) + 1 + L = sorted(bins.items()) + return L + +def U64(s): + return struct.unpack(">Q", s)[0] + +def oid_repr(oid): + if isinstance(oid, six.binary_type) and len(oid) == 8: + return '%16x' % U64(oid) + else: + return repr(oid) + +def addcommas(n): + sign, s = '', str(n) + if s[0] == '-': + sign, s = '-', s[1:] + i = len(s) - 3 + while i > 0: + s = s[:i] + ',' + s[i:] + i -= 3 + return sign + s + +explain = { + # The first hex digit shows the operation, the second the outcome. + # If the second digit is in "02468" then it is a 'miss'. + # If it is in "ACE" then it is a 'hit'. + + 0x00: "_setup_trace (initialization)", + + 0x10: "invalidate (miss)", + 0x1A: "invalidate (hit, version)", + 0x1C: "invalidate (hit, saving non-current)", + # 0x1E can occur during startup verification. + 0x1E: "invalidate (hit, discarding current or non-current)", + + 0x20: "load (miss)", + 0x22: "load (hit)", + 0x24: "load (non-current, miss)", + 0x26: "load (non-current, hit)", + + 0x50: "store (version)", + 0x52: "store (current, non-version)", + 0x54: "store (non-current)", + } + +if __name__ == "__main__": + sys.exit(main()) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/scripts/parsezeolog.py b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/parsezeolog.py new file mode 100644 index 0000000..f30708d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/parsezeolog.py @@ -0,0 +1,140 @@ +#!/usr/bin/env python2.3 + +"""Parse the BLATHER logging generated by ZEO2. + +An example of the log format is: +2002-04-15T13:05:29 BLATHER(-100) ZEO Server storea(3235680, [714], 235339406490168806) ('10.0.26.30', 45514) +""" +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function + +import re +import time + +rx_time = re.compile('(\d\d\d\d-\d\d-\d\d)T(\d\d:\d\d:\d\d)') + +def parse_time(line): + """Return the time portion of a zLOG line in seconds or None.""" + mo = rx_time.match(line) + if mo is None: + return None + date, time_ = mo.group(1, 2) + date_l = [int(elt) for elt in date.split('-')] + time_l = [int(elt) for elt in time_.split(':')] + return int(time.mktime(date_l + time_l + [0, 0, 0])) + +rx_meth = re.compile("zrpc:\d+ calling (\w+)\((.*)") + +def parse_method(line): + pass + +def parse_line(line): + """Parse a log entry and return time, method info, and client.""" + t = parse_time(line) + if t is None: + return None, None + mo = rx_meth.search(line) + if mo is None: + return None, None + meth_name = mo.group(1) + meth_args = mo.group(2).strip() + if meth_args.endswith(')'): + meth_args = meth_args[:-1] + meth_args = [s.strip() for s in meth_args.split(",")] + m = meth_name, tuple(meth_args) + return t, m + +class TStats(object): + + counter = 1 + + def __init__(self): + self.id = TStats.counter + TStats.counter += 1 + + fields = ("time", "vote", "done", "user", "path") + fmt = "%-24s %5s %5s %-15s %s" + hdr = fmt % fields + + def report(self): + """Print a report about the transaction""" + t = time.ctime(self.begin) + if hasattr(self, "vote"): + d_vote = self.vote - self.begin + else: + d_vote = "*" + if hasattr(self, "finish"): + d_finish = self.finish - self.begin + else: + d_finish = "*" + print(self.fmt % (time.ctime(self.begin), d_vote, d_finish, + self.user, self.url)) + +class TransactionParser(object): + + def __init__(self): + self.txns = {} + self.skipped = 0 + + def parse(self, line): + t, m = parse_line(line) + if t is None: + return + name = m[0] + meth = getattr(self, name, None) + if meth is not None: + meth(t, m[1]) + + def tpc_begin(self, time, args): + t = TStats() + t.begin = time + t.user = args[1] + t.url = args[2] + t.objects = [] + tid = eval(args[0]) + self.txns[tid] = t + + def get_txn(self, args): + tid = eval(args[0]) + try: + return self.txns[tid] + except KeyError: + print("uknown tid", repr(tid)) + return None + + def tpc_finish(self, time, args): + t = self.get_txn(args) + if t is None: + return + t.finish = time + + def vote(self, time, args): + t = self.get_txn(args) + if t is None: + return + t.vote = time + + def get_txns(self): + L = [(t.id, t) for t in self.txns.values()] + L.sort() + return [t for (id, t) in L] + +if __name__ == "__main__": + import fileinput + + p = TransactionParser() + i = 0 + for line in fileinput.input(): + i += 1 + try: + p.parse(line) + except: + print("line", i) + raise + print("Transaction: %d" % len(p.txns)) + print(TStats.hdr) + for txn in p.get_txns(): + txn.report() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/scripts/tests.py b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/tests.py new file mode 100644 index 0000000..67de27a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/tests.py @@ -0,0 +1,29 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from __future__ import print_function +import doctest, re, unittest +from zope.testing import renormalizing + +def test_suite(): + return unittest.TestSuite(( + doctest.DocFileSuite( + 'zeopack.test', + checker=renormalizing.RENormalizing([ + (re.compile('usage: Usage: '), 'Usage: '), # Py 2.4 + (re.compile('options:'), 'Options:'), # Py 2.4 + ]), + globs={'print_function': print_function}, + ), + )) + diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/scripts/timeout.py b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/timeout.py new file mode 100644 index 0000000..a5a9164 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/timeout.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python2.3 + +"""Transaction timeout test script. + +This script connects to a storage, begins a transaction, calls store() +and tpc_vote(), and then sleeps forever. This should trigger the +transaction timeout feature of the server. + +usage: timeout.py address delay [storage-name] + +""" +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function + +import sys +import time + +from ZODB.Connection import TransactionMetaData +from ZODB.tests.MinPO import MinPO +from ZODB.tests.StorageTestBase import zodb_pickle +from ZEO.ClientStorage import ClientStorage + +ZERO = '\0'*8 + +def main(): + if len(sys.argv) not in (3, 4): + sys.stderr.write("Usage: timeout.py address delay [storage-name]\n" % + sys.argv[0]) + sys.exit(2) + + hostport = sys.argv[1] + delay = float(sys.argv[2]) + if sys.argv[3:]: + name = sys.argv[3] + else: + name = "1" + + if "/" in hostport: + address = hostport + else: + if ":" in hostport: + i = hostport.index(":") + host, port = hostport[:i], hostport[i+1:] + else: + host, port = "", hostport + port = int(port) + address = (host, port) + + print("Connecting to %s..." % repr(address)) + storage = ClientStorage(address, name) + print("Connected. Now starting a transaction...") + + oid = storage.new_oid() + revid = ZERO + data = MinPO("timeout.py") + pickled_data = zodb_pickle(data) + t = TransactionMetaData() + t.user = "timeout.py" + storage.tpc_begin(t) + storage.store(oid, revid, pickled_data, '', t) + print("Stored. Now voting...") + storage.tpc_vote(t) + + print("Voted; now sleeping %s..." % delay) + time.sleep(delay) + print("Done.") + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeopack.py b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeopack.py new file mode 100644 index 0000000..478bbe4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeopack.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python2.3 + +import logging +import optparse +import socket +import sys +import time +import traceback +import ZEO.ClientStorage +from six.moves import map +from six.moves import zip + +usage = """Usage: %prog [options] [servers] + +Pack one or more storages hosted by ZEO servers. + +The positional arguments specify 0 or more tcp servers to pack, where +each is of the form: + + host:port[:name] + +""" + +WAIT = 10 # wait no more than 10 seconds for client to connect + +def _main(args=None, prog=None): + if args is None: + args = sys.argv[1:] + + parser = optparse.OptionParser(usage, prog=prog) + + parser.add_option( + "-d", "--days", dest="days", type='int', default=0, + help=("Pack objects that are older than this number of days") + ) + + parser.add_option( + "-t", "--time", dest="time", + help=("Time of day to pack to of the form: HH[:MM[:SS]]. " + "Defaults to current time.") + ) + + parser.add_option( + "-u", "--unix", dest="unix_sockets", action="append", + help=("A unix-domain-socket server to connect to, of the form: " + "path[:name]") + ) + + parser.remove_option('-h') + parser.add_option( + "-h", dest="host", + help=("Deprecated: " + "Used with the -p and -S options, specified the host to " + "connect to.") + ) + + parser.add_option( + "-p", type="int", dest="port", + help=("Deprecated: " + "Used with the -h and -S options, specifies " + "the port to connect to.") + ) + + parser.add_option( + "-S", dest="name", default='1', + help=("Deprecated: Used with the -h and -p, options, or with the " + "-U option specified the storage name to use. Defaults to 1.") + ) + + parser.add_option( + "-U", dest="unix", + help=("Deprecated: Used with the -S option, " + "Unix-domain socket to connect to.") + ) + + if not args: + parser.print_help() + return + + def error(message): + sys.stderr.write("Error:\n%s\n" % message) + sys.exit(1) + + options, args = parser.parse_args(args) + + packt = time.time() + if options.time: + time_ = list(map(int, options.time.split(':'))) + if len(time_) == 1: + time_ += (0, 0) + elif len(time_) == 2: + time_ += (0,) + elif len(time_) > 3: + error("Invalid time value: %r" % options.time) + + packt = time.localtime(packt) + packt = time.mktime(packt[:3]+tuple(time_)+packt[6:]) + + packt -= options.days * 86400 + + servers = [] + + if options.host: + if not options.port: + error("If host (-h) is specified then a port (-p) must be " + "specified as well.") + servers.append(((options.host, options.port), options.name)) + elif options.port: + servers.append(((socket.gethostname(), options.port), options.name)) + + if options.unix: + servers.append((options.unix, options.name)) + + for server in args: + data = server.split(':') + if len(data) in (2, 3): + host = data[0] + try: + port = int(data[1]) + except ValueError: + error("Invalid port in server specification: %r" % server) + addr = host, port + if len(data) == 2: + name = '1' + else: + name = data[2] + else: + error("Invalid server specification: %r" % server) + + servers.append((addr, name)) + + for server in options.unix_sockets or (): + data = server.split(':') + if len(data) == 1: + addr = data[0] + name = '1' + elif len(data) == 2: + addr = data[0] + name = data[1] + else: + error("Invalid server specification: %r" % server) + + servers.append((addr, name)) + + if not servers: + error("No servers specified.") + + for addr, name in servers: + try: + cs = ZEO.ClientStorage.ClientStorage( + addr, storage=name, wait=False, read_only=1) + for i in range(60): + if cs.is_connected(): + break + time.sleep(1) + else: + sys.stderr.write("Couldn't connect to: %r\n" + % ((addr, name), )) + cs.close() + continue + cs.pack(packt, wait=True) + cs.close() + except: + traceback.print_exception(*(sys.exc_info()+(99, sys.stderr))) + error("Error packing storage %s in %r" % (name, addr)) + +def main(*args): + root_logger = logging.getLogger() + old_level = root_logger.getEffectiveLevel() + logging.getLogger().setLevel(logging.WARNING) + handler = logging.StreamHandler(sys.stdout) + handler.setFormatter(logging.Formatter( + "%(name)s %(levelname)s %(message)s")) + logging.getLogger().addHandler(handler) + try: + _main(*args) + finally: + logging.getLogger().setLevel(old_level) + logging.getLogger().removeHandler(handler) + +if __name__ == "__main__": + main() + diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeopack.test b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeopack.test new file mode 100644 index 0000000..8b4677d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeopack.test @@ -0,0 +1,279 @@ +zeopack +======= + +The zeopack script can be used to pack one or more storages. It uses +ClientStorage to do this. To test it's behavior, we'll replace the +normal ClientStorage with a fake one that echos information we'll want +for our test: + + >>> class ClientStorage: + ... connect_wait = 0 + ... def __init__(self, *args, **kw): + ... if args[0] == 'bad': + ... import logging + ... logging.getLogger('test.ClientStorage').error( + ... "I hate this address, %r", args[0]) + ... raise ValueError("Bad address") + ... print("ClientStorage(%s %s)" % ( + ... repr(args)[1:-1], + ... ', '.join("%s=%r" % i for i in sorted(kw.items())), + ... )) + ... def pack(self, t=None, *args, **kw): + ... now = time.localtime(time.time()) + ... local_midnight = time.mktime(now[:3]+(0, 0, 0)+now[6:]) + ... t -= local_midnight # adjust for tz + ... t += 86400*7 # add a week to make sure we're positive + ... print("pack(%r,%s %s)" % ( + ... t, repr(args)[1:-1], + ... ', '.join("%s=%r" % i for i in sorted(kw.items())), + ... )) + ... def is_connected(self): + ... self.connect_wait -= 1 + ... print('is_connected', self.connect_wait < 0) + ... return self.connect_wait < 0 + ... def close(self): + ... print("close()") + + >>> import ZEO + >>> ClientStorage_orig = ZEO.ClientStorage.ClientStorage + >>> ZEO.ClientStorage.ClientStorage = ClientStorage + +Now, we're ready to try the script: + + >>> from ZEO.scripts.zeopack import main + +If we call it with no arguments, we get help: + +>>> import os; os.environ['COLUMNS'] = '80' # for consistent optparse output +>>> main([], 'zeopack') +Usage: zeopack [options] [servers] + +Pack one or more storages hosted by ZEO servers. + +The positional arguments specify 0 or more tcp servers to pack, where +each is of the form: + + host:port[:name] + + + +Options: + -d DAYS, --days=DAYS Pack objects that are older than this number of days + -t TIME, --time=TIME Time of day to pack to of the form: HH[:MM[:SS]]. + Defaults to current time. + -u UNIX_SOCKETS, --unix=UNIX_SOCKETS + A unix-domain-socket server to connect to, of the + form: path[:name] + -h HOST Deprecated: Used with the -p and -S options, specified + the host to connect to. + -p PORT Deprecated: Used with the -h and -S options, specifies + the port to connect to. + -S NAME Deprecated: Used with the -h and -p, options, or with + the -U option specified the storage name to use. + Defaults to 1. + -U UNIX Deprecated: Used with the -S option, Unix-domain + socket to connect to. + +Since packing involves time, we'd better have our way with it. Replace +time.time() with a function that always returns the same value. The +value is timezone dependent. + + >>> import time + >>> time_orig = time.time + >>> time.time = lambda : time.mktime((2009, 3, 24, 10, 55, 17, 1, 83, -1)) + >>> sleep_orig = time.sleep + >>> def sleep(t): + ... print('sleep(%r)' % t) + >>> time.sleep = sleep + +Normally, we pass one or more TCP server specifications: + + >>> main(["host1:8100", "host1:8100:2"]) + ClientStorage(('host1', 8100), read_only=1, storage='1', wait=False) + is_connected True + pack(644117.0, wait=True) + close() + ClientStorage(('host1', 8100), read_only=1, storage='2', wait=False) + is_connected True + pack(644117.0, wait=True) + close() + +We can also pass unix-domain-sockey servers using the -u option: + + >>> main(["-ufoo", "-ubar:spam", "host1:8100", "host1:8100:2"]) + ClientStorage(('host1', 8100), read_only=1, storage='1', wait=False) + is_connected True + pack(644117.0, wait=True) + close() + ClientStorage(('host1', 8100), read_only=1, storage='2', wait=False) + is_connected True + pack(644117.0, wait=True) + close() + ClientStorage('foo', read_only=1, storage='1', wait=False) + is_connected True + pack(644117.0, wait=True) + close() + ClientStorage('bar', read_only=1, storage='spam', wait=False) + is_connected True + pack(644117.0, wait=True) + close() + +The -d option causes a pack time the given number of days earlier to +be used: + + >>> main(["-ufoo", "-ubar:spam", "-d3", "host1:8100", "host1:8100:2"]) + ClientStorage(('host1', 8100), read_only=1, storage='1', wait=False) + is_connected True + pack(384917.0, wait=True) + close() + ClientStorage(('host1', 8100), read_only=1, storage='2', wait=False) + is_connected True + pack(384917.0, wait=True) + close() + ClientStorage('foo', read_only=1, storage='1', wait=False) + is_connected True + pack(384917.0, wait=True) + close() + ClientStorage('bar', read_only=1, storage='spam', wait=False) + is_connected True + pack(384917.0, wait=True) + close() + +The -t option allows us to control the time of day: + + >>> main(["-ufoo", "-d3", "-t1:30", "host1:8100:2"]) + ClientStorage(('host1', 8100), read_only=1, storage='2', wait=False) + is_connected True + pack(351000.0, wait=True) + close() + ClientStorage('foo', read_only=1, storage='1', wait=False) + is_connected True + pack(351000.0, wait=True) + close() + +Connection timeout +------------------ + +The zeopack script tells ClientStorage not to wait for connections +before returning from the constructor, but will time out after 60 +seconds of waiting for a connect. + + >>> ClientStorage.connect_wait = 3 + >>> main(["-d3", "-t1:30", "host1:8100:2"]) + ClientStorage(('host1', 8100), read_only=1, storage='2', wait=False) + is_connected False + sleep(1) + is_connected False + sleep(1) + is_connected False + sleep(1) + is_connected True + pack(351000.0, wait=True) + close() + + >>> def call_main(args): + ... import sys + ... old_stderr = sys.stderr + ... sys.stderr = sys.stdout + ... try: + ... try: + ... main(args) + ... except SystemExit as v: + ... print("Exited", v) + ... finally: + ... sys.stderr = old_stderr + + >>> ClientStorage.connect_wait = 999 + >>> call_main(["-d3", "-t1:30", "host1:8100", "host1:8100:2"]) + ... # doctest: +ELLIPSIS + ClientStorage(('host1', 8100), read_only=1, storage='1', wait=False) + is_connected False + sleep(1) + ... + is_connected False + sleep(1) + Couldn't connect to: (('host1', 8100), '1') + close() + ClientStorage(('host1', 8100), read_only=1, storage='2', wait=False) + is_connected False + sleep(1) + ... + is_connected False + sleep(1) + Couldn't connect to: (('host1', 8100), '2') + close() + + >>> ClientStorage.connect_wait = 0 + + +Legacy support +-------------- + + >>> main(["-d3", "-h", "host1", "-p", "8100", "-S", "2"]) + ClientStorage(('host1', 8100), read_only=1, storage='2', wait=False) + is_connected True + pack(384917.0, wait=True) + close() + + >>> import socket + >>> old_gethostname = socket.gethostname + >>> socket.gethostname = lambda : 'test.host.com' + >>> main(["-d3", "-p", "8100"]) + ClientStorage(('test.host.com', 8100), read_only=1, storage='1', wait=False) + is_connected True + pack(384917.0, wait=True) + close() + >>> socket.gethostname = old_gethostname + + >>> main(["-d3", "-U", "foo/bar", "-S", "2"]) + ClientStorage('foo/bar', read_only=1, storage='2', wait=False) + is_connected True + pack(384917.0, wait=True) + close() + +Error handling +-------------- + + >>> call_main(["-d3"]) + Error: + No servers specified. + Exited 1 + + >>> call_main(["-d3", "a"]) + Error: + Invalid server specification: 'a' + Exited 1 + + >>> call_main(["-d3", "a:b:c:d"]) + Error: + Invalid server specification: 'a:b:c:d' + Exited 1 + + >>> call_main(["-d3", "a:b:2"]) + Error: + Invalid port in server specification: 'a:b:2' + Exited 1 + + >>> call_main(["-d3", "-u", "a:b:2"]) + Error: + Invalid server specification: 'a:b:2' + Exited 1 + + >>> call_main(["-d3", "-u", "bad"]) # doctest: +ELLIPSIS + test.ClientStorage ERROR I hate this address, 'bad' + Traceback (most recent call last): + ... + ValueError: Bad address + Error: + Error packing storage 1 in 'bad' + Exited 1 + + + +Note that in the previous example, the first line was output through logging. + +.. tear down + + >>> ZEO.ClientStorage.ClientStorage = ClientStorage_orig + >>> time.time = time_orig + >>> time.sleep = sleep_orig diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeoqueue.py b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeoqueue.py new file mode 100644 index 0000000..c424710 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeoqueue.py @@ -0,0 +1,396 @@ +#!/usr/bin/env python2.3 + +"""Report on the number of currently waiting clients in the ZEO queue. + +Usage: %(PROGRAM)s [options] logfile + +Options: + -h / --help + Print this help text and exit. + + -v / --verbose + Verbose output + + -f file + --file file + Use the specified file to store the incremental state as a pickle. If + not given, %(STATEFILE)s is used. + + -r / --reset + Reset the state of the tool. This blows away any existing state + pickle file and then exits -- it does not parse the file. Use this + when you rotate log files so that the next run will parse from the + beginning of the file. +""" +from __future__ import print_function + +import os +import re +import sys +import time +import errno +import getopt +from ZEO._compat import load, dump + +COMMASPACE = ', ' +STATEFILE = 'zeoqueue.pck' +PROGRAM = sys.argv[0] + + + +tcre = re.compile(r""" + (?P + \d{4}- # year + \d{2}- # month + \d{2}) # day + T # separator + (?P + \d{2}: # hour + \d{2}: # minute + \d{2}) # second + """, re.VERBOSE) + +ccre = re.compile(r""" + zrpc-conn:(?P\d+.\d+.\d+.\d+:\d+)\s+ + calling\s+ + (?P + \w+) # the method + \( # args open paren + \' # string quote start + (?P + \S+) # first argument -- usually the tid + \' # end of string + (?P + .*) # rest of line + """, re.VERBOSE) + +wcre = re.compile(r'Clients waiting: (?P\d+)') + + + +def parse_time(line): + """Return the time portion of a zLOG line in seconds or None.""" + mo = tcre.match(line) + if mo is None: + return None + date, time_ = mo.group('ymd', 'hms') + date_l = [int(elt) for elt in date.split('-')] + time_l = [int(elt) for elt in time_.split(':')] + return int(time.mktime(date_l + time_l + [0, 0, 0])) + + +class Txn(object): + """Track status of single transaction.""" + def __init__(self, tid): + self.tid = tid + self.hint = None + self.begin = None + self.vote = None + self.abort = None + self.finish = None + self.voters = [] + + def isactive(self): + if self.begin and not (self.abort or self.finish): + return True + else: + return False + + + +class Status(object): + """Track status of ZEO server by replaying log records. + + We want to keep track of several events: + + - The last committed transaction. + - The last committed or aborted transaction. + - The last transaction that got the lock but didn't finish. + - The client address doing the first vote of a transaction. + - The number of currently active transactions. + - The number of reported queued transactions. + - Client restarts. + - Number of current connections (but this might not be useful). + + We can observe these events by reading the following sorts of log + entries: + + 2002-12-16T06:16:05 BLATHER(-100) zrpc:12649 calling + tpc_begin('\x03I\x90((\xdbp\xd5', '', 'QueueCatal... + + 2002-12-16T06:16:06 BLATHER(-100) zrpc:12649 calling + vote('\x03I\x90((\xdbp\xd5') + + 2002-12-16T06:16:06 BLATHER(-100) zrpc:12649 calling + tpc_finish('\x03I\x90((\xdbp\xd5') + + 2002-12-16T10:46:10 INFO(0) ZSS:12649:1 Transaction blocked waiting + for storage. Clients waiting: 1. + + 2002-12-16T06:15:57 BLATHER(-100) zrpc:12649 connect from + ('10.0.26.54', 48983): + + 2002-12-16T10:30:09 INFO(0) ZSS:12649:1 disconnected + """ + + def __init__(self): + self.lineno = 0 + self.pos = 0 + self.reset() + + def reset(self): + self.commit = None + self.commit_or_abort = None + self.last_unfinished = None + self.n_active = 0 + self.n_blocked = 0 + self.n_conns = 0 + self.t_restart = None + self.txns = {} + + def iscomplete(self): + # The status report will always be complete if we encounter an + # explicit restart. + if self.t_restart is not None: + return True + # If we haven't seen a restart, assume that seeing a finished + # transaction is good enough. + return self.commit is not None + + def process_file(self, fp): + if self.pos: + if VERBOSE: + print('seeking to file position', self.pos) + fp.seek(self.pos) + while True: + line = fp.readline() + if not line: + break + self.lineno += 1 + self.process(line) + self.pos = fp.tell() + + def process(self, line): + if line.find("calling") != -1: + self.process_call(line) + elif line.find("connect") != -1: + self.process_connect(line) + # test for "locked" because word may start with "B" or "b" + elif line.find("locked") != -1: + self.process_block(line) + elif line.find("Starting") != -1: + self.process_start(line) + + def process_call(self, line): + mo = ccre.search(line) + if mo is None: + return + called_method = mo.group('method') + # Exit early if we've got zeoLoad, because it's the most + # frequently called method and we don't use it. + if called_method == "zeoLoad": + return + t = parse_time(line) + meth = getattr(self, "call_%s" % called_method, None) + if meth is None: + return + client = mo.group('addr') + tid = mo.group('tid') + rest = mo.group('rest') + meth(t, client, tid, rest) + + def process_connect(self, line): + pass + + def process_block(self, line): + mo = wcre.search(line) + if mo is None: + # assume that this was a restart message for the last blocked + # transaction. + self.n_blocked = 0 + else: + self.n_blocked = int(mo.group('num')) + + def process_start(self, line): + if line.find("Starting ZEO server") != -1: + self.reset() + self.t_restart = parse_time(line) + + def call_tpc_begin(self, t, client, tid, rest): + txn = Txn(tid) + txn.begin = t + if rest[0] == ',': + i = 1 + while rest[i].isspace(): + i += 1 + rest = rest[i:] + txn.hint = rest + self.txns[tid] = txn + self.n_active += 1 + self.last_unfinished = txn + + def call_vote(self, t, client, tid, rest): + txn = self.txns.get(tid) + if txn is None: + print("Oops!") + txn = self.txns[tid] = Txn(tid) + txn.vote = t + txn.voters.append(client) + + def call_tpc_abort(self, t, client, tid, rest): + txn = self.txns.get(tid) + if txn is None: + print("Oops!") + txn = self.txns[tid] = Txn(tid) + txn.abort = t + txn.voters = [] + self.n_active -= 1 + if self.commit_or_abort: + # delete the old transaction + try: + del self.txns[self.commit_or_abort.tid] + except KeyError: + pass + self.commit_or_abort = txn + + def call_tpc_finish(self, t, client, tid, rest): + txn = self.txns.get(tid) + if txn is None: + print("Oops!") + txn = self.txns[tid] = Txn(tid) + txn.finish = t + txn.voters = [] + self.n_active -= 1 + if self.commit: + # delete the old transaction + try: + del self.txns[self.commit.tid] + except KeyError: + pass + if self.commit_or_abort: + # delete the old transaction + try: + del self.txns[self.commit_or_abort.tid] + except KeyError: + pass + self.commit = self.commit_or_abort = txn + + def report(self): + print("Blocked transactions:", self.n_blocked) + if not VERBOSE: + return + if self.t_restart: + print("Server started:", time.ctime(self.t_restart)) + + if self.commit is not None: + t = self.commit_or_abort.finish + if t is None: + t = self.commit_or_abort.abort + print("Last finished transaction:", time.ctime(t)) + + # the blocked transaction should be the first one that calls vote + L = [(txn.begin, txn) for txn in self.txns.values()] + L.sort() + + for x, txn in L: + if txn.isactive(): + began = txn.begin + if txn.voters: + print("Blocked client (first vote):", txn.voters[0]) + print("Blocked transaction began at:", time.ctime(began)) + print("Hint:", txn.hint) + print("Idle time: %d sec" % int(time.time() - began)) + break + + + +def usage(code, msg=''): + print(__doc__ % globals(), file=sys.stderr) + if msg: + print(msg, file=sys.stderr) + sys.exit(code) + + +def main(): + global VERBOSE + + VERBOSE = 0 + file = STATEFILE + reset = False + # -0 is a secret option used for testing purposes only + seek = True + try: + opts, args = getopt.getopt(sys.argv[1:], 'vhf:r0', + ['help', 'verbose', 'file=', 'reset']) + except getopt.error as msg: + usage(1, msg) + + for opt, arg in opts: + if opt in ('-h', '--help'): + usage(0) + elif opt in ('-v', '--verbose'): + VERBOSE += 1 + elif opt in ('-f', '--file'): + file = arg + elif opt in ('-r', '--reset'): + reset = True + elif opt == '-0': + seek = False + + if reset: + # Blow away the existing state file and exit + try: + os.unlink(file) + if VERBOSE: + print('removing pickle state file', file) + except OSError as e: + if e.errno != errno.ENOENT: + raise + return + + if not args: + usage(1, 'logfile is required') + if len(args) > 1: + usage(1, 'too many arguments: %s' % COMMASPACE.join(args)) + + path = args[0] + + # Get the previous status object from the pickle file, if it is available + # and if the --reset flag wasn't given. + status = None + try: + statefp = open(file, 'rb') + try: + status = load(statefp) + if VERBOSE: + print('reading status from file', file) + finally: + statefp.close() + except IOError as e: + if e.errno != errno.ENOENT: + raise + if status is None: + status = Status() + if VERBOSE: + print('using new status') + + if not seek: + status.pos = 0 + + fp = open(path, 'rb') + try: + status.process_file(fp) + finally: + fp.close() + # Save state + statefp = open(file, 'wb') + dump(status, statefp, 1) + statefp.close() + # Print the report and return the number of blocked clients in the exit + # status code. + status.report() + sys.exit(status.n_blocked) + + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeoreplay.py b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeoreplay.py new file mode 100644 index 0000000..5c9d7c1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeoreplay.py @@ -0,0 +1,326 @@ +#!/usr/bin/env python2.3 + +"""Parse the BLATHER logging generated by ZEO, and optionally replay it. + +Usage: zeointervals.py [options] + +Options: + + --help / -h + Print this message and exit. + + --replay=storage + -r storage + Replay the parsed transactions through the new storage + + --maxtxn=count + -m count + Parse no more than count transactions. + + --report / -p + Print a report as we're parsing. + +Unlike parsezeolog.py, this script generates timestamps for each transaction, +and sub-command in the transaction. We can use this to compare timings with +synthesized data. +""" +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function + +import re +import sys +import time +import getopt +import operator +# ZEO logs measure wall-clock time so for consistency we need to do the same +#from time import clock as now +from time import time as now + +from ZODB.FileStorage import FileStorage +#from BDBStorage.BDBFullStorage import BDBFullStorage +#from Standby.primary import PrimaryStorage +#from Standby.config import RS_PORT +from ZODB.Connection import TransactionMetaData +from ZODB.utils import p64 +from functools import reduce + +datecre = re.compile('(\d\d\d\d-\d\d-\d\d)T(\d\d:\d\d:\d\d)') +methcre = re.compile("ZEO Server (\w+)\((.*)\) \('(.*)', (\d+)") + +class StopParsing(Exception): + pass + + + +def usage(code, msg=''): + print(__doc__) + if msg: + print(msg) + sys.exit(code) + + + +def parse_time(line): + """Return the time portion of a zLOG line in seconds or None.""" + mo = datecre.match(line) + if mo is None: + return None + date, time_ = mo.group(1, 2) + date_l = [int(elt) for elt in date.split('-')] + time_l = [int(elt) for elt in time_.split(':')] + return int(time.mktime(date_l + time_l + [0, 0, 0])) + + +def parse_line(line): + """Parse a log entry and return time, method info, and client.""" + t = parse_time(line) + if t is None: + return None, None, None + mo = methcre.search(line) + if mo is None: + return None, None, None + meth_name = mo.group(1) + meth_args = mo.group(2) + meth_args = [s.strip() for s in meth_args.split(',')] + m = meth_name, tuple(meth_args) + c = mo.group(3), mo.group(4) + return t, m, c + + + +class StoreStat(object): + def __init__(self, when, oid, size): + self.when = when + self.oid = oid + self.size = size + + # Crufty + def __getitem__(self, i): + if i == 0: return self.oid + if i == 1: return self.size + raise IndexError + + +class TxnStat(object): + def __init__(self): + self._begintime = None + self._finishtime = None + self._aborttime = None + self._url = None + self._objects = [] + + def tpc_begin(self, when, args, client): + self._begintime = when + # args are txnid, user, description (looks like it's always a url) + self._url = args[2] + + def storea(self, when, args, client): + oid = int(args[0]) + # args[1] is "[numbytes]" + size = int(args[1][1:-1]) + s = StoreStat(when, oid, size) + self._objects.append(s) + + def tpc_abort(self, when): + self._aborttime = when + + def tpc_finish(self, when): + self._finishtime = when + + + +# Mapping oid -> revid +_revids = {} + +class ReplayTxn(TxnStat): + def __init__(self, storage): + self._storage = storage + self._replaydelta = 0 + TxnStat.__init__(self) + + def replay(self): + ZERO = '\0'*8 + t0 = now() + t = TransactionMetaData() + self._storage.tpc_begin(t) + for obj in self._objects: + oid = obj.oid + revid = _revids.get(oid, ZERO) + # BAW: simulate a pickle of the given size + data = 'x' * obj.size + # BAW: ignore versions for now + newrevid = self._storage.store(p64(oid), revid, data, '', t) + _revids[oid] = newrevid + if self._aborttime: + self._storage.tpc_abort(t) + origdelta = self._aborttime - self._begintime + else: + self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + origdelta = self._finishtime - self._begintime + t1 = now() + # Shows how many seconds behind (positive) or ahead (negative) of the + # original reply our local update took + self._replaydelta = t1 - t0 - origdelta + + + +class ZEOParser(object): + def __init__(self, maxtxns=-1, report=1, storage=None): + self.__txns = [] + self.__curtxn = {} + self.__skipped = 0 + self.__maxtxns = maxtxns + self.__finishedtxns = 0 + self.__report = report + self.__storage = storage + + def parse(self, line): + t, m, c = parse_line(line) + if t is None: + # Skip this line + return + name = m[0] + meth = getattr(self, name, None) + if meth is not None: + meth(t, m[1], c) + + def tpc_begin(self, when, args, client): + txn = ReplayTxn(self.__storage) + self.__curtxn[client] = txn + meth = getattr(txn, 'tpc_begin', None) + if meth is not None: + meth(when, args, client) + + def storea(self, when, args, client): + txn = self.__curtxn.get(client) + if txn is None: + self.__skipped += 1 + return + meth = getattr(txn, 'storea', None) + if meth is not None: + meth(when, args, client) + + def tpc_finish(self, when, args, client): + txn = self.__curtxn.get(client) + if txn is None: + self.__skipped += 1 + return + meth = getattr(txn, 'tpc_finish', None) + if meth is not None: + meth(when) + if self.__report: + self.report(txn) + self.__txns.append(txn) + self.__curtxn[client] = None + self.__finishedtxns += 1 + if self.__maxtxns > 0 and self.__finishedtxns >= self.__maxtxns: + raise StopParsing + + def report(self, txn): + """Print a report about the transaction""" + if txn._objects: + bytes = reduce(operator.add, [size for oid, size in txn._objects]) + else: + bytes = 0 + print('%s %s %4d %10d %s %s' % ( + txn._begintime, txn._finishtime - txn._begintime, + len(txn._objects), + bytes, + time.ctime(txn._begintime), + txn._url)) + + def replay(self): + for txn in self.__txns: + txn.replay() + # How many fell behind? + slower = [] + faster = [] + for txn in self.__txns: + if txn._replaydelta > 0: + slower.append(txn) + else: + faster.append(txn) + print(len(slower), 'laggards,', len(faster), 'on-time or faster') + # Find some averages + if slower: + sum = reduce(operator.add, + [txn._replaydelta for txn in slower], 0) + print('average slower txn was:', float(sum) / len(slower)) + if faster: + sum = reduce(operator.add, + [txn._replaydelta for txn in faster], 0) + print('average faster txn was:', float(sum) / len(faster)) + + + +def main(): + try: + opts, args = getopt.getopt( + sys.argv[1:], + 'hr:pm:', + ['help', 'replay=', 'report', 'maxtxns=']) + except getopt.error as e: + usage(1, e) + + if args: + usage(1) + + replay = 0 + maxtxns = -1 + report = 0 + storagefile = None + for opt, arg in opts: + if opt in ('-h', '--help'): + usage(0) + elif opt in ('-r', '--replay'): + replay = 1 + storagefile = arg + elif opt in ('-p', '--report'): + report = 1 + elif opt in ('-m', '--maxtxns'): + try: + maxtxns = int(arg) + except ValueError: + usage(1, 'Bad -m argument: %s' % arg) + + if replay: + storage = FileStorage(storagefile) + #storage = BDBFullStorage(storagefile) + #storage = PrimaryStorage('yyz', storage, RS_PORT) + t0 = now() + p = ZEOParser(maxtxns, report, storage) + i = 0 + while 1: + line = sys.stdin.readline() + if not line: + break + i += 1 + try: + p.parse(line) + except StopParsing: + break + except: + print('input file line:', i) + raise + t1 = now() + print('total parse time:', t1-t0) + t2 = now() + if replay: + p.replay() + t3 = now() + print('total replay time:', t3-t2) + print('total time:', t3-t0) + + + +if __name__ == '__main__': + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeoserverlog.py b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeoserverlog.py new file mode 100644 index 0000000..f2f1441 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeoserverlog.py @@ -0,0 +1,566 @@ +#!/usr/bin/env python2.3 + +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tools for analyzing ZEO Server logs. + +This script contains a number of commands, implemented by command +functions. To run a command, give the command name and it's arguments +as arguments to this script. + +Commands: + + blocked_times file threshold + + Output a summary of episodes where thransactions were blocked + when the episode lasted at least threshold seconds. + + The file may be a file name or - to read from standard input. + The file may also be a command: + + script blocked_times 'bunzip2 = 0 + + if blocking and waiting == 1: + t1 = time(line) + t2 = t1 + + if not blocking and last_blocking: + last_wait = 0 + t2 = time(line) + cid = idre.search(line).group(1) + + if waiting == 0: + d = sub(t1, time(line)) + if d >= thresh: + print(t1, sub(t1, t2), cid, d) + t1 = t2 = cid = blocking = waiting = last_wait = max_wait = 0 + + last_blocking = blocking + +connidre = re.compile(r' zrpc-conn:(\d+.\d+.\d+.\d+:\d+) ') +def time_calls(f): + f, thresh = f + if f == '-': + f = sys.stdin + else: + f = xopen(f) + + thresh = float(thresh) + t1 = None + maxd = 0 + + for line in f: + line = line.strip() + + if ' calling ' in line: + t1 = time(line) + elif ' returns ' in line and t1 is not None: + d = sub(t1, time(line)) + if d >= thresh: + print(t1, d, connidre.search(line).group(1)) + maxd = max(maxd, d) + t1 = None + + print(maxd) + +def xopen(f): + if f == '-': + return sys.stdin + if ' ' in f: + return os.popen(f, 'r') + return open(f) + +def time_tpc(f): + f, thresh = f + if f == '-': + f = sys.stdin + else: + f = xopen(f) + + thresh = float(thresh) + transactions = {} + + for line in f: + line = line.strip() + + if ' calling vote(' in line: + cid = connidre.search(line).group(1) + transactions[cid] = time(line), + elif ' vote returns None' in line: + cid = connidre.search(line).group(1) + transactions[cid] += time(line), 'n' + elif ' vote() raised' in line: + cid = connidre.search(line).group(1) + transactions[cid] += time(line), 'e' + elif ' vote returns ' in line: + # delayed, skip + cid = connidre.search(line).group(1) + transactions[cid] += time(line), 'd' + elif ' calling tpc_abort(' in line: + cid = connidre.search(line).group(1) + if cid in transactions: + t1, t2, vs = transactions[cid] + t = time(line) + d = sub(t1, t) + if d >= thresh: + print('a', t1, cid, sub(t1, t2), vs, sub(t2, t)) + del transactions[cid] + elif ' calling tpc_finish(' in line: + if cid in transactions: + cid = connidre.search(line).group(1) + transactions[cid] += time(line), + elif ' tpc_finish returns ' in line: + if cid in transactions: + t1, t2, vs, t3 = transactions[cid] + t = time(line) + d = sub(t1, t) + if d >= thresh: + print('c', t1, cid, sub(t1, t2), vs, sub(t2, t3), sub(t3, t)) + del transactions[cid] + + +newobre = re.compile(r"storea\(.*, '\\x00\\x00\\x00\\x00\\x00") +def time_trans(f): + f, thresh = f + if f == '-': + f = sys.stdin + else: + f = xopen(f) + + thresh = float(thresh) + transactions = {} + + for line in f: + line = line.strip() + + if ' calling tpc_begin(' in line: + cid = connidre.search(line).group(1) + transactions[cid] = time(line), [0, 0] + if ' calling storea(' in line: + cid = connidre.search(line).group(1) + if cid in transactions: + transactions[cid][1][0] += 1 + if not newobre.search(line): + transactions[cid][1][1] += 1 + + elif ' calling vote(' in line: + cid = connidre.search(line).group(1) + if cid in transactions: + transactions[cid] += time(line), + elif ' vote returns None' in line: + cid = connidre.search(line).group(1) + if cid in transactions: + transactions[cid] += time(line), 'n' + elif ' vote() raised' in line: + cid = connidre.search(line).group(1) + if cid in transactions: + transactions[cid] += time(line), 'e' + elif ' vote returns ' in line: + # delayed, skip + cid = connidre.search(line).group(1) + if cid in transactions: + transactions[cid] += time(line), 'd' + elif ' calling tpc_abort(' in line: + cid = connidre.search(line).group(1) + if cid in transactions: + try: + t0, (stores, old), t1, t2, vs = transactions[cid] + except ValueError: + pass + else: + t = time(line) + d = sub(t1, t) + if d >= thresh: + print(t1, cid, "%s/%s" % (stores, old), \ + sub(t0, t1), sub(t1, t2), vs, \ + sub(t2, t), 'abort') + del transactions[cid] + elif ' calling tpc_finish(' in line: + if cid in transactions: + cid = connidre.search(line).group(1) + transactions[cid] += time(line), + elif ' tpc_finish returns ' in line: + if cid in transactions: + t0, (stores, old), t1, t2, vs, t3 = transactions[cid] + t = time(line) + d = sub(t1, t) + if d >= thresh: + print(t1, cid, "%s/%s" % (stores, old), \ + sub(t0, t1), sub(t1, t2), vs, \ + sub(t2, t3), sub(t3, t)) + del transactions[cid] + +def minute(f, slice=16, detail=1, summary=1): + f, = f + + if f == '-': + f = sys.stdin + else: + f = xopen(f) + + cols = ["time", "reads", "stores", "commits", "aborts", "txns"] + fmt = "%18s %6s %6s %7s %6s %6s" + print(fmt % cols) + print(fmt % ["-"*len(col) for col in cols]) + + mlast = r = s = c = a = cl = None + rs = [] + ss = [] + cs = [] + aborts = [] + ts = [] + cls = [] + + for line in f: + line = line.strip() + if (line.find('returns') > 0 + or line.find('storea') > 0 + or line.find('tpc_abort') > 0 + ): + client = connidre.search(line).group(1) + m = line[:slice] + if m != mlast: + if mlast: + if detail: + print(fmt % (mlast, len(cl), r, s, c, a, a+c)) + cls.append(len(cl)) + rs.append(r) + ss.append(s) + cs.append(c) + aborts.append(a) + ts.append(c+a) + mlast = m + r = s = c = a = 0 + cl = {} + if line.find('zeoLoad') > 0: + r += 1 + cl[client] = 1 + elif line.find('storea') > 0: + s += 1 + cl[client] = 1 + elif line.find('tpc_finish') > 0: + c += 1 + cl[client] = 1 + elif line.find('tpc_abort') > 0: + a += 1 + cl[client] = 1 + + if mlast: + if detail: + print(fmt % (mlast, len(cl), r, s, c, a, a+c)) + cls.append(len(cl)) + rs.append(r) + ss.append(s) + cs.append(c) + aborts.append(a) + ts.append(c+a) + + if summary: + print() + print('Summary: \t', '\t'.join(('min', '10%', '25%', 'med', + '75%', '90%', 'max', 'mean'))) + print("n=%6d\t" % len(cls), '-'*62) + print('Clients: \t', '\t'.join(map(str,stats(cls)))) + print('Reads: \t', '\t'.join(map(str,stats(rs)))) + print('Stores: \t', '\t'.join(map(str,stats(ss)))) + print('Commits: \t', '\t'.join(map(str,stats(cs)))) + print('Aborts: \t', '\t'.join(map(str,stats(aborts)))) + print('Trans: \t', '\t'.join(map(str,stats(ts)))) + +def stats(s): + s.sort() + min = s[0] + max = s[-1] + n = len(s) + out = [min] + ni = n + 1 + for p in .1, .25, .5, .75, .90: + lp = ni*p + l = int(lp) + if lp < 1 or lp > n: + out.append('-') + elif abs(lp-l) < .00001: + out.append(s[l-1]) + else: + out.append(int(s[l-1] + (lp - l) * (s[l] - s[l-1]))) + + mean = 0.0 + for v in s: + mean += v + + out.extend([max, int(mean/n)]) + + return out + +def minutes(f): + minute(f, 16, detail=0) + +def hour(f): + minute(f, 13) + +def day(f): + minute(f, 10) + +def hours(f): + minute(f, 13, detail=0) + +def days(f): + minute(f, 10, detail=0) + + +new_connection_idre = re.compile( + r"new connection \('(\d+.\d+.\d+.\d+)', (\d+)\):") +def verify(f): + f, = f + + if f == '-': + f = sys.stdin + else: + f = xopen(f) + + t1 = None + nv = {} + for line in f: + if line.find('new connection') > 0: + m = new_connection_idre.search(line) + cid = "%s:%s" % (m.group(1), m.group(2)) + nv[cid] = [time(line), 0] + elif line.find('calling zeoVerify(') > 0: + cid = connidre.search(line).group(1) + nv[cid][1] += 1 + elif line.find('calling endZeoVerify()') > 0: + cid = connidre.search(line).group(1) + t1, n = nv[cid] + if n: + d = sub(t1, time(line)) + print(cid, t1, n, d, n and (d*1000.0/n) or '-') + +def recovery(f): + f, = f + + if f == '-': + f = sys.stdin + else: + f = xopen(f) + + last = '' + trans = [] + n = 0 + for line in f: + n += 1 + if line.find('RecoveryServer') < 0: + continue + l = line.find('sending transaction ') + if l > 0 and last.find('sending transaction ') > 0: + trans.append(line[l+20:].strip()) + else: + if trans: + if len(trans) > 1: + print(" ... %s similar records skipped ..." % ( + len(trans) - 1)) + print(n, last.strip()) + trans=[] + print(n, line.strip()) + last = line + + if len(trans) > 1: + print(" ... %s similar records skipped ..." % ( + len(trans) - 1)) + print(n, last.strip()) + + + +if __name__ == '__main__': + globals()[sys.argv[1]](sys.argv[2:]) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeoup.py b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeoup.py new file mode 100644 index 0000000..65bbdfc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/scripts/zeoup.py @@ -0,0 +1,156 @@ +#!/usr/bin/env python2.3 + +"""Make sure a ZEO server is running. + +usage: zeoup.py [options] + +The test will connect to a ZEO server, load the root object, and attempt to +update the zeoup counter in the root. It will report success if it updates +the counter or if it gets a ConflictError. A ConflictError is considered a +success, because the client was able to start a transaction. + +Options: + + -p port -- port to connect to + + -h host -- host to connect to (default is current host) + + -S storage -- storage name (default '1') + + -U path -- Unix-domain socket to connect to + + --nowrite -- Do not update the zeoup counter. + + -1 -- Connect to a ZEO 1.0 server. + +You must specify either -p and -h or -U. +""" +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function + +import getopt +import logging +import socket +import sys +import time + +from persistent.mapping import PersistentMapping +import transaction + +import ZODB +from ZODB.POSException import ConflictError +from ZODB.tests.MinPO import MinPO +from ZEO.ClientStorage import ClientStorage + +ZEO_VERSION = 2 + +def setup_logging(): + # Set up logging to stderr which will show messages originating + # at severity ERROR or higher. + root = logging.getLogger() + root.setLevel(logging.ERROR) + fmt = logging.Formatter( + "------\n%(asctime)s %(levelname)s %(name)s %(message)s", + "%Y-%m-%dT%H:%M:%S") + handler = logging.StreamHandler() + handler.setFormatter(fmt) + root.addHandler(handler) + +def check_server(addr, storage, write): + t0 = time.time() + if ZEO_VERSION == 2: + # TODO: should do retries w/ exponential backoff. + cs = ClientStorage(addr, storage=storage, wait=0, + read_only=(not write)) + else: + cs = ClientStorage(addr, storage=storage, debug=1, + wait_for_server_on_startup=1) + # _startup() is an artifact of the way ZEO 1.0 works. The + # ClientStorage doesn't get fully initialized until registerDB() + # is called. The only thing we care about, though, is that + # registerDB() calls _startup(). + + if write: + db = ZODB.DB(cs) + cn = db.open() + root = cn.root() + try: + # We store the data in a special `monitor' dict under the root, + # where other tools may also store such heartbeat and bookkeeping + # type data. + monitor = root.get('monitor') + if monitor is None: + monitor = root['monitor'] = PersistentMapping() + obj = monitor['zeoup'] = monitor.get('zeoup', MinPO(0)) + obj.value += 1 + transaction.commit() + except ConflictError: + pass + cn.close() + db.close() + else: + data, serial = cs.load("\0\0\0\0\0\0\0\0", "") + cs.close() + t1 = time.time() + print("Elapsed time: %.2f" % (t1 - t0)) + +def usage(exit=1): + print(__doc__) + print(" ".join(sys.argv)) + sys.exit(exit) + +def main(): + host = None + port = None + unix = None + write = 1 + storage = '1' + try: + opts, args = getopt.getopt(sys.argv[1:], 'p:h:U:S:1', + ['nowrite']) + for o, a in opts: + if o == '-p': + port = int(a) + elif o == '-h': + host = a + elif o == '-U': + unix = a + elif o == '-S': + storage = a + elif o == '--nowrite': + write = 0 + elif o == '-1': + ZEO_VERSION = 1 + except Exception as err: + s = str(err) + if s: + s = ": " + s + print(err.__class__.__name__ + s) + usage() + + if unix is not None: + addr = unix + else: + if host is None: + host = socket.gethostname() + if port is None: + usage() + addr = host, port + + setup_logging() + check_server(addr, storage, write) + +if __name__ == "__main__": + try: + main() + except SystemExit: + raise + except Exception as err: + s = str(err) + if s: + s = ": " + s + print(err.__class__.__name__ + s) + sys.exit(1) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/server.xml b/thesisenv/lib/python3.6/site-packages/ZEO/server.xml new file mode 100644 index 0000000..12fa112 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/server.xml @@ -0,0 +1,133 @@ + + + + + + + The full path to an SSL certificate file. + + + + + + The full path to an SSL key file for the server certificate. + + + + + + Dotted name of importable function for retrieving a password + for the client certificate key. + + + + + + Path to a file or directory containing client certificates to + be authenticated. This can also be - or SIGNED to require + signed client certificates. + + + + + + + +
+ + + The content of a ZEO section describe operational parameters + of a ZEO server except for the storage(s) to be served. + + + + + The address at which the server should listen. This can be in + the form 'host:port' to signify a TCP/IP connection or a + pathname string to signify a Unix domain socket connection (at + least one '/' is required). A hostname may be a DNS name or a + dotted IP address. If the hostname is omitted, the platform's + default behavior is used when binding the listening socket ('' + is passed to socket.bind() as the hostname portion of the + address). + + + + + + Flag indicating whether the server should operate in read-only + mode. Defaults to false. Note that even if the server is + operating in writable mode, individual storages may still be + read-only. But if the server is in read-only mode, no write + operations are allowed, even if the storages are writable. Note + that pack() is considered a read-only operation. + + + + + + The storage server keeps a queue of the objects modified by the + last N transactions, where N == invalidation_queue_size. This + queue is used to speed client cache verification when a client + disconnects for a short period of time. + + + + + + The maximum age of a client for which quick-verification + invalidations will be provided by iterating over the served + storage. This option should only be used if the served storage + supports efficient iteration from a starting point near the + end of the transaction history (e.g. end of file). + + + + + + The maximum amount of time to wait for a transaction to commit + after acquiring the storage lock, specified in seconds. If the + transaction takes too long, the client connection will be closed + and the transaction aborted. + + + + + + The full path to the file in which to write the ZEO server's Process ID + at startup. If omitted, $INSTANCE/var/ZEO.pid is used. + + $INSTANCE/var/ZEO.pid (or $clienthome/ZEO.pid) + + + + + Flag indicating whether the server should return conflict + errors to the client, for resolution there. + + + + + + Use msgpack to serialize and de-serialize ZEO protocol messages. + + An advantage of using msgpack for ZEO communication is that + it's a tiny bit faster and a ZEO server can support Python 2 + or Python 3 clients (but not both). + + msgpack can also be enabled by setting the ``ZEO_MSGPACK`` + environment to a non-empty string. + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/shortrepr.py b/thesisenv/lib/python3.6/site-packages/ZEO/shortrepr.py new file mode 100644 index 0000000..9f24bb8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/shortrepr.py @@ -0,0 +1,56 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +REPR_LIMIT = 60 + +def short_repr(obj): + "Return an object repr limited to REPR_LIMIT bytes." + + # Some of the objects being repr'd are large strings. A lot of memory + # would be wasted to repr them and then truncate, so they are treated + # specially in this function. + # Also handle short repr of a tuple containing a long string. + + # This strategy works well for arguments to StorageServer methods. + # The oid is usually first and will get included in its entirety. + # The pickle is near the beginning, too, and you can often fit the + # module name in the pickle. + + if isinstance(obj, str): + if len(obj) > REPR_LIMIT: + r = repr(obj[:REPR_LIMIT]) + else: + r = repr(obj) + if len(r) > REPR_LIMIT: + r = r[:REPR_LIMIT-4] + '...' + r[-1] + return r + elif isinstance(obj, (list, tuple)): + elts = [] + size = 0 + for elt in obj: + r = short_repr(elt) + elts.append(r) + size += len(r) + if size > REPR_LIMIT: + break + if isinstance(obj, tuple): + r = "(%s)" % (", ".join(elts)) + else: + r = "[%s]" % (", ".join(elts)) + else: + r = repr(obj) + if len(r) > REPR_LIMIT: + return r[:REPR_LIMIT] + '...' + else: + return r diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/Cache.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/Cache.py new file mode 100644 index 0000000..d38c844 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/Cache.py @@ -0,0 +1,50 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Tests of the ZEO cache""" + +from ZODB.Connection import TransactionMetaData +from ZODB.tests.MinPO import MinPO +from ZODB.tests.StorageTestBase import zodb_unpickle + +class TransUndoStorageWithCache(object): + + def checkUndoInvalidation(self): + oid = self._storage.new_oid() + revid = self._dostore(oid, data=MinPO(23)) + revid = self._dostore(oid, revid=revid, data=MinPO(24)) + revid = self._dostore(oid, revid=revid, data=MinPO(25)) + + info = self._storage.undoInfo() + if not info: + # Preserved this comment, but don't understand it: + # "Perhaps we have an old storage implementation that + # does do the negative nonsense." + info = self._storage.undoInfo(0, 20) + tid = info[0]['id'] + + # Now start an undo transaction + t = TransactionMetaData() + t.note(u'undo1') + oids = self._begin_undos_vote(t, tid) + + # Make sure this doesn't load invalid data into the cache + self._storage.load(oid, '') + + self._storage.tpc_finish(t) + + [uoid] = oids + assert uoid == oid + data, revid = self._storage.load(oid, '') + obj = zodb_unpickle(data) + assert obj == MinPO(24) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/CommitLockTests.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/CommitLockTests.py new file mode 100644 index 0000000..ba98af1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/CommitLockTests.py @@ -0,0 +1,180 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Tests of the distributed commit lock.""" + +import threading +import time + +from persistent.TimeStamp import TimeStamp +from ZODB.Connection import TransactionMetaData +from ZODB.tests.StorageTestBase import zodb_pickle, MinPO + +import ZEO.ClientStorage +from ZEO.Exceptions import ClientDisconnected +from ZEO.tests.TestThread import TestThread + +ZERO = b'\0'*8 + +class DummyDB(object): + def invalidate(self, *args, **kwargs): + pass + + transform_record_data = untransform_record_data = lambda self, data: data + +class WorkerThread(TestThread): + + # run the entire test in a thread so that the blocking call for + # tpc_vote() doesn't hang the test suite. + + def __init__(self, test, storage, trans): + self.storage = storage + self.trans = trans + self.ready = threading.Event() + TestThread.__init__(self, test) + + def testrun(self): + try: + self.storage.tpc_begin(self.trans) + oid = self.storage.new_oid() + p = zodb_pickle(MinPO("c")) + self.storage.store(oid, ZERO, p, '', self.trans) + oid = self.storage.new_oid() + p = zodb_pickle(MinPO("c")) + self.storage.store(oid, ZERO, p, '', self.trans) + self.myvote() + self.storage.tpc_finish(self.trans) + except ClientDisconnected: + pass + + def myvote(self): + # The vote() call is synchronous, which makes it difficult to + # coordinate the action of multiple threads that all call + # vote(). This method sends the vote call, then sets the + # event saying vote was called, then waits for the vote + # response. + + future = self.storage._server.call_future('vote', id(self.trans)) + self.ready.set() + future.result(9) + +class CommitLockTests(object): + + NUM_CLIENTS = 5 + + # The commit lock tests verify that the storage successfully + # blocks and restarts transactions when there is contention for a + # single storage. There are a lot of cases to cover. + + # The general flow of these tests is to start a transaction by + # getting far enough into 2PC to acquire the commit lock. Then + # begin one or more other connections that also want to commit. + # This causes the commit lock code to be exercised. Once the + # other connections are started, the first transaction completes. + + def _cleanup(self): + for store, trans in self._storages: + store.tpc_abort(trans) + store.close() + self._storages = [] + + def _start_txn(self): + txn = TransactionMetaData() + self._storage.tpc_begin(txn) + oid = self._storage.new_oid() + self._storage.store(oid, ZERO, zodb_pickle(MinPO(1)), '', txn) + return oid, txn + + def _begin_threads(self): + # Start a second transaction on a different connection without + # blocking the test thread. Returns only after each thread has + # set it's ready event. + self._storages = [] + self._threads = [] + + for i in range(self.NUM_CLIENTS): + storage = self._duplicate_client() + txn = TransactionMetaData() + tid = self._get_timestamp() + + t = WorkerThread(self, storage, txn) + self._threads.append(t) + t.start() + t.ready.wait() + + # Close one of the connections abnormally to test server response + if i == 0: + storage.close() + else: + self._storages.append((storage, txn)) + + def _finish_threads(self): + for t in self._threads: + t.cleanup() + + def _duplicate_client(self): + "Open another ClientStorage to the same server." + # It's hard to find the actual address. + # The rpc mgr addr attribute is a list. Each element in the + # list is a socket domain (AF_INET, AF_UNIX, etc.) and an + # address. + addr = self._storage._addr + new = ZEO.ClientStorage.ClientStorage( + addr, wait=1, **self._client_options()) + new.registerDB(DummyDB()) + return new + + def _get_timestamp(self): + t = time.time() + t = TimeStamp(*time.gmtime(t)[:5]+(t%60,)) + return repr(t) + +class CommitLockVoteTests(CommitLockTests): + + def checkCommitLockVoteFinish(self): + oid, txn = self._start_txn() + self._storage.tpc_vote(txn) + + self._begin_threads() + + self._storage.tpc_finish(txn) + self._storage.load(oid, '') + + self._finish_threads() + + self._dostore() + self._cleanup() + + def checkCommitLockVoteAbort(self): + oid, txn = self._start_txn() + self._storage.tpc_vote(txn) + + self._begin_threads() + + self._storage.tpc_abort(txn) + + self._finish_threads() + + self._dostore() + self._cleanup() + + def checkCommitLockVoteClose(self): + oid, txn = self._start_txn() + self._storage.tpc_vote(txn) + + self._begin_threads() + + self._storage.close() + + self._finish_threads() + self._cleanup() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ConnectionTests.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ConnectionTests.py new file mode 100644 index 0000000..f7ad8a5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ConnectionTests.py @@ -0,0 +1,1118 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import concurrent.futures +import contextlib +import os +import time +import socket +import threading +import logging + +from ZEO.ClientStorage import ClientStorage +from ZEO.Exceptions import ClientDisconnected +from ZEO.asyncio.marshal import encode +from ZEO.tests import forker + +from ZODB.Connection import TransactionMetaData +from ZODB.DB import DB +from ZODB.POSException import ReadOnlyError, ConflictError +from ZODB.tests.StorageTestBase import StorageTestBase +from ZODB.tests.MinPO import MinPO +from ZODB.tests.StorageTestBase import zodb_pickle, zodb_unpickle +import ZODB.tests.util + +import transaction + +from . import testssl + +logger = logging.getLogger('ZEO.tests.ConnectionTests') + +ZERO = '\0'*8 + +class TestClientStorage(ClientStorage): + + test_connection = False + + connection_count_for_tests = 0 + + def notify_connected(self, conn, info): + ClientStorage.notify_connected(self, conn, info) + self.connection_count_for_tests += 1 + self.verify_result = conn.verify_result + +class DummyDB(object): + def invalidate(self, *args, **kwargs): + pass + + def invalidateCache(self): + pass + + transform_record_data = untransform_record_data = lambda self, data: data + + +class CommonSetupTearDown(StorageTestBase): + """Common boilerplate""" + + __super_setUp = StorageTestBase.setUp + __super_tearDown = StorageTestBase.tearDown + keep = 0 + invq = None + timeout = None + db_class = DummyDB + + def setUp(self, before=None): + """Test setup for connection tests. + + This starts only one server; a test may start more servers by + calling self._newAddr() and then self.startServer(index=i) + for i in 1, 2, ... + """ + self.__super_setUp() + logging.info("setUp() %s", self.id()) + self.file = 'storage_conf' + self._servers = [] + self.caches = [] + self.addr = [('127.0.0.1', 0)] + self.startServer() + + def tearDown(self): + """Try to cause the tests to halt""" + + if getattr(self, '_storage', None) is not None: + self._storage.close() + if hasattr(self._storage, 'cleanup'): + logging.debug("cleanup storage %s" % + self._storage.__name__) + self._storage.cleanup() + for stop in self._servers: + stop() + + for c in self.caches: + for i in 0, 1: + for ext in "", ".trace", ".lock": + path = "%s-%s.zec%s" % (c, "1", ext) + # On Windows before 2.3, we don't have a way to wait for + # the spawned server(s) to close, and they inherited + # file descriptors for our open files. So long as those + # processes are alive, we can't delete the files. Try + # a few times then give up. + need_to_delete = False + if os.path.exists(path): + need_to_delete = True + for dummy in range(5): + try: + os.unlink(path) + except: + time.sleep(0.5) + else: + need_to_delete = False + break + if need_to_delete: + os.unlink(path) # sometimes this is just gonna fail + self.__super_tearDown() + + def _newAddr(self): + self.addr.append(self._getAddr()) + + def _getAddr(self): + return '127.0.0.1', forker.get_port(self) + + def getConfig(self, path, create, read_only): + raise NotImplementedError + + cache_id = 1 + + def openClientStorage(self, cache=None, cache_size=200000, wait=1, + read_only=0, read_only_fallback=0, + username=None, password=None, realm=None): + if cache is None: + cache = str(self.__class__.cache_id) + self.__class__.cache_id += 1 + self.caches.append(cache) + storage = TestClientStorage(self.addr, + client=cache, + var='.', + cache_size=cache_size, + wait=wait, + min_disconnect_poll=0.1, + read_only=read_only, + read_only_fallback=read_only_fallback, + **self._client_options()) + storage.registerDB(DummyDB()) + return storage + + def _client_options(self): + return {} + + def getServerConfig(self, addr, ro_svr): + zconf = forker.ZEOConfig(addr, log='server.log') + if ro_svr: + zconf.read_only = 1 + if self.invq: + zconf.invalidation_queue_size = self.invq + if self.timeout: + zconf.transaction_timeout = self.timeout + return zconf + + def startServer(self, create=1, index=0, read_only=0, ro_svr=0, keep=None, + path=None, **kw): + addr = self.addr[index] + logging.info("startServer(create=%d, index=%d, read_only=%d) @ %s" % + (create, index, read_only, addr)) + if path is None: + path = "%s.%d" % (self.file, index) + sconf = self.getConfig(path, create, read_only) + zconf = self.getServerConfig(addr, ro_svr) + if keep is None: + keep = self.keep + zeoport, stop = forker.start_zeo_server( + sconf, zconf, addr[1], keep, **kw) + self._servers.append(stop) + if addr[1] == 0: + self.addr[index] = zeoport + + def shutdownServer(self, index=0): + logging.info("shutdownServer(index=%d) @ %s" % + (index, self._servers[index])) + stop = self._servers[index] + if stop is not None: + stop() + self._servers[index] = lambda : None + + def pollUp(self, timeout=30.0, storage=None): + if storage is None: + storage = self._storage + storage.server_status() + + def pollDown(self, timeout=30.0): + # Poll until we're disconnected. + now = time.time() + giveup = now + timeout + while self._storage.is_connected(): + now = time.time() + if now > giveup: + self.fail("timed out waiting for storage to disconnect") + time.sleep(0.1) + + +class ConnectionTests(CommonSetupTearDown): + """Tests that explicitly manage the server process. + + To test the cache or re-connection, these test cases explicit + start and stop a ZEO storage server. + """ + + def checkMultipleAddresses(self): + for i in range(4): + self._newAddr() + self._storage = self.openClientStorage('test', 100000) + oid = self._storage.new_oid() + obj = MinPO(12) + self._dostore(oid, data=obj) + self._storage.close() + + def checkReadOnlyClient(self): + # Open a read-only client to a read-write server; stores fail + + # Start a read-only client for a read-write server + self._storage = self.openClientStorage(read_only=1) + # Stores should fail here + self.assertRaises(ReadOnlyError, self._dostore) + self._storage.close() + + def checkReadOnlyServer(self): + # Open a read-only client to a read-only *server*; stores fail + + # We don't want the read-write server created by setUp() + self.shutdownServer() + self._servers = [] + # Start a read-only server + self.startServer(create=0, index=0, ro_svr=1) + # Start a read-only client + self._storage = self.openClientStorage(read_only=1) + # Stores should fail here + self.assertRaises(ReadOnlyError, self._dostore) + self._storage.close() + # Get rid of the 'test left new threads behind' warning + time.sleep(0.1) + + def checkReadOnlyFallbackWritable(self): + # Open a fallback client to a read-write server; stores succeed + + # Start a read-only-fallback client for a read-write server + self._storage = self.openClientStorage(read_only_fallback=1) + # Stores should succeed here + self._dostore() + self._storage.close() + + def checkReadOnlyFallbackReadOnlyServer(self): + # Open a fallback client to a read-only *server*; stores fail + + # We don't want the read-write server created by setUp() + self.shutdownServer() + self._servers = [] + # Start a read-only server + self.startServer(create=0, index=0, ro_svr=1) + # Start a read-only-fallback client + self._storage = self.openClientStorage(read_only_fallback=1) + self.assertTrue(self._storage.isReadOnly()) + # Stores should fail here + self.assertRaises(ReadOnlyError, self._dostore) + self._storage.close() + + + def checkDisconnectionError(self): + # Make sure we get a ClientDisconnected when we try to read an + # object when we're not connected to a storage server and the + # object is not in the cache. + self.shutdownServer() + self._storage = self.openClientStorage('test', 1000, wait=0) + with short_timeout(self): + self.assertRaises(ClientDisconnected, + self._storage.load, b'fredwash', '') + self._storage.close() + + def checkBasicPersistence(self): + # Verify cached data persists across client storage instances. + + # To verify that the cache is being used, the test closes the + # server and then starts a new client with the server down. + # When the server is down, a load() gets the data from its cache. + + self._storage = self.openClientStorage('test', 100000) + oid = self._storage.new_oid() + obj = MinPO(12) + revid1 = self._dostore(oid, data=obj) + self._storage.close() + self.shutdownServer() + self._storage = self.openClientStorage('test', 100000, wait=0) + data, revid2 = self._storage.load(oid, '') + self.assertEqual(zodb_unpickle(data), MinPO(12)) + self.assertEqual(revid1, revid2) + self._storage.close() + + def checkDisconnectedCacheWorks(self): + # Check that the cache works when the client is disconnected. + self._storage = self.openClientStorage('test') + oid1 = self._storage.new_oid() + obj1 = MinPO("1" * 500) + self._dostore(oid1, data=obj1) + oid2 = self._storage.new_oid() + obj2 = MinPO("2" * 500) + self._dostore(oid2, data=obj2) + expected1 = self._storage.load(oid1, '') + expected2 = self._storage.load(oid2, '') + + # Shut it all down, and try loading from the persistent cache file + # without a server present. + self._storage.close() + self.shutdownServer() + self._storage = self.openClientStorage('test', wait=False) + self.assertEqual(expected1, self._storage.load(oid1, '')) + self.assertEqual(expected2, self._storage.load(oid2, '')) + self._storage.close() + + def checkDisconnectedCacheFails(self): + # Like checkDisconnectedCacheWorks above, except the cache + # file is so small that only one object can be remembered. + self._storage = self.openClientStorage('test', cache_size=900) + oid1 = self._storage.new_oid() + obj1 = MinPO("1" * 500) + self._dostore(oid1, data=obj1) + oid2 = self._storage.new_oid() + obj2 = MinPO("2" * 500) + # The cache file is so small that adding oid2 will evict oid1. + self._dostore(oid2, data=obj2) + expected2 = self._storage.load(oid2, '') + + # Shut it all down, and try loading from the persistent cache file + # without a server present. + self._storage.close() + self.shutdownServer() + self._storage = self.openClientStorage('test', cache_size=900, + wait=False) + # oid2 should still be in cache. + self.assertEqual(expected2, self._storage.load(oid2, '')) + # But oid1 should have been purged, so that trying to load it will + # try to fetch it from the (non-existent) ZEO server. + with short_timeout(self): + self.assertRaises(ClientDisconnected, self._storage.load, oid1, '') + self._storage.close() + + def checkVerificationInvalidationPersists(self): + # This tests a subtle invalidation bug from ZODB 3.3: + # invalidations processed as part of ZEO cache verification acted + # kinda OK wrt the in-memory cache structures, but had no effect + # on the cache file. So opening the file cache again could + # incorrectly believe that a previously invalidated object was + # still current. This takes some effort to set up. + + # First, using a persistent cache ('test'), create an object + # MinPO(13). We used to see this again at the end of this test, + # despite that we modify it, and despite that it gets invalidated + # in 'test', before the end. + self._storage = self.openClientStorage('test') + oid = self._storage.new_oid() + obj = MinPO(13) + self._dostore(oid, data=obj) + self._storage.close() + + # Now modify obj via a temp connection. `test` won't learn about + # this until we open a connection using `test` again. + self._storage = self.openClientStorage() + pickle, rev = self._storage.load(oid, '') + newobj = zodb_unpickle(pickle) + self.assertEqual(newobj, obj) + newobj.value = 42 # .value *should* be 42 forever after now, not 13 + self._dostore(oid, data=newobj, revid=rev) + self._storage.close() + + # Open 'test' again. `oid` in this cache should be (and is) + # invalidated during cache verification. The bug was that it + # got invalidated (kinda) in memory, but not in the cache file. + self._storage = self.openClientStorage('test') + + # The invalidation happened already. Now create and store a new + # object before closing this storage: this is so `test` believes + # it's seen transactions beyond the one that invalidated `oid`, so + # that the *next* time we open `test` it doesn't process another + # invalidation for `oid`. It's also important that we not try to + # load `oid` now: because it's been (kinda) invalidated in the + # cache's memory structures, loading it now would fetch the + # current revision from the server, thus hiding the bug. + obj2 = MinPO(666) + oid2 = self._storage.new_oid() + self._dostore(oid2, data=obj2) + self._storage.close() + + # Finally, open `test` again and load `oid`. `test` believes + # it's beyond the transaction that modified `oid`, so its view + # of whether it has an up-to-date `oid` comes solely from the disk + # file, unaffected by cache verification. + self._storage = self.openClientStorage('test') + pickle, rev = self._storage.load(oid, '') + newobj_copy = zodb_unpickle(pickle) + # This used to fail, with + # AssertionError: MinPO(13) != MinPO(42) + # That is, `test` retained a stale revision of the object on disk. + self.assertEqual(newobj_copy, newobj) + self._storage.close() + + def checkBadMessage1(self): + # not even close to a real message + self._bad_message(b"salty") + + def checkBadMessage2(self): + # just like a real message, but with an unpicklable argument + global Hack + class Hack(object): + pass + + msg = encode(1, 0, "foo", (Hack(),)) + self._bad_message(msg) + del Hack + + def _bad_message(self, msg): + # Establish a connection, then send the server an ill-formatted + # request. Verify that the connection is closed and that it is + # possible to establish a new connection. + + self._storage = self.openClientStorage() + self._dostore() + + generation = self._storage._connection_generation + + future = concurrent.futures.Future() + + def write(): + try: + self._storage._server.client.protocol._write(msg) + except Exception as exc: + future.set_exception(exc) + else: + future.set_result(None) + + # break into the internals to send a bogus message + self._storage._server.loop.call_soon_threadsafe(write) + future.result() + + # If we manage to call _dostore before the server disconnects + # us, we'll get a ClientDisconnected error. When we retry, it + # will succeed. It will succeed because: + # - _dostore calls tpc_abort + # - tpc_abort makes a synchronous call to the server to abort + # the transaction + # - when disconnected, synchronous calls are blocked for a little + # while while reconnecting (or they timeout of it takes too long). + try: + self._dostore() + except ClientDisconnected: + self._dostore() + + self.assertTrue(self._storage._connection_generation > generation) + + # Test case for multiple storages participating in a single + # transaction. This is not really a connection test, but it needs + # about the same infrastructure (several storage servers). + + # TODO: with the current ZEO code, this occasionally fails. + # That's the point of this test. :-) + + def NOcheckMultiStorageTransaction(self): + # Configuration parameters (larger values mean more likely deadlocks) + N = 2 + # These don't *have* to be all the same, but it's convenient this way + self.nservers = N + self.nthreads = N + self.ntrans = N + self.nobj = N + + # Start extra servers + for i in range(1, self.nservers): + self._newAddr() + self.startServer(index=i) + + # Spawn threads that each do some transactions on all storages + threads = [] + try: + for i in range(self.nthreads): + t = MSTThread(self, "T%d" % i) + threads.append(t) + t.start() + # Wait for all threads to finish + for t in threads: + t.join(60) + self.assertFalse(t.isAlive(), "%s didn't die" % t.getName()) + finally: + for t in threads: + t.closeclients() + + def checkCrossDBInvalidations(self): + db1 = DB(self.openClientStorage()) + c1 = db1.open() + r1 = c1.root() + + r1["a"] = MinPO("a") + transaction.commit() + self.assertEqual(r1._p_state, 0) # up-to-date + + db2 = DB(self.openClientStorage()) + r2 = db2.open().root() + + self.assertEqual(r2["a"].value, "a") + + r2["b"] = MinPO("b") + transaction.commit() + + # Make sure the invalidation is received in the other client. + # We've had problems with this timing out on "slow" and/or "very + # busy" machines, so we increase the sleep time on each trip, and + # are willing to wait quite a long time. + for i in range(20): + c1.sync() + if r1._p_state == -1: + break + time.sleep(i / 10.0) + self.assertEqual(r1._p_state, -1) # ghost + + r1.keys() # unghostify + self.assertEqual(r1._p_serial, r2._p_serial) + self.assertEqual(r1["b"].value, "b") + + db2.close() + db1.close() + + def checkCheckForOutOfDateServer(self): + # We don't want to connect a client to a server if the client + # has seen newer transactions. + self._storage = self.openClientStorage() + self._dostore() + self.shutdownServer() + with short_timeout(self): + self.assertRaises(ClientDisconnected, + self._storage.load, b'\0'*8, '') + + self.startServer() + + # No matter how long we wait, the client won't reconnect: + time.sleep(2) + with short_timeout(self): + self.assertRaises(ClientDisconnected, + self._storage.load, b'\0'*8, '') + +class SSLConnectionTests(ConnectionTests): + + def getServerConfig(self, addr, ro_svr): + return testssl.server_config.replace( + '127.0.0.1:0', + '{}: {}\nread-only {}'.format( + addr[0], addr[1], 'true' if ro_svr else 'false')) + + def _client_options(self): + return {'ssl': testssl.client_ssl()} + + +class InvqTests(CommonSetupTearDown): + invq = 3 + + def checkQuickVerificationWith2Clients(self): + perstorage = self.openClientStorage(cache="test", cache_size=4000) + + self._storage = self.openClientStorage() + oid = self._storage.new_oid() + oid2 = self._storage.new_oid() + # When we create a new storage, it should always do a full + # verification + self.assertEqual(self._storage.verify_result, "empty cache") + # do two storages of the object to make sure an invalidation + # message is generated + revid = self._dostore(oid) + revid = self._dostore(oid, revid) + # Create a second object and revision to guarantee it doesn't + # show up in the list of invalidations sent when perstore restarts. + revid2 = self._dostore(oid2) + revid2 = self._dostore(oid2, revid2) + + forker.wait_until( + lambda : + perstorage.lastTransaction() == self._storage.lastTransaction()) + + perstorage.load(oid, '') + perstorage.close() + + forker.wait_until(lambda : os.path.exists('test-1.zec')) + + revid = self._dostore(oid, revid) + + perstorage = self.openClientStorage(cache="test") + self.assertEqual(perstorage.verify_result, "quick verification") + + self.assertEqual(perstorage.load(oid, ''), + self._storage.load(oid, '')) + perstorage.close() + + def checkVerificationWith2ClientsInvqOverflow(self): + perstorage = self.openClientStorage(cache="test") + self.assertEqual(perstorage.verify_result, "empty cache") + + self._storage = self.openClientStorage() + oid = self._storage.new_oid() + # When we create a new storage, it should always do a full + # verification + self.assertEqual(self._storage.verify_result, "empty cache") + # do two storages of the object to make sure an invalidation + # message is generated + revid = self._dostore(oid) + revid = self._dostore(oid, revid) + forker.wait_until( + "Client has seen all of the transactions from the server", + lambda : + perstorage.lastTransaction() == self._storage.lastTransaction() + ) + perstorage.load(oid, '') + perstorage.close() + + # the test code sets invq bound to 2 + for i in range(5): + revid = self._dostore(oid, revid) + + perstorage = self.openClientStorage(cache="test") + self.assertEqual(perstorage.verify_result, "cache too old, clearing") + self.assertEqual(self._storage.load(oid, '')[1], revid) + self.assertEqual(perstorage.load(oid, ''), + self._storage.load(oid, '')) + + perstorage.close() + +class ReconnectionTests(CommonSetupTearDown): + # The setUp() starts a server automatically. In order for its + # state to persist, we set the class variable keep to 1. In + # order for its state to be cleaned up, the last startServer() + # call in the test must pass keep=0. + keep = 1 + invq = 2 + + def checkReadOnlyStorage(self): + # Open a read-only client to a read-only *storage*; stores fail + + # We don't want the read-write server created by setUp() + self.shutdownServer() + self._servers = [] + # Start a read-only server + self.startServer(create=0, index=0, read_only=1, keep=0) + # Start a read-only client + self._storage = self.openClientStorage(read_only=1) + # Stores should fail here + self.assertRaises(ReadOnlyError, self._dostore) + + def checkReadOnlyFallbackReadOnlyStorage(self): + # Open a fallback client to a read-only *storage*; stores fail + + # We don't want the read-write server created by setUp() + self.shutdownServer() + self._servers = [] + # Start a read-only server + self.startServer(create=0, index=0, read_only=1, keep=0) + # Start a read-only-fallback client + self._storage = self.openClientStorage(read_only_fallback=1) + # Stores should fail here + self.assertRaises(ReadOnlyError, self._dostore) + + # TODO: Compare checkReconnectXXX() here to checkReconnection() + # further down. Is the code here hopelessly naive, or is + # checkReconnection() overwrought? + + def checkReconnectWritable(self): + # A read-write client reconnects to a read-write server + + # Start a client + self._storage = self.openClientStorage() + # Stores should succeed here + self._dostore() + + # Shut down the server + self.shutdownServer() + self._servers = [] + # Poll until the client disconnects + self.pollDown() + # Stores should fail now + with short_timeout(self): + self.assertRaises(ClientDisconnected, self._dostore) + + # Restart the server + self.startServer(create=0) + # Poll until the client connects + self.pollUp() + # Stores should succeed here + self._dostore() + self._storage.close() + + def checkReconnectReadOnly(self): + # A read-only client reconnects from a read-write to a + # read-only server + + # Start a client + self._storage = self.openClientStorage(read_only=1) + # Stores should fail here + self.assertRaises(ReadOnlyError, self._dostore) + + # Shut down the server + self.shutdownServer() + self._servers = [] + # Poll until the client disconnects + self.pollDown() + # Stores should still fail + self.assertRaises(ReadOnlyError, self._dostore) + + # Restart the server + self.startServer(create=0, read_only=1, keep=0) + # Poll until the client connects + self.pollUp() + # Stores should still fail + self.assertRaises(ReadOnlyError, self._dostore) + + def checkReconnectFallback(self): + # A fallback client reconnects from a read-write to a + # read-only server + + # Start a client in fallback mode + self._storage = self.openClientStorage(read_only_fallback=1) + # Stores should succeed here + self._dostore() + + # Shut down the server + self.shutdownServer() + self._servers = [] + # Poll until the client disconnects + self.pollDown() + # Stores should fail now + with short_timeout(self): + self.assertRaises(ClientDisconnected, self._dostore) + + # Restart the server + self.startServer(create=0, read_only=1, keep=0) + # Poll until the client connects + self.pollUp() + # Stores should fail here + self.assertRaises(ReadOnlyError, self._dostore) + + def checkReconnectUpgrade(self): + # A fallback client reconnects from a read-only to a + # read-write server + + # We don't want the read-write server created by setUp() + self.shutdownServer() + self._servers = [] + # Start a read-only server + self.startServer(create=0, read_only=1) + # Start a client in fallback mode + self._storage = self.openClientStorage(read_only_fallback=1) + # Stores should fail here + self.assertRaises(ReadOnlyError, self._dostore) + + # Shut down the server + self.shutdownServer() + self._servers = [] + # Poll until the client disconnects + self.pollDown() + + # Accesses should fail now + with short_timeout(self): + self.assertRaises(ClientDisconnected, self._storage.ping) + + # Restart the server, this time read-write + self.startServer(create=0, keep=0) + # Poll until the client sconnects + self.pollUp() + # Stores should now succeed + self._dostore() + + def checkReconnectSwitch(self): + # A fallback client initially connects to a read-only server, + # then discovers a read-write server and switches to that + + # We don't want the read-write server created by setUp() + self.shutdownServer() + self._servers = [] + # Allocate a second address (for the second server) + self._newAddr() + + # Start a read-only server + self.startServer(create=0, index=0, read_only=1, keep=0) + # Start a client in fallback mode + self._storage = self.openClientStorage(read_only_fallback=1) + # Stores should fail here + self.assertRaises(ReadOnlyError, self._dostore) + + # Start a read-write server + self.startServer(index=1, read_only=0, keep=0) + # After a while, stores should work + for i in range(300): # Try for 30 seconds + try: + self._dostore() + break + except (ClientDisconnected, ReadOnlyError): + # If the client isn't connected at all, sync() returns + # quickly and the test fails because it doesn't wait + # long enough for the client. + time.sleep(0.1) + else: + self.fail("Couldn't store after starting a read-write server") + + def checkNoVerificationOnServerRestart(self): + self._storage = self.openClientStorage() + # When we create a new storage, it should always do a full + # verification + self.assertEqual(self._storage.verify_result, "empty cache") + self._dostore() + self.shutdownServer() + self.pollDown() + self._storage.verify_result = None + self.startServer(create=0, keep=0) + self.pollUp() + # There were no transactions committed, so no verification + # should be needed. + self.assertEqual(self._storage.verify_result, "Cache up to date") + + def checkNoVerificationOnServerRestartWith2Clients(self): + perstorage = self.openClientStorage(cache="test") + self.assertEqual(perstorage.verify_result, "empty cache") + + self._storage = self.openClientStorage() + oid = self._storage.new_oid() + # When we create a new storage, it should always do a full + # verification + self.assertEqual(self._storage.verify_result, "empty cache") + # do two storages of the object to make sure an invalidation + # message is generated + revid = self._dostore(oid) + revid = self._dostore(oid, revid) + forker.wait_until( + "Client has seen all of the transactions from the server", + lambda : + perstorage.lastTransaction() == self._storage.lastTransaction() + ) + perstorage.load(oid, '') + self.shutdownServer() + self.pollDown() + self._storage.verify_result = None + perstorage.verify_result = None + logging.info('2ALLBEEF') + self.startServer(create=0, keep=0) + self.pollUp() + self.pollUp(storage=perstorage) + # There were no transactions committed, so no verification + # should be needed. + self.assertEqual(self._storage.verify_result, "Cache up to date") + self.assertEqual(perstorage.verify_result, "Cache up to date") + perstorage.close() + self._storage.close() + + def checkDisconnectedAbort(self): + self._storage = self.openClientStorage() + self._dostore() + oids = [self._storage.new_oid() for i in range(5)] + txn = TransactionMetaData() + self._storage.tpc_begin(txn) + for oid in oids: + data = zodb_pickle(MinPO(oid)) + self._storage.store(oid, None, data, '', txn) + self.shutdownServer() + with short_timeout(self): + self.assertRaises(ClientDisconnected, self._storage.tpc_vote, txn) + self.startServer(create=0) + self._storage.tpc_abort(txn) + self._dostore() + + # This test is supposed to cover the following error, although + # I don't have much confidence that it does. The likely + # explanation for the error is that the _tbuf contained + # objects that weren't in the _seriald, because the client was + # interrupted waiting for tpc_vote() to return. When the next + # transaction committed, it tried to do something with the + # bogus _tbuf entries. The explanation is wrong/incomplete, + # because tpc_begin() should clear the _tbuf. + + # 2003-01-15T15:44:19 ERROR(200) ZODB A storage error occurred + # in the last phase of a two-phase commit. This shouldn't happen. + + # Traceback (innermost last): + # Module ZODB.Transaction, line 359, in _finish_one + # Module ZODB.Connection, line 691, in tpc_finish + # Module ZEO.ClientStorage, line 679, in tpc_finish + # Module ZEO.ClientStorage, line 709, in _update_cache + # KeyError: ... + + + def checkReconnection(self): + # Check that the client reconnects when a server restarts. + + self._storage = self.openClientStorage() + oid = self._storage.new_oid() + obj = MinPO(12) + self._dostore(oid, data=obj) + logging.info("checkReconnection(): About to shutdown server") + self.shutdownServer() + logging.info("checkReconnection(): About to restart server") + self.startServer(create=0) + forker.wait_until('reconnect', self._storage.is_connected) + oid = self._storage.new_oid() + obj = MinPO(12) + while 1: + try: + self._dostore(oid, data=obj) + break + except ClientDisconnected: + # Maybe the exception mess is better now + logging.info("checkReconnection(): Error after" + " server restart; retrying.", exc_info=True) + transaction.abort() + # Give the other thread a chance to run. + time.sleep(0.1) + logging.info("checkReconnection(): finished") + self._storage.close() + + def checkMultipleServers(self): + # Crude test-- just start two servers and do a commit at each one. + + self._newAddr() + self._storage = self.openClientStorage('test', 100000) + self._dostore() + self.shutdownServer(index=0) + + # When we start the second server, we use file data file from + # the original server so tha the new server is a replica of + # the original. We need this becaise ClientStorage won't use + # a server if the server's last transaction is earlier than + # what the client has seen. + self.startServer(index=1, path=self.file+'.0', create=False) + + # If we can still store after shutting down one of the + # servers, we must be reconnecting to the other server. + + did_a_store = 0 + for i in range(10): + try: + self._dostore() + did_a_store = 1 + break + except ClientDisconnected: + time.sleep(0.5) + self.assertTrue(did_a_store) + self._storage.close() + +class TimeoutTests(CommonSetupTearDown): + timeout = 1 + + def checkTimeout(self): + self._storage = storage = self.openClientStorage() + txn = TransactionMetaData() + storage.tpc_begin(txn) + storage.tpc_vote(txn) + time.sleep(2) + with short_timeout(self): + self.assertRaises(ClientDisconnected, storage.tpc_finish, txn) + + # Make sure it's logged as CRITICAL + with open("server.log") as f: + for line in f: + if (('Transaction timeout after' in line) and + ('CRITICAL ZEO.StorageServer' in line) + ): + break + else: + self.fail('bad logging') + + storage.close() + + def checkTimeoutOnAbort(self): + storage = self.openClientStorage() + txn = TransactionMetaData() + storage.tpc_begin(txn) + storage.tpc_vote(txn) + storage.tpc_abort(txn) + storage.close() + + def checkTimeoutOnAbortNoLock(self): + storage = self.openClientStorage() + txn = TransactionMetaData() + storage.tpc_begin(txn) + storage.tpc_abort(txn) + storage.close() + + def checkTimeoutAfterVote(self): + self._storage = storage = self.openClientStorage() + # Assert that the zeo cache is empty + self.assertTrue(not list(storage._cache.contents())) + # Create the object + oid = storage.new_oid() + obj = MinPO(7) + # Now do a store, sleeping before the finish so as to cause a timeout + t = TransactionMetaData() + old_connection_count = storage.connection_count_for_tests + storage.tpc_begin(t) + revid1 = storage.store(oid, ZERO, zodb_pickle(obj), '', t) + storage.tpc_vote(t) + # Now sleep long enough for the storage to time out + time.sleep(3) + self.assertTrue( + (not storage.is_connected()) + or + (storage.connection_count_for_tests > old_connection_count) + ) + storage._wait() + self.assertTrue(storage.is_connected()) + # We expect finish to fail + self.assertRaises(ClientDisconnected, storage.tpc_finish, t) + # The cache should still be empty + self.assertTrue(not list(storage._cache.contents())) + # Load should fail since the object should not be in either the cache + # or the server. + self.assertRaises(KeyError, storage.load, oid, '') + +class MSTThread(threading.Thread): + + __super_init = threading.Thread.__init__ + + def __init__(self, testcase, name): + self.__super_init(name=name) + self.testcase = testcase + self.clients = [] + + def run(self): + tname = self.getName() + testcase = self.testcase + + # Create client connections to each server + clients = self.clients + for i in range(len(testcase.addr)): + c = testcase.openClientStorage(addr=testcase.addr[i]) + c.__name = "C%d" % i + clients.append(c) + + for i in range(testcase.ntrans): + # Because we want a transaction spanning all storages, + # we can't use _dostore(). This is several _dostore() calls + # expanded in-line (mostly). + + # Create oid->serial mappings + for c in clients: + c.__oids = [] + c.__serials = {} + + # Begin a transaction + t = TransactionMetaData() + for c in clients: + #print("%s.%s.%s begin" % (tname, c.__name, i)) + c.tpc_begin(t) + + for j in range(testcase.nobj): + for c in clients: + # Create and store a new object on each server + oid = c.new_oid() + c.__oids.append(oid) + data = MinPO("%s.%s.t%d.o%d" % (tname, c.__name, i, j)) + #print(data.value) + data = zodb_pickle(data) + c.store(oid, ZERO, data, '', t) + + # Vote on all servers and handle serials + for c in clients: + #print("%s.%s.%s vote" % (tname, c.__name, i)) + c.tpc_vote(t) + + # Finish on all servers + for c in clients: + #print("%s.%s.%s finish\n" % (tname, c.__name, i)) + c.tpc_finish(t) + + for c in clients: + # Check that we got serials for all oids + for oid in c.__oids: + testcase.assertIn(oid, c.__serials) + # Check that we got serials for no other oids + for oid in c.__serials.keys(): + testcase.assertIn(oid, c.__oids) + + def closeclients(self): + # Close clients opened by run() + for c in self.clients: + try: + c.close() + except: + pass + + +@contextlib.contextmanager +def short_timeout(self): + old = self._storage._server.timeout + self._storage._server.timeout = 1 + yield + self._storage._server.timeout = old + +# Run IPv6 tests if V6 sockets are supported +try: + with socket.socket(socket.AF_INET6, socket.SOCK_STREAM) as s: + pass +except (socket.error, AttributeError): + pass +else: + class V6Setup(object): + def _getAddr(self): + return '::1', forker.get_port(self) + + _g = globals() + for name, value in tuple(_g.items()): + if isinstance(value, type) and issubclass(value, CommonSetupTearDown): + _g[name+"V6"] = type(name+"V6", (V6Setup, value), {}) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/InvalidationTests.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/InvalidationTests.py new file mode 100644 index 0000000..028d013 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/InvalidationTests.py @@ -0,0 +1,511 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +import threading +import time +from random import Random + +import transaction + +from BTrees.check import check, display +from BTrees.OOBTree import OOBTree + +from ZEO.tests.TestThread import TestThread + +from ZODB.DB import DB +from ZODB.POSException import ReadConflictError, ConflictError + +# The tests here let several threads have a go at one or more database +# instances simultaneously. Each thread appends a disjoint (from the +# other threads) sequence of increasing integers to an OOBTree, one at +# at time (per thread). This provokes lots of conflicts, and BTrees +# work hard at conflict resolution too. An OOBTree is used because +# that flavor has the smallest maximum bucket size, and so splits buckets +# more often than other BTree flavors. +# +# When these tests were first written, they provoked an amazing number +# of obscure timing-related bugs in cache consistency logic, revealed +# by failure of the BTree to pass internal consistency checks at the end, +# and/or by failure of the BTree to contain all the keys the threads +# thought they added (i.e., the keys for which transaction.commit() +# did not raise any exception). + +class FailableThread(TestThread): + + # mixin class + # subclass must provide + # - self.stop attribute (an event) + # - self._testrun() method + + # TestThread.run() invokes testrun(). + def testrun(self): + try: + self._testrun() + except: + # Report the failure here to all the other threads, so + # that they stop quickly. + self.stop.set() + raise + + +class StressTask(object): + # Append integers startnum, startnum + step, startnum + 2*step, ... + # to 'tree'. If sleep is given, sleep + # that long after each append. At the end, instance var .added_keys + # is a list of the ints the thread believes it added successfully. + def __init__(self, db, threadnum, startnum, step=2, sleep=None): + self.db = db + self.threadnum = threadnum + self.startnum = startnum + self.step = step + self.sleep = sleep + self.added_keys = [] + self.tm = transaction.TransactionManager() + self.cn = self.db.open(transaction_manager=self.tm) + self.cn.sync() + + def doStep(self): + tree = self.cn.root()["tree"] + key = self.startnum + tree[key] = self.threadnum + + def commit(self): + cn = self.cn + key = self.startnum + self.tm.get().note(u"add key %s" % key) + try: + self.tm.get().commit() + except ConflictError as msg: + self.tm.abort() + else: + if self.sleep: + time.sleep(self.sleep) + self.added_keys.append(key) + self.startnum += self.step + + def cleanup(self): + self.tm.get().abort() + self.cn.close() + +def _runTasks(rounds, *tasks): + '''run *task* interleaved for *rounds* rounds.''' + def commit(run, actions): + actions.append(':') + for t in run: + t.commit() + del run[:] + r = Random() + r.seed(1064589285) # make it deterministic + run = [] + actions = [] + try: + for i in range(rounds): + t = r.choice(tasks) + if t in run: + commit(run, actions) + run.append(t) + t.doStep() + actions.append(repr(t.startnum)) + commit(run,actions) + # stderr.write(' '.join(actions)+'\n') + finally: + for t in tasks: + t.cleanup() + + +class StressThread(FailableThread): + + # Append integers startnum, startnum + step, startnum + 2*step, ... + # to 'tree' until Event stop is set. If sleep is given, sleep + # that long after each append. At the end, instance var .added_keys + # is a list of the ints the thread believes it added successfully. + def __init__(self, testcase, db, stop, threadnum, commitdict, + startnum, step=2, sleep=None): + TestThread.__init__(self, testcase) + self.db = db + self.stop = stop + self.threadnum = threadnum + self.startnum = startnum + self.step = step + self.sleep = sleep + self.added_keys = [] + self.commitdict = commitdict + + def _testrun(self): + tm = transaction.TransactionManager() + cn = self.db.open(transaction_manager=tm) + while not self.stop.isSet(): + try: + tree = cn.root()["tree"] + break + except (ConflictError, KeyError): + tm.abort() + key = self.startnum + while not self.stop.isSet(): + try: + tree[key] = self.threadnum + tm.get().note(u"add key %s" % key) + tm.commit() + self.commitdict[self] = 1 + if self.sleep: + time.sleep(self.sleep) + except (ReadConflictError, ConflictError) as msg: + tm.abort() + else: + self.added_keys.append(key) + key += self.step + cn.close() + +class LargeUpdatesThread(FailableThread): + + # A thread that performs a lot of updates. It attempts to modify + # more than 25 objects so that it can test code that runs vote + # in a separate thread when it modifies more than 25 objects. + + def __init__(self, test, db, stop, threadnum, commitdict, startnum, + step=2, sleep=None): + TestThread.__init__(self, test) + self.db = db + self.stop = stop + self.threadnum = threadnum + self.startnum = startnum + self.step = step + self.sleep = sleep + self.added_keys = [] + self.commitdict = commitdict + + def _testrun(self): + cn = self.db.open() + while not self.stop.isSet(): + try: + tree = cn.root()["tree"] + break + except (ConflictError, KeyError): + # print("%d getting tree abort" % self.threadnum) + transaction.abort() + + keys_added = {} # set of keys we commit + tkeys = [] + while not self.stop.isSet(): + + # The test picks 50 keys spread across many buckets. + # self.startnum and self.step ensure that all threads use + # disjoint key sets, to minimize conflict errors. + + nkeys = len(tkeys) + if nkeys < 50: + tkeys = list(range(self.startnum, 3000, self.step)) + nkeys = len(tkeys) + step = max(int(nkeys / 50), 1) + keys = [tkeys[i] for i in range(0, nkeys, step)] + for key in keys: + try: + tree[key] = self.threadnum + except (ReadConflictError, ConflictError) as msg: + # print("%d setting key %s" % (self.threadnum, msg)) + transaction.abort() + break + else: + # print("%d set #%d" % (self.threadnum, len(keys))) + transaction.get().note(u"keys %s" % ", ".join(map(str, keys))) + try: + transaction.commit() + self.commitdict[self] = 1 + if self.sleep: + time.sleep(self.sleep) + except ConflictError as msg: + # print("%d commit %s" % (self.threadnum, msg)) + transaction.abort() + continue + for k in keys: + tkeys.remove(k) + keys_added[k] = 1 + self.added_keys = keys_added.keys() + cn.close() + +class InvalidationTests(object): + + # Minimum # of seconds the main thread lets the workers run. The + # test stops as soon as this much time has elapsed, and all threads + # have managed to commit a change. + MINTIME = 10 + + # Maximum # of seconds the main thread lets the workers run. We + # stop after this long has elapsed regardless of whether all threads + # have managed to commit a change. + MAXTIME = 300 + + StressThread = StressThread + + def _check_tree(self, cn, tree): + # Make sure the BTree is sane at the C level. + retries = 3 + while retries: + retries -= 1 + try: + check(tree) + tree._check() + except ReadConflictError: + if retries: + transaction.abort() + else: + raise + except: + display(tree) + raise + + def _check_threads(self, tree, *threads): + # Make sure the thread's view of the world is consistent with + # the actual database state. + + expected_keys = [] + errormsgs = [] + err = errormsgs.append + + for t in threads: + if not t.added_keys: + err("thread %d didn't add any keys" % t.threadnum) + expected_keys.extend(t.added_keys) + expected_keys.sort() + + for i in range(100): + tree._p_jar.sync() + actual_keys = list(tree.keys()) + if expected_keys == actual_keys: + break + time.sleep(.1) + else: + err("expected keys != actual keys") + for k in expected_keys: + if k not in actual_keys: + err("key %s expected but not in tree" % k) + for k in actual_keys: + if k not in expected_keys: + err("key %s in tree but not expected" % k) + + self.fail('\n'.join(errormsgs)) + + def go(self, stop, commitdict, *threads): + # Run the threads + for t in threads: + t.start() + delay = self.MINTIME + start = time.time() + while time.time() - start <= self.MAXTIME: + stop.wait(delay) + if stop.isSet(): + # Some thread failed. Stop right now. + break + delay = 2.0 + if len(commitdict) >= len(threads): + break + # Some thread still hasn't managed to commit anything. + stop.set() + # Give all the threads some time to stop before trying to clean up. + # cleanup() will cause the test to fail if some thread ended with + # an uncaught exception, and unittest will call the base class + # tearDown then immediately, but if other threads are still + # running that can lead to a cascade of spurious exceptions. + for t in threads: + t.join(30) + for t in threads: + t.cleanup(10) + + def checkConcurrentUpdates2Storages_emulated(self): + self._storage = storage1 = self.openClientStorage() + db1 = DB(storage1) + storage2 = self.openClientStorage() + db2 = DB(storage2) + + cn = db1.open() + tree = cn.root()["tree"] = OOBTree() + transaction.commit() + # DM: allow time for invalidations to come in and process them + time.sleep(0.1) + + # Run two threads that update the BTree + t1 = StressTask(db1, 1, 1,) + t2 = StressTask(db2, 2, 2,) + _runTasks(100, t1, t2) + + cn.sync() + self._check_tree(cn, tree) + self._check_threads(tree, t1, t2) + + cn.close() + db1.close() + db2.close() + + def checkConcurrentUpdates2Storages(self): + self._storage = storage1 = self.openClientStorage() + db1 = DB(storage1) + storage2 = self.openClientStorage() + db2 = DB(storage2) + stop = threading.Event() + + cn = db1.open() + tree = cn.root()["tree"] = OOBTree() + transaction.commit() + cn.close() + + # Run two threads that update the BTree + cd = {} + t1 = self.StressThread(self, db1, stop, 1, cd, 1) + t2 = self.StressThread(self, db2, stop, 2, cd, 2) + self.go(stop, cd, t1, t2) + + while db1.lastTransaction() != db2.lastTransaction(): + db1._storage.sync() + db2._storage.sync() + + cn = db1.open() + tree = cn.root()["tree"] + self._check_tree(cn, tree) + self._check_threads(tree, t1, t2) + + cn.close() + db1.close() + db2.close() + + def checkConcurrentUpdates19Storages(self): + n = 19 + dbs = [DB(self.openClientStorage()) for i in range(n)] + self._storage = dbs[0].storage + stop = threading.Event() + + cn = dbs[0].open() + tree = cn.root()["tree"] = OOBTree() + transaction.commit() + cn.close() + + # Run threads that update the BTree + cd = {} + threads = [self.StressThread(self, dbs[i], stop, i, cd, i, n) + for i in range(n)] + self.go(stop, cd, *threads) + + while len(set(db.lastTransaction() for db in dbs)) > 1: + _ = [db._storage.sync() for db in dbs] + + cn = dbs[0].open() + tree = cn.root()["tree"] + self._check_tree(cn, tree) + self._check_threads(tree, *threads) + + cn.close() + _ = [db.close() for db in dbs] + + def checkConcurrentUpdates1Storage(self): + self._storage = storage1 = self.openClientStorage() + db1 = DB(storage1) + stop = threading.Event() + + cn = db1.open() + tree = cn.root()["tree"] = OOBTree() + transaction.commit() + cn.close() + + # Run two threads that update the BTree + cd = {} + t1 = self.StressThread(self, db1, stop, 1, cd, 1, sleep=0.01) + t2 = self.StressThread(self, db1, stop, 2, cd, 2, sleep=0.01) + self.go(stop, cd, t1, t2) + + cn = db1.open() + tree = cn.root()["tree"] + self._check_tree(cn, tree) + self._check_threads(tree, t1, t2) + + cn.close() + db1.close() + + def checkConcurrentUpdates2StoragesMT(self): + self._storage = storage1 = self.openClientStorage() + db1 = DB(storage1) + db2 = DB(self.openClientStorage()) + stop = threading.Event() + + cn = db1.open() + tree = cn.root()["tree"] = OOBTree() + transaction.commit() + cn.close() + + # Run three threads that update the BTree. + # Two of the threads share a single storage so that it + # is possible for both threads to read the same object + # at the same time. + + cd = {} + t1 = self.StressThread(self, db1, stop, 1, cd, 1, 3) + t2 = self.StressThread(self, db2, stop, 2, cd, 2, 3, 0.01) + t3 = self.StressThread(self, db2, stop, 3, cd, 3, 3, 0.01) + self.go(stop, cd, t1, t2, t3) + + while db1.lastTransaction() != db2.lastTransaction(): + time.sleep(.1) + + time.sleep(.1) + cn = db1.open() + tree = cn.root()["tree"] + self._check_tree(cn, tree) + self._check_threads(tree, t1, t2, t3) + + cn.close() + db1.close() + db2.close() + + def checkConcurrentLargeUpdates(self): + # Use 3 threads like the 2StorageMT test above. + self._storage = storage1 = self.openClientStorage() + db1 = DB(storage1) + db2 = DB(self.openClientStorage()) + stop = threading.Event() + + cn = db1.open() + tree = cn.root()["tree"] = OOBTree() + for i in range(0, 3000, 2): + tree[i] = 0 + transaction.commit() + cn.close() + + # Run three threads that update the BTree. + # Two of the threads share a single storage so that it + # is possible for both threads to read the same object + # at the same time. + + cd = {} + t1 = LargeUpdatesThread(self, db1, stop, 1, cd, 1, 3, 0.02) + t2 = LargeUpdatesThread(self, db2, stop, 2, cd, 2, 3, 0.01) + t3 = LargeUpdatesThread(self, db2, stop, 3, cd, 3, 3, 0.01) + self.go(stop, cd, t1, t2, t3) + + while db1.lastTransaction() != db2.lastTransaction(): + db1._storage.sync() + db2._storage.sync() + + cn = db1.open() + tree = cn.root()["tree"] + self._check_tree(cn, tree) + + # Purge the tree of the dummy entries mapping to 0. + losers = [k for k, v in tree.items() if v == 0] + for k in losers: + del tree[k] + transaction.commit() + + self._check_threads(tree, t1, t2, t3) + + cn.close() + db1.close() + db2.close() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/IterationTests.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/IterationTests.py new file mode 100644 index 0000000..74056dc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/IterationTests.py @@ -0,0 +1,223 @@ +############################################################################## +# +# Copyright (c) 2008 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""ZEO iterator protocol tests.""" + +import transaction +import six +import gc + +from ZODB.Connection import TransactionMetaData + +from ..asyncio.testing import AsyncRPC + +class IterationTests(object): + + def _assertIteratorIdsEmpty(self): + # Account for the need to run a GC collection + # under non-refcounted implementations like PyPy + # for storage._iterator_gc to fully do its job. + # First, confirm that it ran + self.assertTrue(self._storage._iterators._last_gc > 0) + gc_enabled = gc.isenabled() + # make sure there's no race conditions cleaning out the weak refs + gc.disable() + try: + self.assertEqual(0, len(self._storage._iterator_ids)) + except AssertionError: + # Ok, we have ids. That should also mean that the + # weak dictionary has the same length. + + self.assertEqual(len(self._storage._iterators), + len(self._storage._iterator_ids)) + # Now if we do a collection and re-ask for iterator_gc + # everything goes away as expected. + gc.enable() + gc.collect() + gc.collect() # sometimes PyPy needs it twice to clear weak refs + + self._storage._iterator_gc() + + self.assertEqual(len(self._storage._iterators), + len(self._storage._iterator_ids)) + self.assertEqual(0, len(self._storage._iterator_ids)) + finally: + if gc_enabled: + gc.enable() + else: + gc.disable() + + def checkIteratorGCProtocol(self): + # Test garbage collection on protocol level. + server = AsyncRPC(self._storage._server) + + iid = server.iterator_start(None, None) + # None signals the end of iteration. + self.assertEqual(None, server.iterator_next(iid)) + # The server has disposed the iterator already. + self.assertRaises(KeyError, server.iterator_next, iid) + + iid = server.iterator_start(None, None) + # This time, we tell the server to throw the iterator away. + server.iterator_gc([iid]) + self.assertRaises(KeyError, server.iterator_next, iid) + + def checkIteratorExhaustionStorage(self): + # Test the storage's garbage collection mechanism. + self._dostore() + iterator = self._storage.iterator() + + # At this point, a wrapping iterator might not have called the CS + # iterator yet. We'll consume one item to make sure this happens. + six.advance_iterator(iterator) + self.assertEqual(1, len(self._storage._iterator_ids)) + iid = list(self._storage._iterator_ids)[0] + self.assertEqual([], list(iterator)) + self.assertEqual(0, len(self._storage._iterator_ids)) + + # The iterator has run through, so the server has already disposed it. + self.assertRaises(KeyError, self._storage._call, 'iterator_next', iid) + + def checkIteratorGCSpanTransactions(self): + # Keep a hard reference to the iterator so it won't be automatically + # garbage collected at the transaction boundary. + self._dostore() + iterator = self._storage.iterator() + self._dostore() + # As the iterator was not garbage collected, we can still use it. (We + # don't see the transaction we just wrote being picked up, because + # iterators only see the state from the point in time when they were + # created.) + self.assertTrue(list(iterator)) + + def checkIteratorGCStorageCommitting(self): + # We want the iterator to be garbage-collected, so we don't keep any + # hard references to it. The storage tracks its ID, though. + + # The odd little jig we do below arises from the fact that the + # CS iterator may not be constructed right away if the CS is wrapped. + # We need to actually do some iteration to get the iterator created. + # We do a store to make sure the iterator isn't exhausted right away. + self._dostore() + six.advance_iterator(self._storage.iterator()) + + self.assertEqual(1, len(self._storage._iterator_ids)) + iid = list(self._storage._iterator_ids)[0] + + # GC happens at the transaction boundary. After that, both the storage + # and the server have forgotten the iterator. + self._storage._iterators._last_gc = -1 + self._dostore() + self._assertIteratorIdsEmpty() + self.assertRaises(KeyError, self._storage._call, 'iterator_next', iid) + + def checkIteratorGCStorageTPCAborting(self): + # The odd little jig we do below arises from the fact that the + # CS iterator may not be constructed right away if the CS is wrapped. + # We need to actually do some iteration to get the iterator created. + # We do a store to make sure the iterator isn't exhausted right away. + self._dostore() + six.advance_iterator(self._storage.iterator()) + + iid = list(self._storage._iterator_ids)[0] + + t = TransactionMetaData() + self._storage._iterators._last_gc = -1 + self._storage.tpc_begin(t) + self._storage.tpc_abort(t) + self._assertIteratorIdsEmpty() + self.assertRaises(KeyError, self._storage._call, 'iterator_next', iid) + + def checkIteratorGCStorageDisconnect(self): + + # The odd little jig we do below arises from the fact that the + # CS iterator may not be constructed right away if the CS is wrapped. + # We need to actually do some iteration to get the iterator created. + # We do a store to make sure the iterator isn't exhausted right away. + self._dostore() + six.advance_iterator(self._storage.iterator()) + + iid = list(self._storage._iterator_ids)[0] + t = TransactionMetaData() + self._storage.tpc_begin(t) + # Show that after disconnecting, the client side GCs the iterators + # as well. I'm calling this directly to avoid accidentally + # calling tpc_abort implicitly. + self._storage.notify_disconnected() + self.assertEqual(0, len(self._storage._iterator_ids)) + + def checkIteratorParallel(self): + self._dostore() + self._dostore() + iter1 = self._storage.iterator() + iter2 = self._storage.iterator() + txn_info1 = six.advance_iterator(iter1) + txn_info2 = six.advance_iterator(iter2) + self.assertEqual(txn_info1.tid, txn_info2.tid) + txn_info1 = six.advance_iterator(iter1) + txn_info2 = six.advance_iterator(iter2) + self.assertEqual(txn_info1.tid, txn_info2.tid) + self.assertRaises(StopIteration, next, iter1) + self.assertRaises(StopIteration, next, iter2) + + +def iterator_sane_after_reconnect(): + r"""Make sure that iterators are invalidated on disconnect. + +Start a server: + + >>> addr, adminaddr = start_server( + ... '\npath fs\n', keep=1) + +Open a client storage to it and commit a some transactions: + + >>> import ZEO, ZODB, transaction + >>> client = ZEO.client(addr) + >>> db = ZODB.DB(client) + >>> conn = db.open() + >>> for i in range(10): + ... conn.root().i = i + ... transaction.commit() + +Create an iterator: + + >>> it = client.iterator() + >>> tid1 = it.next().tid + +Restart the storage: + + >>> stop_server(adminaddr) + >>> wait_disconnected(client) + >>> _ = start_server('\npath fs\n', addr=addr) + >>> wait_connected(client) + +Now, we'll create a second iterator: + + >>> it2 = client.iterator() + +If we try to advance the first iterator, we should get an error: + + >>> it.next().tid > tid1 + Traceback (most recent call last): + ... + ClientDisconnected: Disconnected iterator + +The second iterator should be peachy: + + >>> it2.next().tid == tid1 + True + +Cleanup: + + >>> db.close() + """ diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/TestThread.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/TestThread.py new file mode 100644 index 0000000..0cfdfb8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/TestThread.py @@ -0,0 +1,57 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""A Thread base class for use with unittest.""" +import threading +import sys +import six + +class TestThread(threading.Thread): + """Base class for defining threads that run from unittest. + + The subclass should define a testrun() method instead of a run() + method. + + Call cleanup() when the test is done with the thread, instead of join(). + If the thread exits with an uncaught exception, it's captured and + re-raised when cleanup() is called. cleanup() should be called by + the main thread! Trying to tell unittest that a test failed from + another thread creates a nightmare of timing-depending cascading + failures and missed errors (tracebacks that show up on the screen, + but don't cause unittest to believe the test failed). + + cleanup() also joins the thread. If the thread ended without raising + an uncaught exception, and the join doesn't succeed in the timeout + period, then the test is made to fail with a "Thread still alive" + message. + """ + + def __init__(self, testcase): + threading.Thread.__init__(self) + # In case this thread hangs, don't stop Python from exiting. + self.setDaemon(1) + self._exc_info = None + self._testcase = testcase + + def run(self): + try: + self.testrun() + except: + self._exc_info = sys.exc_info() + + def cleanup(self, timeout=15): + self.join(timeout) + if self._exc_info: + six.reraise(self._exc_info[0], self._exc_info[1], self._exc_info[2]) + if self.isAlive(): + self._testcase.fail("Thread did not finish: %s" % self) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ThreadTests.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ThreadTests.py new file mode 100644 index 0000000..df4a5ad --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ThreadTests.py @@ -0,0 +1,132 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Compromising positions involving threads.""" + +import threading + +from ZODB.Connection import TransactionMetaData +from ZODB.tests.StorageTestBase import zodb_pickle, MinPO +import ZEO.Exceptions + +ZERO = '\0'*8 + +class BasicThread(threading.Thread): + def __init__(self, storage, doNextEvent, threadStartedEvent): + self.storage = storage + self.trans = TransactionMetaData() + self.doNextEvent = doNextEvent + self.threadStartedEvent = threadStartedEvent + self.gotValueError = 0 + self.gotDisconnected = 0 + threading.Thread.__init__(self) + self.setDaemon(1) + + def join(self): + threading.Thread.join(self, 10) + assert not self.isAlive() + + +class GetsThroughVoteThread(BasicThread): + # This thread gets partially through a transaction before it turns + # execution over to another thread. We're trying to establish that a + # tpc_finish() after a storage has been closed by another thread will get + # a ClientStorageError error. + # + # This class gets does a tpc_begin(), store(), tpc_vote() and is waiting + # to do the tpc_finish() when the other thread closes the storage. + def run(self): + self.storage.tpc_begin(self.trans) + oid = self.storage.new_oid() + self.storage.store(oid, ZERO, zodb_pickle(MinPO("c")), '', self.trans) + self.storage.tpc_vote(self.trans) + self.threadStartedEvent.set() + self.doNextEvent.wait(10) + try: + self.storage.tpc_finish(self.trans) + except ZEO.Exceptions.ClientStorageError: + self.gotValueError = 1 + self.storage.tpc_abort(self.trans) + + +class GetsThroughBeginThread(BasicThread): + # This class is like the above except that it is intended to be run when + # another thread is already in a tpc_begin(). Thus, this thread will + # block in the tpc_begin until another thread closes the storage. When + # that happens, this one will get disconnected too. + def run(self): + try: + self.storage.tpc_begin(self.trans) + except ZEO.Exceptions.ClientStorageError: + self.gotValueError = 1 + + +class ThreadTests(object): + # Thread 1 should start a transaction, but not get all the way through it. + # Main thread should close the connection. Thread 1 should then get + # disconnected. + def checkDisconnectedOnThread2Close(self): + doNextEvent = threading.Event() + threadStartedEvent = threading.Event() + thread1 = GetsThroughVoteThread(self._storage, + doNextEvent, threadStartedEvent) + thread1.start() + threadStartedEvent.wait(10) + self._storage.close() + doNextEvent.set() + thread1.join() + self.assertEqual(thread1.gotValueError, 1) + + # Thread 1 should start a transaction, but not get all the way through + # it. While thread 1 is in the middle of the transaction, a second thread + # should start a transaction, and it will block in the tcp_begin() -- + # because thread 1 has acquired the lock in its tpc_begin(). Now the main + # thread closes the storage and both sub-threads should get disconnected. + def checkSecondBeginFails(self): + doNextEvent = threading.Event() + threadStartedEvent = threading.Event() + thread1 = GetsThroughVoteThread(self._storage, + doNextEvent, threadStartedEvent) + thread2 = GetsThroughBeginThread(self._storage, + doNextEvent, threadStartedEvent) + thread1.start() + threadStartedEvent.wait(1) + thread2.start() + self._storage.close() + doNextEvent.set() + thread1.join() + thread2.join() + self.assertEqual(thread1.gotValueError, 1) + self.assertEqual(thread2.gotValueError, 1) + + # Run a bunch of threads doing small and large stores in parallel + def checkMTStores(self): + threads = [] + for i in range(5): + t = threading.Thread(target=self.mtstorehelper) + threads.append(t) + t.start() + for t in threads: + t.join(30) + for i in threads: + self.assertFalse(t.isAlive()) + + # Helper for checkMTStores + def mtstorehelper(self): + name = threading.currentThread().getName() + objs = [] + for i in range(10): + objs.append(MinPO("X" * 200000)) + objs.append(MinPO("X")) + for obj in objs: + self._dostore(data=obj) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/README.rst b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/README.rst new file mode 100644 index 0000000..701e8b8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/README.rst @@ -0,0 +1,10 @@ +====================== +Copy of ZEO 4 server +====================== + +This copy was made by first converting the ZEO 4 server code to use +relative imports. The code was tested with ZEO 4 before copying. It +was unchanged aside from the relative imports. + +The ZEO 4 server is used for tests if the ZEO4_SERVER environment +variable is set to a non-empty value. diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/StorageServer.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/StorageServer.py new file mode 100644 index 0000000..d5c3ab4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/StorageServer.py @@ -0,0 +1,1637 @@ +############################################################################## +# +# Copyright (c) 2001, 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""The StorageServer class and the exception that it may raise. + +This server acts as a front-end for one or more real storages, like +file storage or Berkeley storage. + +TODO: Need some basic access control-- a declaration of the methods +exported for invocation by the server. +""" +import asyncore +import codecs +import itertools +import logging +import os +import sys +import tempfile +import threading +import time +import warnings +from .zrpc.error import DisconnectedError +import ZODB.blob +import ZODB.event +import ZODB.serialize +import ZODB.TimeStamp +import zope.interface +import six + +from ZEO._compat import Pickler, Unpickler, PY3, BytesIO +from ZEO.Exceptions import AuthError +from .monitor import StorageStats, StatsServer +from .zrpc.connection import ManagedServerConnection, Delay, MTDelay, Result +from .zrpc.server import Dispatcher +from ZODB.Connection import TransactionMetaData +from ZODB.loglevels import BLATHER +from ZODB.POSException import StorageError, StorageTransactionError +from ZODB.POSException import TransactionError, ReadOnlyError, ConflictError +from ZODB.serialize import referencesf +from ZODB.utils import oid_repr, p64, u64, z64 + +ResolvedSerial = b'rs' + +logger = logging.getLogger('ZEO.StorageServer') + +def log(message, level=logging.INFO, label='', exc_info=False): + """Internal helper to log a message.""" + if label: + message = "(%s) %s" % (label, message) + logger.log(level, message, exc_info=exc_info) + + +class StorageServerError(StorageError): + """Error reported when an unpicklable exception is raised.""" + + +class ZEOStorage(object): + """Proxy to underlying storage for a single remote client.""" + + # A list of extension methods. A subclass with extra methods + # should override. + extensions = [] + + def __init__(self, server, read_only=0, auth_realm=None): + self.server = server + # timeout and stats will be initialized in register() + self.stats = None + self.connection = None + self.client = None + self.storage = None + self.storage_id = "uninitialized" + self.transaction = None + self.read_only = read_only + self.log_label = 'unconnected' + self.locked = False # Don't have storage lock + self.verifying = 0 + self.store_failed = 0 + self.authenticated = 0 + self.auth_realm = auth_realm + self.blob_tempfile = None + # The authentication protocol may define extra methods. + self._extensions = {} + for func in self.extensions: + self._extensions[func.__name__] = None + self._iterators = {} + self._iterator_ids = itertools.count() + # Stores the last item that was handed out for a + # transaction iterator. + self._txn_iterators_last = {} + + def _finish_auth(self, authenticated): + if not self.auth_realm: + return 1 + self.authenticated = authenticated + return authenticated + + def set_database(self, database): + self.database = database + + def notifyConnected(self, conn): + self.connection = conn + assert conn.peer_protocol_version is not None + if conn.peer_protocol_version < b'Z309': + self.client = ClientStub308(conn) + conn.register_object(ZEOStorage308Adapter(self)) + else: + self.client = ClientStub(conn) + self.log_label = _addr_label(conn.addr) + + def notifyDisconnected(self): + # When this storage closes, we must ensure that it aborts + # any pending transaction. + if self.transaction is not None: + self.log("disconnected during %s transaction" + % (self.locked and 'locked' or 'unlocked')) + self.tpc_abort(self.transaction.id) + else: + self.log("disconnected") + + self.connection = None + + def __repr__(self): + tid = self.transaction and repr(self.transaction.id) + if self.storage: + stid = (self.tpc_transaction() and + repr(self.tpc_transaction().id)) + else: + stid = None + name = self.__class__.__name__ + return "<%s %X trans=%s s_trans=%s>" % (name, id(self), tid, stid) + + def log(self, msg, level=logging.INFO, exc_info=False): + log(msg, level=level, label=self.log_label, exc_info=exc_info) + + def setup_delegation(self): + """Delegate several methods to the storage + """ + # Called from register + + storage = self.storage + + info = self.get_info() + + if not info['supportsUndo']: + self.undoLog = self.undoInfo = lambda *a,**k: () + + self.getTid = storage.getTid + self.load = storage.load + self.loadSerial = storage.loadSerial + record_iternext = getattr(storage, 'record_iternext', None) + if record_iternext is not None: + self.record_iternext = record_iternext + + try: + fn = storage.getExtensionMethods + except AttributeError: + pass # no extension methods + else: + d = fn() + self._extensions.update(d) + for name in d: + assert not hasattr(self, name) + setattr(self, name, getattr(storage, name)) + self.lastTransaction = storage.lastTransaction + + try: + self.tpc_transaction = storage.tpc_transaction + except AttributeError: + if hasattr(storage, '_transaction'): + log("Storage %r doesn't have a tpc_transaction method.\n" + "See ZEO.interfaces.IServeable." + "Falling back to using _transaction attribute, which\n." + "is icky.", + logging.ERROR) + self.tpc_transaction = lambda : storage._transaction + else: + raise + + def history(self,tid,size=1): + # This caters for storages which still accept + # a version parameter. + return self.storage.history(tid,size=size) + + def _check_tid(self, tid, exc=None): + if self.read_only: + raise ReadOnlyError() + if self.transaction is None: + caller = sys._getframe().f_back.f_code.co_name + self.log("no current transaction: %s()" % caller, + level=logging.WARNING) + if exc is not None: + raise exc(None, tid) + else: + return 0 + if self.transaction.id != tid: + caller = sys._getframe().f_back.f_code.co_name + self.log("%s(%s) invalid; current transaction = %s" % + (caller, repr(tid), repr(self.transaction.id)), + logging.WARNING) + if exc is not None: + raise exc(self.transaction.id, tid) + else: + return 0 + return 1 + + def getAuthProtocol(self): + """Return string specifying name of authentication module to use. + + The module name should be auth_%s where %s is auth_protocol.""" + protocol = self.server.auth_protocol + if not protocol or protocol == 'none': + return None + return protocol + + def register(self, storage_id, read_only): + """Select the storage that this client will use + + This method must be the first one called by the client. + For authenticated storages this method will be called by the client + immediately after authentication is finished. + """ + if self.auth_realm and not self.authenticated: + raise AuthError("Client was never authenticated with server!") + + if self.storage is not None: + self.log("duplicate register() call") + raise ValueError("duplicate register() call") + + storage = self.server.storages.get(storage_id) + if storage is None: + self.log("unknown storage_id: %s" % storage_id) + raise ValueError("unknown storage: %s" % storage_id) + + if not read_only and (self.read_only or storage.isReadOnly()): + raise ReadOnlyError() + + self.read_only = self.read_only or read_only + self.storage_id = storage_id + self.storage = storage + self.setup_delegation() + self.stats = self.server.register_connection(storage_id, self) + + def get_info(self): + storage = self.storage + + + supportsUndo = (getattr(storage, 'supportsUndo', lambda : False)() + and self.connection.peer_protocol_version >= b'Z310') + + # Communicate the backend storage interfaces to the client + storage_provides = zope.interface.providedBy(storage) + interfaces = [] + for candidate in storage_provides.__iro__: + interfaces.append((candidate.__module__, candidate.__name__)) + + return {'length': len(storage), + 'size': storage.getSize(), + 'name': storage.getName(), + 'supportsUndo': supportsUndo, + 'extensionMethods': self.getExtensionMethods(), + 'supports_record_iternext': hasattr(self, 'record_iternext'), + 'interfaces': tuple(interfaces), + } + + def get_size_info(self): + return {'length': len(self.storage), + 'size': self.storage.getSize(), + } + + def getExtensionMethods(self): + return self._extensions + + def loadEx(self, oid): + self.stats.loads += 1 + return self.storage.load(oid, '') + + def loadBefore(self, oid, tid): + self.stats.loads += 1 + return self.storage.loadBefore(oid, tid) + + def getInvalidations(self, tid): + invtid, invlist = self.server.get_invalidations(self.storage_id, tid) + if invtid is None: + return None + self.log("Return %d invalidations up to tid %s" + % (len(invlist), u64(invtid))) + return invtid, invlist + + def verify(self, oid, tid): + try: + t = self.getTid(oid) + except KeyError: + self.client.invalidateVerify(oid) + else: + if tid != t: + self.client.invalidateVerify(oid) + + def zeoVerify(self, oid, s): + if not self.verifying: + self.verifying = 1 + self.stats.verifying_clients += 1 + try: + os = self.getTid(oid) + except KeyError: + self.client.invalidateVerify((oid, '')) + # It's not clear what we should do now. The KeyError + # could be caused by an object uncreation, in which case + # invalidation is right. It could be an application bug + # that left a dangling reference, in which case it's bad. + else: + if s != os: + self.client.invalidateVerify((oid, '')) + + def endZeoVerify(self): + if self.verifying: + self.stats.verifying_clients -= 1 + self.verifying = 0 + self.client.endVerify() + + def pack(self, time, wait=1): + # Yes, you can pack a read-only server or storage! + if wait: + return run_in_thread(self._pack_impl, time) + else: + # If the client isn't waiting for a reply, start a thread + # and forget about it. + t = threading.Thread(target=self._pack_impl, args=(time,)) + t.setName("zeo storage packing thread") + t.start() + return None + + def _pack_impl(self, time): + self.log("pack(time=%s) started..." % repr(time)) + self.storage.pack(time, referencesf) + self.log("pack(time=%s) complete" % repr(time)) + # Broadcast new size statistics + self.server.invalidate(0, self.storage_id, None, + (), self.get_size_info()) + + def new_oids(self, n=100): + """Return a sequence of n new oids, where n defaults to 100""" + n = min(n, 100) + if self.read_only: + raise ReadOnlyError() + if n <= 0: + n = 1 + return [self.storage.new_oid() for i in range(n)] + + # undoLog and undoInfo are potentially slow methods + + def undoInfo(self, first, last, spec): + return run_in_thread(self.storage.undoInfo, first, last, spec) + + def undoLog(self, first, last): + return run_in_thread(self.storage.undoLog, first, last) + + def tpc_begin(self, id, user, description, ext, tid=None, status=" "): + if self.read_only: + raise ReadOnlyError() + if self.transaction is not None: + if self.transaction.id == id: + self.log("duplicate tpc_begin(%s)" % repr(id)) + return + else: + raise StorageTransactionError("Multiple simultaneous tpc_begin" + " requests from one client.") + + t = TransactionMetaData(user, description, ext) + t.id = id + + self.serials = [] + self.invalidated = [] + self.txnlog = CommitLog() + self.blob_log = [] + self.tid = tid + self.status = status + self.store_failed = 0 + self.stats.active_txns += 1 + + # Assign the transaction attribute last. This is so we don't + # think we've entered TPC until everything is set. Why? + # Because if we have an error after this, the server will + # think it is in TPC and the client will think it isn't. At + # that point, the client will keep trying to enter TPC and + # server won't let it. Errors *after* the tpc_begin call will + # cause the client to abort the transaction. + # (Also see https://bugs.launchpad.net/zodb/+bug/374737.) + self.transaction = t + + def tpc_finish(self, id): + if not self._check_tid(id): + return + assert self.locked, "finished called wo lock" + + self.stats.commits += 1 + self.storage.tpc_finish(self.transaction, self._invalidate) + # Note that the tid is still current because we still hold the + # commit lock. We'll relinquish it in _clear_transaction. + tid = self.storage.lastTransaction() + # Return the tid, for cache invalidation optimization + return Result(tid, self._clear_transaction) + + def _invalidate(self, tid): + if self.invalidated: + self.server.invalidate(self, self.storage_id, tid, + self.invalidated, self.get_size_info()) + + def tpc_abort(self, tid): + if not self._check_tid(tid): + return + self.stats.aborts += 1 + self.storage.tpc_abort(self.transaction) + self._clear_transaction() + + def _clear_transaction(self): + # Common code at end of tpc_finish() and tpc_abort() + if self.locked: + self.server.unlock_storage(self) + self.locked = 0 + if self.transaction is not None: + self.server.stop_waiting(self) + self.transaction = None + self.stats.active_txns -= 1 + if self.txnlog is not None: + self.txnlog.close() + self.txnlog = None + for oid, oldserial, data, blobfilename in self.blob_log: + ZODB.blob.remove_committed(blobfilename) + del self.blob_log + + def vote(self, tid): + self._check_tid(tid, exc=StorageTransactionError) + if self.locked or self.server.already_waiting(self): + raise StorageTransactionError( + 'Already voting (%s)' % (self.locked and 'locked' or 'waiting') + ) + return self._try_to_vote() + + def _try_to_vote(self, delay=None): + if self.connection is None: + return # We're disconnected + if delay is not None and delay.sent: + # as a consequence of the unlocking strategy, _try_to_vote + # may be called multiple times for delayed + # transactions. The first call will mark the delay as + # sent. We should skip if the delay was already sent. + return + self.locked, delay = self.server.lock_storage(self, delay) + if self.locked: + try: + self.log( + "Preparing to commit transaction: %d objects, %d bytes" + % (self.txnlog.stores, self.txnlog.size()), + level=BLATHER) + + if (self.tid is not None) or (self.status != ' '): + self.storage.tpc_begin(self.transaction, + self.tid, self.status) + else: + self.storage.tpc_begin(self.transaction) + + for op, args in self.txnlog: + if not getattr(self, op)(*args): + break + + + # Blob support + while self.blob_log and not self.store_failed: + oid, oldserial, data, blobfilename = self.blob_log.pop() + self._store(oid, oldserial, data, blobfilename) + + if not self.store_failed: + # Only call tpc_vote of no store call failed, + # otherwise the serialnos() call will deliver an + # exception that will be handled by the client in + # its tpc_vote() method. + serials = self.storage.tpc_vote(self.transaction) + if serials: + self.serials.extend(serials) + + self.client.serialnos(self.serials) + + except Exception: + self.storage.tpc_abort(self.transaction) + self._clear_transaction() + if delay is not None: + delay.error(sys.exc_info()) + else: + raise + else: + if delay is not None: + delay.reply(None) + else: + return None + + else: + return delay + + def _unlock_callback(self, delay): + connection = self.connection + if connection is None: + self.server.stop_waiting(self) + else: + connection.call_from_thread(self._try_to_vote, delay) + + # The public methods of the ZEO client API do not do the real work. + # They defer work until after the storage lock has been acquired. + # Most of the real implementations are in methods beginning with + # an _. + + def deleteObject(self, oid, serial, id): + self._check_tid(id, exc=StorageTransactionError) + self.stats.stores += 1 + self.txnlog.delete(oid, serial) + + def storea(self, oid, serial, data, id): + self._check_tid(id, exc=StorageTransactionError) + self.stats.stores += 1 + self.txnlog.store(oid, serial, data) + + def checkCurrentSerialInTransaction(self, oid, serial, id): + self._check_tid(id, exc=StorageTransactionError) + self.txnlog.checkread(oid, serial) + + def restorea(self, oid, serial, data, prev_txn, id): + self._check_tid(id, exc=StorageTransactionError) + self.stats.stores += 1 + self.txnlog.restore(oid, serial, data, prev_txn) + + def storeBlobStart(self): + assert self.blob_tempfile is None + self.blob_tempfile = tempfile.mkstemp( + dir=self.storage.temporaryDirectory()) + + def storeBlobChunk(self, chunk): + os.write(self.blob_tempfile[0], chunk) + + def storeBlobEnd(self, oid, serial, data, id): + self._check_tid(id, exc=StorageTransactionError) + assert self.txnlog is not None # effectively not allowed after undo + fd, tempname = self.blob_tempfile + self.blob_tempfile = None + os.close(fd) + self.blob_log.append((oid, serial, data, tempname)) + + def storeBlobShared(self, oid, serial, data, filename, id): + self._check_tid(id, exc=StorageTransactionError) + assert self.txnlog is not None # effectively not allowed after undo + + # Reconstruct the full path from the filename in the OID directory + if (os.path.sep in filename + or not (filename.endswith('.tmp') + or filename[:-1].endswith('.tmp') + ) + ): + logger.critical( + "We're under attack! (bad filename to storeBlobShared, %r)", + filename) + raise ValueError(filename) + + filename = os.path.join(self.storage.fshelper.getPathForOID(oid), + filename) + self.blob_log.append((oid, serial, data, filename)) + + def sendBlob(self, oid, serial): + self.client.storeBlob(oid, serial, self.storage.loadBlob(oid, serial)) + + def undo(*a, **k): + raise NotImplementedError + + def undoa(self, trans_id, tid): + self._check_tid(tid, exc=StorageTransactionError) + self.txnlog.undo(trans_id) + + def _op_error(self, oid, err, op): + self.store_failed = 1 + if isinstance(err, ConflictError): + self.stats.conflicts += 1 + self.log("conflict error oid=%s msg=%s" % + (oid_repr(oid), str(err)), BLATHER) + if not isinstance(err, TransactionError): + # Unexpected errors are logged and passed to the client + self.log("%s error: %s, %s" % ((op,)+ sys.exc_info()[:2]), + logging.ERROR, exc_info=True) + err = self._marshal_error(err) + # The exception is reported back as newserial for this oid + self.serials.append((oid, err)) + + def _delete(self, oid, serial): + err = None + try: + self.storage.deleteObject(oid, serial, self.transaction) + except (SystemExit, KeyboardInterrupt): + raise + except Exception as e: + err = e + self._op_error(oid, err, 'delete') + + return err is None + + def _checkread(self, oid, serial): + err = None + try: + self.storage.checkCurrentSerialInTransaction( + oid, serial, self.transaction) + except (SystemExit, KeyboardInterrupt): + raise + except Exception as e: + err = e + self._op_error(oid, err, 'checkCurrentSerialInTransaction') + + return err is None + + def _store(self, oid, serial, data, blobfile=None): + err = None + try: + if blobfile is None: + newserial = self.storage.store( + oid, serial, data, '', self.transaction) + else: + newserial = self.storage.storeBlob( + oid, serial, data, blobfile, '', self.transaction) + except (SystemExit, KeyboardInterrupt): + raise + except Exception as error: + self._op_error(oid, error, 'store') + err = error + else: + if serial != b"\0\0\0\0\0\0\0\0": + self.invalidated.append(oid) + + if isinstance(newserial, bytes): + newserial = [(oid, newserial)] + + for oid, s in newserial or (): + + if s == ResolvedSerial: + self.stats.conflicts_resolved += 1 + self.log("conflict resolved oid=%s" + % oid_repr(oid), BLATHER) + + self.serials.append((oid, s)) + + return err is None + + def _restore(self, oid, serial, data, prev_txn): + err = None + try: + self.storage.restore(oid, serial, data, '', prev_txn, + self.transaction) + except (SystemExit, KeyboardInterrupt): + raise + except Exception as err: + self._op_error(oid, err, 'restore') + + return err is None + + def _undo(self, trans_id): + err = None + try: + tid, oids = self.storage.undo(trans_id, self.transaction) + except (SystemExit, KeyboardInterrupt): + raise + except Exception as e: + err = e + self._op_error(z64, err, 'undo') + else: + self.invalidated.extend(oids) + self.serials.extend((oid, ResolvedSerial) for oid in oids) + + return err is None + + def _marshal_error(self, error): + # Try to pickle the exception. If it can't be pickled, + # the RPC response would fail, so use something that can be pickled. + if PY3: + pickler = Pickler(BytesIO(), 3) + else: + # The pure-python version requires at least one argument (PyPy) + pickler = Pickler(0) + pickler.fast = 1 + try: + pickler.dump(error) + except: + msg = "Couldn't pickle storage exception: %s" % repr(error) + self.log(msg, logging.ERROR) + error = StorageServerError(msg) + return error + + # IStorageIteration support + + def iterator_start(self, start, stop): + iid = next(self._iterator_ids) + self._iterators[iid] = iter(self.storage.iterator(start, stop)) + return iid + + def iterator_next(self, iid): + iterator = self._iterators[iid] + try: + info = next(iterator) + except StopIteration: + del self._iterators[iid] + item = None + if iid in self._txn_iterators_last: + del self._txn_iterators_last[iid] + else: + item = (info.tid, + info.status, + info.user, + info.description, + info.extension) + # Keep a reference to the last iterator result to allow starting a + # record iterator off it. + self._txn_iterators_last[iid] = info + return item + + def iterator_record_start(self, txn_iid, tid): + record_iid = next(self._iterator_ids) + txn_info = self._txn_iterators_last[txn_iid] + if txn_info.tid != tid: + raise Exception( + 'Out-of-order request for record iterator for transaction %r' + % tid) + self._iterators[record_iid] = iter(txn_info) + return record_iid + + def iterator_record_next(self, iid): + iterator = self._iterators[iid] + try: + info = next(iterator) + except StopIteration: + del self._iterators[iid] + item = None + else: + item = (info.oid, + info.tid, + info.data, + info.data_txn) + return item + + def iterator_gc(self, iids): + for iid in iids: + self._iterators.pop(iid, None) + + def server_status(self): + return self.server.server_status(self.storage_id) + + def set_client_label(self, label): + self.log_label = str(label)+' '+_addr_label(self.connection.addr) + +class StorageServerDB(object): + + def __init__(self, server, storage_id): + self.server = server + self.storage_id = storage_id + self.references = ZODB.serialize.referencesf + + def invalidate(self, tid, oids, version=''): + if version: + raise StorageServerError("Versions aren't supported.") + storage_id = self.storage_id + self.server.invalidate(None, storage_id, tid, oids) + + def invalidateCache(self): + self.server._invalidateCache(self.storage_id) + + transform_record_data = untransform_record_data = lambda self, data: data + +class StorageServer(object): + + """The server side implementation of ZEO. + + The StorageServer is the 'manager' for incoming connections. Each + connection is associated with its own ZEOStorage instance (defined + below). The StorageServer may handle multiple storages; each + ZEOStorage instance only handles a single storage. + """ + + # Classes we instantiate. A subclass might override. + + from .zrpc.server import Dispatcher as DispatcherClass + ZEOStorageClass = ZEOStorage + ManagedServerConnectionClass = ManagedServerConnection + + def __init__(self, addr, storages, + read_only=0, + invalidation_queue_size=100, + invalidation_age=None, + transaction_timeout=None, + monitor_address=None, + auth_protocol=None, + auth_database=None, + auth_realm=None, + ): + """StorageServer constructor. + + This is typically invoked from the start.py script. + + Arguments (the first two are required and positional): + + addr -- the address at which the server should listen. This + can be a tuple (host, port) to signify a TCP/IP connection + or a pathname string to signify a Unix domain socket + connection. A hostname may be a DNS name or a dotted IP + address. + + storages -- a dictionary giving the storage(s) to handle. The + keys are the storage names, the values are the storage + instances, typically FileStorage or Berkeley storage + instances. By convention, storage names are typically + strings representing small integers starting at '1'. + + read_only -- an optional flag saying whether the server should + operate in read-only mode. Defaults to false. Note that + even if the server is operating in writable mode, + individual storages may still be read-only. But if the + server is in read-only mode, no write operations are + allowed, even if the storages are writable. Note that + pack() is considered a read-only operation. + + invalidation_queue_size -- The storage server keeps a queue + of the objects modified by the last N transactions, where + N == invalidation_queue_size. This queue is used to + speed client cache verification when a client disconnects + for a short period of time. + + invalidation_age -- + If the invalidation queue isn't big enough to support a + quick verification, but the last transaction seen by a + client is younger than the invalidation age, then + invalidations will be computed by iterating over + transactions later than the given transaction. + + transaction_timeout -- The maximum amount of time to wait for + a transaction to commit after acquiring the storage lock. + If the transaction takes too long, the client connection + will be closed and the transaction aborted. + + monitor_address -- The address at which the monitor server + should listen. If specified, a monitor server is started. + The monitor server provides server statistics in a simple + text format. + + auth_protocol -- The name of the authentication protocol to use. + Examples are "digest" and "srp". + + auth_database -- The name of the password database filename. + It should be in a format compatible with the authentication + protocol used; for instance, "sha" and "srp" require different + formats. + + Note that to implement an authentication protocol, a server + and client authentication mechanism must be implemented in a + auth_* module, which should be stored inside the "auth" + subdirectory. This module may also define a DatabaseClass + variable that should indicate what database should be used + by the authenticator. + """ + + self.addr = addr + self.storages = storages + msg = ", ".join( + ["%s:%s:%s" % (name, storage.isReadOnly() and "RO" or "RW", + storage.getName()) + for name, storage in storages.items()]) + log("%s created %s with storages: %s" % + (self.__class__.__name__, read_only and "RO" or "RW", msg)) + + + self._lock = threading.Lock() + self._commit_locks = {} + self._waiting = dict((name, []) for name in storages) + + self.read_only = read_only + self.auth_protocol = auth_protocol + self.auth_database = auth_database + self.auth_realm = auth_realm + self.database = None + if auth_protocol: + self._setup_auth(auth_protocol) + # A list, by server, of at most invalidation_queue_size invalidations. + # The list is kept in sorted order with the most recent + # invalidation at the front. The list never has more than + # self.invq_bound elements. + self.invq_bound = invalidation_queue_size + self.invq = {} + for name, storage in storages.items(): + self._setup_invq(name, storage) + storage.registerDB(StorageServerDB(self, name)) + self.invalidation_age = invalidation_age + self.connections = {} + self.socket_map = {} + self.dispatcher = self.DispatcherClass( + addr, factory=self.new_connection, map=self.socket_map) + if len(self.addr) == 2 and self.addr[1] == 0 and self.addr[0]: + self.addr = self.dispatcher.socket.getsockname() + ZODB.event.notify( + Serving(self, address=self.dispatcher.socket.getsockname())) + self.stats = {} + self.timeouts = {} + for name in self.storages.keys(): + self.connections[name] = [] + self.stats[name] = StorageStats(self.connections[name]) + if transaction_timeout is None: + # An object with no-op methods + timeout = StubTimeoutThread() + else: + timeout = TimeoutThread(transaction_timeout) + timeout.setName("TimeoutThread for %s" % name) + timeout.start() + self.timeouts[name] = timeout + if monitor_address: + warnings.warn( + "The monitor server is deprecated. Use the server_status\n" + "ZEO method instead.", + DeprecationWarning) + self.monitor = StatsServer(monitor_address, self.stats) + else: + self.monitor = None + + def _setup_invq(self, name, storage): + lastInvalidations = getattr(storage, 'lastInvalidations', None) + if lastInvalidations is None: + # Using None below doesn't look right, but the first + # element in invq is never used. See get_invalidations. + # (If it was used, it would generate an error, which would + # be good. :) Doing this allows clients that were up to + # date when a server was restarted to pick up transactions + # it subsequently missed. + self.invq[name] = [(storage.lastTransaction() or z64, None)] + else: + self.invq[name] = list(lastInvalidations(self.invq_bound)) + self.invq[name].reverse() + + + def _setup_auth(self, protocol): + # Can't be done in global scope, because of cyclic references + from .auth import get_module + + name = self.__class__.__name__ + + module = get_module(protocol) + if not module: + log("%s: no such an auth protocol: %s" % (name, protocol)) + return + + storage_class, client, db_class = module + + if not storage_class or not issubclass(storage_class, ZEOStorage): + log(("%s: %s isn't a valid protocol, must have a StorageClass" % + (name, protocol))) + self.auth_protocol = None + return + self.ZEOStorageClass = storage_class + + log("%s: using auth protocol: %s" % (name, protocol)) + + # We create a Database instance here for use with the authenticator + # modules. Having one instance allows it to be shared between multiple + # storages, avoiding the need to bloat each with a new authenticator + # Database that would contain the same info, and also avoiding any + # possibly synchronization issues between them. + self.database = db_class(self.auth_database) + if self.database.realm != self.auth_realm: + raise ValueError("password database realm %r " + "does not match storage realm %r" + % (self.database.realm, self.auth_realm)) + + + def new_connection(self, sock, addr): + """Internal: factory to create a new connection. + + This is called by the Dispatcher class in ZEO.zrpc.server + whenever accept() returns a socket for a new incoming + connection. + """ + if self.auth_protocol and self.database: + zstorage = self.ZEOStorageClass(self, self.read_only, + auth_realm=self.auth_realm) + zstorage.set_database(self.database) + else: + zstorage = self.ZEOStorageClass(self, self.read_only) + + c = self.ManagedServerConnectionClass(sock, addr, zstorage, self) + log("new connection %s: %s" % (addr, repr(c)), logging.DEBUG) + return c + + def register_connection(self, storage_id, conn): + """Internal: register a connection with a particular storage. + + This is called by ZEOStorage.register(). + + The dictionary self.connections maps each storage name to a + list of current connections for that storage; this information + is needed to handle invalidation. This function updates this + dictionary. + + Returns the timeout and stats objects for the appropriate storage. + """ + self.connections[storage_id].append(conn) + return self.stats[storage_id] + + def _invalidateCache(self, storage_id): + """We need to invalidate any caches we have. + + This basically means telling our clients to + invalidate/revalidate their caches. We do this by closing them + and making them reconnect. + """ + + # This method can be called from foreign threads. We have to + # worry about interaction with the main thread. + + # 1. We modify self.invq which is read by get_invalidations + # below. This is why get_invalidations makes a copy of + # self.invq. + + # 2. We access connections. There are two dangers: + # + # a. We miss a new connection. This is not a problem because + # if a client connects after we get the list of connections, + # then it will have to read the invalidation queue, which + # has already been reset. + # + # b. A connection is closes while we are iterating. This + # doesn't matter, bacause we can call should_close on a closed + # connection. + + # Rebuild invq + self._setup_invq(storage_id, self.storages[storage_id]) + + # Make a copy since we are going to be mutating the + # connections indirectoy by closing them. We don't care about + # later transactions since they will have to validate their + # caches anyway. + for p in self.connections[storage_id][:]: + try: + p.connection.should_close() + p.connection.trigger.pull_trigger() + except DisconnectedError: + pass + + + def invalidate(self, conn, storage_id, tid, invalidated=(), info=None): + """Internal: broadcast info and invalidations to clients. + + This is called from several ZEOStorage methods. + + invalidated is a sequence of oids. + + This can do three different things: + + - If the invalidated argument is non-empty, it broadcasts + invalidateTransaction() messages to all clients of the given + storage except the current client (the conn argument). + + - If the invalidated argument is empty and the info argument + is a non-empty dictionary, it broadcasts info() messages to + all clients of the given storage, including the current + client. + + - If both the invalidated argument and the info argument are + non-empty, it broadcasts invalidateTransaction() messages to all + clients except the current, and sends an info() message to + the current client. + + """ + + # This method can be called from foreign threads. We have to + # worry about interaction with the main thread. + + # 1. We modify self.invq which is read by get_invalidations + # below. This is why get_invalidations makes a copy of + # self.invq. + + # 2. We access connections. There are two dangers: + # + # a. We miss a new connection. This is not a problem because + # we are called while the storage lock is held. A new + # connection that tries to read data won't read committed + # data without first recieving an invalidation. Also, if a + # client connects after getting the list of connections, + # then it will have to read the invalidation queue, which + # has been updated to reflect the invalidations. + # + # b. A connection is closes while we are iterating. We'll need + # to cactch and ignore Disconnected errors. + + + if invalidated: + invq = self.invq[storage_id] + if len(invq) >= self.invq_bound: + invq.pop() + invq.insert(0, (tid, invalidated)) + + for p in self.connections[storage_id]: + try: + if invalidated and p is not conn: + p.client.invalidateTransaction(tid, invalidated) + elif info is not None: + p.client.info(info) + except DisconnectedError: + pass + + def get_invalidations(self, storage_id, tid): + """Return a tid and list of all objects invalidation since tid. + + The tid is the most recent transaction id seen by the client. + + Returns None if it is unable to provide a complete list + of invalidations for tid. In this case, client should + do full cache verification. + """ + + # We make a copy of invq because it might be modified by a + # foreign (other than main thread) calling invalidate above. + invq = self.invq[storage_id][:] + + oids = set() + latest_tid = None + if invq and invq[-1][0] <= tid: + # We have needed data in the queue + for _tid, L in invq: + if _tid <= tid: + break + oids.update(L) + latest_tid = invq[0][0] + elif (self.invalidation_age and + (self.invalidation_age > + (time.time()-ZODB.TimeStamp.TimeStamp(tid).timeTime()) + ) + ): + for t in self.storages[storage_id].iterator(p64(u64(tid)+1)): + for r in t: + oids.add(r.oid) + latest_tid = t.tid + elif not invq: + log("invq empty") + else: + log("tid to old for invq %s < %s" % (u64(tid), u64(invq[-1][0]))) + + return latest_tid, list(oids) + + def loop(self, timeout=30): + try: + asyncore.loop(timeout, map=self.socket_map) + except Exception: + if not self.__closed: + raise # Unexpected exc + + __thread = None + def start_thread(self, daemon=True): + self.__thread = thread = threading.Thread(target=self.loop) + thread.setName("StorageServer(%s)" % _addr_label(self.addr)) + thread.setDaemon(daemon) + thread.start() + + __closed = False + def close(self, join_timeout=1): + """Close the dispatcher so that there are no new connections. + + This is only called from the test suite, AFAICT. + """ + if self.__closed: + return + self.__closed = True + + # Stop accepting connections + self.dispatcher.close() + if self.monitor is not None: + self.monitor.close() + + ZODB.event.notify(Closed(self)) + + # Close open client connections + for sid, connections in self.connections.items(): + for conn in connections[:]: + try: + conn.connection.close() + except: + pass + + for name, storage in six.iteritems(self.storages): + logger.info("closing storage %r", name) + storage.close() + + if self.__thread is not None: + self.__thread.join(join_timeout) + + def close_conn(self, conn): + """Internal: remove the given connection from self.connections. + + This is the inverse of register_connection(). + """ + for cl in self.connections.values(): + if conn.obj in cl: + cl.remove(conn.obj) + + def lock_storage(self, zeostore, delay): + storage_id = zeostore.storage_id + waiting = self._waiting[storage_id] + with self._lock: + + if storage_id in self._commit_locks: + # The lock is held by another zeostore + + locked = self._commit_locks[storage_id] + + assert locked is not zeostore, (storage_id, delay) + + if locked.connection is None: + locked.log("Still locked after disconnected. Unlocking.", + logging.CRITICAL) + if locked.transaction: + locked.storage.tpc_abort(locked.transaction) + del self._commit_locks[storage_id] + # yuck: have to manipulate lock to appease with :( + self._lock.release() + try: + return self.lock_storage(zeostore, delay) + finally: + self._lock.acquire() + + if delay is None: + # New request, queue it + assert not [i for i in waiting if i[0] is zeostore + ], "already waiting" + delay = Delay() + waiting.append((zeostore, delay)) + zeostore.log("(%r) queue lock: transactions waiting: %s" + % (storage_id, len(waiting)), + _level_for_waiting(waiting) + ) + + return False, delay + else: + self._commit_locks[storage_id] = zeostore + self.timeouts[storage_id].begin(zeostore) + self.stats[storage_id].lock_time = time.time() + if delay is not None: + # we were waiting, stop + waiting[:] = [i for i in waiting if i[0] is not zeostore] + zeostore.log("(%r) lock: transactions waiting: %s" + % (storage_id, len(waiting)), + _level_for_waiting(waiting) + ) + return True, delay + + def unlock_storage(self, zeostore): + storage_id = zeostore.storage_id + waiting = self._waiting[storage_id] + with self._lock: + assert self._commit_locks[storage_id] is zeostore + del self._commit_locks[storage_id] + self.timeouts[storage_id].end(zeostore) + self.stats[storage_id].lock_time = None + callbacks = waiting[:] + + if callbacks: + assert not [i for i in waiting if i[0] is zeostore + ], "waiting while unlocking" + zeostore.log("(%r) unlock: transactions waiting: %s" + % (storage_id, len(callbacks)), + _level_for_waiting(callbacks) + ) + + for zeostore, delay in callbacks: + try: + zeostore._unlock_callback(delay) + except (SystemExit, KeyboardInterrupt): + raise + except Exception: + logger.exception("Calling unlock callback") + + + def stop_waiting(self, zeostore): + storage_id = zeostore.storage_id + waiting = self._waiting[storage_id] + with self._lock: + new_waiting = [i for i in waiting if i[0] is not zeostore] + if len(new_waiting) == len(waiting): + return + waiting[:] = new_waiting + + zeostore.log("(%r) dequeue lock: transactions waiting: %s" + % (storage_id, len(waiting)), + _level_for_waiting(waiting) + ) + + def already_waiting(self, zeostore): + storage_id = zeostore.storage_id + waiting = self._waiting[storage_id] + with self._lock: + return bool([i for i in waiting if i[0] is zeostore]) + + def server_status(self, storage_id): + status = self.stats[storage_id].__dict__.copy() + status['connections'] = len(status['connections']) + status['waiting'] = len(self._waiting[storage_id]) + status['timeout-thread-is-alive'] = self.timeouts[storage_id].isAlive() + last_transaction = self.storages[storage_id].lastTransaction() + last_transaction_hex = codecs.encode(last_transaction, 'hex_codec') + if PY3: + # doctests and maybe clients expect a str, not bytes + last_transaction_hex = str(last_transaction_hex, 'ascii') + status['last-transaction'] = last_transaction_hex + return status + + def ruok(self): + return dict((storage_id, self.server_status(storage_id)) + for storage_id in self.storages) + +def _level_for_waiting(waiting): + if len(waiting) > 9: + return logging.CRITICAL + if len(waiting) > 3: + return logging.WARNING + else: + return logging.DEBUG + +class StubTimeoutThread(object): + + def begin(self, client): + pass + + def end(self, client): + pass + + isAlive = lambda self: 'stub' + + +class TimeoutThread(threading.Thread): + """Monitors transaction progress and generates timeouts.""" + + # There is one TimeoutThread per storage, because there's one + # transaction lock per storage. + + def __init__(self, timeout): + threading.Thread.__init__(self) + self.setName("TimeoutThread") + self.setDaemon(1) + self._timeout = timeout + self._client = None + self._deadline = None + self._cond = threading.Condition() # Protects _client and _deadline + + def begin(self, client): + # Called from the restart code the "main" thread, whenever the + # storage lock is being acquired. (Serialized by asyncore.) + with self._cond: + assert self._client is None + self._client = client + self._deadline = time.time() + self._timeout + self._cond.notify() + + def end(self, client): + # Called from the "main" thread whenever the storage lock is + # being released. (Serialized by asyncore.) + with self._cond: + assert self._client is not None + assert self._client is client + self._client = None + self._deadline = None + + def run(self): + # Code running in the thread. + while 1: + with self._cond: + while self._deadline is None: + self._cond.wait() + howlong = self._deadline - time.time() + if howlong <= 0: + # Prevent reporting timeout more than once + self._deadline = None + client = self._client # For the howlong <= 0 branch below + + if howlong <= 0: + client.log("Transaction timeout after %s seconds" % + self._timeout, logging.CRITICAL) + try: + client.connection.call_from_thread(client.connection.close) + except: + client.log("Timeout failure", logging.CRITICAL, + exc_info=sys.exc_info()) + self.end(client) + else: + time.sleep(howlong) + + +def run_in_thread(method, *args): + t = SlowMethodThread(method, args) + t.start() + return t.delay + + +class SlowMethodThread(threading.Thread): + """Thread to run potentially slow storage methods. + + Clients can use the delay attribute to access the MTDelay object + used to send a zrpc response at the right time. + """ + + # Some storage methods can take a long time to complete. If we + # run these methods via a standard asyncore read handler, they + # will block all other server activity until they complete. To + # avoid blocking, we spawn a separate thread, return an MTDelay() + # object, and have the thread reply() when it finishes. + + def __init__(self, method, args): + threading.Thread.__init__(self) + self.setName("SlowMethodThread for %s" % method.__name__) + self._method = method + self._args = args + self.delay = MTDelay() + + def run(self): + try: + result = self._method(*self._args) + except (SystemExit, KeyboardInterrupt): + raise + except Exception: + self.delay.error(sys.exc_info()) + else: + self.delay.reply(result) + + +class ClientStub(object): + + def __init__(self, rpc): + self.rpc = rpc + + def beginVerify(self): + self.rpc.callAsync('beginVerify') + + def invalidateVerify(self, args): + self.rpc.callAsync('invalidateVerify', args) + + def endVerify(self): + self.rpc.callAsync('endVerify') + + def invalidateTransaction(self, tid, args): + # Note that this method is *always* called from a different + # thread than self.rpc's async thread. It is the only method + # for which this is true and requires special consideration! + + # callAsyncNoSend is important here because: + # - callAsyncNoPoll isn't appropriate because + # the network thread may not wake up for a long time, + # delaying invalidations for too long. (This is demonstrateed + # by a test failure.) + # - callAsync isn't appropriate because (on the server) it tries + # to write to the socket. If self.rpc's network thread also + # tries to write at the ame time, we can run into problems + # because handle_write isn't thread safe. + self.rpc.callAsyncNoSend('invalidateTransaction', tid, args) + + def serialnos(self, arg): + self.rpc.callAsyncNoPoll('serialnos', arg) + + def info(self, arg): + self.rpc.callAsyncNoPoll('info', arg) + + def storeBlob(self, oid, serial, blobfilename): + + def store(): + yield ('receiveBlobStart', (oid, serial)) + f = open(blobfilename, 'rb') + while 1: + chunk = f.read(59000) + if not chunk: + break + yield ('receiveBlobChunk', (oid, serial, chunk, )) + f.close() + yield ('receiveBlobStop', (oid, serial)) + + self.rpc.callAsyncIterator(store()) + +class ClientStub308(ClientStub): + + def invalidateTransaction(self, tid, args): + ClientStub.invalidateTransaction( + self, tid, [(arg, '') for arg in args]) + + def invalidateVerify(self, oid): + ClientStub.invalidateVerify(self, (oid, '')) + +class ZEOStorage308Adapter(object): + + def __init__(self, storage): + self.storage = storage + + def __eq__(self, other): + return self is other or self.storage is other + + def getSerial(self, oid): + return self.storage.loadEx(oid)[1] # Z200 + + def history(self, oid, version, size=1): + if version: + raise ValueError("Versions aren't supported.") + return self.storage.history(oid, size=size) + + def getInvalidations(self, tid): + result = self.storage.getInvalidations(tid) + if result is not None: + result = result[0], [(oid, '') for oid in result[1]] + return result + + def verify(self, oid, version, tid): + if version: + raise StorageServerError("Versions aren't supported.") + return self.storage.verify(oid, tid) + + def loadEx(self, oid, version=''): + if version: + raise StorageServerError("Versions aren't supported.") + data, serial = self.storage.loadEx(oid) + return data, serial, '' + + def storea(self, oid, serial, data, version, id): + if version: + raise StorageServerError("Versions aren't supported.") + self.storage.storea(oid, serial, data, id) + + def storeBlobEnd(self, oid, serial, data, version, id): + if version: + raise StorageServerError("Versions aren't supported.") + self.storage.storeBlobEnd(oid, serial, data, id) + + def storeBlobShared(self, oid, serial, data, filename, version, id): + if version: + raise StorageServerError("Versions aren't supported.") + self.storage.storeBlobShared(oid, serial, data, filename, id) + + def getInfo(self): + result = self.storage.getInfo() + result['supportsVersions'] = False + return result + + def zeoVerify(self, oid, s, sv=None): + if sv: + raise StorageServerError("Versions aren't supported.") + self.storage.zeoVerify(oid, s) + + def modifiedInVersion(self, oid): + return '' + + def versions(self): + return () + + def versionEmpty(self, version): + return True + + def commitVersion(self, *a, **k): + raise NotImplementedError + + abortVersion = commitVersion + + def zeoLoad(self, oid): # Z200 + p, s = self.storage.loadEx(oid) + return p, s, '', None, None + + def __getattr__(self, name): + return getattr(self.storage, name) + +def _addr_label(addr): + if isinstance(addr, six.binary_type): + return addr.decode('ascii') + if isinstance(addr, six.string_types): + return addr + else: + host, port = addr + return str(host) + ":" + str(port) + +class CommitLog(object): + + def __init__(self): + self.file = tempfile.TemporaryFile(suffix=".comit-log") + self.pickler = Pickler(self.file, 1) + self.pickler.fast = 1 + self.stores = 0 + + def size(self): + return self.file.tell() + + def delete(self, oid, serial): + self.pickler.dump(('_delete', (oid, serial))) + self.stores += 1 + + def checkread(self, oid, serial): + self.pickler.dump(('_checkread', (oid, serial))) + self.stores += 1 + + def store(self, oid, serial, data): + self.pickler.dump(('_store', (oid, serial, data))) + self.stores += 1 + + def restore(self, oid, serial, data, prev_txn): + self.pickler.dump(('_restore', (oid, serial, data, prev_txn))) + self.stores += 1 + + def undo(self, transaction_id): + self.pickler.dump(('_undo', (transaction_id, ))) + self.stores += 1 + + def __iter__(self): + self.file.seek(0) + unpickler = Unpickler(self.file) + for i in range(self.stores): + yield unpickler.load() + + def close(self): + if self.file: + self.file.close() + self.file = None + +class ServerEvent(object): + + def __init__(self, server, **kw): + self.__dict__.update(kw) + self.server = server + +class Serving(ServerEvent): + pass + +class Closed(ServerEvent): + pass diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/__init__.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/__init__.py new file mode 100644 index 0000000..792d600 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/__init__.py @@ -0,0 +1 @@ +# diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/auth/__init__.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/auth/__init__.py new file mode 100644 index 0000000..b9b796d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/auth/__init__.py @@ -0,0 +1,30 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +_auth_modules = {} + +def get_module(name): + if name == 'sha': + from auth_sha import StorageClass, SHAClient, Database + return StorageClass, SHAClient, Database + elif name == 'digest': + from .auth_digest import StorageClass, DigestClient, DigestDatabase + return StorageClass, DigestClient, DigestDatabase + else: + return _auth_modules.get(name) + +def register_module(name, storage_class, client, db): + if name in _auth_modules: + raise TypeError("%s is already registred" % name) + _auth_modules[name] = storage_class, client, db diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/auth/auth_digest.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/auth/auth_digest.py new file mode 100644 index 0000000..3ca4abc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/auth/auth_digest.py @@ -0,0 +1,142 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Digest authentication for ZEO + +This authentication mechanism follows the design of HTTP digest +authentication (RFC 2069). It is a simple challenge-response protocol +that does not send passwords in the clear, but does not offer strong +security. The RFC discusses many of the limitations of this kind of +protocol. + +Guard the password database as if it contained plaintext passwords. +It stores the hash of a username and password. This does not expose +the plaintext password, but it is sensitive nonetheless. An attacker +with the hash can impersonate the real user. This is a limitation of +the simple digest scheme. + +HTTP is a stateless protocol, and ZEO is a stateful protocol. The +security requirements are quite different as a result. The HTTP +protocol uses a nonce as a challenge. The ZEO protocol requires a +separate session key that is used for message authentication. We +generate a second nonce for this purpose; the hash of nonce and +user/realm/password is used as the session key. + +TODO: I'm not sure if this is a sound approach; SRP would be preferred. +""" + +import os +import random +import struct +import time + +from .base import Database, Client +from ..StorageServer import ZEOStorage +from ZEO.Exceptions import AuthError +from ..hash import sha1 + +def get_random_bytes(n=8): + try: + b = os.urandom(n) + except NotImplementedError: + L = [chr(random.randint(0, 255)) for i in range(n)] + b = b"".join(L) + return b + +def hexdigest(s): + return sha1(s.encode()).hexdigest() + +class DigestDatabase(Database): + def __init__(self, filename, realm=None): + Database.__init__(self, filename, realm) + + # Initialize a key used to build the nonce for a challenge. + # We need one key for the lifetime of the server, so it + # is convenient to store in on the database. + self.noncekey = get_random_bytes(8) + + def _store_password(self, username, password): + dig = hexdigest("%s:%s:%s" % (username, self.realm, password)) + self._users[username] = dig + +def session_key(h_up, nonce): + # The hash itself is a bit too short to be a session key. + # HMAC wants a 64-byte key. We don't want to use h_up + # directly because it would never change over time. Instead + # use the hash plus part of h_up. + return (sha1(("%s:%s" % (h_up, nonce)).encode('latin-1')).digest() + + h_up.encode('utf-8')[:44]) + +class StorageClass(ZEOStorage): + def set_database(self, database): + assert isinstance(database, DigestDatabase) + self.database = database + self.noncekey = database.noncekey + + def _get_time(self): + # Return a string representing the current time. + t = int(time.time()) + return struct.pack("i", t) + + def _get_nonce(self): + # RFC 2069 recommends a nonce of the form + # H(client-IP ":" time-stamp ":" private-key) + dig = sha1() + dig.update(str(self.connection.addr).encode('latin-1')) + dig.update(self._get_time()) + dig.update(self.noncekey) + return dig.hexdigest() + + def auth_get_challenge(self): + """Return realm, challenge, and nonce.""" + self._challenge = self._get_nonce() + self._key_nonce = self._get_nonce() + return self.auth_realm, self._challenge, self._key_nonce + + def auth_response(self, resp): + # verify client response + user, challenge, response = resp + + # Since zrpc is a stateful protocol, we just store the nonce + # we sent to the client. It will need to generate a new + # nonce for a new connection anyway. + if self._challenge != challenge: + raise ValueError("invalid challenge") + + # lookup user in database + h_up = self.database.get_password(user) + + # regeneration resp from user, password, and nonce + check = hexdigest("%s:%s" % (h_up, challenge)) + if check == response: + self.connection.setSessionKey(session_key(h_up, self._key_nonce)) + return self._finish_auth(check == response) + + extensions = [auth_get_challenge, auth_response] + +class DigestClient(Client): + extensions = ["auth_get_challenge", "auth_response"] + + def start(self, username, realm, password): + _realm, challenge, nonce = self.stub.auth_get_challenge() + if _realm != realm: + raise AuthError("expected realm %r, got realm %r" + % (_realm, realm)) + h_up = hexdigest("%s:%s:%s" % (username, realm, password)) + + resp_dig = hexdigest("%s:%s" % (h_up, challenge)) + result = self.stub.auth_response((username, challenge, resp_dig)) + if result: + return session_key(h_up, nonce) + else: + return None diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/auth/base.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/auth/base.py new file mode 100644 index 0000000..74d4b5d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/auth/base.py @@ -0,0 +1,139 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Base classes for defining an authentication protocol. + +Database -- abstract base class for password database +Client -- abstract base class for authentication client +""" +from __future__ import print_function +from __future__ import print_function + +import os +from ..hash import sha1 + +class Client(object): + # Subclass should override to list the names of methods that + # will be called on the server. + extensions = [] + + def __init__(self, stub): + self.stub = stub + for m in self.extensions: + setattr(self.stub, m, self.stub.extensionMethod(m)) + +def sort(L): + """Sort a list in-place and return it.""" + L.sort() + return L + +class Database(object): + """Abstracts a password database. + + This class is used both in the authentication process (via + get_password()) and by client scripts that manage the password + database file. + + The password file is a simple, colon-separated text file mapping + usernames to password hashes. The hashes are SHA hex digests + produced from the password string. + """ + realm = None + def __init__(self, filename, realm=None): + """Creates a new Database + + filename: a string containing the full pathname of + the password database file. Must be readable by the user + running ZEO. Must be writeable by any client script that + accesses the database. + + realm: the realm name (a string) + """ + self._users = {} + self.filename = filename + self.load() + if realm: + if self.realm and self.realm != realm: + raise ValueError("Specified realm %r differs from database " + "realm %r" % (realm or '', self.realm)) + else: + self.realm = realm + + def save(self, fd=None): + filename = self.filename + needs_closed = False + if not fd: + fd = open(filename, 'w') + needs_closed = True + + try: + if self.realm: + print("realm", self.realm, file=fd) + + for username in sorted(self._users.keys()): + print("%s: %s" % (username, self._users[username]), file=fd) + finally: + if needs_closed: + fd.close() + + def load(self): + filename = self.filename + if not filename: + return + + if not os.path.exists(filename): + return + + with open(filename) as fd: + L = fd.readlines() + + if not L: + return + + if L[0].startswith("realm "): + line = L.pop(0).strip() + self.realm = line[len("realm "):] + + for line in L: + username, hash = line.strip().split(":", 1) + self._users[username] = hash.strip() + + def _store_password(self, username, password): + self._users[username] = self.hash(password) + + def get_password(self, username): + """Returns password hash for specified username. + + Callers must check for LookupError, which is raised in + the case of a non-existent user specified.""" + if username not in self._users: + raise LookupError("No such user: %s" % username) + return self._users[username] + + def hash(self, s): + return sha1(s.encode()).hexdigest() + + def add_user(self, username, password): + if username in self._users: + raise LookupError("User %s already exists" % username) + self._store_password(username, password) + + def del_user(self, username): + if username not in self._users: + raise LookupError("No such user: %s" % username) + del self._users[username] + + def change_password(self, username, password): + if username not in self._users: + raise LookupError("No such user: %s" % username) + self._store_password(username, password) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/auth/hmac.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/auth/hmac.py new file mode 100644 index 0000000..0707acd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/auth/hmac.py @@ -0,0 +1,99 @@ +"""HMAC (Keyed-Hashing for Message Authentication) Python module. + +Implements the HMAC algorithm as described by RFC 2104. +""" +from six.moves import map +from six.moves import zip + +def _strxor(s1, s2): + """Utility method. XOR the two strings s1 and s2 (must have same length). + """ + return "".join(map(lambda x, y: chr(ord(x) ^ ord(y)), s1, s2)) + +# The size of the digests returned by HMAC depends on the underlying +# hashing module used. +digest_size = None + +class HMAC(object): + """RFC2104 HMAC class. + + This supports the API for Cryptographic Hash Functions (PEP 247). + """ + + def __init__(self, key, msg = None, digestmod = None): + """Create a new HMAC object. + + key: key for the keyed hash object. + msg: Initial input for the hash, if provided. + digestmod: A module supporting PEP 247. Defaults to the md5 module. + """ + if digestmod is None: + import md5 + digestmod = md5 + + self.digestmod = digestmod + self.outer = digestmod.new() + self.inner = digestmod.new() + self.digest_size = digestmod.digest_size + + blocksize = 64 + ipad = "\x36" * blocksize + opad = "\x5C" * blocksize + + if len(key) > blocksize: + key = digestmod.new(key).digest() + + key = key + chr(0) * (blocksize - len(key)) + self.outer.update(_strxor(key, opad)) + self.inner.update(_strxor(key, ipad)) + if msg is not None: + self.update(msg) + +## def clear(self): +## raise NotImplementedError("clear() method not available in HMAC.") + + def update(self, msg): + """Update this hashing object with the string msg. + """ + self.inner.update(msg) + + def copy(self): + """Return a separate copy of this hashing object. + + An update to this copy won't affect the original object. + """ + other = HMAC("") + other.digestmod = self.digestmod + other.inner = self.inner.copy() + other.outer = self.outer.copy() + return other + + def digest(self): + """Return the hash value of this hashing object. + + This returns a string containing 8-bit data. The object is + not altered in any way by this function; you can continue + updating the object after calling this function. + """ + h = self.outer.copy() + h.update(self.inner.digest()) + return h.digest() + + def hexdigest(self): + """Like digest(), but returns a string of hexadecimal digits instead. + """ + return "".join([hex(ord(x))[2:].zfill(2) + for x in tuple(self.digest())]) + +def new(key, msg = None, digestmod = None): + """Create a new hashing object and return it. + + key: The starting key for the hash. + msg: if available, will immediately be hashed into the object's starting + state. + + You can now feed arbitrary strings into the object using its update() + method, and can ask for the hash value at any time by calling its digest() + method. + """ + return HMAC(key, msg, digestmod) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/component.xml b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/component.xml new file mode 100644 index 0000000..39cf46b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/component.xml @@ -0,0 +1,127 @@ + + + + + + The content of a ZEO section describe operational parameters + of a ZEO server except for the storage(s) to be served. + + + + + The address at which the server should listen. This can be in + the form 'host:port' to signify a TCP/IP connection or a + pathname string to signify a Unix domain socket connection (at + least one '/' is required). A hostname may be a DNS name or a + dotted IP address. If the hostname is omitted, the platform's + default behavior is used when binding the listening socket ('' + is passed to socket.bind() as the hostname portion of the + address). + + + + + + Flag indicating whether the server should operate in read-only + mode. Defaults to false. Note that even if the server is + operating in writable mode, individual storages may still be + read-only. But if the server is in read-only mode, no write + operations are allowed, even if the storages are writable. Note + that pack() is considered a read-only operation. + + + + + + The storage server keeps a queue of the objects modified by the + last N transactions, where N == invalidation_queue_size. This + queue is used to speed client cache verification when a client + disconnects for a short period of time. + + + + + + The maximum age of a client for which quick-verification + invalidations will be provided by iterating over the served + storage. This option should only be used if the served storage + supports efficient iteration from a starting point near the + end of the transaction history (e.g. end of file). + + + + + + The address at which the monitor server should listen. If + specified, a monitor server is started. The monitor server + provides server statistics in a simple text format. This can + be in the form 'host:port' to signify a TCP/IP connection or a + pathname string to signify a Unix domain socket connection (at + least one '/' is required). A hostname may be a DNS name or a + dotted IP address. If the hostname is omitted, the platform's + default behavior is used when binding the listening socket ('' + is passed to socket.bind() as the hostname portion of the + address). + + + + + + The maximum amount of time to wait for a transaction to commit + after acquiring the storage lock, specified in seconds. If the + transaction takes too long, the client connection will be closed + and the transaction aborted. + + + + + + The name of the protocol used for authentication. The + only protocol provided with ZEO is "digest," but extensions + may provide other protocols. + + + + + + The path of the database containing authentication credentials. + + + + + + The authentication realm of the server. Some authentication + schemes use a realm to identify the logical set of usernames + that are accepted by this server. + + + + + + The full path to the file in which to write the ZEO server's Process ID + at startup. If omitted, $INSTANCE/var/ZEO.pid is used. + + $INSTANCE/var/ZEO.pid (or $clienthome/ZEO.pid) + + + + + + indicates that the cache should be dropped rather than + verified when the verification optimization is not + available (e.g. when the ZEO server restarted). + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/hash.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/hash.py new file mode 100644 index 0000000..1cd42d8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/hash.py @@ -0,0 +1,27 @@ +############################################################################## +# +# Copyright (c) 2008 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +"""In Python 2.6, the "sha" and "md5" modules have been deprecated +in favor of using hashlib for both. This class allows for compatibility +between versions.""" + +try: + import hashlib + sha1 = hashlib.sha1 + new = sha1 +except ImportError: + import sha + sha1 = sha.new + new = sha1 + digest_size = sha.digest_size diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/monitor.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/monitor.py new file mode 100644 index 0000000..4efd5a9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/monitor.py @@ -0,0 +1,190 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Monitor behavior of ZEO server and record statistics. +""" +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function + +import asyncore +import socket +import time +import logging + +zeo_version = 'unknown' +try: + import pkg_resources +except ImportError: + pass +else: + zeo_dist = pkg_resources.working_set.find( + pkg_resources.Requirement.parse('ZODB3') + ) + if zeo_dist is not None: + zeo_version = zeo_dist.version + +class StorageStats(object): + """Per-storage usage statistics.""" + + def __init__(self, connections=None): + self.connections = connections + self.loads = 0 + self.stores = 0 + self.commits = 0 + self.aborts = 0 + self.active_txns = 0 + self.verifying_clients = 0 + self.lock_time = None + self.conflicts = 0 + self.conflicts_resolved = 0 + self.start = time.ctime() + + @property + def clients(self): + return len(self.connections) + + def parse(self, s): + # parse the dump format + lines = s.split("\n") + for line in lines: + field, value = line.split(":", 1) + if field == "Server started": + self.start = value + elif field == "Clients": + # Hack because we use this both on the server and on + # the client where there are no connections. + self.connections = [0] * int(value) + elif field == "Clients verifying": + self.verifying_clients = int(value) + elif field == "Active transactions": + self.active_txns = int(value) + elif field == "Commit lock held for": + # This assumes + self.lock_time = time.time() - int(value) + elif field == "Commits": + self.commits = int(value) + elif field == "Aborts": + self.aborts = int(value) + elif field == "Loads": + self.loads = int(value) + elif field == "Stores": + self.stores = int(value) + elif field == "Conflicts": + self.conflicts = int(value) + elif field == "Conflicts resolved": + self.conflicts_resolved = int(value) + + def dump(self, f): + print("Server started:", self.start, file=f) + print("Clients:", self.clients, file=f) + print("Clients verifying:", self.verifying_clients, file=f) + print("Active transactions:", self.active_txns, file=f) + if self.lock_time: + howlong = time.time() - self.lock_time + print("Commit lock held for:", int(howlong), file=f) + print("Commits:", self.commits, file=f) + print("Aborts:", self.aborts, file=f) + print("Loads:", self.loads, file=f) + print("Stores:", self.stores, file=f) + print("Conflicts:", self.conflicts, file=f) + print("Conflicts resolved:", self.conflicts_resolved, file=f) + +class StatsClient(asyncore.dispatcher): + + def __init__(self, sock, addr): + asyncore.dispatcher.__init__(self, sock) + self.buf = [] + self.closed = 0 + + def close(self): + self.closed = 1 + # The socket is closed after all the data is written. + # See handle_write(). + + def write(self, s): + self.buf.append(s) + + def writable(self): + return len(self.buf) + + def readable(self): + return 0 + + def handle_write(self): + s = "".join(self.buf) + self.buf = [] + n = self.socket.send(s.encode('ascii')) + if n < len(s): + self.buf.append(s[:n]) + + if self.closed and not self.buf: + asyncore.dispatcher.close(self) + +class StatsServer(asyncore.dispatcher): + + StatsConnectionClass = StatsClient + + def __init__(self, addr, stats): + asyncore.dispatcher.__init__(self) + self.addr = addr + self.stats = stats + if type(self.addr) == tuple: + self.create_socket(socket.AF_INET, socket.SOCK_STREAM) + else: + self.create_socket(socket.AF_UNIX, socket.SOCK_STREAM) + self.set_reuse_addr() + logger = logging.getLogger('ZEO.monitor') + logger.info("listening on %s", repr(self.addr)) + self.bind(self.addr) + self.listen(5) + + def writable(self): + return 0 + + def readable(self): + return 1 + + def handle_accept(self): + try: + sock, addr = self.accept() + except socket.error: + return + f = self.StatsConnectionClass(sock, addr) + self.dump(f) + f.close() + + def dump(self, f): + print("ZEO monitor server version %s" % zeo_version, file=f) + print(time.ctime(), file=f) + print(file=f) + + L = sorted(self.stats.keys()) + for k in L: + stats = self.stats[k] + print("Storage:", k, file=f) + stats.dump(f) + print(file=f) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/runzeo.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/runzeo.py new file mode 100644 index 0000000..f8cb989 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/runzeo.py @@ -0,0 +1,396 @@ +############################################################################## +# +# Copyright (c) 2001, 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Start the ZEO storage server. + +Usage: %s [-C URL] [-a ADDRESS] [-f FILENAME] [-h] + +Options: +-C/--configuration URL -- configuration file or URL +-a/--address ADDRESS -- server address of the form PORT, HOST:PORT, or PATH + (a PATH must contain at least one "/") +-f/--filename FILENAME -- filename for FileStorage +-t/--timeout TIMEOUT -- transaction timeout in seconds (default no timeout) +-h/--help -- print this usage message and exit +-m/--monitor ADDRESS -- address of monitor server ([HOST:]PORT or PATH) +--pid-file PATH -- relative path to output file containing this process's pid; + default $(INSTANCE_HOME)/var/ZEO.pid but only if envar + INSTANCE_HOME is defined + +Unless -C is specified, -a and -f are required. +""" +from __future__ import print_function +from __future__ import print_function + +# The code here is designed to be reused by other, similar servers. +# For the forseeable future, it must work under Python 2.1 as well as +# 2.2 and above. + +import asyncore +import os +import sys +import signal +import socket +import logging + +import ZConfig.datatypes +from zdaemon.zdoptions import ZDOptions + +logger = logging.getLogger('ZEO.runzeo') +_pid = str(os.getpid()) + +def log(msg, level=logging.INFO, exc_info=False): + """Internal: generic logging function.""" + message = "(%s) %s" % (_pid, msg) + logger.log(level, message, exc_info=exc_info) + +def parse_binding_address(arg): + # Caution: Not part of the official ZConfig API. + obj = ZConfig.datatypes.SocketBindingAddress(arg) + return obj.family, obj.address + +def windows_shutdown_handler(): + # Called by the signal mechanism on Windows to perform shutdown. + import asyncore + asyncore.close_all() + +class ZEOOptionsMixin(object): + + storages = None + + def handle_address(self, arg): + self.family, self.address = parse_binding_address(arg) + + def handle_monitor_address(self, arg): + self.monitor_family, self.monitor_address = parse_binding_address(arg) + + def handle_filename(self, arg): + from ZODB.config import FileStorage # That's a FileStorage *opener*! + class FSConfig(object): + def __init__(self, name, path): + self._name = name + self.path = path + self.stop = None + def getSectionName(self): + return self._name + if not self.storages: + self.storages = [] + name = str(1 + len(self.storages)) + conf = FileStorage(FSConfig(name, arg)) + self.storages.append(conf) + + testing_exit_immediately = False + def handle_test(self, *args): + self.testing_exit_immediately = True + + def add_zeo_options(self): + self.add(None, None, None, "test", self.handle_test) + self.add(None, None, "a:", "address=", self.handle_address) + self.add(None, None, "f:", "filename=", self.handle_filename) + self.add("family", "zeo.address.family") + self.add("address", "zeo.address.address", + required="no server address specified; use -a or -C") + self.add("read_only", "zeo.read_only", default=0) + self.add("invalidation_queue_size", "zeo.invalidation_queue_size", + default=100) + self.add("invalidation_age", "zeo.invalidation_age") + self.add("transaction_timeout", "zeo.transaction_timeout", + "t:", "timeout=", float) + self.add("monitor_address", "zeo.monitor_address.address", + "m:", "monitor=", self.handle_monitor_address) + self.add('auth_protocol', 'zeo.authentication_protocol', + None, 'auth-protocol=', default=None) + self.add('auth_database', 'zeo.authentication_database', + None, 'auth-database=') + self.add('auth_realm', 'zeo.authentication_realm', + None, 'auth-realm=') + self.add('pid_file', 'zeo.pid_filename', + None, 'pid-file=') + +class ZEOOptions(ZDOptions, ZEOOptionsMixin): + + __doc__ = __doc__ + + logsectionname = "eventlog" + schemadir = os.path.dirname(__file__) + + def __init__(self): + ZDOptions.__init__(self) + self.add_zeo_options() + self.add("storages", "storages", + required="no storages specified; use -f or -C") + + def realize(self, *a, **k): + ZDOptions.realize(self, *a, **k) + nunnamed = [s for s in self.storages if s.name is None] + if nunnamed: + if len(nunnamed) > 1: + return self.usage("No more than one storage may be unnamed.") + if [s for s in self.storages if s.name == '1']: + return self.usage( + "Can't have an unnamed storage and a storage named 1.") + for s in self.storages: + if s.name is None: + s.name = '1' + break + + +class ZEOServer(object): + + def __init__(self, options): + self.options = options + + def main(self): + self.setup_default_logging() + self.check_socket() + self.clear_socket() + self.make_pidfile() + try: + self.open_storages() + self.setup_signals() + self.create_server() + self.loop_forever() + finally: + self.server.close() + self.clear_socket() + self.remove_pidfile() + + def setup_default_logging(self): + if self.options.config_logger is not None: + return + # No log file is configured; default to stderr. + root = logging.getLogger() + root.setLevel(logging.INFO) + fmt = logging.Formatter( + "------\n%(asctime)s %(levelname)s %(name)s %(message)s", + "%Y-%m-%dT%H:%M:%S") + handler = logging.StreamHandler() + handler.setFormatter(fmt) + root.addHandler(handler) + + def check_socket(self): + if (isinstance(self.options.address, tuple) and + self.options.address[1] is None): + self.options.address = self.options.address[0], 0 + return + if self.can_connect(self.options.family, self.options.address): + self.options.usage("address %s already in use" % + repr(self.options.address)) + + def can_connect(self, family, address): + s = socket.socket(family, socket.SOCK_STREAM) + try: + s.connect(address) + except socket.error: + return 0 + else: + s.close() + return 1 + + def clear_socket(self): + if isinstance(self.options.address, type("")): + try: + os.unlink(self.options.address) + except os.error: + pass + + def open_storages(self): + self.storages = {} + for opener in self.options.storages: + log("opening storage %r using %s" + % (opener.name, opener.__class__.__name__)) + self.storages[opener.name] = opener.open() + + def setup_signals(self): + """Set up signal handlers. + + The signal handler for SIGFOO is a method handle_sigfoo(). + If no handler method is defined for a signal, the signal + action is not changed from its initial value. The handler + method is called without additional arguments. + """ + if os.name != "posix": + if os.name == "nt": + self.setup_win32_signals() + return + if hasattr(signal, 'SIGXFSZ'): + signal.signal(signal.SIGXFSZ, signal.SIG_IGN) # Special case + init_signames() + for sig, name in signames.items(): + method = getattr(self, "handle_" + name.lower(), None) + if method is not None: + def wrapper(sig_dummy, frame_dummy, method=method): + method() + signal.signal(sig, wrapper) + + def setup_win32_signals(self): + # Borrow the Zope Signals package win32 support, if available. + # Signals does a check/log for the availability of pywin32. + try: + import Signals.Signals + except ImportError: + logger.debug("Signals package not found. " + "Windows-specific signal handler " + "will *not* be installed.") + return + SignalHandler = Signals.Signals.SignalHandler + if SignalHandler is not None: # may be None if no pywin32. + SignalHandler.registerHandler(signal.SIGTERM, + windows_shutdown_handler) + SignalHandler.registerHandler(signal.SIGINT, + windows_shutdown_handler) + SIGUSR2 = 12 # not in signal module on Windows. + SignalHandler.registerHandler(SIGUSR2, self.handle_sigusr2) + + def create_server(self): + self.server = create_server(self.storages, self.options) + + def loop_forever(self): + if self.options.testing_exit_immediately: + print("testing exit immediately") + else: + self.server.loop() + + def handle_sigterm(self): + log("terminated by SIGTERM") + sys.exit(0) + + def handle_sigint(self): + log("terminated by SIGINT") + sys.exit(0) + + def handle_sighup(self): + log("restarted by SIGHUP") + sys.exit(1) + + def handle_sigusr2(self): + # log rotation signal - do the same as Zope 2.7/2.8... + if self.options.config_logger is None or os.name not in ("posix", "nt"): + log("received SIGUSR2, but it was not handled!", + level=logging.WARNING) + return + + loggers = [self.options.config_logger] + + if os.name == "posix": + for l in loggers: + l.reopen() + log("Log files reopened successfully", level=logging.INFO) + else: # nt - same rotation code as in Zope's Signals/Signals.py + for l in loggers: + for f in l.handler_factories: + handler = f() + if hasattr(handler, 'rotate') and callable(handler.rotate): + handler.rotate() + log("Log files rotation complete", level=logging.INFO) + + def _get_pidfile(self): + pidfile = self.options.pid_file + # 'pidfile' is marked as not required. + if not pidfile: + # Try to find a reasonable location if the pidfile is not + # set. If we are running in a Zope environment, we can + # safely assume INSTANCE_HOME. + instance_home = os.environ.get("INSTANCE_HOME") + if not instance_home: + # If all our attempts failed, just log a message and + # proceed. + logger.debug("'pidfile' option not set, and 'INSTANCE_HOME' " + "environment variable could not be found. " + "Cannot guess pidfile location.") + return + self.options.pid_file = os.path.join(instance_home, + "var", "ZEO.pid") + + def make_pidfile(self): + if not self.options.read_only: + self._get_pidfile() + pidfile = self.options.pid_file + if pidfile is None: + return + pid = os.getpid() + try: + if os.path.exists(pidfile): + os.unlink(pidfile) + f = open(pidfile, 'w') + print(pid, file=f) + f.close() + log("created PID file '%s'" % pidfile) + except IOError: + logger.error("PID file '%s' cannot be opened" % pidfile) + + def remove_pidfile(self): + if not self.options.read_only: + pidfile = self.options.pid_file + if pidfile is None: + return + try: + if os.path.exists(pidfile): + os.unlink(pidfile) + log("removed PID file '%s'" % pidfile) + except IOError: + logger.error("PID file '%s' could not be removed" % pidfile) + + +def create_server(storages, options): + from .StorageServer import StorageServer + return StorageServer( + options.address, + storages, + read_only = options.read_only, + invalidation_queue_size = options.invalidation_queue_size, + invalidation_age = options.invalidation_age, + transaction_timeout = options.transaction_timeout, + monitor_address = options.monitor_address, + auth_protocol = options.auth_protocol, + auth_database = options.auth_database, + auth_realm = options.auth_realm, + ) + + +# Signal names + +signames = None + +def signame(sig): + """Return a symbolic name for a signal. + + Return "signal NNN" if there is no corresponding SIG name in the + signal module. + """ + + if signames is None: + init_signames() + return signames.get(sig) or "signal %d" % sig + +def init_signames(): + global signames + signames = {} + for name, sig in signal.__dict__.items(): + k_startswith = getattr(name, "startswith", None) + if k_startswith is None: + continue + if k_startswith("SIG") and not k_startswith("SIG_"): + signames[sig] = name + + +# Main program + +def main(args=None): + options = ZEOOptions() + options.realize(args) + s = ZEOServer(options) + s.main() + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/schema.xml b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/schema.xml new file mode 100644 index 0000000..9cb0cae --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/schema.xml @@ -0,0 +1,40 @@ + + + + + + This schema describes the configuration of the ZEO storage server + process. + + + + + + + + + + + + + + +
+ +
+ + + + One or more storages that are provided by the ZEO server. The + section names are used as the storage names, and must be unique + within each ZEO storage server. Traditionally, these names + represent small integers starting at '1'. + + + +
+ + diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/__init__.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/__init__.py new file mode 100644 index 0000000..c79471e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/__init__.py @@ -0,0 +1,24 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +# zrpc is a package with the following modules +# client -- manages connection creation to remote server +# connection -- object dispatcher +# log -- logging helper +# error -- exceptions raised by zrpc +# marshal -- internal, handles basic protocol issues +# server -- manages incoming connections from remote clients +# smac -- sized message async connections +# trigger -- medusa's trigger + +# zrpc is not an advertised subpackage of ZEO; its interfaces are internal diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/_hmac.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/_hmac.py new file mode 100644 index 0000000..888a399 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/_hmac.py @@ -0,0 +1,106 @@ +# This file is a slightly modified copy of Python 2.3's Lib/hmac.py. +# This file is under the Python Software Foundation (PSF) license. + +"""HMAC (Keyed-Hashing for Message Authentication) Python module. + +Implements the HMAC algorithm as described by RFC 2104. +""" +from six.moves import map +from six.moves import zip + +def _strxor(s1, s2): + """Utility method. XOR the two strings s1 and s2 (must have same length). + """ + return "".join(map(lambda x, y: chr(ord(x) ^ ord(y)), s1, s2)) + +# The size of the digests returned by HMAC depends on the underlying +# hashing module used. +digest_size = None + +class HMAC(object): + """RFC2104 HMAC class. + + This supports the API for Cryptographic Hash Functions (PEP 247). + """ + + def __init__(self, key, msg = None, digestmod = None): + """Create a new HMAC object. + + key: key for the keyed hash object. + msg: Initial input for the hash, if provided. + digestmod: A module supporting PEP 247. Defaults to the md5 module. + """ + if digestmod is None: + import md5 + digestmod = md5 + + self.digestmod = digestmod + self.outer = digestmod.new() + self.inner = digestmod.new() + # Python 2.1 and 2.2 differ about the correct spelling + try: + self.digest_size = digestmod.digestsize + except AttributeError: + self.digest_size = digestmod.digest_size + + blocksize = 64 + ipad = "\x36" * blocksize + opad = "\x5C" * blocksize + + if len(key) > blocksize: + key = digestmod.new(key).digest() + + key = key + chr(0) * (blocksize - len(key)) + self.outer.update(_strxor(key, opad)) + self.inner.update(_strxor(key, ipad)) + if msg is not None: + self.update(msg) + +## def clear(self): +## raise NotImplementedError("clear() method not available in HMAC.") + + def update(self, msg): + """Update this hashing object with the string msg. + """ + self.inner.update(msg) + + def copy(self): + """Return a separate copy of this hashing object. + + An update to this copy won't affect the original object. + """ + other = HMAC("") + other.digestmod = self.digestmod + other.inner = self.inner.copy() + other.outer = self.outer.copy() + return other + + def digest(self): + """Return the hash value of this hashing object. + + This returns a string containing 8-bit data. The object is + not altered in any way by this function; you can continue + updating the object after calling this function. + """ + h = self.outer.copy() + h.update(self.inner.digest()) + return h.digest() + + def hexdigest(self): + """Like digest(), but returns a string of hexadecimal digits instead. + """ + return "".join([hex(ord(x))[2:].zfill(2) + for x in tuple(self.digest())]) + +def new(key, msg = None, digestmod = None): + """Create a new hashing object and return it. + + key: The starting key for the hash. + msg: if available, will immediately be hashed into the object's starting + state. + + You can now feed arbitrary strings into the object using its update() + method, and can ask for the hash value at any time by calling its digest() + method. + """ + return HMAC(key, msg, digestmod) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/client.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/client.py new file mode 100644 index 0000000..6c448d2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/client.py @@ -0,0 +1,655 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import asyncore +import errno +import logging +import select +import socket +import sys +import threading +import time +from . import trigger + + +from .connection import ManagedClientConnection +from .log import log +from .error import DisconnectedError + +from ZODB.POSException import ReadOnlyError +from ZODB.loglevels import BLATHER +from six.moves import map + + +def client_timeout(): + return 30.0 + +def client_loop(map): + read = asyncore.read + write = asyncore.write + _exception = asyncore._exception + + while map: + try: + + # The next two lines intentionally don't use + # iterators. Other threads can close dispatchers, causeing + # the socket map to shrink. + r = e = map.keys() + w = [fd for (fd, obj) in map.items() if obj.writable()] + + try: + r, w, e = select.select(r, w, e, client_timeout()) + except (select.error, RuntimeError) as err: + # Python >= 3.3 makes select.error an alias of OSError, + # which is not subscriptable but does have the 'errno' attribute + err_errno = getattr(err, 'errno', None) or err[0] + if err_errno != errno.EINTR: + if err_errno == errno.EBADF: + + # If a connection is closed while we are + # calling select on it, we can get a bad + # file-descriptor error. We'll check for this + # case by looking for entries in r and w that + # are not in the socket map. + + if [fd for fd in r if fd not in map]: + continue + if [fd for fd in w if fd not in map]: + continue + + # Hm, on Mac OS X, we could get a run time + # error and end up here, but retrying select + # would work. Let's try: + select.select(r, w, e, 0) + # we survived, keep going :) + continue + + raise + else: + continue + + if not map: + break + + if not (r or w or e): + # The line intentionally doesn't use iterators. Other + # threads can close dispatchers, causeing the socket + # map to shrink. + for obj in map.values(): + if isinstance(obj, ManagedClientConnection): + # Send a heartbeat message as a reply to a + # non-existent message id. + try: + obj.send_reply(-1, None) + except DisconnectedError: + pass + continue + + for fd in r: + obj = map.get(fd) + if obj is None: + continue + read(obj) + + for fd in w: + obj = map.get(fd) + if obj is None: + continue + write(obj) + + for fd in e: + obj = map.get(fd) + if obj is None: + continue + _exception(obj) + + except: + if map: + try: + logging.getLogger(__name__+'.client_loop').critical( + 'A ZEO client loop failed.', + exc_info=sys.exc_info()) + except: + + pass + + for fd, obj in map.items(): + if not hasattr(obj, 'mgr'): + continue + try: + obj.mgr.client.close() + except: + map.pop(fd, None) + try: + logging.getLogger(__name__+'.client_loop' + ).critical( + "Couldn't close a dispatcher.", + exc_info=sys.exc_info()) + except: + pass + + +class ConnectionManager(object): + """Keeps a connection up over time""" + + sync_wait = 30 + + def __init__(self, addrs, client, tmin=1, tmax=180): + self.client = client + self._start_asyncore_loop() + self.addrlist = self._parse_addrs(addrs) + self.tmin = min(tmin, tmax) + self.tmax = tmax + self.cond = threading.Condition(threading.Lock()) + self.connection = None # Protected by self.cond + self.closed = 0 + # If thread is not None, then there is a helper thread + # attempting to connect. + self.thread = None # Protected by self.cond + + def new_addrs(self, addrs): + self.addrlist = self._parse_addrs(addrs) + + def _start_asyncore_loop(self): + self.map = {} + self.trigger = trigger.trigger(self.map) + self.loop_thread = threading.Thread( + name="%s zeo client networking thread" % self.client.__name__, + target=client_loop, args=(self.map,)) + self.loop_thread.setDaemon(True) + self.loop_thread.start() + + def __repr__(self): + return "<%s for %s>" % (self.__class__.__name__, self.addrlist) + + def _parse_addrs(self, addrs): + # Return a list of (addr_type, addr) pairs. + + # For backwards compatibility (and simplicity?) the + # constructor accepts a single address in the addrs argument -- + # a string for a Unix domain socket or a 2-tuple with a + # hostname and port. It can also accept a list of such addresses. + + addr_type = self._guess_type(addrs) + if addr_type is not None: + return [(addr_type, addrs)] + else: + addrlist = [] + for addr in addrs: + addr_type = self._guess_type(addr) + if addr_type is None: + raise ValueError("unknown address in list: %s" % repr(addr)) + addrlist.append((addr_type, addr)) + return addrlist + + def _guess_type(self, addr): + if isinstance(addr, str): + return socket.AF_UNIX + + if (len(addr) == 2 + and isinstance(addr[0], str) + and isinstance(addr[1], int)): + return socket.AF_INET # also denotes IPv6 + + # not anything I know about + return None + + def close(self): + """Prevent ConnectionManager from opening new connections""" + self.closed = 1 + self.cond.acquire() + try: + t = self.thread + self.thread = None + finally: + self.cond.release() + if t is not None: + log("CM.close(): stopping and joining thread") + t.stop() + t.join(30) + if t.isAlive(): + log("CM.close(): self.thread.join() timed out", + level=logging.WARNING) + + for fd, obj in list(self.map.items()): + if obj is not self.trigger: + try: + obj.close() + except: + logging.getLogger(__name__+'.'+self.__class__.__name__ + ).critical( + "Couldn't close a dispatcher.", + exc_info=sys.exc_info()) + + self.map.clear() + self.trigger.pull_trigger() + try: + self.loop_thread.join(9) + except RuntimeError: + pass # we are the thread :) + self.trigger.close() + + def attempt_connect(self): + """Attempt a connection to the server without blocking too long. + + There isn't a crisp definition for too long. When a + ClientStorage is created, it attempts to connect to the + server. If the server isn't immediately available, it can + operate from the cache. This method will start the background + connection thread and wait a little while to see if it + finishes quickly. + """ + + # Will a single attempt take too long? + # Answer: it depends -- normally, you'll connect or get a + # connection refused error very quickly. Packet-eating + # firewalls and other mishaps may cause the connect to take a + # long time to time out though. It's also possible that you + # connect quickly to a slow server, and the attempt includes + # at least one roundtrip to the server (the register() call). + # But that's as fast as you can expect it to be. + self.connect() + self.cond.acquire() + try: + t = self.thread + conn = self.connection + finally: + self.cond.release() + if t is not None and conn is None: + event = t.one_attempt + event.wait() + self.cond.acquire() + try: + conn = self.connection + finally: + self.cond.release() + return conn is not None + + def connect(self, sync=0): + self.cond.acquire() + try: + if self.connection is not None: + return + t = self.thread + if t is None: + log("CM.connect(): starting ConnectThread") + self.thread = t = ConnectThread(self, self.client) + t.setDaemon(1) + t.start() + if sync: + while self.connection is None and t.isAlive(): + self.cond.wait(self.sync_wait) + if self.connection is None: + log("CM.connect(sync=1): still waiting...") + assert self.connection is not None + finally: + self.cond.release() + + def connect_done(self, conn, preferred): + # Called by ConnectWrapper.notify_client() after notifying the client + log("CM.connect_done(preferred=%s)" % preferred) + self.cond.acquire() + try: + self.connection = conn + if preferred: + self.thread = None + self.cond.notifyAll() # Wake up connect(sync=1) + finally: + self.cond.release() + + def close_conn(self, conn): + # Called by the connection when it is closed + self.cond.acquire() + try: + if conn is not self.connection: + # Closing a non-current connection + log("CM.close_conn() non-current", level=BLATHER) + return + log("CM.close_conn()") + self.connection = None + finally: + self.cond.release() + self.client.notifyDisconnected() + if not self.closed: + self.connect() + + def is_connected(self): + self.cond.acquire() + try: + return self.connection is not None + finally: + self.cond.release() + +# When trying to do a connect on a non-blocking socket, some outcomes +# are expected. Set _CONNECT_IN_PROGRESS to the errno value(s) expected +# when an initial connect can't complete immediately. Set _CONNECT_OK +# to the errno value(s) expected if the connect succeeds *or* if it's +# already connected (our code can attempt redundant connects). +if hasattr(errno, "WSAEWOULDBLOCK"): # Windows + # Caution: The official Winsock docs claim that WSAEALREADY should be + # treated as yet another "in progress" indicator, but we've never + # seen this. + _CONNECT_IN_PROGRESS = (errno.WSAEWOULDBLOCK,) + # Win98: WSAEISCONN; Win2K: WSAEINVAL + _CONNECT_OK = (0, errno.WSAEISCONN, errno.WSAEINVAL) +else: # Unix + _CONNECT_IN_PROGRESS = (errno.EINPROGRESS,) + _CONNECT_OK = (0, errno.EISCONN) + +class ConnectThread(threading.Thread): + """Thread that tries to connect to server given one or more addresses. + + The thread is passed a ConnectionManager and the manager's client + as arguments. It calls testConnection() on the client when a + socket connects; that should return 1 or 0 indicating whether this + is a preferred or a fallback connection. It may also raise an + exception, in which case the connection is abandoned. + + The thread will continue to run, attempting connections, until a + preferred connection is seen and successfully handed over to the + manager and client. + + As soon as testConnection() finds a preferred connection, or after + all sockets have been tried and at least one fallback connection + has been seen, notifyConnected(connection) is called on the client + and connect_done() on the manager. If this was a preferred + connection, the thread then exits; otherwise, it keeps trying + until it gets a preferred connection, and then reconnects the + client using that connection. + + """ + + __super_init = threading.Thread.__init__ + + # We don't expect clients to call any methods of this Thread other + # than close() and those defined by the Thread API. + + def __init__(self, mgr, client): + self.__super_init(name="Connect(%s)" % mgr.addrlist) + self.mgr = mgr + self.client = client + self.stopped = 0 + self.one_attempt = threading.Event() + # A ConnectThread keeps track of whether it has finished a + # call to try_connecting(). This allows the ConnectionManager + # to make an attempt to connect right away, but not block for + # too long if the server isn't immediately available. + + def stop(self): + self.stopped = 1 + + def run(self): + delay = self.mgr.tmin + success = 0 + # Don't wait too long the first time. + # TODO: make timeout configurable? + attempt_timeout = 5 + while not self.stopped: + success = self.try_connecting(attempt_timeout) + if not self.one_attempt.isSet(): + self.one_attempt.set() + attempt_timeout = 75 + if success > 0: + break + time.sleep(delay) + if self.mgr.is_connected(): + log("CT: still trying to replace fallback connection", + level=logging.INFO) + delay = min(delay*2, self.mgr.tmax) + log("CT: exiting thread: %s" % self.getName()) + + def try_connecting(self, timeout): + """Try connecting to all self.mgr.addrlist addresses. + + Return 1 if a preferred connection was found; 0 if no + connection was found; and -1 if a fallback connection was + found. + + If no connection is found within timeout seconds, return 0. + """ + log("CT: attempting to connect on %d sockets" % len(self.mgr.addrlist)) + deadline = time.time() + timeout + wrappers = self._create_wrappers() + for wrap in wrappers.keys(): + if wrap.state == "notified": + return 1 + try: + if time.time() > deadline: + return 0 + r = self._connect_wrappers(wrappers, deadline) + if r is not None: + return r + if time.time() > deadline: + return 0 + r = self._fallback_wrappers(wrappers, deadline) + if r is not None: + return r + # Alas, no luck. + assert not wrappers + finally: + for wrap in wrappers.keys(): + wrap.close() + del wrappers + return 0 + + def _expand_addrlist(self): + for domain, addr in self.mgr.addrlist: + # AF_INET really means either IPv4 or IPv6, possibly + # indirected by DNS. By design, DNS lookup is deferred + # until connections get established, so that DNS + # reconfiguration can affect failover + if domain == socket.AF_INET: + host, port = addr + for (family, socktype, proto, cannoname, sockaddr + ) in socket.getaddrinfo(host or 'localhost', port, + socket.AF_INET, + socket.SOCK_STREAM + ): # prune non-TCP results + # for IPv6, drop flowinfo, and restrict addresses + # to [host]:port + yield family, sockaddr[:2] + else: + yield domain, addr + + def _create_wrappers(self): + # Create socket wrappers + wrappers = {} # keys are active wrappers + for domain, addr in self._expand_addrlist(): + wrap = ConnectWrapper(domain, addr, self.mgr, self.client) + wrap.connect_procedure() + if wrap.state == "notified": + for w in wrappers.keys(): + w.close() + return {wrap: wrap} + if wrap.state != "closed": + wrappers[wrap] = wrap + return wrappers + + def _connect_wrappers(self, wrappers, deadline): + # Next wait until they all actually connect (or fail) + # The deadline is necessary, because we'd wait forever if a + # sockets never connects or fails. + while wrappers: + if self.stopped: + for wrap in wrappers.keys(): + wrap.close() + return 0 + # Select connecting wrappers + connecting = [wrap + for wrap in wrappers.keys() + if wrap.state == "connecting"] + if not connecting: + break + if time.time() > deadline: + break + try: + r, w, x = select.select([], connecting, connecting, 1.0) + log("CT: select() %d, %d, %d" % tuple(map(len, (r,w,x)))) + except select.error as msg: + log("CT: select failed; msg=%s" % str(msg), + level=logging.WARNING) + continue + # Exceptable wrappers are in trouble; close these suckers + for wrap in x: + log("CT: closing troubled socket %s" % str(wrap.addr)) + del wrappers[wrap] + wrap.close() + # Writable sockets are connected + for wrap in w: + wrap.connect_procedure() + if wrap.state == "notified": + del wrappers[wrap] # Don't close this one + for wrap in wrappers.keys(): + wrap.close() + return 1 + if wrap.state == "closed": + del wrappers[wrap] + + def _fallback_wrappers(self, wrappers, deadline): + # If we've got wrappers left at this point, they're fallback + # connections. Try notifying them until one succeeds. + for wrap in list(wrappers.keys()): + assert wrap.state == "tested" and wrap.preferred == 0 + if self.mgr.is_connected(): + wrap.close() + else: + wrap.notify_client() + if wrap.state == "notified": + del wrappers[wrap] # Don't close this one + for wrap in wrappers.keys(): + wrap.close() + return -1 + assert wrap.state == "closed" + del wrappers[wrap] + + # TODO: should check deadline + + +class ConnectWrapper(object): + """An object that handles the connection procedure for one socket. + + This is a little state machine with states: + closed + opened + connecting + connected + tested + notified + """ + + def __init__(self, domain, addr, mgr, client): + """Store arguments and create non-blocking socket.""" + self.domain = domain + self.addr = addr + self.mgr = mgr + self.client = client + # These attributes are part of the interface + self.state = "closed" + self.sock = None + self.conn = None + self.preferred = 0 + log("CW: attempt to connect to %s" % repr(addr)) + try: + self.sock = socket.socket(domain, socket.SOCK_STREAM) + except socket.error as err: + log("CW: can't create socket, domain=%s: %s" % (domain, err), + level=logging.ERROR) + self.close() + return + self.sock.setblocking(0) + self.state = "opened" + + def connect_procedure(self): + """Call sock.connect_ex(addr) and interpret result.""" + if self.state in ("opened", "connecting"): + try: + err = self.sock.connect_ex(self.addr) + except socket.error as msg: + log("CW: connect_ex(%r) failed: %s" % (self.addr, msg), + level=logging.ERROR) + self.close() + return + log("CW: connect_ex(%s) returned %s" % + (self.addr, errno.errorcode.get(err) or str(err))) + if err in _CONNECT_IN_PROGRESS: + self.state = "connecting" + return + if err not in _CONNECT_OK: + log("CW: error connecting to %s: %s" % + (self.addr, errno.errorcode.get(err) or str(err)), + level=logging.WARNING) + self.close() + return + self.state = "connected" + if self.state == "connected": + self.test_connection() + + def test_connection(self): + """Establish and test a connection at the zrpc level. + + Call the client's testConnection(), giving the client a chance + to do app-level check of the connection. + """ + self.conn = ManagedClientConnection(self.sock, self.addr, self.mgr) + self.sock = None # The socket is now owned by the connection + try: + self.preferred = self.client.testConnection(self.conn) + self.state = "tested" + except ReadOnlyError: + log("CW: ReadOnlyError in testConnection (%s)" % repr(self.addr)) + self.close() + return + except: + log("CW: error in testConnection (%s)" % repr(self.addr), + level=logging.ERROR, exc_info=True) + self.close() + return + if self.preferred: + self.notify_client() + + def notify_client(self): + """Call the client's notifyConnected(). + + If this succeeds, call the manager's connect_done(). + + If the client is already connected, we assume it's a fallback + connection, and the new connection must be a preferred + connection. The client will close the old connection. + """ + try: + self.client.notifyConnected(self.conn) + except: + log("CW: error in notifyConnected (%s)" % repr(self.addr), + level=logging.ERROR, exc_info=True) + self.close() + return + self.state = "notified" + self.mgr.connect_done(self.conn, self.preferred) + + def close(self): + """Close the socket and reset everything.""" + self.state = "closed" + self.mgr = self.client = None + self.preferred = 0 + if self.conn is not None: + # Closing the ZRPC connection will eventually close the + # socket, somewhere in asyncore. Guido asks: Why do we care? + self.conn.close() + self.conn = None + if self.sock is not None: + self.sock.close() + self.sock = None + + def fileno(self): + return self.sock.fileno() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/connection.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/connection.py new file mode 100644 index 0000000..05bf12e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/connection.py @@ -0,0 +1,846 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import asyncore +import errno +import json +import sys +import threading +import logging +from . import marshal +from . import trigger + +from . import smac +from .error import ZRPCError, DisconnectedError +from .log import short_repr, log +from ZODB.loglevels import BLATHER, TRACE +import ZODB.POSException + +REPLY = ".reply" # message name used for replies + +exception_type_type = type(Exception) + +debug_zrpc = False + +class Delay(object): + """Used to delay response to client for synchronous calls. + + When a synchronous call is made and the original handler returns + without handling the call, it returns a Delay object that prevents + the mainloop from sending a response. + """ + + msgid = conn = sent = None + + def set_sender(self, msgid, conn): + self.msgid = msgid + self.conn = conn + + def reply(self, obj): + self.sent = 'reply' + self.conn.send_reply(self.msgid, obj) + + def error(self, exc_info): + self.sent = 'error' + log("Error raised in delayed method", logging.ERROR, exc_info=exc_info) + self.conn.return_error(self.msgid, *exc_info[:2]) + + def __repr__(self): + return "%s[%s, %r, %r, %r]" % ( + self.__class__.__name__, id(self), self.msgid, self.conn, self.sent) + +class Result(Delay): + + def __init__(self, *args): + self.args = args + + def set_sender(self, msgid, conn): + reply, callback = self.args + conn.send_reply(msgid, reply, False) + callback() + +class MTDelay(Delay): + + def __init__(self): + self.ready = threading.Event() + + def set_sender(self, *args): + Delay.set_sender(self, *args) + self.ready.set() + + def reply(self, obj): + self.ready.wait() + self.conn.call_from_thread(self.conn.send_reply, self.msgid, obj) + + def error(self, exc_info): + self.ready.wait() + log("Error raised in delayed method", logging.ERROR, exc_info=exc_info) + self.conn.call_from_thread(Delay.error, self, exc_info) + +# PROTOCOL NEGOTIATION +# +# The code implementing protocol version 2.0.0 (which is deployed +# in the field and cannot be changed) *only* talks to peers that +# send a handshake indicating protocol version 2.0.0. In that +# version, both the client and the server immediately send out +# their protocol handshake when a connection is established, +# without waiting for their peer, and disconnect when a different +# handshake is receive. +# +# The new protocol uses this to enable new clients to talk to +# 2.0.0 servers. In the new protocol: +# +# The server sends its protocol handshake to the client at once. +# +# The client waits until it receives the server's protocol handshake +# before sending its own handshake. The client sends the lower of its +# own protocol version and the server protocol version, allowing it to +# talk to servers using later protocol versions (2.0.2 and higher) as +# well: the effective protocol used will be the lower of the client +# and server protocol. However, this changed in ZODB 3.3.1 (and +# should have changed in ZODB 3.3) because an older server doesn't +# support MVCC methods required by 3.3 clients. +# +# [Ugly details: In order to treat the first received message (protocol +# handshake) differently than all later messages, both client and server +# start by patching their message_input() method to refer to their +# recv_handshake() method instead. In addition, the client has to arrange +# to queue (delay) outgoing messages until it receives the server's +# handshake, so that the first message the client sends to the server is +# the client's handshake. This multiply-special treatment of the first +# message is delicate, and several asyncore and thread subtleties were +# handled unsafely before ZODB 3.2.6. +# ] +# +# The ZEO modules ClientStorage and ServerStub have backwards +# compatibility code for dealing with the previous version of the +# protocol. The client accepts the old version of some messages, +# and will not send new messages when talking to an old server. +# +# As long as the client hasn't sent its handshake, it can't send +# anything else; output messages are queued during this time. +# (Output can happen because the connection testing machinery can +# start sending requests before the handshake is received.) +# +# UPGRADING FROM ZEO 2.0.0 TO NEWER VERSIONS: +# +# Because a new client can talk to an old server, but not vice +# versa, all clients should be upgraded before upgrading any +# servers. Protocol upgrades beyond 2.0.1 will not have this +# restriction, because clients using protocol 2.0.1 or later can +# talk to both older and newer servers. +# +# No compatibility with protocol version 1 is provided. + +# Connection is abstract (it must be derived from). ManagedServerConnection +# and ManagedClientConnection are the concrete subclasses. They need to +# supply a handshake() method appropriate for their role in protocol +# negotiation. + +class Connection(smac.SizedMessageAsyncConnection, object): + """Dispatcher for RPC on object on both sides of socket. + + The connection supports synchronous calls, which expect a return, + and asynchronous calls, which do not. + + It uses the Marshaller class to handle encoding and decoding of + method calls and arguments. Marshaller uses pickle to encode + arbitrary Python objects. The code here doesn't ever see the wire + format. + + A Connection is designed for use in a multithreaded application, + where a synchronous call must block until a response is ready. + + A socket connection between a client and a server allows either + side to invoke methods on the other side. The processes on each + end of the socket use a Connection object to manage communication. + + The Connection deals with decoded RPC messages. They are + represented as four-tuples containing: msgid, flags, method name, + and a tuple of method arguments. + + The msgid starts at zero and is incremented by one each time a + method call message is sent. Each side of the connection has a + separate msgid state. + + When one side of the connection (the client) calls a method, it + sends a message with a new msgid. The other side (the server), + replies with a message that has the same msgid, the string + ".reply" (the global variable REPLY) as the method name, and the + actual return value in the args position. Note that each side of + the Connection can initiate a call, in which case it will be the + client for that particular call. + + The protocol also supports asynchronous calls. The client does + not wait for a return value for an asynchronous call. + + If a method call raises an Exception, the exception is propagated + back to the client via the REPLY message. The client side will + raise any exception it receives instead of returning the value to + the caller. + """ + + __super_init = smac.SizedMessageAsyncConnection.__init__ + __super_close = smac.SizedMessageAsyncConnection.close + __super_setSessionKey = smac.SizedMessageAsyncConnection.setSessionKey + + # Protocol history: + # + # Z200 -- Original ZEO 2.0 protocol + # + # Z201 -- Added invalidateTransaction() to client. + # Renamed several client methods. + # Added several sever methods: + # lastTransaction() + # getAuthProtocol() and scheme-specific authentication methods + # getExtensionMethods(). + # getInvalidations(). + # + # Z303 -- named after the ZODB release 3.3 + # Added methods for MVCC: + # loadBefore() + # A Z303 client cannot talk to a Z201 server, because the latter + # doesn't support MVCC. A Z201 client can talk to a Z303 server, + # but because (at least) the type of the root object changed + # from ZODB.PersistentMapping to persistent.mapping, the older + # client can't actually make progress if a Z303 client created, + # or ever modified, the root. + # + # Z308 -- named after the ZODB release 3.8 + # Added blob-support server methods: + # sendBlob + # storeBlobStart + # storeBlobChunk + # storeBlobEnd + # storeBlobShared + # Added blob-support client methods: + # receiveBlobStart + # receiveBlobChunk + # receiveBlobStop + # + # Z309 -- named after the ZODB release 3.9 + # New server methods: + # restorea, iterator_start, iterator_next, + # iterator_record_start, iterator_record_next, + # iterator_gc + # + # Z310 -- named after the ZODB release 3.10 + # New server methods: + # undoa + # Doesn't support undo for older clients. + # Undone oid info returned by vote. + # + # Z3101 -- checkCurrentSerialInTransaction + # + # Z4 -- checkCurrentSerialInTransaction + # No-longer call load. + + # Protocol variables: + # Our preferred protocol. + current_protocol = b"Z4" + + # If we're a client, an exhaustive list of the server protocols we + # can accept. + servers_we_can_talk_to = [b"Z308", b"Z309", b"Z310", b"Z3101", + current_protocol] + + # If we're a server, an exhaustive list of the client protocols we + # can accept. + clients_we_can_talk_to = [ + b"Z200", b"Z201", b"Z303", b"Z308", b"Z309", b"Z310", b"Z3101", + current_protocol] + + # This is pretty excruciating. Details: + # + # 3.3 server 3.2 client + # server sends Z303 to client + # client computes min(Z303, Z201) == Z201 as the protocol to use + # client sends Z201 to server + # OK, because Z201 is in the server's clients_we_can_talk_to + # + # 3.2 server 3.3 client + # server sends Z201 to client + # client computes min(Z303, Z201) == Z201 as the protocol to use + # Z201 isn't in the client's servers_we_can_talk_to, so client + # raises exception + # + # 3.3 server 3.3 client + # server sends Z303 to client + # client computes min(Z303, Z303) == Z303 as the protocol to use + # Z303 is in the client's servers_we_can_talk_to, so client + # sends Z303 to server + # OK, because Z303 is in the server's clients_we_can_talk_to + + # Exception types that should not be logged: + unlogged_exception_types = () + + # Client constructor passes b'C' for tag, server constructor b'S'. This + # is used in log messages, and to determine whether we can speak with + # our peer. + def __init__(self, sock, addr, obj, tag, map=None): + self.obj = None + self.decode = marshal.decode + self.encode = marshal.encode + self.fast_encode = marshal.fast_encode + + self.closed = False + self.peer_protocol_version = None # set in recv_handshake() + + assert tag in b"CS" + self.tag = tag + self.logger = logging.getLogger('ZEO.zrpc.Connection(%r)' % tag) + if isinstance(addr, tuple): + self.log_label = "(%s:%d) " % addr + else: + self.log_label = "(%s) " % addr + + # Supply our own socket map, so that we don't get registered with + # the asyncore socket map just yet. The initial protocol messages + # are treated very specially, and we dare not get invoked by asyncore + # before that special-case setup is complete. Some of that setup + # occurs near the end of this constructor, and the rest is done by + # a concrete subclass's handshake() method. Unfortunately, because + # we ultimately derive from asyncore.dispatcher, it's not possible + # to invoke the superclass constructor without asyncore stuffing + # us into _some_ socket map. + ourmap = {} + self.__super_init(sock, addr, map=ourmap) + + # The singleton dict is used in synchronous mode when a method + # needs to call into asyncore to try to force some I/O to occur. + # The singleton dict is a socket map containing only this object. + self._singleton = {self._fileno: self} + + # waiting_for_reply is used internally to indicate whether + # a call is in progress. setting a session key is deferred + # until after the call returns. + self.waiting_for_reply = False + self.delay_sesskey = None + self.register_object(obj) + + # The first message we see is a protocol handshake. message_input() + # is temporarily replaced by recv_handshake() to treat that message + # specially. revc_handshake() does "del self.message_input", which + # uncovers the normal message_input() method thereafter. + self.message_input = self.recv_handshake + + # Server and client need to do different things for protocol + # negotiation, and handshake() is implemented differently in each. + self.handshake() + + # Now it's safe to register with asyncore's socket map; it was not + # safe before message_input was replaced, or before handshake() was + # invoked. + # Obscure: in Python 2.4, the base asyncore.dispatcher class grew + # a ._map attribute, which is used instead of asyncore's global + # socket map when ._map isn't None. Because we passed `ourmap` to + # the base class constructor above, in 2.4 asyncore believes we want + # to use `ourmap` instead of the global socket map -- but we don't. + # So we have to replace our ._map with the global socket map, and + # update the global socket map with `ourmap`. Replacing our ._map + # isn't necessary before Python 2.4, but doesn't hurt then (it just + # gives us an unused attribute in 2.3); updating the global socket + # map is necessary regardless of Python version. + if map is None: + map = asyncore.socket_map + self._map = map + map.update(ourmap) + + def __repr__(self): + return "<%s %s>" % (self.__class__.__name__, self.addr) + + __str__ = __repr__ # Defeat asyncore's dreaded __getattr__ + + def log(self, message, level=BLATHER, exc_info=False): + self.logger.log(level, self.log_label + message, exc_info=exc_info) + + def close(self): + self.mgr.close_conn(self) + if self.closed: + return + self._singleton.clear() + self.closed = True + self.__super_close() + self.trigger.pull_trigger() + + def register_object(self, obj): + """Register obj as the true object to invoke methods on.""" + self.obj = obj + + # Subclass must implement. handshake() is called by the constructor, + # near its end, but before self is added to asyncore's socket map. + # When a connection is created the first message sent is a 4-byte + # protocol version. This allows the protocol to evolve over time, and + # lets servers handle clients using multiple versions of the protocol. + # In general, the server's handshake() just needs to send the server's + # preferred protocol; the client's also needs to queue (delay) outgoing + # messages until it sees the handshake from the server. + def handshake(self): + raise NotImplementedError + + # Replaces message_input() for the first message received. Records the + # protocol sent by the peer in `peer_protocol_version`, restores the + # normal message_input() method, and raises an exception if the peer's + # protocol is unacceptable. That's all the server needs to do. The + # client needs to do additional work in response to the server's + # handshake, and extends this method. + def recv_handshake(self, proto): + # Extended by ManagedClientConnection. + del self.message_input # uncover normal-case message_input() + self.peer_protocol_version = proto + + if self.tag == b'C': + good_protos = self.servers_we_can_talk_to + else: + assert self.tag == b'S' + good_protos = self.clients_we_can_talk_to + + if proto in good_protos: + self.log("received handshake %r" % proto, level=logging.INFO) + else: + self.log("bad handshake %s" % short_repr(proto), + level=logging.ERROR) + raise ZRPCError("bad handshake %r" % proto) + + def message_input(self, message): + """Decode an incoming message and dispatch it""" + # If something goes wrong during decoding, the marshaller + # will raise an exception. The exception will ultimately + # result in asycnore calling handle_error(), which will + # close the connection. + msgid, async_, name, args = self.decode(message) + + if debug_zrpc: + self.log("recv msg: %s, %s, %s, %s" % (msgid, async_, name, + short_repr(args)), + level=TRACE) + + if name == 'loadEx': + + # Special case and inline the heck out of load case: + try: + ret = self.obj.loadEx(*args) + except (SystemExit, KeyboardInterrupt): + raise + except Exception as msg: + if not isinstance(msg, self.unlogged_exception_types): + self.log("%s() raised exception: %s" % (name, msg), + logging.ERROR, exc_info=True) + self.return_error(msgid, *sys.exc_info()[:2]) + else: + try: + self.message_output(self.fast_encode(msgid, 0, REPLY, ret)) + self.poll() + except: + # Fall back to normal version for better error handling + self.send_reply(msgid, ret) + + elif name == REPLY: + assert not async_ + self.handle_reply(msgid, args) + else: + self.handle_request(msgid, async_, name, args) + + def handle_request(self, msgid, async_, name, args): + obj = self.obj + + if name.startswith('_') or not hasattr(obj, name): + if obj is None: + if debug_zrpc: + self.log("no object calling %s%s" + % (name, short_repr(args)), + level=logging.DEBUG) + return + + msg = "Invalid method name: %s on %s" % (name, repr(obj)) + raise ZRPCError(msg) + if debug_zrpc: + self.log("calling %s%s" % (name, short_repr(args)), + level=logging.DEBUG) + + meth = getattr(obj, name) + try: + self.waiting_for_reply = True + try: + ret = meth(*args) + finally: + self.waiting_for_reply = False + except (SystemExit, KeyboardInterrupt): + raise + except Exception as msg: + if not isinstance(msg, self.unlogged_exception_types): + self.log("%s() raised exception: %s" % (name, msg), + logging.ERROR, exc_info=True) + error = sys.exc_info()[:2] + if async_: + self.log("Asynchronous call raised exception: %s" % self, + level=logging.ERROR, exc_info=True) + else: + self.return_error(msgid, *error) + return + + if async_: + if ret is not None: + raise ZRPCError("async method %s returned value %s" % + (name, short_repr(ret))) + else: + if debug_zrpc: + self.log("%s returns %s" % (name, short_repr(ret)), + logging.DEBUG) + if isinstance(ret, Delay): + ret.set_sender(msgid, self) + else: + self.send_reply(msgid, ret, not self.delay_sesskey) + + if self.delay_sesskey: + self.__super_setSessionKey(self.delay_sesskey) + self.delay_sesskey = None + + def return_error(self, msgid, err_type, err_value): + # Note that, ideally, this should be defined soley for + # servers, but a test arranges to get it called on + # a client. Too much trouble to fix it now. :/ + + if not isinstance(err_value, Exception): + err_value = err_type, err_value + + # encode() can pass on a wide variety of exceptions from cPickle. + # While a bare `except` is generally poor practice, in this case + # it's acceptable -- we really do want to catch every exception + # cPickle may raise. + try: + msg = self.encode(msgid, 0, REPLY, (err_type, err_value)) + except: # see above + try: + r = short_repr(err_value) + except: + r = "" + err = ZRPCError("Couldn't pickle error %.100s" % r) + msg = self.encode(msgid, 0, REPLY, (ZRPCError, err)) + self.message_output(msg) + self.poll() + + def handle_error(self): + if sys.exc_info()[0] == SystemExit: + raise sys.exc_info() + self.log("Error caught in asyncore", + level=logging.ERROR, exc_info=True) + self.close() + + def setSessionKey(self, key): + if self.waiting_for_reply: + self.delay_sesskey = key + else: + self.__super_setSessionKey(key) + + def send_call(self, method, args, async_=False): + # send a message and return its msgid + if async_: + msgid = 0 + else: + msgid = self._new_msgid() + + if debug_zrpc: + self.log("send msg: %d, %d, %s, ..." % (msgid, async_, method), + level=TRACE) + buf = self.encode(msgid, async_, method, args) + self.message_output(buf) + return msgid + + def callAsync(self, method, *args): + if self.closed: + raise DisconnectedError() + self.send_call(method, args, 1) + self.poll() + + def callAsyncNoPoll(self, method, *args): + # Like CallAsync but doesn't poll. This exists so that we can + # send invalidations atomically to all clients without + # allowing any client to sneak in a load request. + if self.closed: + raise DisconnectedError() + self.send_call(method, args, 1) + + def callAsyncNoSend(self, method, *args): + # Like CallAsync but doesn't poll. This exists so that we can + # send invalidations atomically to all clients without + # allowing any client to sneak in a load request. + if self.closed: + raise DisconnectedError() + self.send_call(method, args, 1) + self.call_from_thread() + + def callAsyncIterator(self, iterator): + """Queue a sequence of calls using an iterator + + The calls will not be interleaved with other calls from the same + client. + """ + self.message_output(self.encode(0, 1, method, args) + for method, args in iterator) + + def handle_reply(self, msgid, ret): + assert msgid == -1 and ret is None + + def poll(self): + """Invoke asyncore mainloop to get pending message out.""" + if debug_zrpc: + self.log("poll()", level=TRACE) + self.trigger.pull_trigger() + + +# import cProfile, time + +class ManagedServerConnection(Connection): + """Server-side Connection subclass.""" + + # Exception types that should not be logged: + unlogged_exception_types = (ZODB.POSException.POSKeyError, ) + + def __init__(self, sock, addr, obj, mgr): + self.mgr = mgr + map = {} + Connection.__init__(self, sock, addr, obj, b'S', map=map) + + self.decode = marshal.server_decode + + self.trigger = trigger.trigger(map) + self.call_from_thread = self.trigger.pull_trigger + + t = threading.Thread(target=server_loop, args=(map,)) + t.setName("ManagedServerConnection thread") + t.setDaemon(True) + t.start() + + # self.profile = cProfile.Profile() + + # def message_input(self, message): + # self.profile.enable() + # try: + # Connection.message_input(self, message) + # finally: + # self.profile.disable() + + def handshake(self): + # Send the server's preferred protocol to the client. + self.message_output(self.current_protocol) + + def recv_handshake(self, proto): + if proto == b'ruok': + self.message_output(json.dumps(self.mgr.ruok()).encode("ascii")) + self.poll() + Connection.close(self) + else: + Connection.recv_handshake(self, proto) + self.obj.notifyConnected(self) + + def close(self): + self.obj.notifyDisconnected() + Connection.close(self) + # self.profile.dump_stats(str(time.time())+'.stats') + + def send_reply(self, msgid, ret, immediately=True): + # encode() can pass on a wide variety of exceptions from cPickle. + # While a bare `except` is generally poor practice, in this case + # it's acceptable -- we really do want to catch every exception + # cPickle may raise. + try: + msg = self.encode(msgid, 0, REPLY, ret) + except: # see above + try: + r = short_repr(ret) + except: + r = "" + err = ZRPCError("Couldn't pickle return %.100s" % r) + msg = self.encode(msgid, 0, REPLY, (ZRPCError, err)) + self.message_output(msg) + if immediately: + self.poll() + + poll = smac.SizedMessageAsyncConnection.handle_write + +def server_loop(map): + while len(map) > 1: + try: + asyncore.poll(30.0, map) + except Exception as v: + if v.args[0] != errno.EBADF: + raise + + for o in tuple(map.values()): + o.close() + +class ManagedClientConnection(Connection): + """Client-side Connection subclass.""" + __super_init = Connection.__init__ + base_message_output = Connection.message_output + + def __init__(self, sock, addr, mgr): + self.mgr = mgr + + # We can't use the base smac's message_output directly because the + # client needs to queue outgoing messages until it's seen the + # initial protocol handshake from the server. So we have our own + # message_ouput() method, and support for initial queueing. This is + # a delicate design, requiring an output mutex to be wholly + # thread-safe. + # Caution: we must set this up before calling the base class + # constructor, because the latter registers us with asyncore; + # we need to guarantee that we'll queue outgoing messages before + # asyncore learns about us. + self.output_lock = threading.Lock() + self.queue_output = True + self.queued_messages = [] + + # msgid_lock guards access to msgid + self.msgid = 0 + self.msgid_lock = threading.Lock() + + # replies_cond is used to block when a synchronous call is + # waiting for a response + self.replies_cond = threading.Condition() + self.replies = {} + + self.__super_init(sock, addr, None, tag=b'C', map=mgr.map) + self.trigger = mgr.trigger + self.call_from_thread = self.trigger.pull_trigger + self.call_from_thread() + + def close(self): + Connection.close(self) + self.replies_cond.acquire() + self.replies_cond.notifyAll() + self.replies_cond.release() + + # Our message_ouput() queues messages until recv_handshake() gets the + # protocol handshake from the server. + def message_output(self, message): + self.output_lock.acquire() + try: + if self.queue_output: + self.queued_messages.append(message) + else: + assert not self.queued_messages + self.base_message_output(message) + finally: + self.output_lock.release() + + def handshake(self): + # The client waits to see the server's handshake. Outgoing messages + # are queued for the duration. The client will send its own + # handshake after the server's handshake is seen, in recv_handshake() + # below. It will then send any messages queued while waiting. + assert self.queue_output # the constructor already set this + + def recv_handshake(self, proto): + # The protocol to use is the older of our and the server's preferred + # protocols. + proto = min(proto, self.current_protocol) + + # Restore the normal message_input method, and raise an exception + # if the protocol version is too old. + Connection.recv_handshake(self, proto) + + # Tell the server the protocol in use, then send any messages that + # were queued while waiting to hear the server's protocol, and stop + # queueing messages. + self.output_lock.acquire() + try: + self.base_message_output(proto) + for message in self.queued_messages: + self.base_message_output(message) + self.queued_messages = [] + self.queue_output = False + finally: + self.output_lock.release() + + def _new_msgid(self): + self.msgid_lock.acquire() + try: + msgid = self.msgid + self.msgid = self.msgid + 1 + return msgid + finally: + self.msgid_lock.release() + + def call(self, method, *args): + if self.closed: + raise DisconnectedError() + msgid = self.send_call(method, args) + r_args = self.wait(msgid) + if (isinstance(r_args, tuple) and len(r_args) > 1 + and type(r_args[0]) == exception_type_type + and issubclass(r_args[0], Exception)): + inst = r_args[1] + raise inst # error raised by server + else: + return r_args + + def wait(self, msgid): + """Invoke asyncore mainloop and wait for reply.""" + if debug_zrpc: + self.log("wait(%d)" % msgid, level=TRACE) + + self.trigger.pull_trigger() + + self.replies_cond.acquire() + try: + while 1: + if self.closed: + raise DisconnectedError() + reply = self.replies.get(msgid, self) + if reply is not self: + del self.replies[msgid] + if debug_zrpc: + self.log("wait(%d): reply=%s" % + (msgid, short_repr(reply)), level=TRACE) + return reply + self.replies_cond.wait() + finally: + self.replies_cond.release() + + # For testing purposes, it is useful to begin a synchronous call + # but not block waiting for its response. + + def _deferred_call(self, method, *args): + if self.closed: + raise DisconnectedError() + msgid = self.send_call(method, args) + self.trigger.pull_trigger() + return msgid + + def _deferred_wait(self, msgid): + r_args = self.wait(msgid) + if (isinstance(r_args, tuple) + and type(r_args[0]) == exception_type_type + and issubclass(r_args[0], Exception)): + inst = r_args[1] + raise inst # error raised by server + else: + return r_args + + def handle_reply(self, msgid, args): + if debug_zrpc: + self.log("recv reply: %s, %s" + % (msgid, short_repr(args)), level=TRACE) + self.replies_cond.acquire() + try: + self.replies[msgid] = args + self.replies_cond.notifyAll() + finally: + self.replies_cond.release() + + def send_reply(self, msgid, ret): + # Whimper. Used to send heartbeat + assert msgid == -1 and ret is None + self.message_output(b'(J\xff\xff\xff\xffK\x00U\x06.replyNt.') diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/error.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/error.py new file mode 100644 index 0000000..35fda34 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/error.py @@ -0,0 +1,27 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +from ZODB import POSException +from ZEO.Exceptions import ClientDisconnected + +class ZRPCError(POSException.StorageError): + pass + +class DisconnectedError(ZRPCError, ClientDisconnected): + """The database storage is disconnected from the storage server. + + The error occurred because a problem in the low-level RPC connection, + or because the connection was closed. + """ + + # This subclass is raised when zrpc catches the error. diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/log.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/log.py new file mode 100644 index 0000000..36ee8e3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/log.py @@ -0,0 +1,77 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import os +import threading +import logging + +from ZODB.loglevels import BLATHER + +LOG_THREAD_ID = 0 # Set this to 1 during heavy debugging + +logger = logging.getLogger('ZEO.zrpc') + +_label = "%s" % os.getpid() + +def new_label(): + global _label + _label = str(os.getpid()) + +def log(message, level=BLATHER, label=None, exc_info=False): + label = label or _label + if LOG_THREAD_ID: + label = label + ':' + threading.currentThread().getName() + logger.log(level, '(%s) %s' % (label, message), exc_info=exc_info) + +REPR_LIMIT = 60 + +def short_repr(obj): + "Return an object repr limited to REPR_LIMIT bytes." + + # Some of the objects being repr'd are large strings. A lot of memory + # would be wasted to repr them and then truncate, so they are treated + # specially in this function. + # Also handle short repr of a tuple containing a long string. + + # This strategy works well for arguments to StorageServer methods. + # The oid is usually first and will get included in its entirety. + # The pickle is near the beginning, too, and you can often fit the + # module name in the pickle. + + if isinstance(obj, str): + if len(obj) > REPR_LIMIT: + r = repr(obj[:REPR_LIMIT]) + else: + r = repr(obj) + if len(r) > REPR_LIMIT: + r = r[:REPR_LIMIT-4] + '...' + r[-1] + return r + elif isinstance(obj, (list, tuple)): + elts = [] + size = 0 + for elt in obj: + r = short_repr(elt) + elts.append(r) + size += len(r) + if size > REPR_LIMIT: + break + if isinstance(obj, tuple): + r = "(%s)" % (", ".join(elts)) + else: + r = "[%s]" % (", ".join(elts)) + else: + r = repr(obj) + if len(r) > REPR_LIMIT: + return r[:REPR_LIMIT] + '...' + else: + return r diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/marshal.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/marshal.py new file mode 100644 index 0000000..11b3885 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/marshal.py @@ -0,0 +1,139 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import logging + +from ZEO._compat import Unpickler, Pickler, BytesIO, PY3, PYPY +from .error import ZRPCError +from .log import log, short_repr + +PY2 = not PY3 + +def encode(*args): # args: (msgid, flags, name, args) + # (We used to have a global pickler, but that's not thread-safe. :-( ) + + # It's not thread safe if, in the couse of pickling, we call the + # Python interpeter, which releases the GIL. + + # Note that args may contain very large binary pickles already; for + # this reason, it's important to use proto 1 (or higher) pickles here + # too. For a long time, this used proto 0 pickles, and that can + # bloat our pickle to 4x the size (due to high-bit and control bytes + # being represented by \xij escapes in proto 0). + # Undocumented: cPickle.Pickler accepts a lone protocol argument; + # pickle.py does not. + # XXX: Py3: Needs optimization. + f = BytesIO() + pickler = Pickler(f, 3) + pickler.fast = 1 + pickler.dump(args) + res = f.getvalue() + return res + + + +if PY3: + # XXX: Py3: Needs optimization. + fast_encode = encode +elif PYPY: + # can't use the python-2 branch, need a new pickler + # every time, getvalue() only works once + fast_encode = encode +else: + def fast_encode(): + # Only use in cases where you *know* the data contains only basic + # Python objects + pickler = Pickler(1) + pickler.fast = 1 + dump = pickler.dump + def fast_encode(*args): + return dump(args, 1) + return fast_encode + fast_encode = fast_encode() + +def decode(msg): + """Decodes msg and returns its parts""" + unpickler = Unpickler(BytesIO(msg)) + unpickler.find_global = find_global + try: + unpickler.find_class = find_global # PyPy, zodbpickle, the non-c-accelerated version + except AttributeError: + pass + try: + return unpickler.load() # msgid, flags, name, args + except: + log("can't decode message: %s" % short_repr(msg), + level=logging.ERROR) + raise + +def server_decode(msg): + """Decodes msg and returns its parts""" + unpickler = Unpickler(BytesIO(msg)) + unpickler.find_global = server_find_global + try: + unpickler.find_class = server_find_global # PyPy, zodbpickle, the non-c-accelerated version + except AttributeError: + pass + + try: + return unpickler.load() # msgid, flags, name, args + except: + log("can't decode message: %s" % short_repr(msg), + level=logging.ERROR) + raise + +_globals = globals() +_silly = ('__doc__',) + +exception_type_type = type(Exception) + +_SAFE_MODULE_NAMES = ('ZopeUndo.Prefix', 'copy_reg', '__builtin__', 'zodbpickle') + +def find_global(module, name): + """Helper for message unpickler""" + try: + m = __import__(module, _globals, _globals, _silly) + except ImportError as msg: + raise ZRPCError("import error %s: %s" % (module, msg)) + + try: + r = getattr(m, name) + except AttributeError: + raise ZRPCError("module %s has no global %s" % (module, name)) + + safe = getattr(r, '__no_side_effects__', 0) or (PY2 and module in _SAFE_MODULE_NAMES) + if safe: + return r + + # TODO: is there a better way to do this? + if type(r) == exception_type_type and issubclass(r, Exception): + return r + + raise ZRPCError("Unsafe global: %s.%s" % (module, name)) + +def server_find_global(module, name): + """Helper for message unpickler""" + if module not in _SAFE_MODULE_NAMES: + raise ImportError("Module not allowed: %s" % (module,)) + + try: + m = __import__(module, _globals, _globals, _silly) + except ImportError as msg: + raise ZRPCError("import error %s: %s" % (module, msg)) + + try: + r = getattr(m, name) + except AttributeError: + raise ZRPCError("module %s has no global %s" % (module, name)) + + return r diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/server.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/server.py new file mode 100644 index 0000000..b83cc00 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/server.py @@ -0,0 +1,124 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import asyncore +import socket + +# _has_dualstack: True if the dual-stack sockets are supported +try: + # Check whether IPv6 sockets can be created + s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) +except (socket.error, AttributeError): + _has_dualstack = False +else: + # Check whether enabling dualstack (disabling v6only) works + try: + s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False) + except (socket.error, AttributeError): + _has_dualstack = False + else: + _has_dualstack = True + s.close() + del s + +from .connection import Connection +from .log import log +from .log import logger +import logging + +# Export the main asyncore loop +loop = asyncore.loop + +class Dispatcher(asyncore.dispatcher): + """A server that accepts incoming RPC connections""" + __super_init = asyncore.dispatcher.__init__ + + def __init__(self, addr, factory=Connection, map=None): + self.__super_init(map=map) + self.addr = addr + self.factory = factory + self._open_socket() + + def _open_socket(self): + if type(self.addr) == tuple: + if self.addr[0] == '' and _has_dualstack: + # Wildcard listen on all interfaces, both IPv4 and + # IPv6 if possible + self.create_socket(socket.AF_INET6, socket.SOCK_STREAM) + self.socket.setsockopt( + socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False) + elif ':' in self.addr[0]: + self.create_socket(socket.AF_INET6, socket.SOCK_STREAM) + if _has_dualstack: + # On Linux, IPV6_V6ONLY is off by default. + # If the user explicitly asked for IPv6, don't bind to IPv4 + self.socket.setsockopt( + socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, True) + else: + self.create_socket(socket.AF_INET, socket.SOCK_STREAM) + else: + self.create_socket(socket.AF_UNIX, socket.SOCK_STREAM) + self.set_reuse_addr() + log("listening on %s" % str(self.addr), logging.INFO) + + for i in range(25): + try: + self.bind(self.addr) + except Exception as exc: + log("bind failed %s waiting", i) + if i == 24: + raise + else: + time.sleep(5) + else: + break + + self.listen(5) + + def writable(self): + return 0 + + def readable(self): + return 1 + + def handle_accept(self): + try: + sock, addr = self.accept() + except socket.error as msg: + log("accepted failed: %s" % msg) + return + + + # We could short-circuit the attempt below in some edge cases + # and avoid a log message by checking for addr being None. + # Unfortunately, our test for the code below, + # quick_close_doesnt_kill_server, causes addr to be None and + # we'd have to write a test for the non-None case, which is + # *even* harder to provoke. :/ So we'll leave things as they + # are for now. + + # It might be better to check whether the socket has been + # closed, but I don't see a way to do that. :( + + # Drop flow-info from IPv6 addresses + if addr: # Sometimes None on Mac. See above. + addr = addr[:2] + + try: + c = self.factory(sock, addr) + except: + if sock.fileno() in asyncore.socket_map: + del asyncore.socket_map[sock.fileno()] + logger.exception("Error in handle_accept") + else: + log("connect from %s: %s" % (repr(addr), c)) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/smac.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/smac.py new file mode 100644 index 0000000..c7077dd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/smac.py @@ -0,0 +1,342 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Sized Message Async Connections. + +This class extends the basic asyncore layer with a record-marking +layer. The message_output() method accepts an arbitrary sized string +as its argument. It sends over the wire the length of the string +encoded using struct.pack('>I') and the string itself. The receiver +passes the original string to message_input(). + +This layer also supports an optional message authentication code +(MAC). If a session key is present, it uses HMAC-SHA-1 to generate a +20-byte MAC. If a MAC is present, the high-order bit of the length +is set to 1 and the MAC immediately follows the length. +""" + +import asyncore +import errno +import six +try: + import hmac +except ImportError: + from . import _hmac as hmac +import socket +import struct +import threading + +from .log import log +from .error import DisconnectedError +from .. import hash as ZEO_hash + + +# Use the dictionary to make sure we get the minimum number of errno +# entries. We expect that EWOULDBLOCK == EAGAIN on most systems -- +# or that only one is actually used. + +tmp_dict = {errno.EWOULDBLOCK: 0, + errno.EAGAIN: 0, + errno.EINTR: 0, + } +expected_socket_read_errors = tuple(tmp_dict.keys()) + +tmp_dict = {errno.EAGAIN: 0, + errno.EWOULDBLOCK: 0, + errno.ENOBUFS: 0, + errno.EINTR: 0, + } +expected_socket_write_errors = tuple(tmp_dict.keys()) +del tmp_dict + +# We chose 60000 as the socket limit by looking at the largest strings +# that we could pass to send() without blocking. +SEND_SIZE = 60000 + +MAC_BIT = 0x80000000 + +_close_marker = object() + +class SizedMessageAsyncConnection(asyncore.dispatcher): + __super_init = asyncore.dispatcher.__init__ + __super_close = asyncore.dispatcher.close + + __closed = True # Marker indicating that we're closed + + socket = None # to outwit Sam's getattr + + def __init__(self, sock, addr, map=None): + self.addr = addr + # __input_lock protects __inp, __input_len, __state, __msg_size + self.__input_lock = threading.Lock() + self.__inp = None # None, a single String, or a list + self.__input_len = 0 + # Instance variables __state, __msg_size and __has_mac work together: + # when __state == 0: + # __msg_size == 4, and the next thing read is a message size; + # __has_mac is set according to the MAC_BIT in the header + # when __state == 1: + # __msg_size is variable, and the next thing read is a message. + # __has_mac indicates if we're in MAC mode or not (and + # therefore, if we need to check the mac header) + # The next thing read is always of length __msg_size. + # The state alternates between 0 and 1. + self.__state = 0 + self.__has_mac = 0 + self.__msg_size = 4 + self.__output_messages = [] + self.__output = [] + self.__closed = False + # Each side of the connection sends and receives messages. A + # MAC is generated for each message and depends on each + # previous MAC; the state of the MAC generator depends on the + # history of operations it has performed. So the MACs must be + # generated in the same order they are verified. + + # Each side is guaranteed to receive messages in the order + # they are sent, but there is no ordering constraint between + # message sends and receives. If the two sides are A and B + # and message An indicates the nth message sent by A, then + # A1 A2 B1 B2 and A1 B1 B2 A2 are both legitimate total + # orderings of the messages. + + # As a result, there must be seperate MAC generators for each + # side of the connection. If not, the generator state would + # be different after A1 A2 B1 B2 than it would be after + # A1 B1 B2 A2; if the generator state was different, the MAC + # could not be verified. + self.__hmac_send = None + self.__hmac_recv = None + + self.__super_init(sock, map) + + # asyncore overwrites addr with the getpeername result + # restore our value + self.addr = addr + + def setSessionKey(self, sesskey): + log("set session key %r" % sesskey) + + # Low-level construction is now delayed until data are sent. + # This is to allow use of iterators that generate messages + # only when we're ready to do I/O so that we can effeciently + # transmit large files. Because we delay messages, we also + # have to delay setting the session key to retain proper + # ordering. + + # The low-level output queue supports strings, a special close + # marker, and iterators. It doesn't support callbacks. We + # can create a allback by providing an iterator that doesn't + # yield anything. + + # The hack fucntion below is a callback in iterator's + # clothing. :) It never yields anything, but is a generator + # and thus iterator, because it contains a yield statement. + + def hack(): + self.__hmac_send = hmac.HMAC(sesskey, digestmod=ZEO_hash) + self.__hmac_recv = hmac.HMAC(sesskey, digestmod=ZEO_hash) + if False: + yield b'' + + self.message_output(hack()) + + def get_addr(self): + return self.addr + + # TODO: avoid expensive getattr calls? Can't remember exactly what + # this comment was supposed to mean, but it has something to do + # with the way asyncore uses getattr and uses if sock: + def __nonzero__(self): + return 1 + + def handle_read(self): + self.__input_lock.acquire() + try: + # Use a single __inp buffer and integer indexes to make this fast. + try: + d = self.recv(8192) + except socket.error as err: + # Python >= 3.3 makes select.error an alias of OSError, + # which is not subscriptable but does have the 'errno' attribute + err_errno = getattr(err, 'errno', None) or err[0] + if err_errno in expected_socket_read_errors: + return + raise + if not d: + return + + input_len = self.__input_len + len(d) + msg_size = self.__msg_size + state = self.__state + has_mac = self.__has_mac + + inp = self.__inp + if msg_size > input_len: + if inp is None: + self.__inp = d + elif isinstance(self.__inp, six.binary_type): + self.__inp = [self.__inp, d] + else: + self.__inp.append(d) + self.__input_len = input_len + return # keep waiting for more input + + # load all previous input and d into single string inp + if isinstance(inp, six.binary_type): + inp = inp + d + elif inp is None: + inp = d + else: + inp.append(d) + inp = b"".join(inp) + + offset = 0 + while (offset + msg_size) <= input_len: + msg = inp[offset:offset + msg_size] + offset = offset + msg_size + if not state: + msg_size = struct.unpack(">I", msg)[0] + has_mac = msg_size & MAC_BIT + if has_mac: + msg_size ^= MAC_BIT + msg_size += 20 + elif self.__hmac_send: + raise ValueError("Received message without MAC") + state = 1 + else: + msg_size = 4 + state = 0 + # Obscure: We call message_input() with __input_lock + # held!!! And message_input() may end up calling + # message_output(), which has its own lock. But + # message_output() cannot call message_input(), so + # the locking order is always consistent, which + # prevents deadlock. Also, message_input() may + # take a long time, because it can cause an + # incoming call to be handled. During all this + # time, the __input_lock is held. That's a good + # thing, because it serializes incoming calls. + if has_mac: + mac = msg[:20] + msg = msg[20:] + if self.__hmac_recv: + self.__hmac_recv.update(msg) + _mac = self.__hmac_recv.digest() + if mac != _mac: + raise ValueError("MAC failed: %r != %r" + % (_mac, mac)) + else: + log("Received MAC but no session key set") + elif self.__hmac_send: + raise ValueError("Received message without MAC") + self.message_input(msg) + + self.__state = state + self.__has_mac = has_mac + self.__msg_size = msg_size + self.__inp = inp[offset:] + self.__input_len = input_len - offset + finally: + self.__input_lock.release() + + def readable(self): + return True + + def writable(self): + return bool(self.__output_messages or self.__output) + + def should_close(self): + self.__output_messages.append(_close_marker) + + def handle_write(self): + output = self.__output + messages = self.__output_messages + while output or messages: + + # Process queued messages until we have enough output + size = sum((len(s) for s in output)) + while (size <= SEND_SIZE) and messages: + message = messages[0] + if isinstance(message, six.binary_type): + size += self.__message_output(messages.pop(0), output) + elif isinstance(message, six.text_type): + # XXX This can silently lead to data loss and client hangs + # if asserts aren't enabled. Encountered this under Python3 + # and 'ruok' protocol + assert False, "Got a unicode message: %s" % repr(message) + elif message is _close_marker: + del messages[:] + del output[:] + return self.close() + else: + try: + message = six.advance_iterator(message) + except StopIteration: + messages.pop(0) + else: + assert(isinstance(message, six.binary_type)) + size += self.__message_output(message, output) + + v = b"".join(output) + del output[:] + + try: + n = self.send(v) + except socket.error as err: + # Fix for https://bugs.launchpad.net/zodb/+bug/182833 + # ensure the above mentioned "output" invariant + output.insert(0, v) + # Python >= 3.3 makes select.error an alias of OSError, + # which is not subscriptable but does have the 'errno' attribute + err_errno = getattr(err, 'errno', None) or err[0] + if err_errno in expected_socket_write_errors: + break # we couldn't write anything + raise + + if n < len(v): + output.append(v[n:]) + break # we can't write any more + + def handle_close(self): + self.close() + + def message_output(self, message): + if self.__closed: + raise DisconnectedError( + "This action is temporarily unavailable.

") + self.__output_messages.append(message) + + def __message_output(self, message, output): + # do two separate appends to avoid copying the message string + size = 4 + if self.__hmac_send: + output.append(struct.pack(">I", len(message) | MAC_BIT)) + self.__hmac_send.update(message) + output.append(self.__hmac_send.digest()) + size += 20 + else: + output.append(struct.pack(">I", len(message))) + + if len(message) <= SEND_SIZE: + output.append(message) + else: + for i in range(0, len(message), SEND_SIZE): + output.append(message[i:i+SEND_SIZE]) + + return size + len(message) + + def close(self): + if not self.__closed: + self.__closed = True + self.__super_close() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/trigger.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/trigger.py new file mode 100644 index 0000000..70797e4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/ZEO4/zrpc/trigger.py @@ -0,0 +1,235 @@ +from __future__ import print_function +############################################################################## +# +# Copyright (c) 2001-2005 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +from __future__ import with_statement + +import asyncore +import os +import socket +import errno + +from ZODB.utils import positive_id +from ZEO._compat import thread, get_ident + +# Original comments follow; they're hard to follow in the context of +# ZEO's use of triggers. TODO: rewrite from a ZEO perspective. + +# Wake up a call to select() running in the main thread. +# +# This is useful in a context where you are using Medusa's I/O +# subsystem to deliver data, but the data is generated by another +# thread. Normally, if Medusa is in the middle of a call to +# select(), new output data generated by another thread will have +# to sit until the call to select() either times out or returns. +# If the trigger is 'pulled' by another thread, it should immediately +# generate a READ event on the trigger object, which will force the +# select() invocation to return. +# +# A common use for this facility: letting Medusa manage I/O for a +# large number of connections; but routing each request through a +# thread chosen from a fixed-size thread pool. When a thread is +# acquired, a transaction is performed, but output data is +# accumulated into buffers that will be emptied more efficiently +# by Medusa. [picture a server that can process database queries +# rapidly, but doesn't want to tie up threads waiting to send data +# to low-bandwidth connections] +# +# The other major feature provided by this class is the ability to +# move work back into the main thread: if you call pull_trigger() +# with a thunk argument, when select() wakes up and receives the +# event it will call your thunk from within that thread. The main +# purpose of this is to remove the need to wrap thread locks around +# Medusa's data structures, which normally do not need them. [To see +# why this is true, imagine this scenario: A thread tries to push some +# new data onto a channel's outgoing data queue at the same time that +# the main thread is trying to remove some] + +class _triggerbase(object): + """OS-independent base class for OS-dependent trigger class.""" + + kind = None # subclass must set to "pipe" or "loopback"; used by repr + + def __init__(self): + self._closed = False + + # `lock` protects the `thunks` list from being traversed and + # appended to simultaneously. + self.lock = thread.allocate_lock() + + # List of no-argument callbacks to invoke when the trigger is + # pulled. These run in the thread running the asyncore mainloop, + # regardless of which thread pulls the trigger. + self.thunks = [] + + def readable(self): + return 1 + + def writable(self): + return 0 + + def handle_connect(self): + pass + + def handle_close(self): + self.close() + + # Override the asyncore close() method, because it doesn't know about + # (so can't close) all the gimmicks we have open. Subclass must + # supply a _close() method to do platform-specific closing work. _close() + # will be called iff we're not already closed. + def close(self): + if not self._closed: + self._closed = True + self.del_channel() + self._close() # subclass does OS-specific stuff + + def _close(self): # see close() above; subclass must supply + raise NotImplementedError + + def pull_trigger(self, *thunk): + if thunk: + with self.lock: + self.thunks.append(thunk) + try: + self._physical_pull() + except Exception: + if not self._closed: + raise + + # Subclass must supply _physical_pull, which does whatever the OS + # needs to do to provoke the "write" end of the trigger. + def _physical_pull(self): + raise NotImplementedError + + def handle_read(self): + try: + self.recv(8192) + except socket.error: + return + + while 1: + with self.lock: + if self.thunks: + thunk = self.thunks.pop(0) + else: + return + try: + thunk[0](*thunk[1:]) + except: + nil, t, v, tbinfo = asyncore.compact_traceback() + print(('exception in trigger thunk:' + ' (%s:%s %s)' % (t, v, tbinfo))) + + def __repr__(self): + return '' % (self.kind, positive_id(self)) + +if os.name == 'posix': + + class trigger(_triggerbase, asyncore.file_dispatcher): + kind = "pipe" + + def __init__(self, map=None): + _triggerbase.__init__(self) + r, self.trigger = os.pipe() + asyncore.file_dispatcher.__init__(self, r, map) + + if self.socket.fd != r: + # Starting in Python 2.6, the descriptor passed to + # file_dispatcher gets duped and assigned to + # self.socket.fd. This breals the instantiation semantics and + # is a bug imo. I dount it will get fixed, but maybe + # it will. Who knows. For that reason, we test for the + # fd changing rather than just checking the Python version. + os.close(r) + + def _close(self): + os.close(self.trigger) + asyncore.file_dispatcher.close(self) + + def _physical_pull(self): + os.write(self.trigger, b'x') + +else: + # Windows version; uses just sockets, because a pipe isn't select'able + # on Windows. + + class BindError(Exception): + pass + + class trigger(_triggerbase, asyncore.dispatcher): + kind = "loopback" + + def __init__(self, map=None): + _triggerbase.__init__(self) + + # Get a pair of connected sockets. The trigger is the 'w' + # end of the pair, which is connected to 'r'. 'r' is put + # in the asyncore socket map. "pulling the trigger" then + # means writing something on w, which will wake up r. + + w = socket.socket() + # Disable buffering -- pulling the trigger sends 1 byte, + # and we want that sent immediately, to wake up asyncore's + # select() ASAP. + w.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + + count = 0 + while 1: + count += 1 + # Bind to a local port; for efficiency, let the OS pick + # a free port for us. + # Unfortunately, stress tests showed that we may not + # be able to connect to that port ("Address already in + # use") despite that the OS picked it. This appears + # to be a race bug in the Windows socket implementation. + # So we loop until a connect() succeeds (almost always + # on the first try). See the long thread at + # http://mail.zope.org/pipermail/zope/2005-July/160433.html + # for hideous details. + a = socket.socket() + a.bind(("127.0.0.1", 0)) + connect_address = a.getsockname() # assigned (host, port) pair + a.listen(1) + try: + w.connect(connect_address) + break # success + except socket.error as detail: + if detail[0] != errno.WSAEADDRINUSE: + # "Address already in use" is the only error + # I've seen on two WinXP Pro SP2 boxes, under + # Pythons 2.3.5 and 2.4.1. + raise + # (10048, 'Address already in use') + # assert count <= 2 # never triggered in Tim's tests + if count >= 10: # I've never seen it go above 2 + a.close() + w.close() + raise BindError("Cannot bind trigger!") + # Close `a` and try again. Note: I originally put a short + # sleep() here, but it didn't appear to help or hurt. + a.close() + + r, addr = a.accept() # r becomes asyncore's (self.)socket + a.close() + self.trigger = w + asyncore.dispatcher.__init__(self, r, map) + + def _close(self): + # self.socket is r, and self.trigger is w, from __init__ + self.socket.close() + self.trigger.close() + + def _physical_pull(self): + self.trigger.send('x') diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/__init__.py new file mode 100644 index 0000000..be2b45b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/__init__.py @@ -0,0 +1,13 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/client-config.test b/thesisenv/lib/python3.6/site-packages/ZEO/tests/client-config.test new file mode 100644 index 0000000..afeef58 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/client-config.test @@ -0,0 +1,77 @@ +ZEO Client Configuration +======================== + +Here we'll describe (and test) the various ZEO Client configuration +options. To facilitate this, we'l start a server that our client can +connect to: + + >>> addr, _ = start_server(blob_dir='server-blobs') + +The simplest client configuration specified a server address: + + >>> import ZODB.config + >>> storage = ZODB.config.storageFromString(""" + ... + ... server %s:%s + ... + ... """ % addr) + + >>> storage.getName(), storage.__class__.__name__ + ... # doctest: +ELLIPSIS + ("[('127.0.0.1', ...)] (connected)", 'ClientStorage') + + >>> storage.blob_dir + >>> storage._storage + '1' + >>> storage._cache.maxsize + 20971520 + >>> storage._cache.path + >>> storage._is_read_only + False + >>> storage._read_only_fallback + False + >>> storage._blob_cache_size + + >>> storage.close() + + >>> storage = ZODB.config.storageFromString(""" + ... + ... server %s:%s + ... blob-dir blobs + ... storage 2 + ... cache-size 100 + ... name bob + ... client cache + ... read-only true + ... drop-cache-rather-verify true + ... blob-cache-size 1000MB + ... blob-cache-size-check 10 + ... wait false + ... + ... """ % addr) + + + >>> storage.getName(), storage.__class__.__name__ + ('bob (disconnected)', 'ClientStorage') + + >>> storage.blob_dir + 'blobs' + >>> storage._storage + '2' + >>> storage._cache.maxsize + 100 + >>> import os + >>> storage._cache.path == os.path.abspath('cache-2.zec') + True + + >>> storage._is_read_only + True + >>> storage._read_only_fallback + False + >>> storage._blob_cache_size + 1048576000 + + >>> print(storage._blob_cache_size_check) + 104857600 + + >>> storage.close() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/client.pem b/thesisenv/lib/python3.6/site-packages/ZEO/tests/client.pem new file mode 100644 index 0000000..2293c5d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/client.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDCDCCAfACCQCbN0hYgirJXTANBgkqhkiG9w0BAQsFADBFMQswCQYDVQQGEwJB +VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0 +cyBQdHkgTHRkMCAXDTE3MDcxMTEzNTM0OFoYDzQ3NTUwNjA3MTM1MzQ4WjBFMQsw +CQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJu +ZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9K5RplDXJpZcrkP+2AjaMxXOZ1ANRDY77BdUuAxtKoZIT0Mc9kUNr4TvLab4 +CLVc5iwwEfir7xPSPEexkZl88IJuqjWS62d11JFlbU62Vq54ZcEYiimnuR1h5zeU +zKGqNcMExbTo7jcgSj3FnH4wCUeWWOUEVasnpafL1O4ViT9BuaxWXKD7gnoFSzg6 +QblzCO2fxknqadLdowTmnyF5EUi2ufaMyY47akZFC8Bf08GnrZtAsENFRHTkZf76 +YNkivvN+Gnfr60ktiL5HsCarAkjXYqpfi8YpwrlFkqnucqv3VzQwOQsvu94UJScu +iD5V9MEc2n3lpk+IrQeaDNmC6QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQDMBjeH +tsFkr5GgM3wczmZrCZxq/UEK++L0KGm1k6zGAB54bf01J9QgvBBmlKK7sPC00I/h +9MWD4bs4IfQWhyr28mYui+rJl7C9V4mmwY78DqG/gRi/qx+YxhIuuvZ+wyqtAOl9 +5e5Zn8puT6mJEI23EsBpmRKTyqE8acMZdkjFhVq4Ytxe5foyqNVYoK8PS/RgdUIr +ZGHXUhhiCI1W+OrB2/GpeTyTUV4itBBLaW6+DuWYnWL4kljJDx0BsK2UOHvS1mc3 +mBPJEEqIaWxnaAARe7RPkfOvcWknhfz2oNfDIAhMe22/mY9ka3dwtB9gx+Krpf0M +rvtOwosJMZDMBlRC +-----END CERTIFICATE----- diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/client_key.pem b/thesisenv/lib/python3.6/site-packages/ZEO/tests/client_key.pem new file mode 100644 index 0000000..30c1fc8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/client_key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEA9K5RplDXJpZcrkP+2AjaMxXOZ1ANRDY77BdUuAxtKoZIT0Mc +9kUNr4TvLab4CLVc5iwwEfir7xPSPEexkZl88IJuqjWS62d11JFlbU62Vq54ZcEY +iimnuR1h5zeUzKGqNcMExbTo7jcgSj3FnH4wCUeWWOUEVasnpafL1O4ViT9BuaxW +XKD7gnoFSzg6QblzCO2fxknqadLdowTmnyF5EUi2ufaMyY47akZFC8Bf08GnrZtA +sENFRHTkZf76YNkivvN+Gnfr60ktiL5HsCarAkjXYqpfi8YpwrlFkqnucqv3VzQw +OQsvu94UJScuiD5V9MEc2n3lpk+IrQeaDNmC6QIDAQABAoIBAErM27MvdYabYvv3 +V3otwp7pZK8avuOCfPEg9MpLKjhc0tBAYSM8WwG0bvYS3DK1VxAapBtqXQ16jsPU +2wj61kIkbbZlKGQEvfXc+Rfgf0eikLXywRDDyT2DKQHpcPjZ11IWK2hRdQAWJC3u +EnJT9VVw6BqG8LtL1pQC5wJSQo0xC1sJ/MTr/szLvKRjuYZE7YStpUfV6RYq2KQF +7Oa9nPKtxlIbDCa7z4S6y5yiusYrSSFilK0pVSU+9789kGNZMLzKbnGu+YSVB/Bx +MLXWRAD8DV9zign255pIU/xI5VKjOwID38JfgdcebV/KeCPu8W6jKKbfUsUCqcjL +YjDtHYECgYEA/SaxUoejMOasHppnsAewy/I+DzMuX+KYztqAnzjsuGwRxmxjYyQe +w7EidinM3WuloJIBZzA9aULmWjSKOfTsuGm+Mokucbbw9jaWVT6Co3kWrHySInhZ +sfTwHKz5ojGBcQD4l06xaVM9utNi6r8wvJijFl5xIsMzc5szEkWs9vkCgYEA9285 +bGSAAwzUFHVk1pyLKozM2gOtF5rrAUQlWtNVU6K2tw+MKEGara0f+HFZrJZC9Rh2 +HBm2U9PPt/kJ73HErQG+E6n0jfol8TQ3ZKz3tlSxImh0CiaKLnh4ahf7o8zU16nT +XDfu3+Rf11EhORXYfZLmdubfCOD4ZaB2/405N3ECgYEA7b4k0gkoLYi1JJiFwD+4 +vhBmUAgVCV/ZeoqiOOZRCnITz3GDdVw6uDXm02o2R8wM5Fu6jZo0UmLNyvGEzyFC +H37PbM6Am7LfYZuqW6w1LClQLfVfmJfGROZvib65QqWTlvj+fbsdyniuhIJ5Z1Tf +BH+kyiEvxyHjdDLRJ9vfsKECgYA8P9MFt7sMAxWpHaS+NUQVyk8fTwHY25oZptRJ +t2fxg49mJ90C+GaHn75HKqKhSb1oHNq1oPUqmEreC0AGE/fGAMSd2SZ5Y83VW9eZ +JhzzQtAXBsQqrJO9GQyJGOnnSrsRAIM800nRLrS/ozupwM4EVb7UeQcaDF2vsVEI +jQS/oQKBgHj26xn7AunX5GS8EYe4GPj4VZehmlnEKONGrPrr25aWkaY4kDJgMLUb +AxwIQHbCMm5TMqIxi5l39/O9dxuuGCkOs37j7C3f3VVFuQW1KKyHem9OClgFDZj3 +tEEk1N3NevrH06NlmAHweHMuJXL8mBvM375zH9tSw5mgG0OMRbnG +-----END RSA PRIVATE KEY----- diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/drop_cache_rather_than_verify.txt b/thesisenv/lib/python3.6/site-packages/ZEO/tests/drop_cache_rather_than_verify.txt new file mode 100644 index 0000000..72ed7de --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/drop_cache_rather_than_verify.txt @@ -0,0 +1,170 @@ +Avoiding cache verifification +============================= + +For large databases it is common to also use very large ZEO cache +files. If a client has beed disconnected for too long, the server +can't play back missing invalidations. In this case, the cache is +cleared. When this happens, a ZEO.interfaces.StaleCache event is +published, largely for backward compatibility. + +ClientStorage used to provide an option to drop it's cache rather than +doing verification. This is now the only behavior. Cache +verification is no longer supported. + +- Invalidates all object caches + +- Drops or clears it's client cache. (The end result is that the cache + is working but empty.) + +- Logs a CRITICAL message. + +Here's an example that shows that this is actually what happens. + +Start a server, create a client to it and commit some data + + >>> addr, admin = start_server(keep=1) + >>> import ZEO, transaction + >>> db = ZEO.DB(addr, client='cache', name='test') + >>> wait_connected(db.storage) + >>> conn = db.open() + >>> conn.root()[1] = conn.root().__class__() + >>> conn.root()[1].x = 1 + >>> transaction.commit() + >>> len(db.storage._cache) + 3 + +Now, we'll stop the server and restart with a different address: + + >>> stop_server(admin) + >>> addr2, admin = start_server(keep=1) + +And create another client and write some data to it: + + >>> db2 = ZEO.DB(addr2) + >>> wait_connected(db2.storage) + >>> conn2 = db2.open() + >>> for i in range(5): + ... conn2.root()[1].x += 1 + ... transaction.commit() + >>> db2.close() + >>> stop_server(admin) + +Now, we'll restart the server. Before we do that, we'll capture +logging and event data: + + >>> import logging, zope.testing.loggingsupport, ZODB.event + >>> handler = zope.testing.loggingsupport.InstalledHandler( + ... 'ZEO', level=logging.ERROR) + >>> events = [] + >>> def event_handler(e): + ... if hasattr(e, 'storage'): + ... events.append(( + ... len(e.storage._server.client.cache), str(handler), e.__class__.__name__)) + + >>> old_notify = ZODB.event.notify + >>> ZODB.event.notify = event_handler + +Note that the event handler is saving away the length of the cache and +the state of the log handler. We'll use this to show that the event +is generated before the cache is dropped or the message is logged. + +Now, we'll restart the server on the original address: + + >>> _, admin = start_server(zeo_conf=dict(invalidation_queue_size=1), + ... addr=addr, keep=1) + + >>> wait_connected(db.storage) + +Now, let's verify our assertions above: + +- Publishes a stale-cache event. + + >>> for e in events: + ... print(e) + (3, '', 'StaleCache') + + Note that the length of the cache when the event handler was + called waa non-zero. This is because the cache wasn't cleared + yet. Similarly, the dropping-cache message hasn't been logged + yet. + + >>> del events[:] + +- Drops or clears it's client cache. (The end result is that the cache + is working but empty.) + + >>> len(db.storage._cache) + 0 + +- Invalidates all object caches + + >>> transaction.abort() + >>> conn.root()._p_changed + +- Logs a CRITICAL message. + + >>> print(handler) # doctest: +ELLIPSIS + ZEO... CRITICAL + test dropping stale cache + + >>> handler.clear() + +If we access the root object, it'll be loaded from the server: + + >>> conn.root()[1].x + 6 + +Similarly, if we simply disconnect the client, and write data from +another client: + + >>> db.close() + + >>> db2 = ZEO.DB(addr) + >>> wait_connected(db2.storage) + >>> conn2 = db2.open() + >>> for i in range(5): + ... conn2.root()[1].x += 1 + ... transaction.commit() + >>> db2.close() + + >>> db = ZEO.DB(addr, drop_cache_rather_verify=True, client='cache', + ... name='test') + >>> wait_connected(db.storage) + + +- Drops or clears it's client cache. (The end result is that the cache + is working but empty.) + + >>> len(db.storage._cache) <= 1 + True + +(When a database is created, it checks to make sure the root object is +in the database, which is why we get 1, rather than 0 objects in the cache.) + +- Publishes a stake-cache event. + + >>> for e in events: + ... print(e) + (2, '', 'StaleCache') + + >>> del events[:] + +- Logs a CRITICAL message. + + >>> print(handler) # doctest: +ELLIPSIS + ZEO... CRITICAL + test dropping stale cache + + >>> handler.clear() + +If we access the root object, it'll be loaded from the server: + + >>> conn = db.open() + >>> conn.root()[1].x + 11 + +.. Cleanup + + >>> db.close() + >>> handler.uninstall() + >>> ZODB.event.notify = old_notify diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/dynamic_server_ports.test b/thesisenv/lib/python3.6/site-packages/ZEO/tests/dynamic_server_ports.test new file mode 100644 index 0000000..02c0f0c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/dynamic_server_ports.test @@ -0,0 +1,85 @@ +The storage server can be told to bind to port 0, allowing the OS to +pick a port dynamically. For this to be useful, there needs to be a +way to tell someone. For this reason, the server posts events to +ZODB.notify. + + >>> import ZODB.event + >>> old_notify = ZODB.event.notify + + >>> last_event = None + >>> def notify(event): + ... global last_event + ... last_event = event + >>> ZODB.event.notify = notify + +Now, let's start a server and verify that we get a serving event: + + >>> import ZEO + >>> addr, stop = ZEO.server() + + >>> isinstance(last_event, ZEO.StorageServer.Serving) + True + + >>> last_event.address == addr + True + + >>> server = last_event.server + >>> server.addr == addr + True + +Let's make sure we can connect. + + >>> client = ZEO.client(last_event.address).close() + +If we close the server, we'll get a closed event: + + >>> stop() + >>> isinstance(last_event, ZEO.StorageServer.Closed) + True + >>> last_event.server is server + True + +If we pass an empty string as the host part of the server address, we +can't really assign a single address, so the server addr attribute is +left alone: + + >>> addr, stop = ZEO.server(port=('', 0)) + >>> isinstance(last_event, ZEO.StorageServer.Serving) + True + + >>> last_event.address[1] > 0 + True + + >>> last_event.server.addr + ('', 0) + + >>> stop() + +The runzeo module provides some process support, including getting the +server configuration via a ZConfig configuration file. To spell a +dynamic port using ZConfig, you'd use a hostname by itself. In this +case, ZConfig passes None as the port. + + >>> import ZEO.runzeo + >>> with open('conf', 'w') as f: + ... _ = f.write(""" + ... + ... address 127.0.0.1 + ... + ... + ... + ... """) + >>> options = ZEO.runzeo.ZEOOptions() + >>> options.realize('-C conf'.split()) + >>> options.address + ('127.0.0.1', None) + + >>> rs = ZEO.runzeo.ZEOServer(options) + >>> rs.check_socket() + >>> options.address + ('127.0.0.1', 0) + + +.. cleanup + + >>> ZODB.event.notify = old_notify diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/forker.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/forker.py new file mode 100644 index 0000000..df0f06a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/forker.py @@ -0,0 +1,207 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Library for forking storage server and connecting client storage""" +from __future__ import print_function + + +import random +import sys +import time + + +import socket + +import logging + +import six + +import ZODB.tests.util +import zope.testing.setupstack + +from ZEO._compat import WIN +from ZEO import _forker + +logger = logging.getLogger('ZEO.tests.forker') + +DEBUG = _forker.DEBUG + +ZEO4_SERVER = _forker.ZEO4_SERVER + +skip_if_testing_client_against_zeo4 = ( + (lambda func: None) + if ZEO4_SERVER else + (lambda func: func) + ) + + +ZEOConfig = _forker.ZEOConfig + + +def encode_format(fmt): + # The list of replacements mirrors + # ZConfig.components.logger.handlers._control_char_rewrites + for xform in (("\n", r"\n"), ("\t", r"\t"), ("\b", r"\b"), + ("\f", r"\f"), ("\r", r"\r")): + fmt = fmt.replace(*xform) + return fmt + +runner = _forker.runner + +stop_runner = _forker.stop_runner +start_zeo_server = _forker.start_zeo_server + +if WIN: + def _quote_arg(s): + return '"%s"' % s +else: + def _quote_arg(s): + return s + +shutdown_zeo_server = _forker.shutdown_zeo_server + +def get_port(ignored=None): + """Return a port that is not in use. + + Checks if a port is in use by trying to connect to it. Assumes it + is not in use if connect raises an exception. We actually look for + 2 consective free ports because most of the clients of this + function will use the returned port and the next one. + + Raises RuntimeError after 10 tries. + """ + + for _i in range(10): + port = random.randrange(20000, 30000) + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + try: + s.connect(('127.0.0.1', port)) + except socket.error: + pass # Perhaps we should check value of error too. + else: + continue + + try: + s1.connect(('127.0.0.1', port+1)) + except socket.error: + pass # Perhaps we should check value of error too. + else: + continue + + return port + + finally: + s.close() + s1.close() + raise RuntimeError("Can't find port") + +def can_connect(port): + c = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + try: + c.connect(('127.0.0.1', port)) + except socket.error: + return False # Perhaps we should check value of error too. + else: + return True + finally: + c.close() + +def setUp(test): + ZODB.tests.util.setUp(test) + + servers = [] + + def start_server(storage_conf=None, zeo_conf=None, port=None, keep=False, + addr=None, path='Data.fs', protocol=None, blob_dir=None, + suicide=True, debug=False, **kw): + """Start a ZEO server. + + Return the server and admin addresses. + """ + if port is None: + if addr is None: + port = 0 + else: + port = addr[1] + elif addr is not None: + raise TypeError("Can't specify port and addr") + addr, stop = start_zeo_server( + storage_conf=storage_conf, + zeo_conf=zeo_conf, + port=port, + keep=keep, + path=path, + protocol=protocol, + blob_dir=blob_dir, + suicide=suicide, + debug=debug, + **kw) + servers.append(stop) + return addr, stop + + test.globs['start_server'] = start_server + + def stop_server(stop): + stop() + servers.remove(stop) + + test.globs['stop_server'] = stop_server + + def cleanup_servers(): + for stop in list(servers): + stop() + + zope.testing.setupstack.register(test, cleanup_servers) + + test.globs['wait_until'] = wait_until + test.globs['wait_connected'] = wait_connected + test.globs['wait_disconnected'] = wait_disconnected + + +def wait_until(label=None, func=None, timeout=30, onfail=None): + if label is None: + if func is not None: + label = func.__name__ + elif not isinstance(label, six.string_types) and func is None: + func = label + label = func.__name__ + + if func is None: + def wait_decorator(f): + wait_until(label, f, timeout, onfail) + + return wait_decorator + + giveup = time.time() + timeout + while not func(): + if time.time() > giveup: + if onfail is None: + raise AssertionError("Timed out waiting for: ", label) + else: + return onfail() + time.sleep(0.01) + +def wait_connected(storage): + wait_until("storage is connected", storage.is_connected) + +def wait_disconnected(storage): + wait_until("storage is disconnected", + lambda: not storage.is_connected()) + + +debug_logging = _forker.debug_logging +whine = _forker.whine +ThreadlessQueue = _forker.ThreadlessQueue diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/invalidation-age.txt b/thesisenv/lib/python3.6/site-packages/ZEO/tests/invalidation-age.txt new file mode 100644 index 0000000..62a87eb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/invalidation-age.txt @@ -0,0 +1,124 @@ +Invalidation age +================ + +When a ZEO client with a non-empty cache connects to the server, it +needs to verify whether the data in its cache is current. It does +this in one of 2 ways: + +quick verification + It gets a list of invalidations from the server since the last + transaction the client has seen and applies those to it's disk and + in-memory caches. This is only possible if there haven't been too + many transactions since the client was last connected. + +full verification + If quick verification isn't possible, the client iterates through + it's disk cache asking the server to verify whether each current + entry is valid. + +Unfortunately, for large caches, full verification is soooooo not +quick that it is impractical. Quick verificatioin is highly +desireable. + +To support quick verification, the server keeps a list of recent +invalidations. The size of this list is controlled by the +invalidation_queue_size parameter. If there is a lot of database +activity, the size might need to be quite large to support having +clients be disconnected for more than a few minutes. A very large +invalidation queue size can use a lot of memory. + +To suppliment the invalidation queue, you can also specify an +invalidation_age parameter. When a client connects and presents the +last transaction id it has seen, we first check to see if the +invalidation queue has that transaction id. It it does, then we send +all transactions since that id. Otherwise, we check to see if the +difference between storage's last transaction id and the given id is +less than or equal to the invalidation age. If it is, then we iterate +over the storage, starting with the given id, to get the invalidations +since the given id. + +NOTE: This assumes that iterating from a point near the "end" of a +database is inexpensive. Don't use this option for a storage for which +that is not the case. + +Here's an example. We set up a server, using an +invalidation-queue-size of 5: + + >>> addr, admin = start_server(zeo_conf=dict(invalidation_queue_size=5), + ... keep=True) + +Now, we'll open a client with a persistent cache, set up some data, +and then close client: + + >>> import ZEO, transaction + >>> db = ZEO.DB(addr, client='test') + >>> conn = db.open() + >>> for i in range(9): + ... conn.root()[i] = conn.root().__class__() + ... conn.root()[i].x = 0 + >>> transaction.commit() + >>> db.close() + +We'll open another client, and commit some transactions: + + >>> db = ZEO.DB(addr) + >>> conn = db.open() + >>> import transaction + >>> for i in range(2): + ... conn.root()[i].x = 1 + ... transaction.commit() + >>> db.close() + +If we reopen the first client, we'll do quick verification. + + >>> db = ZEO.DB(addr, client='test') # doctest: +ELLIPSIS + >>> db._storage._server.client.verify_result + 'quick verification' + + >>> [v.x for v in db.open().root().values()] + [1, 1, 0, 0, 0, 0, 0, 0, 0] + +Now, if we disconnect and commit more than 5 transactions, we'll see +that we had to clear the cache: + + >>> db.close() + >>> db = ZEO.DB(addr) + >>> conn = db.open() + >>> import transaction + >>> for i in range(9): + ... conn.root()[i].x = 2 + ... transaction.commit() + >>> db.close() + + >>> db = ZEO.DB(addr, client='test') + >>> db._storage._server.client.verify_result + 'cache too old, clearing' + + >>> [v.x for v in db.open().root().values()] + [2, 2, 2, 2, 2, 2, 2, 2, 2] + + >>> db.close() + +But if we restart the server with invalidation-age set, we can +do quick verification: + + >>> stop_server(admin) + >>> addr, admin = start_server(zeo_conf=dict(invalidation_queue_size=5, + ... invalidation_age=100)) + >>> db = ZEO.DB(addr) + >>> conn = db.open() + >>> import transaction + >>> for i in range(9): + ... conn.root()[i].x = 3 + ... transaction.commit() + >>> db.close() + + + >>> db = ZEO.DB(addr, client='test') # doctest: +ELLIPSIS + >>> db._storage._server.client.verify_result + 'quick verification' + + >>> [v.x for v in db.open().root().values()] + [3, 3, 3, 3, 3, 3, 3, 3, 3] + + >>> db.close() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/new_addr.test b/thesisenv/lib/python3.6/site-packages/ZEO/tests/new_addr.test new file mode 100644 index 0000000..18692be --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/new_addr.test @@ -0,0 +1,47 @@ +You can change the address(es) of a client storaage. + +We'll start by setting up a server and connecting to it: + + >>> import ZEO, transaction + + >>> addr, stop = ZEO.server(path='test.fs', threaded=False) + >>> conn = ZEO.connection(addr) + >>> client = conn.db().storage + >>> client.is_connected() + True + >>> conn.root() + {} + >>> conn.root.x = 1 + >>> transaction.commit() + +Now we'll close the server: + + >>> stop() + +And wait for the connectin to notice it's disconnected: + + >>> wait_until(lambda : not client.is_connected()) + +Now, we'll restart the server: + + >>> addr, stop = ZEO.server(path='test.fs', threaded=False) + +Update with another client: + + >>> conn2 = ZEO.connection(addr) + >>> conn2.root.x += 1 + >>> transaction.commit() + +Update the connection and wait for connect: + + >>> client.new_addr(addr) + >>> wait_until(lambda : client.is_connected()) + >>> _ = transaction.begin() + >>> conn.root() + {'x': 2} + +.. cleanup + + >>> conn.close() + >>> conn2.close() + >>> stop() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/protocols.test b/thesisenv/lib/python3.6/site-packages/ZEO/tests/protocols.test new file mode 100644 index 0000000..83ffefc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/protocols.test @@ -0,0 +1,184 @@ +Test that multiple protocols are supported +========================================== + +A full test of all protocols isn't practical. But we'll do a limited +test that at least the current and previous protocols are supported in +both directions. + +Let's start a Z4 server + + >>> storage_conf = ''' + ... + ... blob-dir server-blobs + ... + ... path Data.fs + ... + ... + ... ''' + + >>> addr, stop = start_server( + ... storage_conf, dict(invalidation_queue_size=5), protocol=b'4') + +A current client should be able to connect to a old server: + + >>> import ZEO, ZODB.blob, transaction + >>> db = ZEO.DB(addr, client='client', blob_dir='blobs') + >>> wait_connected(db.storage) + >>> str(db.storage.protocol_version.decode('ascii'))[1:] + '4' + + >>> conn = db.open() + >>> conn.root().x = 0 + >>> transaction.commit() + >>> len(db.history(conn.root()._p_oid, 99)) + 2 + + >>> conn.root()['blob1'] = ZODB.blob.Blob() + >>> with conn.root()['blob1'].open('w') as f: + ... r = f.write(b'blob data 1') + >>> transaction.commit() + + >>> db2 = ZEO.DB(addr, blob_dir='server-blobs', shared_blob_dir=True) + >>> wait_connected(db2.storage) + >>> conn2 = db2.open() + >>> for i in range(5): + ... conn2.root().x += 1 + ... transaction.commit() + >>> conn2.root()['blob2'] = ZODB.blob.Blob() + >>> with conn2.root()['blob2'].open('w') as f: + ... r = f.write(b'blob data 2') + >>> transaction.commit() + + >>> @wait_until("Get the new data") + ... def f(): + ... conn.sync() + ... return conn.root().x == 5 + + >>> db.close() + + >>> for i in range(2): + ... conn2.root().x += 1 + ... transaction.commit() + + >>> db = ZEO.DB(addr, client='client', blob_dir='blobs') + >>> wait_connected(db.storage) + >>> conn = db.open() + >>> conn.root().x + 7 + + >>> db.close() + + >>> for i in range(10): + ... conn2.root().x += 1 + ... transaction.commit() + + >>> db = ZEO.DB(addr, client='client', blob_dir='blobs') + >>> wait_connected(db.storage) + >>> conn = db.open() + >>> conn.root().x + 17 + + >>> with conn.root()['blob1'].open() as f: + ... f.read() + b'blob data 1' + >>> with conn.root()['blob2'].open() as f: + ... f.read() + b'blob data 2' + + >>> db2.close() + >>> db.close() + >>> stop_server(stop) + + >>> import os, zope.testing.setupstack + >>> os.remove('client-1.zec') + >>> zope.testing.setupstack.rmtree('blobs') + >>> zope.testing.setupstack.rmtree('server-blobs') + +############################################################################# +# Note that the ZEO 5.0 server only supports clients that use the Z5 protocol + +# And the other way around: + +# >>> addr, _ = start_server(storage_conf, dict(invalidation_queue_size=5)) + +# Note that we'll have to pull some hijinks: + +# >>> db = ZEO.DB(addr, client='client', blob_dir='blobs') +# >>> str(db.storage.protocol_version.decode('ascii')) +# 'Z4' +# >>> wait_connected(db.storage) +# >>> conn = db.open() +# >>> conn.root().x = 0 +# >>> transaction.commit() +# >>> len(db.history(conn.root()._p_oid, 99)) +# 2 + +# >>> db = ZEO.DB(addr, client='client', blob_dir='blobs') +# >>> db.storage.protocol_version +# b'Z4' +# >>> wait_connected(db.storage) +# >>> conn = db.open() +# >>> conn.root().x = 0 +# >>> transaction.commit() +# >>> len(db.history(conn.root()._p_oid, 99)) +# 2 + +# >>> conn.root()['blob1'] = ZODB.blob.Blob() +# >>> with conn.root()['blob1'].open('w') as f: +# ... r = f.write(b'blob data 1') +# >>> transaction.commit() + +# >>> db2 = ZEO.DB(addr, blob_dir='server-blobs', shared_blob_dir=True) +# >>> wait_connected(db2.storage) +# >>> conn2 = db2.open() +# >>> for i in range(5): +# ... conn2.root().x += 1 +# ... transaction.commit() +# >>> conn2.root()['blob2'] = ZODB.blob.Blob() +# >>> with conn2.root()['blob2'].open('w') as f: +# ... r = f.write(b'blob data 2') +# >>> transaction.commit() + + +# >>> @wait_until() +# ... def x_to_be_5(): +# ... conn.sync() +# ... return conn.root().x == 5 + +# >>> db.close() + +# >>> for i in range(2): +# ... conn2.root().x += 1 +# ... transaction.commit() + +# >>> db = ZEO.DB(addr, client='client', blob_dir='blobs') +# >>> wait_connected(db.storage) +# >>> conn = db.open() +# >>> conn.root().x +# 7 + +# >>> db.close() + +# >>> for i in range(10): +# ... conn2.root().x += 1 +# ... transaction.commit() + +# >>> db = ZEO.DB(addr, client='client', blob_dir='blobs') +# >>> wait_connected(db.storage) +# >>> conn = db.open() +# >>> conn.root().x +# 17 + +# >>> with conn.root()['blob1'].open() as f: +# ... f.read() +# b'blob data 1' +# >>> with conn.root()['blob2'].open() as f: +# ... f.read() +# b'blob data 2' + +# >>> db2.close() +# >>> db.close() + +# Undo the hijinks: + +# >>> ZEO.asyncio.client.Protocol.protocols = old_protocols diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/server.pem b/thesisenv/lib/python3.6/site-packages/ZEO/tests/server.pem new file mode 100644 index 0000000..dcf8169 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/server.pem @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDWDCCAkACCQCI2YETV+CDIzANBgkqhkiG9w0BAQsFADBtMQswCQYDVQQGEwJV +UzELMAkGA1UECAwCVkExDTALBgNVBAoMBFpPREIxETAPBgNVBAsMCHpvZGIub3Jn +MREwDwYDVQQDDAh6b2RiLm9yZzEcMBoGCSqGSIb3DQEJARYNem9kYkB6b2RiLm9y +ZzAgFw0xNzA3MTExMzU5NDBaGA80NzU1MDYwNzEzNTk0MFowbTELMAkGA1UEBhMC +VVMxCzAJBgNVBAgMAlZBMQ0wCwYDVQQKDARaT0RCMREwDwYDVQQLDAh6b2RiLm9y +ZzERMA8GA1UEAwwIem9kYi5vcmcxHDAaBgkqhkiG9w0BCQEWDXpvZGJAem9kYi5v +cmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQD0rlGmUNcmllyuQ/7Y +CNozFc5nUA1ENjvsF1S4DG0qhkhPQxz2RQ2vhO8tpvgItVzmLDAR+KvvE9I8R7GR +mXzwgm6qNZLrZ3XUkWVtTrZWrnhlwRiKKae5HWHnN5TMoao1wwTFtOjuNyBKPcWc +fjAJR5ZY5QRVqyelp8vU7hWJP0G5rFZcoPuCegVLODpBuXMI7Z/GSepp0t2jBOaf +IXkRSLa59ozJjjtqRkULwF/Twaetm0CwQ0VEdORl/vpg2SK+834ad+vrSS2Ivkew +JqsCSNdiql+LxinCuUWSqe5yq/dXNDA5Cy+73hQlJy6IPlX0wRzafeWmT4itB5oM +2YLpAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAGKHiLnFViSExRc2b3NBWBZkKIYb +gw14xW+PT4BLjL2etoiETGcdSo4lgUnFKw4FB8zF4BiCnSMYpi1yyLA/1ZgphXnS +J25ZAbIXFLxMNkC32K/f1WeVLYOaa/u65dzW2bHOXmFwmCYNq45pNC61rch5umuA +6kikl+EoNWpouTtkkY/JnfUCeYrLUbzD8mLxgyNFPKpSEbAo7Q0n2bjTtC+Y5GMW +8IJNC3i+2tsglVAjUWg3JwD0O0ql73qMh6rzWIdNcVjMv6KSKpqlzOYvqjfaIAQv +cvcXPW3RTMd3e245LPMsGwmv3FQ2dgJxhKkf/P3RtNEPrqOhPUCSxonUa+o= +-----END CERTIFICATE----- diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/server.pem.csr b/thesisenv/lib/python3.6/site-packages/ZEO/tests/server.pem.csr new file mode 100644 index 0000000..2101eaa --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/server.pem.csr @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIICsjCCAZoCAQAwbTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAlZBMQ0wCwYDVQQK +DARaT0RCMREwDwYDVQQLDAh6b2RiLm9yZzERMA8GA1UEAwwIem9kYi5vcmcxHDAa +BgkqhkiG9w0BCQEWDXpvZGJAem9kYi5vcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IB +DwAwggEKAoIBAQD0rlGmUNcmllyuQ/7YCNozFc5nUA1ENjvsF1S4DG0qhkhPQxz2 +RQ2vhO8tpvgItVzmLDAR+KvvE9I8R7GRmXzwgm6qNZLrZ3XUkWVtTrZWrnhlwRiK +Kae5HWHnN5TMoao1wwTFtOjuNyBKPcWcfjAJR5ZY5QRVqyelp8vU7hWJP0G5rFZc +oPuCegVLODpBuXMI7Z/GSepp0t2jBOafIXkRSLa59ozJjjtqRkULwF/Twaetm0Cw +Q0VEdORl/vpg2SK+834ad+vrSS2IvkewJqsCSNdiql+LxinCuUWSqe5yq/dXNDA5 +Cy+73hQlJy6IPlX0wRzafeWmT4itB5oM2YLpAgMBAAGgADANBgkqhkiG9w0BAQsF +AAOCAQEAVzxIqDiv3evn3LsKrE0HcSkWKnValZz0e4iF96qmstLs2NJa+WsV7p/J +Tg8DgUbQ72G9wN10OQld1k06KUd1SEWhOonBX60lGOkqyn6LHassItbwgCmHC5hk +qs7h0b56s/gSnxYvN3tAWiRzNxdudFQBB7Ughy2SRN3ChsNDBuRIsJQN2yZtYjXM +lZb2J7hZChFGD+L/9Cq6oPhUD+l1aFUv8PvU3jInf/IYyvNQJ3qeYRpOcNR4cnyf +6oRJn2b3ypFF/4F4ZiOb6Qocpcg7qBRRqztr4C2MZuDST4/zIBAHfKlUwD1/uo7A +BdXUUeM1J1Gaf8GRLSvB8AeZg6/ztA== +-----END CERTIFICATE REQUEST----- diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/server_key.pem b/thesisenv/lib/python3.6/site-packages/ZEO/tests/server_key.pem new file mode 100644 index 0000000..30c1fc8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/server_key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEA9K5RplDXJpZcrkP+2AjaMxXOZ1ANRDY77BdUuAxtKoZIT0Mc +9kUNr4TvLab4CLVc5iwwEfir7xPSPEexkZl88IJuqjWS62d11JFlbU62Vq54ZcEY +iimnuR1h5zeUzKGqNcMExbTo7jcgSj3FnH4wCUeWWOUEVasnpafL1O4ViT9BuaxW +XKD7gnoFSzg6QblzCO2fxknqadLdowTmnyF5EUi2ufaMyY47akZFC8Bf08GnrZtA +sENFRHTkZf76YNkivvN+Gnfr60ktiL5HsCarAkjXYqpfi8YpwrlFkqnucqv3VzQw +OQsvu94UJScuiD5V9MEc2n3lpk+IrQeaDNmC6QIDAQABAoIBAErM27MvdYabYvv3 +V3otwp7pZK8avuOCfPEg9MpLKjhc0tBAYSM8WwG0bvYS3DK1VxAapBtqXQ16jsPU +2wj61kIkbbZlKGQEvfXc+Rfgf0eikLXywRDDyT2DKQHpcPjZ11IWK2hRdQAWJC3u +EnJT9VVw6BqG8LtL1pQC5wJSQo0xC1sJ/MTr/szLvKRjuYZE7YStpUfV6RYq2KQF +7Oa9nPKtxlIbDCa7z4S6y5yiusYrSSFilK0pVSU+9789kGNZMLzKbnGu+YSVB/Bx +MLXWRAD8DV9zign255pIU/xI5VKjOwID38JfgdcebV/KeCPu8W6jKKbfUsUCqcjL +YjDtHYECgYEA/SaxUoejMOasHppnsAewy/I+DzMuX+KYztqAnzjsuGwRxmxjYyQe +w7EidinM3WuloJIBZzA9aULmWjSKOfTsuGm+Mokucbbw9jaWVT6Co3kWrHySInhZ +sfTwHKz5ojGBcQD4l06xaVM9utNi6r8wvJijFl5xIsMzc5szEkWs9vkCgYEA9285 +bGSAAwzUFHVk1pyLKozM2gOtF5rrAUQlWtNVU6K2tw+MKEGara0f+HFZrJZC9Rh2 +HBm2U9PPt/kJ73HErQG+E6n0jfol8TQ3ZKz3tlSxImh0CiaKLnh4ahf7o8zU16nT +XDfu3+Rf11EhORXYfZLmdubfCOD4ZaB2/405N3ECgYEA7b4k0gkoLYi1JJiFwD+4 +vhBmUAgVCV/ZeoqiOOZRCnITz3GDdVw6uDXm02o2R8wM5Fu6jZo0UmLNyvGEzyFC +H37PbM6Am7LfYZuqW6w1LClQLfVfmJfGROZvib65QqWTlvj+fbsdyniuhIJ5Z1Tf +BH+kyiEvxyHjdDLRJ9vfsKECgYA8P9MFt7sMAxWpHaS+NUQVyk8fTwHY25oZptRJ +t2fxg49mJ90C+GaHn75HKqKhSb1oHNq1oPUqmEreC0AGE/fGAMSd2SZ5Y83VW9eZ +JhzzQtAXBsQqrJO9GQyJGOnnSrsRAIM800nRLrS/ozupwM4EVb7UeQcaDF2vsVEI +jQS/oQKBgHj26xn7AunX5GS8EYe4GPj4VZehmlnEKONGrPrr25aWkaY4kDJgMLUb +AxwIQHbCMm5TMqIxi5l39/O9dxuuGCkOs37j7C3f3VVFuQW1KKyHem9OClgFDZj3 +tEEk1N3NevrH06NlmAHweHMuJXL8mBvM375zH9tSw5mgG0OMRbnG +-----END RSA PRIVATE KEY----- diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/serverpw.pem b/thesisenv/lib/python3.6/site-packages/ZEO/tests/serverpw.pem new file mode 100644 index 0000000..513faf2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/serverpw.pem @@ -0,0 +1,24 @@ +-----BEGIN CERTIFICATE----- +MIID8DCCAtigAwIBAgIJALop9P9MBfzLMA0GCSqGSIb3DQEBBQUAMFgxCzAJBgNV +BAYTAlVTMQswCQYDVQQIEwJWQTENMAsGA1UEChMEWk9EQjERMA8GA1UEAxMIem9k +Yi5vcmcxGjAYBgkqhkiG9w0BCQEWC3B3QHpvZGIub3JnMB4XDTE2MDYyMzE1MTAz +MVoXDTE3MDYyMzE1MTAzMVowWDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlZBMQ0w +CwYDVQQKEwRaT0RCMREwDwYDVQQDEwh6b2RiLm9yZzEaMBgGCSqGSIb3DQEJARYL +cHdAem9kYi5vcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDKw/iw +N1EPddU9QQQ+OnCJv9G3rbTOPt4zEbpfTROIHTME3krFKPALrGF2aK+oBpHx3/TZ +HN5UvWK/jmGtDL9jekKCAaeAaVIKlESUS6DIxZY+FaO3re/1fbmBNRz8Cnn1raAw +/4YZRDPvblooH4Nt5m7uooGAIIDPft3fInhmGboOoIpXc7nMGVGOWXlDN5I9oFmm +4vby4CUMy3A/0wnHgTuMNy7Tpjgz2E/1MRAOyWQ7PZYiASs4ycZfas8058O8DI+o +rSYyum/czecIz52P6jbx5LWvcKDWac8QbJoHPelthYtxcMHee2+Nh6MWW688CBzq +HSeFAdNO3d9kMiFpAgMBAAGjgbwwgbkwHQYDVR0OBBYEFDui1OC2+2z2rHADglk5 +tGOndxhoMIGJBgNVHSMEgYEwf4AUO6LU4Lb7bPascAOCWTm0Y6d3GGihXKRaMFgx +CzAJBgNVBAYTAlVTMQswCQYDVQQIEwJWQTENMAsGA1UEChMEWk9EQjERMA8GA1UE +AxMIem9kYi5vcmcxGjAYBgkqhkiG9w0BCQEWC3B3QHpvZGIub3JnggkAuin0/0wF +/MswDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAiEYO8MZ3OG8sqy9t +AtUZbv0aTsIUzy/QTUKKDUo8qwKNOylqyqGAZV0tZ5eCoqIGFAwRJBBymIizU3zH +U1k2MnYZMVi7uYwSy+qwg52+X7GLl/kaAfx8kNvpr274CuZQnLojJS+K8HtH5Pom +YD3gTO3OxGS4IS6uf6DD+mf+C9OBnTl47P0HA0/eHBEXVSc2vsv30H/UoW5VbZ6z +6TWkoPwSMVhCNRRRif4/eqCLh24/h5b4uvAC+tsrIPQ9If7EsqVTNMCbAkv3ib6g +OmaCdbrGkqvD3UVn7i5ci96UZoF80EWNZiwhMdvQtMfOAR4jHQ1pTepJni6JwzZP +UMNDpQ== +-----END CERTIFICATE----- diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/serverpw_key.pem b/thesisenv/lib/python3.6/site-packages/ZEO/tests/serverpw_key.pem new file mode 100644 index 0000000..44f8340 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/serverpw_key.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,769B900D03925712 + +z5M/XkqEC1+PxJ1T3QrUhGG9fPTBsPxKy8WlwIytbMg0RXS0oJBiFgNYp1ktqzGo +yT+AdTCRR1hNVX8M5HbV3ksUjKxXKCL3+yaaB6JtGbNRr2qTNwosvxD92nKT/hvN +R6rHF6LcO05s8ubs9b9ON/ja7HCx69N5CjBuCbCFHUTlAXkwD9w0ScrxrtfP50EY +FOw6LAqhhzq6/KO7c1SJ7k9LYzakhL+nbw5KM9QgBk4WHlmKLbCZIZ5RWvu0F4s5 +n4qk/BcuXIkbYuEv2kH0nDk5eDfA/dj7xZcMMgL5VFymQzaZLYyj4WuQYXu/7JW/ +nM/ZWBkZOMaI3vnPTG1hJ9pgjLjQnjfNA/bGWwbLxjCsPmR8yvZS4v2iqdB6X3Vl +yJ9aV9r8KoU0PJk3x4v2Zp+RQxgrKSaQw59sXptaXAY3NCRR6ohvq4P5X6UB8r5S +vYdoMeVXhX1hzXeMguln7zQInwJhPZqk4wMIV3lTsCqh1eJ7NC2TGCwble+B/ClR +KtzuJBzoYPLw44ltnXPEMmVH1Fkh17+QZFgZRJrKGD9PGOAXmnzudsZ1xX9kNnOM +JLIT/mzKcqkd8S1n1Xi1x7zGA0Ng5xmKGFe8oDokPJucJO1Ou+hbLDmC0DZUGzr1 +qqPJ3F/DzZZDTmD/rZF6doPJgFAZvgpVeiWS8/v1qbz/nz13uwXDLjRPgLfcKpmQ +4R3V4QlgviDilW61VTZnzV9qAOx4fG6+IwWIGBlrJnfsH/fSCDNlAStc6k12zdun +PIIRJBfbEprGig3vRWUoBASReqow1JCN9DaVCX1P27pDKY5oDe+7/HOrQpwhPoya +2HEwbKeyY0nCcCXbkWGL1bwEUs/PrJv+61rik4KxOWhKpHWkZLzbozELb44jXrJx +e8K8XKz4La2DEjsUYHc31u6T69GBQO9JDEvih15phUWq8ITvDnkHpAg+wYb1JAHD +QcqDtAulMvT/ZGN0h7qdwbHMggEsLgCCVPG4iZ5K4cXsMbePFvQqq+o4FTMF+cM5 +2Dq0wir92U9cH+ooy80LIt5Kp5zqgQZzr73o9MEgwqJocCrx9ZrofKRUmTV+ZU0r +w5mfUM47Ctnqia0UNGx6SUs3CHFDPWPbzrAaqGzSvFhzR1MMoL1/rJzP1VSm3Fk3 +ESWkPrg0J8dcQP/ch9MhH8eoQYyA+2q1vClUbeZLAs5KoHxgi6pSkGYqFhshrA+t +2AIrUPDPPDf0PgRoXJrzdVOiNNY1rzyql+0JqDH6DjCVcAADWY+48p9U2YFTd7Je +DvnZWihwe0qYGn1AKIkvJ4SR3bQg36etrxhMrMl/8lUn2dnT7GFrhjr9HwCpJwa7 +8tv150SrQXt3FXZCHb+RMUgoWZDeksDohPiGzXkPU6kaSviZVnRMslyU4ahWp6vC +8tYUhb7K6N+is1hYkICNt6zLl2vBDuCDWmiIwopHtnH1kz8bYlp4/GBVaMIgZiCM +gM/7+p4YCc++s2sJiQ9+BqPo0zKm3bbSP+fPpeWefQVte9Jx4S36YXU52HsJxBTN +WUdHABC+aS2A45I12xMNzOJR6VfxnG6f3JLpt3MkUCEg+898vJGope+TJUhD+aJC +-----END RSA PRIVATE KEY----- diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/servertesting.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/servertesting.py new file mode 100644 index 0000000..c47afda --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/servertesting.py @@ -0,0 +1,50 @@ +from __future__ import print_function +from __future__ import print_function +############################################################################## +# +# Copyright Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +# Testing the current ZEO implementation is rather hard due to the +# architecture, which mixes concerns, especially between application +# and networking. Still, it's not as bad as it could be. + +# The 2 most important classes in the architecture are ZEOStorage and +# StorageServer. A ZEOStorage is created for each client connection. +# The StorageServer maintains data shared or needed for coordination +# among clients. + +# The other important part of the architecture is connections. +# Connections are used by ZEOStorages to send messages or return data +# to clients. + +# Here, we'll try to provide some testing infrastructure to isolate +# servers from the network. + +import ZEO.asyncio.tests +import ZEO.StorageServer +import ZODB.MappingStorage + +class StorageServer(ZEO.StorageServer.StorageServer): + + def __init__(self, addr='test_addr', storages=None, **kw): + if storages is None: + storages = {'1': ZODB.MappingStorage.MappingStorage()} + ZEO.StorageServer.StorageServer.__init__(self, addr, storages, **kw) + +def client(server, name='client'): + zs = ZEO.StorageServer.ZEOStorage(server) + protocol = ZEO.asyncio.tests.server_protocol( + False, zs, protocol_version=b'Z5', addr='test-addr-%s' % name) + zs.notify_connected(protocol) + zs.register('1', 0) + return zs diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/speed.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/speed.py new file mode 100644 index 0000000..d93193c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/speed.py @@ -0,0 +1,222 @@ +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +from __future__ import print_function +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +usage="""Test speed of a ZODB storage + +Options: + + -d file The data file to use as input. + The default is this script. + + -n n The number of repititions + + -s module A module that defines a 'Storage' + attribute, which is an open storage. + If not specified, a FileStorage will ne + used. + + -z Test compressing data + + -D Run in debug mode + + -L Test loads as well as stores by minimizing + the cache after eachrun + + -M Output means only + + -C Run with a persistent client cache + + -U Run ZEO using a Unix domain socket + + -t n Number of concurrent threads to run. +""" + +import asyncore +import sys, os, getopt, time +##sys.path.insert(0, os.getcwd()) + +import persistent +import transaction +import ZODB +from ZODB.POSException import ConflictError +from ZEO.tests import forker + +class P(persistent.Persistent): + pass + +fs_name = "zeo-speed.fs" + +class ZEOExit(asyncore.file_dispatcher): + """Used to exit ZEO.StorageServer when run is done""" + def writable(self): + return 0 + def readable(self): + return 1 + def handle_read(self): + buf = self.recv(4) + assert buf == "done" + self.delete_fs() + os._exit(0) + def handle_close(self): + print("Parent process exited unexpectedly") + self.delete_fs() + os._exit(0) + def delete_fs(self): + os.unlink(fs_name) + os.unlink(fs_name + ".lock") + os.unlink(fs_name + ".tmp") + +def work(db, results, nrep, compress, data, detailed, minimize, threadno=None): + for j in range(nrep): + for r in 1, 10, 100, 1000: + t = time.time() + conflicts = 0 + + jar = db.open() + while 1: + try: + transaction.begin() + rt = jar.root() + key = 's%s' % r + if key in rt: + p = rt[key] + else: + rt[key] = p =P() + for i in range(r): + v = getattr(p, str(i), P()) + if compress is not None: + v.d = compress(data) + else: + v.d = data + setattr(p, str(i), v) + transaction.commit() + except ConflictError: + conflicts = conflicts + 1 + else: + break + jar.close() + + t = time.time() - t + if detailed: + if threadno is None: + print("%s\t%s\t%.4f\t%d" % (j, r, t, conflicts)) + else: + print("%s\t%s\t%.4f\t%d\t%d" % (j, r, t, conflicts, + threadno)) + results[r].append((t, conflicts)) + rt=d=p=v=None # release all references + if minimize: + time.sleep(3) + jar.cacheMinimize() + +def main(args): + opts, args = getopt.getopt(args, 'zd:n:Ds:LMt:U') + s = None + compress = None + data=sys.argv[0] + nrep=5 + minimize=0 + detailed=1 + cache = None + domain = 'AF_INET' + threads = 1 + for o, v in opts: + if o=='-n': nrep = int(v) + elif o=='-d': data = v + elif o=='-s': s = v + elif o=='-z': + import zlib + compress = zlib.compress + elif o=='-L': + minimize=1 + elif o=='-M': + detailed=0 + elif o=='-D': + global debug + os.environ['STUPID_LOG_FILE']='' + os.environ['STUPID_LOG_SEVERITY']='-999' + debug = 1 + elif o == '-C': + cache = 'speed' + elif o == '-U': + domain = 'AF_UNIX' + elif o == '-t': + threads = int(v) + + zeo_pipe = None + if s: + s = __import__(s, globals(), globals(), ('__doc__',)) + s = s.Storage + server = None + else: + s, server, pid = forker.start_zeo("FileStorage", + (fs_name, 1), domain=domain) + + data=open(data).read() + db=ZODB.DB(s, + # disable cache deactivation + cache_size=4000, + cache_deactivate_after=6000,) + + print("Beginning work...") + results={1:[], 10:[], 100:[], 1000:[]} + if threads > 1: + import threading + l = [] + for i in range(threads): + t = threading.Thread(target=work, + args=(db, results, nrep, compress, data, + detailed, minimize, i)) + l.append(t) + for t in l: + t.start() + for t in l: + t.join() + + else: + work(db, results, nrep, compress, data, detailed, minimize) + + if server is not None: + server.close() + os.waitpid(pid, 0) + + if detailed: + print('-'*24) + print("num\tmean\tmin\tmax") + for r in 1, 10, 100, 1000: + times = [] + for time, conf in results[r]: + times.append(time) + t = mean(times) + print("%d\t%.4f\t%.4f\t%.4f" % (r, t, min(times), max(times))) + +def mean(l): + tot = 0 + for v in l: + tot = tot + v + return tot / len(l) + +##def compress(s): +## c = zlib.compressobj() +## o = c.compress(s) +## return o + c.flush() + +if __name__=='__main__': + main(sys.argv[1:]) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/stress.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/stress.py new file mode 100644 index 0000000..bcec84d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/stress.py @@ -0,0 +1,137 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""A ZEO client-server stress test to look for leaks. + +The stress test should run in an infinite loop and should involve +multiple connections. +""" +from __future__ import print_function +# TODO: This code is currently broken. + +import transaction +import ZODB +from ZODB.MappingStorage import MappingStorage +from ZODB.tests import MinPO +from ZEO.ClientStorage import ClientStorage +from ZEO.tests import forker + +import os +import random + +NUM_TRANSACTIONS_PER_CONN = 10 +NUM_CONNECTIONS = 10 +NUM_ROOTS = 20 +MAX_DEPTH = 20 +MIN_OBJSIZE = 128 +MAX_OBJSIZE = 2048 + +def an_object(): + """Return an object suitable for a PersistentMapping key""" + size = random.randrange(MIN_OBJSIZE, MAX_OBJSIZE) + if os.path.exists("/dev/urandom"): + f = open("/dev/urandom") + buf = f.read(size) + f.close() + return buf + else: + f = open(MinPO.__file__) + l = list(f.read(size)) + f.close() + random.shuffle(l) + return "".join(l) + +def setup(cn): + """Initialize the database with some objects""" + root = cn.root() + for i in range(NUM_ROOTS): + prev = an_object() + for j in range(random.randrange(1, MAX_DEPTH)): + o = MinPO.MinPO(prev) + prev = o + root[an_object()] = o + transaction.commit() + cn.close() + +def work(cn): + """Do some work with a transaction""" + cn.sync() + root = cn.root() + obj = random.choice(root.values()) + # walk down to the bottom + while not isinstance(obj.value, str): + obj = obj.value + obj.value = an_object() + transaction.commit() + +def main(): + # Yuck! Need to cleanup forker so that the API is consistent + # across Unix and Windows, at least if that's possible. + if os.name == "nt": + zaddr, tport, pid = forker.start_zeo_server('MappingStorage', ()) + def exitserver(): + import socket + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.connect(tport) + s.close() + else: + zaddr = '', random.randrange(20000, 30000) + pid, exitobj = forker.start_zeo_server(MappingStorage(), zaddr) + def exitserver(): + exitobj.close() + + while 1: + pid = start_child(zaddr) + print("started", pid) + os.waitpid(pid, 0) + + exitserver() + +def start_child(zaddr): + + pid = os.fork() + if pid != 0: + return pid + try: + _start_child(zaddr) + finally: + os._exit(0) + +def _start_child(zaddr): + storage = ClientStorage(zaddr, debug=1, min_disconnect_poll=0.5, wait=1) + db = ZODB.DB(storage, pool_size=NUM_CONNECTIONS) + setup(db.open()) + conns = [] + conn_count = 0 + + for i in range(NUM_CONNECTIONS): + c = db.open() + c.__count = 0 + conns.append(c) + conn_count += 1 + + while conn_count < 25: + c = random.choice(conns) + if c.__count > NUM_TRANSACTIONS_PER_CONN: + conns.remove(c) + c.close() + conn_count += 1 + c = db.open() + c.__count = 0 + conns.append(c) + else: + c.__count += 1 + work(c) + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/testConfig.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testConfig.py new file mode 100644 index 0000000..566065e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testConfig.py @@ -0,0 +1,126 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import unittest + + +from zope.testing import setupstack +from ZODB.config import storageFromString + +from .forker import start_zeo_server +from .threaded import threaded_server_tests + +class ZEOConfigTestBase(setupstack.TestCase): + + setUp = setupstack.setUpDirectory + + def start_server(self, settings='', **kw): + + for name, value in kw.items(): + settings += '\n%s %s\n' % (name.replace('_', '-'), value) + + zeo_conf = """ + + address 127.0.0.1:0 + %s + + """ % settings + return start_zeo_server("\n\n", + zeo_conf, threaded=True) + + def start_client(self, addr, settings='', **kw): + settings += '\nserver %s:%s\n' % addr + for name, value in kw.items(): + settings += '\n%s %s\n' % (name.replace('_', '-'), value) + return storageFromString( + """ + %import ZEO + + + {} + + """.format(settings)) + + def _client_assertions( + self, client, addr, + connected=True, + cache_size=20 * (1<<20), + cache_path=None, + blob_dir=None, + shared_blob_dir=False, + blob_cache_size=None, + blob_cache_size_check=10, + read_only=False, + read_only_fallback=False, + server_sync=False, + wait_timeout=30, + client_label=None, + storage='1', + name=None, + ): + self.assertEqual(client.is_connected(), connected) + self.assertEqual(client._addr, [addr]) + self.assertEqual(client._cache.maxsize, cache_size) + + self.assertEqual(client._cache.path, cache_path) + self.assertEqual(client.blob_dir, blob_dir) + self.assertEqual(client.shared_blob_dir, shared_blob_dir) + self.assertEqual(client._blob_cache_size, blob_cache_size) + if blob_cache_size: + self.assertEqual(client._blob_cache_size_check, + blob_cache_size * blob_cache_size_check // 100) + self.assertEqual(client._is_read_only, read_only) + self.assertEqual(client._read_only_fallback, read_only_fallback) + self.assertEqual(client._server.timeout, wait_timeout) + self.assertEqual(client._client_label, client_label) + self.assertEqual(client._storage, storage) + self.assertEqual(client.__name__, + name if name is not None else str(client._addr)) + +class ZEOConfigTest(ZEOConfigTestBase): + + def test_default_zeo_config(self, **client_settings): + addr, stop = self.start_server() + + client = self.start_client(addr, **client_settings) + self._client_assertions(client, addr, **client_settings) + + client.close() + stop() + + def test_client_variations(self): + + for name, value in dict( + cache_size=4200, + cache_path='test', + blob_dir='blobs', + blob_cache_size=424242, + read_only=True, + read_only_fallback=True, + server_sync=True, + wait_timeout=33, + client_label='test_client', + name='Test' + ).items(): + params = {name: value} + self.test_default_zeo_config(**params) + + def test_blob_cache_size_check(self): + self.test_default_zeo_config(blob_cache_size=424242, + blob_cache_size_check=50) + +def test_suite(): + suite = unittest.makeSuite(ZEOConfigTest) + suite.layer = threaded_server_tests + return suite diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/testConnection.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testConnection.py new file mode 100644 index 0000000..c9c1f2b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testConnection.py @@ -0,0 +1,246 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Test setup for ZEO connection logic. + +The actual tests are in ConnectionTests.py; this file provides the +platform-dependent scaffolding. +""" + +from __future__ import with_statement, print_function + +from ZEO.tests import ConnectionTests, InvalidationTests +from zope.testing import setupstack +import os +if os.environ.get('USE_ZOPE_TESTING_DOCTEST'): + from zope.testing import doctest +else: + import doctest +import unittest +import ZODB.tests.util + +import ZEO + +from . import forker + +class FileStorageConfig(object): + def getConfig(self, path, create, read_only): + return """\ + + path %s + create %s + read-only %s + """ % (path, + create and 'yes' or 'no', + read_only and 'yes' or 'no') + +class MappingStorageConfig(object): + def getConfig(self, path, create, read_only): + return """""" + + +class FileStorageConnectionTests( + FileStorageConfig, + ConnectionTests.ConnectionTests, + InvalidationTests.InvalidationTests + ): + """FileStorage-specific connection tests.""" + +class FileStorageReconnectionTests( + FileStorageConfig, + ConnectionTests.ReconnectionTests, + ): + """FileStorage-specific re-connection tests.""" + # Run this at level 1 because MappingStorage can't do reconnection tests + +class FileStorageInvqTests( + FileStorageConfig, + ConnectionTests.InvqTests + ): + """FileStorage-specific invalidation queue tests.""" + +class FileStorageTimeoutTests( + FileStorageConfig, + ConnectionTests.TimeoutTests + ): + pass + + +class MappingStorageConnectionTests( + MappingStorageConfig, + ConnectionTests.ConnectionTests + ): + """Mapping storage connection tests.""" + +# The ReconnectionTests can't work with MappingStorage because it's only an +# in-memory storage and has no persistent state. + +class MappingStorageTimeoutTests( + MappingStorageConfig, + ConnectionTests.TimeoutTests + ): + pass + +class SSLConnectionTests( + MappingStorageConfig, + ConnectionTests.SSLConnectionTests, + ): + pass + + +test_classes = [FileStorageConnectionTests, + FileStorageReconnectionTests, + FileStorageInvqTests, + FileStorageTimeoutTests, + MappingStorageConnectionTests, + MappingStorageTimeoutTests, + ] +if not forker.ZEO4_SERVER: + test_classes.append(SSLConnectionTests) + +def invalidations_while_connecting(): + r""" +As soon as a client registers with a server, it will recieve +invalidations from the server. The client must be careful to queue +these invalidations until it is ready to deal with them. At the time +of the writing of this test, clients weren't careful enough about +queing invalidations. This led to cache corruption in the form of +both low-level file corruption as well as out-of-date records marked +as current. + +This tests tries to provoke this bug by: + +- starting a server + + >>> addr, _ = start_server() + +- opening a client to the server that writes some objects, filling + it's cache at the same time, + + >>> import ZEO, ZODB.tests.MinPO, transaction + >>> db = ZEO.DB(addr, client='x') + >>> conn = db.open() + >>> nobs = 1000 + >>> for i in range(nobs): + ... conn.root()[i] = ZODB.tests.MinPO.MinPO(0) + >>> transaction.commit() + + >>> import zope.testing.loggingsupport, logging + >>> handler = zope.testing.loggingsupport.InstalledHandler( + ... 'ZEO', level=logging.INFO) + + # >>> logging.getLogger('ZEO').debug( + # ... 'Initial tid %r' % conn.root()._p_serial) + +- disconnecting the first client (closing it with a persistent cache), + + >>> db.close() + +- starting a second client that writes objects more or less + constantly, + + >>> import random, threading, time + >>> stop = False + >>> db2 = ZEO.DB(addr) + >>> tm = transaction.TransactionManager() + >>> conn2 = db2.open(transaction_manager=tm) + >>> random = random.Random(0) + >>> lock = threading.Lock() + >>> def run(): + ... while 1: + ... i = random.randint(0, nobs-1) + ... if stop: + ... return + ... with lock: + ... conn2.root()[i].value += 1 + ... tm.commit() + ... #logging.getLogger('ZEO').debug( + ... # 'COMMIT %s %s %r' % ( + ... # i, conn2.root()[i].value, conn2.root()[i]._p_serial)) + ... time.sleep(0) + >>> thread = threading.Thread(target=run) + >>> thread.setDaemon(True) + >>> thread.start() + +- restarting the first client, and +- testing for cache validity. + + >>> bad = False + >>> try: + ... for c in range(10): + ... time.sleep(.1) + ... db = ZODB.DB(ZEO.ClientStorage.ClientStorage(addr, client='x')) + ... with lock: + ... #logging.getLogger('ZEO').debug('Locked %s' % c) + ... @wait_until("connected and we have caught up", timeout=199) + ... def _(): + ... if (db.storage.is_connected() + ... and db.storage.lastTransaction() + ... == db.storage._call('lastTransaction') + ... ): + ... #logging.getLogger('ZEO').debug( + ... # 'Connected %r' % db.storage.lastTransaction()) + ... return True + ... + ... conn = db.open() + ... for i in range(1000): + ... if conn.root()[i].value != conn2.root()[i].value: + ... print('bad', c, i, conn.root()[i].value, end=" ") + ... print(conn2.root()[i].value) + ... bad = True + ... print('client debug log with lock held') + ... while handler.records: + ... record = handler.records.pop(0) + ... print(record.name, record.levelname, end=' ') + ... print(handler.format(record)) + ... #if bad: + ... # with open('server.log') as f: + ... # print(f.read()) + ... #else: + ... # logging.getLogger('ZEO').debug('GOOD %s' % c) + ... db.close() + ... finally: + ... stop = True + ... thread.join(10) + + >>> thread.isAlive() + False + + >>> for record in handler.records: + ... if record.levelno < logging.ERROR: + ... continue + ... print(record.name, record.levelname) + ... print(handler.format(record)) + + >>> handler.uninstall() + + >>> db.close() + >>> db2.close() + """ + +def test_suite(): + suite = unittest.TestSuite() + + for klass in test_classes: + sub = unittest.makeSuite(klass, 'check') + sub.layer = ZODB.tests.util.MininalTestLayer( + klass.__name__ + ' ZEO Connection Tests') + suite.addTest(sub) + + sub = doctest.DocTestSuite( + setUp=forker.setUp, tearDown=setupstack.tearDown, + ) + sub.layer = ZODB.tests.util.MininalTestLayer('ZEO Connection DocTests') + suite.addTest(sub) + + return suite diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/testConversionSupport.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testConversionSupport.py new file mode 100644 index 0000000..e40d148 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testConversionSupport.py @@ -0,0 +1,150 @@ +############################################################################## +# +# Copyright (c) 2006 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import doctest +import unittest + +import ZEO.asyncio.testing + +class FakeStorageBase(object): + + def __getattr__(self, name): + if name in ('getTid', 'history', 'load', 'loadSerial', + 'lastTransaction', 'getSize', 'getName', 'supportsUndo', + 'tpc_transaction'): + return lambda *a, **k: None + raise AttributeError(name) + + def isReadOnly(self): + return False + + def __len__(self): + return 4 + +class FakeStorage(FakeStorageBase): + + def record_iternext(self, next=None): + if next == None: + next = '0' + next = str(int(next) + 1) + oid = next + if next == '4': + next = None + + return oid, oid*8, 'data ' + oid, next + +class FakeServer(object): + storages = { + '1': FakeStorage(), + '2': FakeStorageBase(), + } + lock_managers = storages + + def register_connection(*args): + return None, None + + client_conflict_resolution = False + +class FakeConnection(object): + protocol_version = b'Z4' + addr = 'test' + + call_soon_threadsafe = lambda f, *a: f(*a) + async_ = async_threadsafe = None + +def test_server_record_iternext(): + """ + +On the server, record_iternext calls are simply delegated to the +underlying storage. + + >>> import ZEO.StorageServer + + >>> zeo = ZEO.StorageServer.ZEOStorage(FakeServer(), False) + >>> zeo.notify_connected(FakeConnection()) + >>> zeo.register('1', False) + + >>> next = None + >>> while 1: + ... oid, serial, data, next = zeo.record_iternext(next) + ... print(oid) + ... if next is None: + ... break + 1 + 2 + 3 + 4 + +The storage info also reflects the fact that record_iternext is supported. + + >>> zeo.get_info()['supports_record_iternext'] + True + + >>> zeo = ZEO.StorageServer.ZEOStorage(FakeServer(), False) + >>> zeo.notify_connected(FakeConnection()) + >>> zeo.register('2', False) + + >>> zeo.get_info()['supports_record_iternext'] + False + +""" + +def test_client_record_iternext(): + """Test client storage delegation to the network client + +The client simply delegates record_iternext calls to it's server stub. + +There's really no decent way to test ZEO without running too much crazy +stuff. I'd rather do a lame test than a really lame test, so here goes. + +First, fake out the connection manager so we can make a connection: + + >>> import ZEO + + >>> class Client(ZEO.asyncio.testing.ClientRunner): + ... + ... def record_iternext(self, next=None): + ... if next == None: + ... next = '0' + ... next = str(int(next) + 1) + ... oid = next + ... if next == '4': + ... next = None + ... + ... return oid, oid*8, 'data ' + oid, next + ... + + >>> client = ZEO.client( + ... '', wait=False, _client_factory=Client) + +Now we'll have our way with it's private _server attr: + + >>> next = None + >>> while 1: + ... oid, serial, data, next = client.record_iternext(next) + ... print(oid) + ... if next is None: + ... break + 1 + 2 + 3 + 4 + >>> client.close() + +""" + +def test_suite(): + return doctest.DocTestSuite() + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/testTransactionBuffer.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testTransactionBuffer.py new file mode 100644 index 0000000..620a1ef --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testTransactionBuffer.py @@ -0,0 +1,58 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import random +import unittest + +from ZEO.TransactionBuffer import TransactionBuffer + +def random_string(size): + """Return a random string of size size.""" + l = [chr(random.randrange(256)) for i in range(size)] + return "".join(l) + +def new_store_data(): + """Return arbitrary data to use as argument to store() method.""" + return random_string(8), random_string(random.randrange(1000)) + +def store(tbuf, resolved=False): + data = new_store_data() + tbuf.store(*data) + if resolved: + tbuf.server_resolve(data[0]) + return data + +class TransBufTests(unittest.TestCase): + + def checkTypicalUsage(self): + tbuf = TransactionBuffer(0) + store(tbuf) + store(tbuf) + for o in tbuf: + pass + tbuf.close() + + def checkOrderPreserved(self): + tbuf = TransactionBuffer(0) + data = [] + for i in range(10): + data.append((store(tbuf), False)) + data.append((store(tbuf, True), True)) + + for i, (oid, d, resolved) in enumerate(tbuf): + self.assertEqual((oid, d), data[i][0]) + self.assertEqual(resolved, data[i][1]) + tbuf.close() + +def test_suite(): + return unittest.makeSuite(TransBufTests, 'check') diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/testZEO.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testZEO.py new file mode 100644 index 0000000..c752366 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testZEO.py @@ -0,0 +1,1835 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Test suite for ZEO based on ZODB.tests.""" +from __future__ import print_function +import multiprocessing +import re + +from ZEO.ClientStorage import ClientStorage +from ZEO.tests import forker, Cache, CommitLockTests, ThreadTests +from ZEO.tests import IterationTests +from ZEO._compat import PY3 +from ZEO._compat import WIN + +from ZODB.Connection import TransactionMetaData +from ZODB.tests import StorageTestBase, BasicStorage, \ + TransactionalUndoStorage, \ + PackableStorage, Synchronization, ConflictResolution, RevisionStorage, \ + MTStorage, ReadOnlyStorage, IteratorStorage, RecoveryStorage +from ZODB.tests.MinPO import MinPO +from ZODB.tests.StorageTestBase import zodb_unpickle +from ZODB.utils import maxtid, p64, u64, z64 +from zope.testing import renormalizing + +import doctest +import logging +import os +import persistent +import pprint +import re +import shutil +import signal +import stat +import ssl +import sys +import tempfile +import threading +import time +import transaction +import unittest +import ZEO.StorageServer +import ZEO.tests.ConnectionTests +import ZODB +import ZODB.blob +import ZODB.tests.hexstorage +import ZODB.tests.testblob +import ZODB.tests.util +import ZODB.utils +import zope.testing.setupstack + +from . import testssl + +logger = logging.getLogger('ZEO.tests.testZEO') + +class DummyDB(object): + def invalidate(self, *args): + pass + def invalidateCache(*unused): + pass + transform_record_data = untransform_record_data = lambda self, v: v + + +class CreativeGetState(persistent.Persistent): + def __getstate__(self): + self.name = 'me' + return super(CreativeGetState, self).__getstate__() + + + +class Test_convenience_functions(unittest.TestCase): + + def test_ZEO_client_convenience(self): + import mock + import ZEO + + client_thread = mock.Mock( + spec=['call', 'async', 'async_iter', 'wait']) + client = ZEO.client( + 8001, wait=False, _client_factory=client_thread) + self.assertIsInstance(client, ClientStorage) + + def test_ZEO_DB_convenience_ok(self): + import mock + import ZEO + + client_mock = mock.Mock(spec=['close']) + client_patch = mock.patch('ZEO.client', return_value=client_mock) + DB_patch = mock.patch('ZODB.DB') + + dummy = object() + + with client_patch as client: + with DB_patch as patched: + db = ZEO.DB(dummy) + + self.assertIs(db, patched()) + client.assert_called_once_with(dummy) + client_mock.close.assert_not_called() + + def test_ZEO_DB_convenience_error(self): + import mock + import ZEO + + client_mock = mock.Mock(spec=['close']) + client_patch = mock.patch('ZEO.client', return_value=client_mock) + DB_patch = mock.patch('ZODB.DB', side_effect=ValueError) + + dummy = object() + + with client_patch as client: + with DB_patch: + with self.assertRaises(ValueError): + ZEO.DB(dummy) + + client.assert_called_once_with(dummy) + client_mock.close.assert_called_once() + + def test_ZEO_connection_convenience_ok(self): + import mock + import ZEO + + ret = object() + DB_mock = mock.Mock(spec=[ + 'close', 'open_then_close_db_when_connection_closes']) + DB_mock.open_then_close_db_when_connection_closes.return_value = ret + DB_patch = mock.patch('ZEO.DB', return_value=DB_mock) + + dummy = object() + + with DB_patch as patched: + conn = ZEO.connection(dummy) + + self.assertIs(conn, ret) + patched.assert_called_once_with(dummy) + DB_mock.close.assert_not_called() + + def test_ZEO_connection_convenience_value(self): + import mock + import ZEO + + DB_mock = mock.Mock(spec=[ + 'close', 'open_then_close_db_when_connection_closes']) + otc = DB_mock.open_then_close_db_when_connection_closes + otc.side_effect = ValueError + DB_patch = mock.patch('ZEO.DB', return_value=DB_mock) + + dummy = object() + + with DB_patch as patched: + with self.assertRaises(ValueError): + ZEO.connection(dummy) + + patched.assert_called_once_with(dummy) + DB_mock.close.assert_called_once() + + +class MiscZEOTests(object): + """ZEO tests that don't fit in elsewhere.""" + + def checkCreativeGetState(self): + # This test covers persistent objects that provide their own + # __getstate__ which modifies the state of the object. + # For details see bug #98275 + + db = ZODB.DB(self._storage) + cn = db.open() + rt = cn.root() + m = CreativeGetState() + m.attr = 'hi' + rt['a'] = m + + # This commit used to fail because of the `Mine` object being put back + # into `changed` state although it was already stored causing the ZEO + # cache to bail out. + transaction.commit() + cn.close() + + def checkLargeUpdate(self): + obj = MinPO("X" * (10 * 128 * 1024)) + self._dostore(data=obj) + + def checkZEOInvalidation(self): + addr = self._storage._addr + storage2 = self._wrap_client( + ClientStorage(addr, wait=1, **self._client_options())) + try: + oid = self._storage.new_oid() + ob = MinPO('first') + revid1 = self._dostore(oid, data=ob) + data, serial = storage2.load(oid, '') + self.assertEqual(zodb_unpickle(data), MinPO('first')) + self.assertEqual(serial, revid1) + revid2 = self._dostore(oid, data=MinPO('second'), revid=revid1) + + # Now, storage 2 should eventually get the new data. It + # will take some time, although hopefully not much. + # We'll poll till we get it and whine if we time out: + for n in range(30): + time.sleep(.1) + data, serial = storage2.load(oid, '') + if (serial == revid2 and + zodb_unpickle(data) == MinPO('second') + ): + break + else: + raise AssertionError('Invalidation message was not sent!') + finally: + storage2.close() + + def checkVolatileCacheWithImmediateLastTransaction(self): + # Earlier, a ClientStorage would not have the last transaction id + # available right after successful connection, this is required now. + addr = self._storage._addr + storage2 = ClientStorage(addr, **self._client_options()) + self.assertTrue(storage2.is_connected()) + self.assertEqual(ZODB.utils.z64, storage2.lastTransaction()) + storage2.close() + + self._dostore() + storage3 = ClientStorage(addr, **self._client_options()) + self.assertTrue(storage3.is_connected()) + self.assertEqual(8, len(storage3.lastTransaction())) + self.assertNotEqual(ZODB.utils.z64, storage3.lastTransaction()) + storage3.close() + +class GenericTestBase( + # Base class for all ZODB tests + StorageTestBase.StorageTestBase): + + shared_blob_dir = False + blob_cache_dir = None + server_debug = False + + def setUp(self): + StorageTestBase.StorageTestBase.setUp(self) + logger.info("setUp() %s", self.id()) + zport, stop = forker.start_zeo_server( + self.getConfig(), self.getZEOConfig(), debug=self.server_debug) + self._servers = [stop] + if not self.blob_cache_dir: + # This is the blob cache for ClientStorage + self.blob_cache_dir = tempfile.mkdtemp( + 'blob_cache', + dir=os.path.abspath(os.getcwd())) + self._storage = self._wrap_client( + ClientStorage( + zport, '1', cache_size=20000000, + min_disconnect_poll=0.5, wait=1, + wait_timeout=60, blob_dir=self.blob_cache_dir, + shared_blob_dir=self.shared_blob_dir, + **self._client_options()), + ) + self._storage.registerDB(DummyDB()) + + def getZEOConfig(self): + return forker.ZEOConfig(('127.0.0.1', 0)) + + def _wrap_client(self, client): + return client + + def _client_options(self): + return {} + + def tearDown(self): + self._storage.close() + for stop in self._servers: + stop() + StorageTestBase.StorageTestBase.tearDown(self) + +class GenericTests( + GenericTestBase, + + # ZODB test mixin classes (in the same order as imported) + BasicStorage.BasicStorage, + PackableStorage.PackableStorage, + Synchronization.SynchronizedStorage, + MTStorage.MTStorage, + ReadOnlyStorage.ReadOnlyStorage, + # ZEO test mixin classes (in the same order as imported) + CommitLockTests.CommitLockVoteTests, + ThreadTests.ThreadTests, + # Locally defined (see above) + MiscZEOTests, + ): + """Combine tests from various origins in one class. + """ + + def open(self, read_only=0): + # Needed to support ReadOnlyStorage tests. Ought to be a + # cleaner way. + addr = self._storage._addr + self._storage.close() + self._storage = ClientStorage( + addr, read_only=read_only, wait=1, **self._client_options()) + + def checkWriteMethods(self): + # ReadOnlyStorage defines checkWriteMethods. The decision + # about where to raise the read-only error was changed after + # Zope 2.5 was released. So this test needs to detect Zope + # of the 2.5 vintage and skip the test. + + # The __version__ attribute was not present in Zope 2.5. + if hasattr(ZODB, "__version__"): + ReadOnlyStorage.ReadOnlyStorage.checkWriteMethods(self) + + def checkSortKey(self): + key = '%s:%s' % (self._storage._storage, self._storage._server_addr) + self.assertEqual(self._storage.sortKey(), key) + + def _do_store_in_separate_thread(self, oid, revid, voted): + + def do_store(): + store = ZEO.ClientStorage.ClientStorage( + self._storage._addr, **self._client_options()) + try: + t = transaction.get() + store.tpc_begin(t) + store.store(oid, revid, b'x', '', t) + store.tpc_vote(t) + store.tpc_finish(t) + except Exception as v: + import traceback + print('E'*70) + print(v) + traceback.print_exception(*sys.exc_info()) + finally: + store.close() + + thread = threading.Thread(name='T2', target=do_store) + thread.setDaemon(True) + thread.start() + thread.join(voted and .1 or 9) + return thread + +class FullGenericTests( + GenericTests, + Cache.TransUndoStorageWithCache, + ConflictResolution.ConflictResolvingStorage, + ConflictResolution.ConflictResolvingTransUndoStorage, + PackableStorage.PackableUndoStorage, + RevisionStorage.RevisionStorage, + TransactionalUndoStorage.TransactionalUndoStorage, + IteratorStorage.IteratorStorage, + IterationTests.IterationTests, + ): + """Extend GenericTests with tests that MappingStorage can't pass.""" + +class FileStorageRecoveryTests(StorageTestBase.StorageTestBase, + RecoveryStorage.RecoveryStorage): + + def getConfig(self): + return """\ + + path %s + + """ % tempfile.mktemp(dir='.') + + def _new_storage(self): + zconf = forker.ZEOConfig(('127.0.0.1', 0)) + zport, stop = forker.start_zeo_server(self.getConfig(), + zconf) + self._servers.append(stop) + + blob_cache_dir = tempfile.mkdtemp(dir='.') + + storage = ClientStorage( + zport, '1', cache_size=20000000, + min_disconnect_poll=0.5, wait=1, + wait_timeout=60, blob_dir=blob_cache_dir) + storage.registerDB(DummyDB()) + return storage + + def setUp(self): + StorageTestBase.StorageTestBase.setUp(self) + self._servers = [] + + self._storage = self._new_storage() + self._dst = self._new_storage() + + def tearDown(self): + self._storage.close() + self._dst.close() + + for stop in self._servers: + stop() + StorageTestBase.StorageTestBase.tearDown(self) + + def new_dest(self): + return self._new_storage() + + +class FileStorageTests(FullGenericTests): + """Test ZEO backed by a FileStorage.""" + + def getConfig(self): + return """\ + + path Data.fs + + """ + + _expected_interfaces = ( + ('ZODB.interfaces', 'IStorageRestoreable'), + ('ZODB.interfaces', 'IStorageIteration'), + ('ZODB.interfaces', 'IStorageUndoable'), + ('ZODB.interfaces', 'IStorageCurrentRecordIteration'), + ('ZODB.interfaces', 'IExternalGC'), + ('ZODB.interfaces', 'IStorage'), + ('zope.interface', 'Interface'), + ) + + def checkInterfaceFromRemoteStorage(self): + # ClientStorage itself doesn't implement IStorageIteration, but the + # FileStorage on the other end does, and thus the ClientStorage + # instance that is connected to it reflects this. + self.assertFalse(ZODB.interfaces.IStorageIteration.implementedBy( + ZEO.ClientStorage.ClientStorage)) + self.assertTrue(ZODB.interfaces.IStorageIteration.providedBy( + self._storage)) + # This is communicated using ClientStorage's _info object: + self.assertEqual(self._expected_interfaces, + self._storage._info['interfaces'] + ) + +class FileStorageSSLTests(FileStorageTests): + + def getZEOConfig(self): + return testssl.server_config + + def _client_options(self): + return {'ssl': testssl.client_ssl()} + + +class FileStorageHexTests(FileStorageTests): + _expected_interfaces = ( + ('ZODB.interfaces', 'IStorageRestoreable'), + ('ZODB.interfaces', 'IStorageIteration'), + ('ZODB.interfaces', 'IStorageUndoable'), + ('ZODB.interfaces', 'IStorageCurrentRecordIteration'), + ('ZODB.interfaces', 'IExternalGC'), + ('ZODB.interfaces', 'IStorage'), + ('ZODB.interfaces', 'IStorageWrapper'), + ('zope.interface', 'Interface'), + ) + + def getConfig(self): + return """\ + %import ZODB.tests + + + path Data.fs + + + """ + +class FileStorageClientHexTests(FileStorageHexTests): + + def getConfig(self): + return """\ + %import ZODB.tests + + + path Data.fs + + + """ + + def _wrap_client(self, client): + return ZODB.tests.hexstorage.HexStorage(client) + +class ClientConflictResolutionTests( + GenericTestBase, + ConflictResolution.ConflictResolvingStorage, + ): + + def getConfig(self): + return '\n\n' + + def getZEOConfig(self): + # Using '' can result in binding to :: and cause problems + # connecting to the MTAcceptor on Travis CI + return forker.ZEOConfig(('127.0.0.1', 0), client_conflict_resolution=True) + +class MappingStorageTests(GenericTests): + """ZEO backed by a Mapping storage.""" + + def getConfig(self): + return """""" + + def checkSimpleIteration(self): + # The test base class IteratorStorage assumes that we keep undo data + # to construct our iterator, which we don't, so we disable this test. + pass + + def checkUndoZombie(self): + # The test base class IteratorStorage assumes that we keep undo data + # to construct our iterator, which we don't, so we disable this test. + pass + +class DemoStorageTests( + GenericTests, + ): + + def getConfig(self): + return """ + + + path Data.fs + + + """ + + def checkUndoZombie(self): + # The test base class IteratorStorage assumes that we keep undo data + # to construct our iterator, which we don't, so we disable this test. + pass + + def checkPackWithMultiDatabaseReferences(self): + pass # DemoStorage pack doesn't do gc + checkPackAllRevisions = checkPackWithMultiDatabaseReferences + +class ZRPCConnectionTests(ZEO.tests.ConnectionTests.CommonSetupTearDown): + + def getConfig(self, path, create, read_only): + return """""" + + def checkCatastrophicClientLoopFailure(self): + # Test what happens when the client loop falls over + self._storage = self.openClientStorage() + + import zope.testing.loggingsupport + handler = zope.testing.loggingsupport.InstalledHandler( + 'ZEO.asyncio.client') + + + # We no longer implement the event loop, we we no longer know + # how to break it. We'll just stop it instead for now. + self._storage._server.loop.call_soon_threadsafe( + self._storage._server.loop.stop) + + forker.wait_until( + 'disconnected', + lambda : not self._storage.is_connected() + ) + + log = str(handler) + handler.uninstall() + self.assertTrue("Client loop stopped unexpectedly" in log) + + def checkExceptionLogsAtError(self): + # Test the exceptions are logged at error + self._storage = self.openClientStorage() + self._dostore(z64, data=MinPO("X" * (10 * 128 * 1024))) + + from zope.testing.loggingsupport import InstalledHandler + handler = InstalledHandler('ZEO.asyncio.client') + import ZODB.POSException + self.assertRaises(TypeError, self._storage.history, z64, None) + self.assertTrue(re.search(" from server: .*TypeError", str(handler))) + + # POSKeyErrors and ConflictErrors aren't logged: + handler.clear() + self.assertRaises(ZODB.POSException.POSKeyError, + self._storage.history, None, None) + handler.uninstall() + self.assertEqual(str(handler), '') + + def checkConnectionInvalidationOnReconnect(self): + + storage = ClientStorage(self.addr, min_disconnect_poll=0.1) + self._storage = storage + assert storage.is_connected() + + class DummyDB(object): + _invalidatedCache = 0 + def invalidateCache(self): + self._invalidatedCache += 1 + def invalidate(*a, **k): + pass + transform_record_data = untransform_record_data = \ + lambda self, data: data + + db = DummyDB() + storage.registerDB(db) + + base = db._invalidatedCache + + # Now we'll force a disconnection and reconnection + storage._server.loop.call_soon_threadsafe( + storage._server.client.protocol.connection_lost, + ValueError('test')) + + # and we'll wait for the storage to be reconnected: + for i in range(100): + if storage.is_connected(): + if db._invalidatedCache > base: + break + time.sleep(0.1) + else: + raise AssertionError("Couldn't connect to server") + + # Now, the root object in the connection should have been invalidated: + self.assertEqual(db._invalidatedCache, base+1) + + +class CommonBlobTests(object): + + def getConfig(self): + return """ + + blob-dir blobs + + path Data.fs + + + """ + + blobdir = 'blobs' + blob_cache_dir = 'blob_cache' + + def checkStoreBlob(self): + import transaction + from ZODB.blob import Blob + from ZODB.tests.StorageTestBase import ZERO + from ZODB.tests.StorageTestBase import zodb_pickle + + somedata = b'a' * 10 + + blob = Blob() + with blob.open('w') as bd_fh: + bd_fh.write(somedata) + tfname = bd_fh.name + oid = self._storage.new_oid() + data = zodb_pickle(blob) + self.assertTrue(os.path.exists(tfname)) + + t = TransactionMetaData() + try: + self._storage.tpc_begin(t) + self._storage.storeBlob(oid, ZERO, data, tfname, '', t) + self._storage.tpc_vote(t) + revid = self._storage.tpc_finish(t) + except: + self._storage.tpc_abort(t) + raise + self.assertTrue(not os.path.exists(tfname)) + filename = self._storage.fshelper.getBlobFilename(oid, revid) + self.assertTrue(os.path.exists(filename)) + with open(filename, 'rb') as f: + self.assertEqual(somedata, f.read()) + + def checkStoreBlob_wrong_partition(self): + os_rename = os.rename + try: + def fail(*a): + raise OSError + os.rename = fail + self.checkStoreBlob() + finally: + os.rename = os_rename + + def checkLoadBlob(self): + from ZODB.blob import Blob + from ZODB.tests.StorageTestBase import zodb_pickle, ZERO + import transaction + + somedata = b'a' * 10 + + blob = Blob() + with blob.open('w') as bd_fh: + bd_fh.write(somedata) + tfname = bd_fh.name + oid = self._storage.new_oid() + data = zodb_pickle(blob) + + t = TransactionMetaData() + try: + self._storage.tpc_begin(t) + self._storage.storeBlob(oid, ZERO, data, tfname, '', t) + self._storage.tpc_vote(t) + serial = self._storage.tpc_finish(t) + except: + self._storage.tpc_abort(t) + raise + + filename = self._storage.loadBlob(oid, serial) + with open(filename, 'rb') as f: + self.assertEqual(somedata, f.read()) + self.assertTrue(not(os.stat(filename).st_mode & stat.S_IWRITE)) + self.assertTrue((os.stat(filename).st_mode & stat.S_IREAD)) + + def checkTemporaryDirectory(self): + self.assertEqual(os.path.join(self.blob_cache_dir, 'tmp'), + self._storage.temporaryDirectory()) + + def checkTransactionBufferCleanup(self): + oid = self._storage.new_oid() + with open('blob_file', 'wb') as f: + f.write(b'I am a happy blob.') + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.storeBlob( + oid, ZODB.utils.z64, 'foo', 'blob_file', '', t) + self._storage.close() + + +class BlobAdaptedFileStorageTests(FullGenericTests, CommonBlobTests): + """ZEO backed by a BlobStorage-adapted FileStorage.""" + + def checkStoreAndLoadBlob(self): + import transaction + from ZODB.blob import Blob + from ZODB.tests.StorageTestBase import ZERO + from ZODB.tests.StorageTestBase import zodb_pickle + + somedata_path = os.path.join(self.blob_cache_dir, 'somedata') + with open(somedata_path, 'w+b') as somedata: + for i in range(1000000): + somedata.write(("%s\n" % i).encode('ascii')) + + def check_data(path): + self.assertTrue(os.path.exists(path)) + somedata.seek(0) + d1 = d2 = 1 + with open(path, 'rb') as f: + while d1 or d2: + d1 = f.read(8096) + d2 = somedata.read(8096) + self.assertEqual(d1, d2) + somedata.seek(0) + + blob = Blob() + with blob.open('w') as bd_fh: + ZODB.utils.cp(somedata, bd_fh) + bd_fh.close() + tfname = bd_fh.name + oid = self._storage.new_oid() + data = zodb_pickle(blob) + self.assertTrue(os.path.exists(tfname)) + + t = TransactionMetaData() + try: + self._storage.tpc_begin(t) + self._storage.storeBlob(oid, ZERO, data, tfname, '', t) + self._storage.tpc_vote(t) + revid = self._storage.tpc_finish(t) + except: + self._storage.tpc_abort(t) + raise + + # The uncommitted data file should have been removed + self.assertTrue(not os.path.exists(tfname)) + + # The file should be in the cache ... + filename = self._storage.fshelper.getBlobFilename(oid, revid) + check_data(filename) + + # ... and on the server + server_filename = os.path.join( + self.blobdir, + ZODB.blob.BushyLayout().getBlobFilePath(oid, revid), + ) + + self.assertTrue(server_filename.startswith(self.blobdir)) + check_data(server_filename) + + # If we remove it from the cache and call loadBlob, it should + # come back. We can do this in many threads. + + ZODB.blob.remove_committed(filename) + returns = [] + threads = [ + threading.Thread( + target=lambda : + returns.append(self._storage.loadBlob(oid, revid)) + ) + for i in range(10) + ] + [thread.start() for thread in threads] + [thread.join() for thread in threads] + [self.assertEqual(r, filename) for r in returns] + check_data(filename) + + +class BlobWritableCacheTests(FullGenericTests, CommonBlobTests): + + blob_cache_dir = 'blobs' + shared_blob_dir = True + +class FauxConn(object): + addr = 'x' + protocol_version = ZEO.asyncio.server.best_protocol_version + peer_protocol_version = protocol_version + + serials = [] + def async_(self, method, *args): + if method == 'serialnos': + self.serials.extend(args[0]) + + call_soon_threadsafe = async_threadsafe = async_ + +class StorageServerWrapper(object): + + def __init__(self, server, storage_id): + self.storage_id = storage_id + self.server = ZEO.StorageServer.ZEOStorage(server, server.read_only) + self.server.notify_connected(FauxConn()) + self.server.register(storage_id, False) + + def sortKey(self): + return self.storage_id + + def __getattr__(self, name): + return getattr(self.server, name) + + def registerDB(self, *args): + pass + + def supportsUndo(self): + return False + + def new_oid(self): + return self.server.new_oids(1)[0] + + def tpc_begin(self, transaction): + self.server.tpc_begin(id(transaction), '', '', {}, None, ' ') + + def tpc_vote(self, transaction): + result = self.server.vote(id(transaction)) + assert result == self.server.connection.serials[:] + del self.server.connection.serials[:] + return result + + def store(self, oid, serial, data, version_ignored, transaction): + self.server.storea(oid, serial, data, id(transaction)) + + def send_reply(self, _, result): # Masquerade as conn + self._result = result + + def tpc_abort(self, transaction): + self.server.tpc_abort(id(transaction)) + + def tpc_finish(self, transaction, func = lambda: None): + self.server.tpc_finish(id(transaction)).set_sender(0, self) + return self._result + +def multiple_storages_invalidation_queue_is_not_insane(): + """ + >>> from ZEO.StorageServer import StorageServer, ZEOStorage + >>> from ZODB.FileStorage import FileStorage + >>> from ZODB.DB import DB + >>> from persistent.mapping import PersistentMapping + >>> from transaction import commit + >>> fs1 = FileStorage('t1.fs') + >>> fs2 = FileStorage('t2.fs') + >>> server = StorageServer(None, storages=dict(fs1=fs1, fs2=fs2)) + + >>> s1 = StorageServerWrapper(server, 'fs1') + >>> s2 = StorageServerWrapper(server, 'fs2') + + >>> db1 = DB(s1); conn1 = db1.open() + >>> db2 = DB(s2); conn2 = db2.open() + + >>> commit() + >>> o1 = conn1.root() + >>> for i in range(10): + ... o1.x = PersistentMapping(); o1 = o1.x + ... commit() + + >>> last = fs1.lastTransaction() + >>> for i in range(5): + ... o1.x = PersistentMapping(); o1 = o1.x + ... commit() + + >>> o2 = conn2.root() + >>> for i in range(20): + ... o2.x = PersistentMapping(); o2 = o2.x + ... commit() + + >>> trans, oids = s1.getInvalidations(last) + >>> from ZODB.utils import u64 + >>> sorted([int(u64(oid)) for oid in oids]) + [10, 11, 12, 13, 14] + + >>> fs1.close(); fs2.close() + """ + +def getInvalidationsAfterServerRestart(): + """ + +Clients were often forced to verify their caches after a server +restart even if there weren't many transactions between the server +restart and the client connect. + +Let's create a file storage and stuff some data into it: + + >>> from ZEO.StorageServer import StorageServer, ZEOStorage + >>> from ZODB.FileStorage import FileStorage + >>> from ZODB.DB import DB + >>> from persistent.mapping import PersistentMapping + >>> fs = FileStorage('t.fs') + >>> db = DB(fs) + >>> conn = db.open() + >>> from transaction import commit + >>> last = [] + >>> for i in range(100): + ... conn.root()[i] = PersistentMapping() + ... commit() + ... last.append(fs.lastTransaction()) + >>> db.close() + +Now we'll open a storage server on the data, simulating a restart: + + >>> fs = FileStorage('t.fs') + >>> sv = StorageServer(None, dict(fs=fs)) + >>> s = ZEOStorage(sv, sv.read_only) + >>> s.notify_connected(FauxConn()) + >>> s.register('fs', False) == fs.lastTransaction() + True + +If we ask for the last transaction, we should get the last transaction +we saved: + + >>> s.lastTransaction() == last[-1] + True + +If a storage implements the method lastInvalidations, as FileStorage +does, then the storage server will populate its invalidation data +structure using lastTransactions. + + + >>> tid, oids = s.getInvalidations(last[-10]) + >>> tid == last[-1] + True + + + >>> from ZODB.utils import u64 + >>> sorted([int(u64(oid)) for oid in oids]) + [0, 92, 93, 94, 95, 96, 97, 98, 99, 100] + +(Note that the fact that we get oids for 92-100 is actually an +artifact of the fact that the FileStorage lastInvalidations method +returns all OIDs written by transactions, even if the OIDs were +created and not modified. FileStorages don't record whether objects +were created rather than modified. Objects that are just created don't +need to be invalidated. This means we'll invalidate objects that +dont' need to be invalidated, however, that's better than verifying +caches.) + + >>> fs.close() + +If a storage doesn't implement lastInvalidations, a client can still +avoid verifying its cache if it was up to date when the server +restarted. To illustrate this, we'll create a subclass of FileStorage +without this method: + + >>> class FS(FileStorage): + ... lastInvalidations = property() + + >>> fs = FS('t.fs') + >>> sv = StorageServer(None, dict(fs=fs)) + >>> st = StorageServerWrapper(sv, 'fs') + >>> s = st.server + +Now, if we ask for the invalidations since the last committed +transaction, we'll get a result: + + >>> tid, oids = s.getInvalidations(last[-1]) + >>> tid == last[-1] + True + >>> oids + [] + + >>> db = DB(st); conn = db.open() + >>> ob = conn.root() + >>> for i in range(5): + ... ob.x = PersistentMapping(); ob = ob.x + ... commit() + ... last.append(fs.lastTransaction()) + + >>> ntid, oids = s.getInvalidations(tid) + >>> ntid == last[-1] + True + + >>> sorted([int(u64(oid)) for oid in oids]) + [0, 101, 102, 103, 104] + + >>> fs.close() + """ + +def tpc_finish_error(): + r"""Server errors in tpc_finish weren't handled properly. + + If there are errors applying changes to the client cache, don't + leave the cache in an inconsistent state. + + >>> addr, admin = start_server() + + >>> client = ZEO.client(addr) + >>> db = ZODB.DB(client) + >>> conn = db.open() + >>> conn.root.x = 1 + >>> t = conn.transaction_manager.get() + >>> conn.tpc_begin(t) + >>> conn.commit(t) + >>> transaction_meta_data = t.data(conn) + >>> _ = client.tpc_vote(transaction_meta_data) + + Cause some breakage by messing with the clients transaction + buffer, sadly, using implementation details: + + >>> tbuf = client._check_trans(transaction_meta_data, 'test') + >>> tbuf.client_resolved = None + + tpc_finish will fail: + + >>> client.tpc_finish(transaction_meta_data) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + AttributeError: ... + + >>> client.tpc_abort(transaction_meta_data) + >>> t.abort() + + But we can still load the saved data: + + >>> conn2 = db.open() + >>> conn2.root.x + 1 + + And we can save new data: + + >>> conn2.root.x += 1 + >>> conn2.transaction_manager.commit() + + >>> db.close() + + >>> stop_server(admin) + """ + +def test_prefetch(self): + """The client storage prefetch method pre-fetches from the server + + >>> count = 999 + + >>> import ZEO + >>> addr, stop = start_server() + >>> conn = ZEO.connection(addr) + >>> root = conn.root() + >>> cls = root.__class__ + >>> for i in range(count): + ... root[i] = cls() + >>> conn.transaction_manager.commit() + >>> oids = [root[i]._p_oid for i in range(count)] + >>> conn.close() + >>> conn = ZEO.connection(addr) + >>> storage = conn.db().storage + >>> len(storage._cache) <= 1 + True + >>> storage.prefetch(oids, conn._storage._start) + + The prefetch returns before the cache is filled: + + >>> len(storage._cache) < count + True + + But it is filled eventually: + + >>> from zope.testing.wait import wait + >>> wait(lambda : len(storage._cache) > count) + + >>> loads = storage.server_status()['loads'] + + Now if we reload the data, it will be satisfied from the cache: + + >>> for oid in oids: + ... _ = conn._storage.load(oid) + + >>> storage.server_status()['loads'] == loads + True + + >>> conn.close() + """ + +def client_has_newer_data_than_server(): + """It is bad if a client has newer data than the server. + + >>> db = ZODB.DB('Data.fs') + >>> db.close() + >>> r = shutil.copyfile('Data.fs', 'Data.save') + >>> addr, admin = start_server(keep=1) + >>> db = ZEO.DB(addr, name='client', max_disconnect_poll=.01) + >>> wait_connected(db.storage) + >>> conn = db.open() + >>> conn.root().x = 1 + >>> transaction.commit() + + OK, we've added some data to the storage and the client cache has + the new data. Now, we'll stop the server, put back the old data, and + see what happens. :) + + >>> stop_server(admin) + >>> r = shutil.copyfile('Data.save', 'Data.fs') + + >>> import zope.testing.loggingsupport + >>> handler = zope.testing.loggingsupport.InstalledHandler( + ... 'ZEO', level=logging.ERROR) + >>> formatter = logging.Formatter('%(name)s %(levelname)s %(message)s') + + >>> _, admin = start_server(addr=addr) + + >>> wait_until('got enough errors', lambda: + ... len([x for x in handler.records + ... if x.levelname == 'CRITICAL' and + ... 'Client has seen newer transactions than server!' in x.msg + ... ]) >= 2) + + Note that the errors repeat because the client keeps on trying to connect. + + >>> db.close() + >>> handler.uninstall() + >>> stop_server(admin) + + """ + +def history_over_zeo(): + """ + >>> addr, _ = start_server() + >>> db = ZEO.DB(addr) + >>> wait_connected(db.storage) + >>> conn = db.open() + >>> conn.root().x = 0 + >>> transaction.commit() + >>> len(db.history(conn.root()._p_oid, 99)) + 2 + + >>> db.close() + """ + +def dont_log_poskeyerrors_on_server(): + """ + >>> addr, admin = start_server(log='server.log') + >>> cs = ClientStorage(addr) + >>> cs.load(ZODB.utils.p64(1)) + Traceback (most recent call last): + ... + POSKeyError: 0x01 + + >>> cs.close() + >>> stop_server(admin) + >>> with open('server.log') as f: + ... 'POSKeyError' in f.read() + False + """ + +def open_convenience(): + """Often, we just want to open a single connection. + + >>> addr, _ = start_server(path='data.fs') + >>> conn = ZEO.connection(addr) + >>> conn.root() + {} + + >>> conn.root()['x'] = 1 + >>> transaction.commit() + >>> conn.close() + + Let's make sure the database was cloased when we closed the + connection, and that the data is there. + + >>> db = ZEO.DB(addr) + >>> conn = db.open() + >>> conn.root() + {'x': 1} + >>> db.close() + """ + +def client_asyncore_thread_has_name(): + """ + >>> addr, _ = start_server() + >>> db = ZEO.DB(addr) + >>> any(t for t in threading.enumerate() + ... if ' zeo client networking thread' in t.getName()) + True + >>> db.close() + """ + +def runzeo_without_configfile(): + """ + >>> with open('runzeo', 'w') as r: + ... _ = r.write(''' + ... import sys + ... sys.path[:] = %r + ... import ZEO.runzeo + ... ZEO.runzeo.main(sys.argv[1:]) + ... ''' % sys.path) + + >>> import subprocess, re + >>> print(re.sub(br'\d\d+|[:]', b'', subprocess.Popen( + ... [sys.executable, 'runzeo', '-a:0', '-ft', '--test'], + ... stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + ... ).stdout.read()).decode('ascii')) + ... # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + ------ + --T INFO ZEO.runzeo () opening storage '1' using FileStorage + ------ + --T INFO ZEO.StorageServer StorageServer created RW with storages 1RWt + ------ + --T INFO ZEO.asyncio... listening on ... + ------ + --T INFO ZEO.StorageServer closing storage '1' + testing exit immediately + """ + +def close_client_storage_w_invalidations(): + r""" +Invalidations could cause errors when closing client storages, + + >>> addr, _ = start_server() + >>> writing = threading.Event() + >>> def mad_write_thread(): + ... global writing + ... conn = ZEO.connection(addr) + ... writing.set() + ... while writing.isSet(): + ... conn.root.x = 1 + ... transaction.commit() + ... conn.close() + + >>> thread = threading.Thread(target=mad_write_thread) + >>> thread.setDaemon(True) + >>> thread.start() + >>> _ = writing.wait() + >>> time.sleep(.01) + >>> for i in range(10): + ... conn = ZEO.connection(addr) + ... _ = conn._storage.load(b'\0'*8) + ... conn.close() + + >>> writing.clear() + >>> thread.join(1) + """ + +def convenient_to_pass_port_to_client_and_ZEO_dot_client(): + """Jim hates typing + + >>> addr, _ = start_server() + >>> client = ZEO.client(addr[1]) + >>> client.__name__ == "('127.0.0.1', %s)" % addr[1] + True + + >>> client.close() + """ + +@forker.skip_if_testing_client_against_zeo4 +def test_server_status(): + """ + You can get server status using the server_status method. + + >>> addr, _ = start_server(zeo_conf=dict(transaction_timeout=1)) + >>> db = ZEO.DB(addr) + >>> pprint.pprint(db.storage.server_status(), width=40) + {'aborts': 0, + 'active_txns': 0, + 'commits': 1, + 'conflicts': 0, + 'conflicts_resolved': 0, + 'connections': 1, + 'last-transaction': '03ac11b771fa1c00', + 'loads': 1, + 'lock_time': None, + 'start': 'Tue May 4 10:55:20 2010', + 'stores': 1, + 'timeout-thread-is-alive': True, + 'waiting': 0} + + >>> db.close() + """ + +@forker.skip_if_testing_client_against_zeo4 +def test_ruok(): + """ + You can also get server status using the ruok protocol. + + >>> addr, _ = start_server(zeo_conf=dict(transaction_timeout=1)) + >>> db = ZEO.DB(addr) # force a transaction :) + >>> import json, socket, struct + >>> s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + >>> s.connect(addr) + >>> writer = s.makefile(mode='wb') + >>> _ = writer.write(struct.pack(">I", 4)+b"ruok") + >>> writer.close() + >>> proto = s.recv(struct.unpack(">I", s.recv(4))[0]) + >>> data = json.loads( + ... s.recv(struct.unpack(">I", s.recv(4))[0]).decode("ascii")) + >>> pprint.pprint(data['1']) + {u'aborts': 0, + u'active_txns': 0, + u'commits': 1, + u'conflicts': 0, + u'conflicts_resolved': 0, + u'connections': 1, + u'last-transaction': u'03ac11cd11372499', + u'loads': 1, + u'lock_time': None, + u'start': u'Sun Jan 4 09:37:03 2015', + u'stores': 1, + u'timeout-thread-is-alive': True, + u'waiting': 0} + >>> db.close(); s.close() + """ + +def client_labels(): + """ +When looking at server logs, for servers with lots of clients coming +from the same machine, it can be very difficult to correlate server +log entries with actual clients. It's possible, sort of, but tedious. + +You can make this easier by passing a label to the ClientStorage +constructor. + + >>> addr, _ = start_server(log='server.log') + >>> db = ZEO.DB(addr, client_label='test-label-1') + >>> db.close() + >>> @wait_until + ... def check_for_test_label_1(): + ... with open('server.log') as f: + ... for line in f: + ... if 'test-label-1' in line: + ... print(line.split()[1:4]) + ... return True + ['INFO', 'ZEO.StorageServer', '(test-label-1'] + +You can specify the client label via a configuration file as well: + + >>> import ZODB.config + >>> db = ZODB.config.databaseFromString(''' + ... + ... + ... server :%s + ... client-label test-label-2 + ... + ... + ... ''' % addr[1]) + >>> db.close() + >>> @wait_until + ... def check_for_test_label_2(): + ... with open('server.log') as f: + ... for line in f: + ... if 'test-label-2' in line: + ... print(line.split()[1:4]) + ... return True + ['INFO', 'ZEO.StorageServer', '(test-label-2'] + + """ + +def invalidate_client_cache_entry_on_server_commit_error(): + """ + +When the serials returned during commit includes an error, typically a +conflict error, invalidate the cache entry. This is important when +the cache is messed up. + + >>> addr, _ = start_server() + >>> conn1 = ZEO.connection(addr) + >>> conn1.root.x = conn1.root().__class__() + >>> transaction.commit() + >>> conn1.root.x + {} + + >>> cs = ZEO.ClientStorage.ClientStorage(addr, client='cache') + >>> conn2 = ZODB.connection(cs) + >>> conn2.root.x + {} + + >>> conn2.close() + >>> cs.close() + + >>> conn1.root.x['x'] = 1 + >>> transaction.commit() + >>> conn1.root.x + {'x': 1} + +Now, let's screw up the cache by making it have a last tid that is later than +the root serial. + + >>> import ZEO.cache + >>> cache = ZEO.cache.ClientCache('cache-1.zec') + >>> cache.setLastTid(p64(u64(conn1.root.x._p_serial)+1)) + >>> cache.close() + +We'll also update the server so that it's last tid is newer than the cache's: + + >>> conn1.root.y = 1 + >>> transaction.commit() + >>> conn1.root.y = 2 + >>> transaction.commit() + +Now, if we reopen the client storage, we'll get the wrong root: + + >>> cs = ZEO.ClientStorage.ClientStorage(addr, client='cache') + >>> conn2 = ZODB.connection(cs) + >>> conn2.root.x + {} + +And, we'll get a conflict error if we try to modify it: + + >>> conn2.root.x['y'] = 1 + >>> transaction.commit() # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ConflictError: ... + +But, if we abort, we'll get up to date data and we'll see the changes. + + >>> transaction.abort() + >>> conn2.root.x + {'x': 1} + >>> conn2.root.x['y'] = 1 + >>> transaction.commit() + >>> sorted(conn2.root.x.items()) + [('x', 1), ('y', 1)] + + >>> conn2.close() + >>> cs.close() + >>> conn1.close() + """ + + +script_template = """ +import sys +sys.path[:] = %(path)r + +%(src)s + +""" +def generate_script(name, src): + with open(name, 'w') as f: + f.write(script_template % dict( + exe=sys.executable, + path=sys.path, + src=src, + )) + +def read(filename): + with open(filename) as f: + return f.read() + +def runzeo_logrotate_on_sigusr2(): + """ + >>> from ZEO.tests.forker import get_port + >>> port = get_port() + >>> with open('c', 'w') as r: + ... _ = r.write(''' + ... + ... address %s + ... + ... + ... + ... + ... + ... path l + ... + ... + ... ''' % port) + >>> generate_script('s', ''' + ... import ZEO.runzeo + ... ZEO.runzeo.main() + ... ''') + >>> import subprocess, signal + >>> p = subprocess.Popen([sys.executable, 's', '-Cc'], close_fds=True) + >>> wait_until('started', + ... lambda : os.path.exists('l') and ('listening on' in read('l')) + ... ) + + >>> oldlog = read('l') + >>> os.rename('l', 'o') + >>> os.kill(p.pid, signal.SIGUSR2) + + >>> s = ClientStorage(port) + >>> s.close() + >>> wait_until('See logging', lambda : ('Log files ' in read('l'))) + >>> read('o') == oldlog # No new data in old log + True + + # Cleanup: + + >>> os.kill(p.pid, signal.SIGKILL) + >>> _ = p.wait() + """ + +def unix_domain_sockets(): + """Make sure unix domain sockets work + + >>> addr, _ = start_server(port='./sock') + + >>> c = ZEO.connection(addr) + >>> c.root.x = 1 + >>> transaction.commit() + >>> c.close() + """ + +def gracefully_handle_abort_while_storing_many_blobs(): + r""" + + >>> import logging, sys + >>> old_level = logging.getLogger().getEffectiveLevel() + >>> logging.getLogger().setLevel(logging.ERROR) + >>> handler = logging.StreamHandler(sys.stdout) + >>> logging.getLogger().addHandler(handler) + + >>> addr, _ = start_server(blob_dir='blobs') + >>> client = ZEO.client(addr, blob_dir='cblobs') + >>> c = ZODB.connection(client) + >>> c.root.x = ZODB.blob.Blob(b'z'*(1<<20)) + >>> c.root.y = ZODB.blob.Blob(b'z'*(1<<2)) + >>> t = c.transaction_manager.get() + >>> c.tpc_begin(t) + >>> c.commit(t) + +We've called commit, but the blob sends are queued. We'll call abort +right away, which will delete the temporary blob files. The queued +iterators will try to open these files. + + >>> c.tpc_abort(t) + +Now we'll try to use the connection, mainly to wait for everything to +get processed. Before we fixed this by making tpc_finish a synchronous +call to the server. we'd get some sort of error here. + + >>> _ = client._call('loadBefore', b'\0'*8, maxtid) + + >>> c.close() + + >>> logging.getLogger().removeHandler(handler) + >>> logging.getLogger().setLevel(old_level) + + + + """ + +def ClientDisconnected_errors_are_TransientErrors(): + """ + >>> from ZEO.Exceptions import ClientDisconnected + >>> from transaction.interfaces import TransientError + >>> issubclass(ClientDisconnected, TransientError) + True + """ + +if not os.environ.get('ZEO4_SERVER'): + if os.environ.get('ZEO_MSGPACK'): + def test_runzeo_msgpack_support(): + """ + >>> import ZEO + + >>> a, s = ZEO.server(threaded=False) + >>> conn = ZEO.connection(a) + >>> str(conn.db().storage.protocol_version.decode('ascii')) + 'M5' + >>> conn.close(); s() + """ + else: + def test_runzeo_msgpack_support(): + """ + >>> import ZEO + + >>> a, s = ZEO.server(threaded=False) + >>> conn = ZEO.connection(a) + >>> str(conn.db().storage.protocol_version.decode('ascii')) + 'Z5' + >>> conn.close(); s() + + >>> a, s = ZEO.server(zeo_conf=dict(msgpack=True), threaded=False) + >>> conn = ZEO.connection(a) + >>> str(conn.db().storage.protocol_version.decode('ascii')) + 'M5' + >>> conn.close(); s() + """ + +if WIN: + del runzeo_logrotate_on_sigusr2 + del unix_domain_sockets + +def work_with_multiprocessing_process(name, addr, q): + conn = ZEO.connection(addr) + q.put((name, conn.root.x)) + conn.close() + +class MultiprocessingTests(unittest.TestCase): + + layer = ZODB.tests.util.MininalTestLayer('work_with_multiprocessing') + + def test_work_with_multiprocessing(self): + "Client storage should work with multi-processing." + + # Gaaa, zope.testing.runner.FakeInputContinueGenerator has no close + if not hasattr(sys.stdin, 'close'): + sys.stdin.close = lambda : None + if not hasattr(sys.stdin, 'fileno'): + sys.stdin.fileno = lambda : -1 + + self.globs = {} + forker.setUp(self) + addr, adminaddr = self.globs['start_server']() + conn = ZEO.connection(addr) + conn.root.x = 1 + transaction.commit() + q = multiprocessing.Queue() + processes = [multiprocessing.Process( + target=work_with_multiprocessing_process, + args=(i, addr, q)) + for i in range(3)] + _ = [p.start() for p in processes] + self.assertEqual(sorted(q.get(timeout=300) for p in processes), + [(0, 1), (1, 1), (2, 1)]) + + _ = [p.join(30) for p in processes] + conn.close() + zope.testing.setupstack.tearDown(self) + +@forker.skip_if_testing_client_against_zeo4 +def quick_close_doesnt_kill_server(): + r""" + + Start a server: + + >>> from .testssl import server_config, client_ssl + >>> addr, _ = start_server(zeo_conf=server_config) + + Now connect and immediately disconnect. This caused the server to + die in the past: + + >>> import socket, struct + >>> for i in range(5): + ... s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + ... s.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, + ... struct.pack('ii', 1, 0)) + ... s.connect(addr) + ... s.close() + + + >>> print("\n\nXXX WARNING: running quick_close_doesnt_kill_server with ssl as hack pending http://bugs.python.org/issue27386\n", file=sys.stderr) # Intentional long line to be annoying till this is fixed + + Now we should be able to connect as normal: + + >>> db = ZEO.DB(addr, ssl=client_ssl()) + >>> db.storage.is_connected() + True + + >>> db.close() + """ + +def can_use_empty_string_for_local_host_on_client(): + """We should be able to spell localhost with ''. + + >>> (_, port), _ = start_server() + >>> conn = ZEO.connection(('', port)) + >>> conn.root() + {} + >>> conn.root.x = 1 + >>> transaction.commit() + + >>> conn.close() + """ + +slow_test_classes = [ + BlobAdaptedFileStorageTests, BlobWritableCacheTests, + MappingStorageTests, DemoStorageTests, + FileStorageTests, + FileStorageHexTests, FileStorageClientHexTests, + ] +if not forker.ZEO4_SERVER: + slow_test_classes.append(FileStorageSSLTests) + +quick_test_classes = [FileStorageRecoveryTests, ZRPCConnectionTests] + +class ServerManagingClientStorage(ClientStorage): + + def __init__(self, name, blob_dir, shared=False, extrafsoptions=''): + if shared: + server_blob_dir = blob_dir + else: + server_blob_dir = 'server-'+blob_dir + self.globs = {} + addr, stop = forker.start_zeo_server( + """ + + blob-dir %s + + path %s + %s + + + """ % (server_blob_dir, name+'.fs', extrafsoptions), + ) + zope.testing.setupstack.register(self, stop) + if shared: + ClientStorage.__init__(self, addr, blob_dir=blob_dir, + shared_blob_dir=True) + else: + ClientStorage.__init__(self, addr, blob_dir=blob_dir) + + def close(self): + ClientStorage.close(self) + zope.testing.setupstack.tearDown(self) + +def create_storage_shared(name, blob_dir): + return ServerManagingClientStorage(name, blob_dir, True) + +class ServerManagingClientStorageForIExternalGCTest( + ServerManagingClientStorage): + + def pack(self, t=None, referencesf=None): + ServerManagingClientStorage.pack(self, t, referencesf, wait=True) + # Packing doesn't clear old versions out of zeo client caches, + # so we'll clear the caches. + self._cache.clear() + ZEO.ClientStorage._check_blob_cache_size(self.blob_dir, 0) + +def test_suite(): + suite = unittest.TestSuite(( + unittest.makeSuite(Test_convenience_functions), + )) + + zeo = unittest.TestSuite() + zeo.addTest(unittest.makeSuite(ZODB.tests.util.AAAA_Test_Runner_Hack)) + patterns = [ + (re.compile(r"u?'start': u?'[^\n]+'"), 'start'), + (re.compile(r"u?'last-transaction': u?'[0-9a-f]+'"), + 'last-transaction'), + (re.compile("ZODB.POSException.ConflictError"), "ConflictError"), + (re.compile("ZODB.POSException.POSKeyError"), "POSKeyError"), + (re.compile("ZEO.Exceptions.ClientStorageError"), "ClientStorageError"), + (re.compile(r"\[Errno \d+\]"), '[Errno N]'), + (re.compile(r"loads=\d+\.\d+"), 'loads=42.42'), + # Python 3 drops the u prefix + (re.compile("u('.*?')"), r"\1"), + (re.compile('u(".*?")'), r"\1") + ] + if not PY3: + patterns.append((re.compile("^'(blob[^']*)'"), r"b'\1'")) + patterns.append((re.compile("^'Z308'"), "b'Z308'")) + zeo.addTest(doctest.DocTestSuite( + setUp=forker.setUp, tearDown=zope.testing.setupstack.tearDown, + checker=renormalizing.RENormalizing(patterns), + )) + zeo.addTest(doctest.DocTestSuite( + ZEO.tests.IterationTests, + setUp=forker.setUp, tearDown=zope.testing.setupstack.tearDown, + checker=renormalizing.RENormalizing(( + (re.compile("ZEO.Exceptions.ClientDisconnected"), + "ClientDisconnected"), + )), + )) + if not forker.ZEO4_SERVER: + # ZEO 4 doesn't support client-side conflict resolution + zeo.addTest(unittest.makeSuite(ClientConflictResolutionTests, 'check')) + zeo.layer = ZODB.tests.util.MininalTestLayer('testZeo-misc') + suite.addTest(zeo) + + zeo = unittest.TestSuite() + zeo.addTest( + doctest.DocFileSuite( + 'zdoptions.test', + 'drop_cache_rather_than_verify.txt', 'client-config.test', + 'protocols.test', 'zeo_blob_cache.test', 'invalidation-age.txt', + '../nagios.rst', + setUp=forker.setUp, tearDown=zope.testing.setupstack.tearDown, + checker=renormalizing.RENormalizing(patterns), + globs={'print_function': print_function}, + ), + ) + zeo.addTest(PackableStorage.IExternalGC_suite( + lambda : + ServerManagingClientStorageForIExternalGCTest( + 'data.fs', 'blobs', extrafsoptions='pack-gc false') + )) + for klass in quick_test_classes: + zeo.addTest(unittest.makeSuite(klass, "check")) + zeo.layer = ZODB.tests.util.MininalTestLayer('testZeo-misc2') + suite.addTest(zeo) + + # tests that often fail, maybe if they have their own layers + for name in 'zeo-fan-out.test', 'new_addr.test': + zeo = unittest.TestSuite() + zeo.addTest( + doctest.DocFileSuite( + name, + setUp=forker.setUp, tearDown=zope.testing.setupstack.tearDown, + checker=renormalizing.RENormalizing(patterns), + globs={'print_function': print_function}, + ), + ) + zeo.layer = ZODB.tests.util.MininalTestLayer('testZeo-' + name) + suite.addTest(zeo) + + suite.addTest(unittest.makeSuite(MultiprocessingTests)) + + # Put the heavyweights in their own layers + for klass in slow_test_classes: + sub = unittest.makeSuite(klass, "check") + sub.layer = ZODB.tests.util.MininalTestLayer(klass.__name__) + suite.addTest(sub) + + suite.addTest(ZODB.tests.testblob.storage_reusable_suite( + 'ClientStorageNonSharedBlobs', ServerManagingClientStorage)) + suite.addTest(ZODB.tests.testblob.storage_reusable_suite( + 'ClientStorageSharedBlobs', create_storage_shared)) + + if not forker.ZEO4_SERVER: + from .threaded import threaded_server_tests + dynamic_server_ports_suite = doctest.DocFileSuite( + 'dynamic_server_ports.test', + setUp=forker.setUp, tearDown=zope.testing.setupstack.tearDown, + checker=renormalizing.RENormalizing(patterns), + globs={'print_function': print_function}, + ) + dynamic_server_ports_suite.layer = threaded_server_tests + suite.addTest(dynamic_server_ports_suite) + + return suite + + +if __name__ == "__main__": + unittest.main(defaultTest="test_suite") diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/testZEO2.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testZEO2.py new file mode 100644 index 0000000..4fe90f2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testZEO2.py @@ -0,0 +1,513 @@ +############################################################################## +# +# Copyright Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from __future__ import print_function +from zope.testing import setupstack, renormalizing +import doctest +import logging +import pprint +import re +import sys +import transaction +import unittest +import ZEO.StorageServer +import ZEO.tests.servertesting +import ZODB.blob +import ZODB.FileStorage +import ZODB.tests.util +import ZODB.utils + +def proper_handling_of_blob_conflicts(): + r""" + +Conflict errors weren't properly handled when storing blobs, the +result being that the storage was left in a transaction. + +We originally saw this when restarting a blob transaction, although +it doesn't really matter. + +Set up the storage with some initial blob data. + + >>> fs = ZODB.FileStorage.FileStorage('t.fs', blob_dir='t.blobs') + >>> db = ZODB.DB(fs) + >>> conn = db.open() + >>> conn.root.b = ZODB.blob.Blob(b'x') + >>> transaction.commit() + +Get the oid and first serial. We'll use the serial later to provide +out-of-date data. + + >>> oid = conn.root.b._p_oid + >>> serial = conn.root.b._p_serial + >>> with conn.root.b.open('w') as file: + ... _ = file.write(b'y') + >>> transaction.commit() + >>> data = fs.load(oid)[0] + +Create the server: + + >>> server = ZEO.tests.servertesting.StorageServer('x', {'1': fs}) + +And an initial client. + + >>> zs1 = ZEO.tests.servertesting.client(server, 1) + >>> zs1.tpc_begin('0', '', '', {}) + >>> zs1.storea(ZODB.utils.p64(99), ZODB.utils.z64, b'x', '0') + >>> _ = zs1.vote('0') # doctest: +ELLIPSIS + +In a second client, we'll try to commit using the old serial. This +will conflict. It will be blocked at the vote call. + + >>> zs2 = ZEO.tests.servertesting.client(server, 2) + >>> zs2.tpc_begin('1', '', '', {}) + >>> zs2.storeBlobStart() + >>> zs2.storeBlobChunk(b'z') + >>> zs2.storeBlobEnd(oid, serial, data, '1') + >>> delay = zs2.vote('1') + + >>> class Sender(object): + ... def send_reply(self, id, reply): + ... print('reply', id, reply) + ... def send_error(self, id, err): + ... print('error', id, err) + >>> delay.set_sender(1, Sender()) + + >>> logger = logging.getLogger('ZEO') + >>> handler = logging.StreamHandler(sys.stdout) + >>> logger.setLevel(logging.INFO) + >>> logger.addHandler(handler) + +Now, when we abort the transaction for the first client. The second +client will be restarted. It will get a conflict error, that is +raised to the client: + + >>> zs1.tpc_abort('0') # doctest: +ELLIPSIS + Error raised in delayed method + Traceback (most recent call last): + ...ConflictError: ... + error 1 database conflict error ... + +The transaction is aborted by the server: + + >>> fs.tpc_transaction() is None + True + + >>> zs2.connected + True + + >>> logger.setLevel(logging.NOTSET) + >>> logger.removeHandler(handler) + >>> zs2.tpc_abort('1') + >>> fs.close() + """ + +def proper_handling_of_errors_in_restart(): + r""" + +It's critical that if there is an error in vote that the +storage isn't left in tpc. + + >>> fs = ZODB.FileStorage.FileStorage('t.fs', blob_dir='t.blobs') + >>> server = ZEO.tests.servertesting.StorageServer('x', {'1': fs}) + +And an initial client. + + >>> zs1 = ZEO.tests.servertesting.client(server, 1) + >>> zs1.tpc_begin('0', '', '', {}) + >>> zs1.storea(ZODB.utils.p64(99), ZODB.utils.z64, b'x', '0') + +Intentionally break zs1: + + >>> zs1._store = lambda : None + >>> _ = zs1.vote('0') # doctest: +ELLIPSIS +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + TypeError: () takes no arguments (3 given) + +We're not in a transaction: + + >>> fs.tpc_transaction() is None + True + +We can start another client and get the storage lock. + + >>> zs1 = ZEO.tests.servertesting.client(server, 1) + >>> zs1.tpc_begin('1', '', '', {}) + >>> zs1.storea(ZODB.utils.p64(99), ZODB.utils.z64, b'x', '1') + >>> _ = zs1.vote('1') # doctest: +ELLIPSIS + + >>> zs1.tpc_finish('1').set_sender(0, zs1.connection) + + >>> fs.close() + >>> server.close() + """ + +def errors_in_vote_should_clear_lock(): + """ + +So, we arrange to get an error in vote: + + >>> import ZODB.MappingStorage + >>> vote_should_fail = True + >>> class MappingStorage(ZODB.MappingStorage.MappingStorage): + ... def tpc_vote(*args): + ... if vote_should_fail: + ... raise ValueError + ... return ZODB.MappingStorage.MappingStorage.tpc_vote(*args) + + >>> server = ZEO.tests.servertesting.StorageServer( + ... 'x', {'1': MappingStorage()}) + >>> zs = ZEO.tests.servertesting.client(server, 1) + >>> zs.tpc_begin('0', '', '', {}) + >>> zs.storea(ZODB.utils.p64(99), ZODB.utils.z64, 'x', '0') + + >>> zs.vote('0') + Traceback (most recent call last): + ... + ValueError + +When we do, the storage server's transaction lock shouldn't be held: + + >>> zs.lock_manager.locked is not None + False + +Of course, if vote suceeds, the lock will be held: + + >>> vote_should_fail = False + >>> zs.tpc_begin('1', '', '', {}) + >>> zs.storea(ZODB.utils.p64(99), ZODB.utils.z64, 'x', '1') + >>> _ = zs.vote('1') # doctest: +ELLIPSIS + + >>> zs.lock_manager.locked is not None + True + + >>> zs.tpc_abort('1') + """ + + +def some_basic_locking_tests(): + r""" + + >>> itid = 0 + >>> def start_trans(zs): + ... global itid + ... itid += 1 + ... tid = str(itid) + ... zs.tpc_begin(tid, '', '', {}) + ... zs.storea(ZODB.utils.p64(99), ZODB.utils.z64, 'x', tid) + ... return tid + + >>> server = ZEO.tests.servertesting.StorageServer() + + >>> handler = logging.StreamHandler(sys.stdout) + >>> handler.setFormatter(logging.Formatter( + ... '%(name)s %(levelname)s\n%(message)s')) + >>> logging.getLogger('ZEO').addHandler(handler) + >>> logging.getLogger('ZEO').setLevel(logging.DEBUG) + +Work around the fact that ZODB registers level names backwards, which +quit working in Python 3.4: + + >>> import logging + >>> from ZODB.loglevels import BLATHER + >>> logging.addLevelName(BLATHER, "BLATHER") + +We start a transaction and vote, this leads to getting the lock. + + >>> zs1 = ZEO.tests.servertesting.client(server, '1') + ZEO.asyncio.base INFO + Connected server protocol + ZEO.asyncio.server INFO + received handshake 'Z5' + >>> tid1 = start_trans(zs1) + >>> resolved1 = zs1.vote(tid1) # doctest: +ELLIPSIS + ZEO.StorageServer DEBUG + (test-addr-1) ('1') lock: transactions waiting: 0 + ZEO.StorageServer BLATHER + (test-addr-1) Preparing to commit transaction: 1 objects, ... bytes + +If another client tried to vote, it's lock request will be queued and +a delay will be returned: + + >>> zs2 = ZEO.tests.servertesting.client(server, '2') + ZEO.asyncio.base INFO + Connected server protocol + ZEO.asyncio.server INFO + received handshake 'Z5' + >>> tid2 = start_trans(zs2) + >>> delay = zs2.vote(tid2) + ZEO.StorageServer DEBUG + (test-addr-2) ('1') queue lock: transactions waiting: 1 + + >>> delay.set_sender(0, zs2.connection) + +When we end the first transaction, the queued vote gets the lock. + + >>> zs1.tpc_abort(tid1) # doctest: +ELLIPSIS + ZEO.StorageServer DEBUG + (test-addr-1) ('1') unlock: transactions waiting: 1 + ZEO.StorageServer DEBUG + (test-addr-2) ('1') lock: transactions waiting: 0 + ZEO.StorageServer BLATHER + (test-addr-2) Preparing to commit transaction: 1 objects, ... bytes + +Let's try again with the first client. The vote will be queued: + + >>> tid1 = start_trans(zs1) + >>> delay = zs1.vote(tid1) + ZEO.StorageServer DEBUG + (test-addr-1) ('1') queue lock: transactions waiting: 1 + +If the queued transaction is aborted, it will be dequeued: + + >>> zs1.tpc_abort(tid1) # doctest: +ELLIPSIS + ZEO.StorageServer DEBUG + (test-addr-1) ('1') dequeue lock: transactions waiting: 0 + +BTW, voting multiple times will error: + + >>> zs2.vote(tid2) + Traceback (most recent call last): + ... + StorageTransactionError: Already voting (locked) + + >>> tid1 = start_trans(zs1) + >>> delay = zs1.vote(tid1) + ZEO.StorageServer DEBUG + (test-addr-1) ('1') queue lock: transactions waiting: 1 + + >>> delay.set_sender(0, zs1.connection) + + >>> zs1.vote(tid1) + Traceback (most recent call last): + ... + StorageTransactionError: Already voting (waiting) + +Note that the locking activity is logged at debug level to avoid +cluttering log files, however, as the number of waiting votes +increased, so does the logging level: + + >>> clients = [] + >>> for i in range(9): + ... client = ZEO.tests.servertesting.client(server, str(i+10)) + ... tid = start_trans(client) + ... delay = client.vote(tid) + ... clients.append(client) + ZEO.asyncio.base INFO + Connected server protocol + ZEO.asyncio.server INFO + received handshake 'Z5' + ZEO.StorageServer DEBUG + (test-addr-10) ('1') queue lock: transactions waiting: 2 + ZEO.asyncio.base INFO + Connected server protocol + ZEO.asyncio.server INFO + received handshake 'Z5' + ZEO.StorageServer DEBUG + (test-addr-11) ('1') queue lock: transactions waiting: 3 + ZEO.asyncio.base INFO + Connected server protocol + ZEO.asyncio.server INFO + received handshake 'Z5' + ZEO.StorageServer WARNING + (test-addr-12) ('1') queue lock: transactions waiting: 4 + ZEO.asyncio.base INFO + Connected server protocol + ZEO.asyncio.server INFO + received handshake 'Z5' + ZEO.StorageServer WARNING + (test-addr-13) ('1') queue lock: transactions waiting: 5 + ZEO.asyncio.base INFO + Connected server protocol + ZEO.asyncio.server INFO + received handshake 'Z5' + ZEO.StorageServer WARNING + (test-addr-14) ('1') queue lock: transactions waiting: 6 + ZEO.asyncio.base INFO + Connected server protocol + ZEO.asyncio.server INFO + received handshake 'Z5' + ZEO.StorageServer WARNING + (test-addr-15) ('1') queue lock: transactions waiting: 7 + ZEO.asyncio.base INFO + Connected server protocol + ZEO.asyncio.server INFO + received handshake 'Z5' + ZEO.StorageServer WARNING + (test-addr-16) ('1') queue lock: transactions waiting: 8 + ZEO.asyncio.base INFO + Connected server protocol + ZEO.asyncio.server INFO + received handshake 'Z5' + ZEO.StorageServer WARNING + (test-addr-17) ('1') queue lock: transactions waiting: 9 + ZEO.asyncio.base INFO + Connected server protocol + ZEO.asyncio.server INFO + received handshake 'Z5' + ZEO.StorageServer CRITICAL + (test-addr-18) ('1') queue lock: transactions waiting: 10 + +If a client with the transaction lock disconnects, it will abort and +release the lock and one of the waiting clients will get the lock. + + >>> zs2.notify_disconnected() # doctest: +ELLIPSIS + ZEO.StorageServer INFO + (test-addr-...) disconnected during locked transaction + ZEO.StorageServer CRITICAL + (test-addr-...) ('1') unlock: transactions waiting: 10 + ZEO.StorageServer WARNING + (test-addr-...) ('1') lock: transactions waiting: 9 + ZEO.StorageServer BLATHER + (test-addr-...) Preparing to commit transaction: 1 objects, ... bytes + +(In practice, waiting clients won't necessarily get the lock in order.) + +We can find out about the current lock state, and get other server +statistics using the server_status method: + + >>> pprint.pprint(zs1.server_status(), width=40) + {'aborts': 3, + 'active_txns': 10, + 'commits': 0, + 'conflicts': 0, + 'conflicts_resolved': 0, + 'connections': 10, + 'last-transaction': '0000000000000000', + 'loads': 0, + 'lock_time': 1272653598.693882, + 'start': 'Fri Apr 30 14:53:18 2010', + 'stores': 13, + 'timeout-thread-is-alive': 'stub', + 'waiting': 9} + +If clients disconnect while waiting, they will be dequeued: + + >>> for client in clients: + ... client.notify_disconnected() # doctest: +ELLIPSIS + ZEO.StorageServer INFO + (test-addr-10) disconnected during...locked transaction + ZEO.StorageServer WARNING + (test-addr-10) ('1') ... lock: transactions waiting: ... + + >>> zs1.server_status()['waiting'] + 0 + + >>> zs1.tpc_abort(tid1) + ZEO.StorageServer DEBUG + (test-addr-1) ('1') unlock: transactions waiting: 0 + + >>> logging.getLogger('ZEO').setLevel(logging.NOTSET) + >>> logging.getLogger('ZEO').removeHandler(handler) + >>> server.close() + """ + +def lock_sanity_check(): + r""" +On one occasion with 3.10.0a1 in production, we had a case where a +transaction lock wasn't released properly. One possibility, fron +scant log information, is that the server and ZEOStorage had different +ideas about whether the ZEOStorage was locked. The timeout thread +properly closed the ZEOStorage's connection, but the ZEOStorage didn't +release it's lock, presumably because it thought it wasn't locked. I'm +not sure why this happened. I've refactored the logic quite a bit to +try to deal with this, but the consequences of this failure are so +severe, I'm adding some sanity checking when queueing lock requests. + +Helper to manage transactions: + + >>> itid = 0 + >>> def start_trans(zs): + ... global itid + ... itid += 1 + ... tid = str(itid) + ... zs.tpc_begin(tid, '', '', {}) + ... zs.storea(ZODB.utils.p64(99), ZODB.utils.z64, 'x', tid) + ... return tid + +Set up server and logging: + + >>> server = ZEO.tests.servertesting.StorageServer() + + >>> handler = logging.StreamHandler(sys.stdout) + >>> handler.setFormatter(logging.Formatter( + ... '%(name)s %(levelname)s\n%(message)s')) + >>> logging.getLogger('ZEO').addHandler(handler) + >>> logging.getLogger('ZEO').setLevel(logging.DEBUG) + +Work around the fact that ZODB registers level names backwards, which +quit working in Python 3.4: + + >>> import logging + >>> from ZODB.loglevels import BLATHER + >>> logging.addLevelName(BLATHER, "BLATHER") + +Now, we'll start a transaction, get the lock and then mark the +ZEOStorage as closed and see if trying to get a lock cleans it up: + + >>> zs1 = ZEO.tests.servertesting.client(server, '1') + ZEO.asyncio.base INFO + Connected server protocol + ZEO.asyncio.server INFO + received handshake 'Z5' + >>> tid1 = start_trans(zs1) + >>> resolved1 = zs1.vote(tid1) # doctest: +ELLIPSIS + ZEO.StorageServer DEBUG + (test-addr-1) ('1') lock: transactions waiting: 0 + ZEO.StorageServer BLATHER + (test-addr-1) Preparing to commit transaction: 1 objects, ... bytes + + >>> zs1.connection.connection_lost(None) + ZEO.StorageServer INFO + (test-addr-1) disconnected during locked transaction + ZEO.StorageServer DEBUG + (test-addr-1) ('1') unlock: transactions waiting: 0 + + >>> zs2 = ZEO.tests.servertesting.client(server, '2') + ZEO.asyncio.base INFO + Connected server protocol + ZEO.asyncio.server INFO + received handshake 'Z5' + >>> tid2 = start_trans(zs2) + >>> resolved2 = zs2.vote(tid2) # doctest: +ELLIPSIS + ZEO.StorageServer DEBUG + (test-addr-2) ('1') lock: transactions waiting: 0 + ZEO.StorageServer BLATHER + (test-addr-2) Preparing to commit transaction: 1 objects, ... bytes + + >>> zs2.tpc_abort(tid2) + ZEO.StorageServer DEBUG + (test-addr-2) ('1') unlock: transactions waiting: 0 + + >>> logging.getLogger('ZEO').setLevel(logging.NOTSET) + >>> logging.getLogger('ZEO').removeHandler(handler) + + >>> server.close() + """ + +def test_suite(): + return unittest.TestSuite(( + doctest.DocTestSuite( + setUp=ZODB.tests.util.setUp, tearDown=setupstack.tearDown, + checker=renormalizing.RENormalizing([ + (re.compile(r'\d+/test-addr'), ''), + (re.compile(r"'lock_time': \d+.\d+"), 'lock_time'), + (re.compile(r"'start': '[^\n]+'"), 'start'), + (re.compile('ZODB.POSException.StorageTransactionError'), + 'StorageTransactionError'), + ]), + ), + )) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/testZEOOptions.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testZEOOptions.py new file mode 100644 index 0000000..f0848a6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testZEOOptions.py @@ -0,0 +1,110 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +"""Test suite for ZEO.runzeo.ZEOOptions.""" + +import os +import tempfile +import unittest + +import ZODB.config + +from ZEO.runzeo import ZEOOptions +from zdaemon.tests.testzdoptions import TestZDOptions + +# When a hostname isn't specified in a socket binding address, ZConfig +# supplies the empty string. +DEFAULT_BINDING_HOST = "" + +class TestZEOOptions(TestZDOptions): + + OptionsClass = ZEOOptions + + input_args = ["-f", "Data.fs", "-a", "5555"] + output_opts = [("-f", "Data.fs"), ("-a", "5555")] + output_args = [] + + configdata = """ + + address 5555 + + + path Data.fs + + """ + + def setUp(self): + self.tempfilename = tempfile.mktemp() + with open(self.tempfilename, "w") as f: + f.write(self.configdata) + + def tearDown(self): + try: + os.remove(self.tempfilename) + except os.error: + pass + + def test_configure(self): + # Hide the base class test_configure + pass + + def test_default_help(self): pass # disable silly test w spurious failures + + def test_defaults_with_schema(self): + options = self.OptionsClass() + options.realize(["-C", self.tempfilename]) + self.assertEqual(options.address, (DEFAULT_BINDING_HOST, 5555)) + self.assertEqual(len(options.storages), 1) + opener = options.storages[0] + self.assertEqual(opener.name, "fs") + self.assertEqual(opener.__class__, ZODB.config.FileStorage) + self.assertEqual(options.read_only, 0) + self.assertEqual(options.transaction_timeout, None) + self.assertEqual(options.invalidation_queue_size, 100) + + def test_defaults_without_schema(self): + options = self.OptionsClass() + options.realize(["-a", "5555", "-f", "Data.fs"]) + self.assertEqual(options.address, (DEFAULT_BINDING_HOST, 5555)) + self.assertEqual(len(options.storages), 1) + opener = options.storages[0] + self.assertEqual(opener.name, "1") + self.assertEqual(opener.__class__, ZODB.config.FileStorage) + self.assertEqual(opener.config.path, "Data.fs") + self.assertEqual(options.read_only, 0) + self.assertEqual(options.transaction_timeout, None) + self.assertEqual(options.invalidation_queue_size, 100) + + def test_commandline_overrides(self): + options = self.OptionsClass() + options.realize(["-C", self.tempfilename, + "-a", "6666", "-f", "Wisdom.fs"]) + self.assertEqual(options.address, (DEFAULT_BINDING_HOST, 6666)) + self.assertEqual(len(options.storages), 1) + opener = options.storages[0] + self.assertEqual(opener.__class__, ZODB.config.FileStorage) + self.assertEqual(opener.config.path, "Wisdom.fs") + self.assertEqual(options.read_only, 0) + self.assertEqual(options.transaction_timeout, None) + self.assertEqual(options.invalidation_queue_size, 100) + + +def test_suite(): + suite = unittest.TestSuite() + for cls in [TestZEOOptions]: + suite.addTest(unittest.makeSuite(cls)) + return suite + +if __name__ == "__main__": + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/testZEOServer.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testZEOServer.py new file mode 100644 index 0000000..93d86fb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testZEOServer.py @@ -0,0 +1,179 @@ +import unittest + +import mock +import os + +from ZEO._compat import PY3 +from ZEO.runzeo import ZEOServer + + +class TestStorageServer(object): + + def __init__(self, fail_create_server): + self.called = [] + if fail_create_server: raise RuntimeError() + + def close(self): + self.called.append("close") + + +class TestZEOServer(ZEOServer): + + def __init__(self, fail_create_server=False, fail_loop_forever=False): + ZEOServer.__init__(self, None) + self.called = [] + self.fail_create_server = fail_create_server + self.fail_loop_forever = fail_loop_forever + + def setup_default_logging(self): + self.called.append("setup_default_logging") + + def check_socket(self): + self.called.append("check_socket") + + def clear_socket(self): + self.called.append("clear_socket") + + def make_pidfile(self): + self.called.append("make_pidfile") + + def open_storages(self): + self.called.append("open_storages") + + def setup_signals(self): + self.called.append("setup_signals") + + def create_server(self): + self.called.append("create_server") + self.server = TestStorageServer(self.fail_create_server) + + def loop_forever(self): + self.called.append("loop_forever") + if self.fail_loop_forever: raise RuntimeError() + + def close_server(self): + self.called.append("close_server") + ZEOServer.close_server(self) + + def remove_pidfile(self): + self.called.append("remove_pidfile") + + +class AttributeErrorTests(unittest.TestCase): + + def testFailCreateServer(self): + # + # Fix AttributeError: 'ZEOServer' object has no attribute + # 'server' in ZEOServer.main + # + # Demonstrate the AttributeError + zeo = TestZEOServer(fail_create_server=True) + self.assertRaises(RuntimeError, zeo.main) + + +class CloseServerTests(unittest.TestCase): + + def testCallSequence(self): + # The close_server hook is called after loop_forever + # has returned + zeo = TestZEOServer() + zeo.main() + self.assertEqual(zeo.called, [ + "setup_default_logging", + "check_socket", + "clear_socket", + "make_pidfile", + "open_storages", + "setup_signals", + "create_server", + "loop_forever", + "close_server", # New + "clear_socket", + "remove_pidfile", + ]) + # The default implementation closes the storage server + self.assertEqual(hasattr(zeo, "server"), True) + self.assertEqual(zeo.server.called, ["close"]) + + def testFailLoopForever(self): + # The close_server hook is called if loop_forever exits + # with an exception + zeo = TestZEOServer(fail_loop_forever=True) + self.assertRaises(RuntimeError, zeo.main) + self.assertEqual(zeo.called, [ + "setup_default_logging", + "check_socket", + "clear_socket", + "make_pidfile", + "open_storages", + "setup_signals", + "create_server", + "loop_forever", + "close_server", + "clear_socket", + "remove_pidfile", + ]) + # The storage server has been closed + self.assertEqual(hasattr(zeo, "server"), True) + self.assertEqual(zeo.server.called, ["close"]) + + def testFailCreateServer(self): + # The close_server hook is called if create_server exits + # with an exception + zeo = TestZEOServer(fail_create_server=True) + self.assertRaises(RuntimeError, zeo.main) + self.assertEqual(zeo.called, [ + "setup_default_logging", + "check_socket", + "clear_socket", + "make_pidfile", + "open_storages", + "setup_signals", + "create_server", + "close_server", + "clear_socket", + "remove_pidfile", + ]) + # The server attribute is present but None + self.assertEqual(hasattr(zeo, "server"), True) + self.assertEqual(zeo.server, None) + +@mock.patch('os.unlink') +class TestZEOServerSocket(unittest.TestCase): + + def _unlinked(self, unlink, options): + server = ZEOServer(options) + server.clear_socket() + unlink.assert_called_once() + + def _not_unlinked(self, unlink, options): + server = ZEOServer(options) + server.clear_socket() + unlink.assert_not_called() + + def test_clear_with_native_str(self, unlink): + class Options(object): + address = "a str that does not exist" + self._unlinked(unlink, Options) + + def test_clear_with_unicode_str(self, unlink): + class Options(object): + address = u"a str that does not exist" + self._unlinked(unlink, Options) + + def test_clear_with_bytes(self, unlink): + class Options(object): + address = b'a byte str that does not exist' + + if PY3: + # bytes are not a string type under Py3 + assertion = self._not_unlinked + else: + assertion = self._unlinked + + assertion(unlink, Options) + + def test_clear_with_tuple(self, unlink): + class Options(object): + address = ('abc', 1) + self._not_unlinked(unlink, Options) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_cache.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_cache.py new file mode 100644 index 0000000..922f214 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_cache.py @@ -0,0 +1,1114 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Basic unit tests for a client cache.""" +from __future__ import print_function + +from ZODB.utils import p64, repr_to_oid +import doctest +import os +import re +import string +import struct +import sys +import tempfile +import unittest +import ZEO.cache +import ZODB.tests.util +import zope.testing.setupstack +import zope.testing.renormalizing + +import ZEO.cache +from ZODB.utils import p64, u64, z64 + +n1 = p64(1) +n2 = p64(2) +n3 = p64(3) +n4 = p64(4) +n5 = p64(5) + + +def hexprint(file): + file.seek(0) + data = file.read() + offset = 0 + while data: + line, data = data[:16], data[16:] + printable = "" + hex = "" + for character in line: + if (character in string.printable + and not ord(character) in [12,13,9]): + printable += character + else: + printable += '.' + hex += character.encode('hex') + ' ' + hex = hex[:24] + ' ' + hex[24:] + hex = hex.ljust(49) + printable = printable.ljust(16) + print('%08x %s |%s|' % (offset, hex, printable)) + offset += 16 + + +def oid(o): + repr = '%016x' % o + return repr_to_oid(repr) +tid = oid + +class CacheTests(ZODB.tests.util.TestCase): + + def setUp(self): + # The default cache size is much larger than we need here. Since + # testSerialization reads the entire file into a string, it's not + # good to leave it that big. + ZODB.tests.util.TestCase.setUp(self) + self.cache = ZEO.cache.ClientCache(size=1024**2) + + def tearDown(self): + self.cache.close() + if self.cache.path: + os.remove(self.cache.path) + ZODB.tests.util.TestCase.tearDown(self) + + def testLastTid(self): + self.assertEqual(self.cache.getLastTid(), z64) + self.cache.setLastTid(n2) + self.assertEqual(self.cache.getLastTid(), n2) + self.assertEqual(self.cache.getLastTid(), n2) + self.cache.setLastTid(n3) + self.assertEqual(self.cache.getLastTid(), n3) + + # Check that setting tids out of order gives an error: + + # the cache complains only when it's non-empty + self.cache.store(n1, n3, None, b'x') + self.assertRaises(ValueError, self.cache.setLastTid, n2) + + def testLoad(self): + data1 = b"data for n1" + self.assertEqual(self.cache.load(n1), None) + self.cache.store(n1, n3, None, data1) + self.assertEqual(self.cache.load(n1), (data1, n3)) + + def testInvalidate(self): + data1 = b"data for n1" + self.cache.store(n1, n3, None, data1) + self.cache.invalidate(n2, n2) + self.cache.invalidate(n1, n4) + self.assertEqual(self.cache.load(n1), None) + self.assertEqual(self.cache.loadBefore(n1, n4), + (data1, n3, n4)) + + def testNonCurrent(self): + data1 = b"data for n1" + data2 = b"data for n2" + self.cache.store(n1, n4, None, data1) + self.cache.store(n1, n2, n3, data2) + # can't say anything about state before n2 + self.assertEqual(self.cache.loadBefore(n1, n2), None) + # n3 is the upper bound of non-current record n2 + self.assertEqual(self.cache.loadBefore(n1, n3), (data2, n2, n3)) + # no data for between n2 and n3 + self.assertEqual(self.cache.loadBefore(n1, n4), None) + self.cache.invalidate(n1, n5) + self.assertEqual(self.cache.loadBefore(n1, n5), (data1, n4, n5)) + self.assertEqual(self.cache.loadBefore(n2, n4), None) + + def testException(self): + self.cache.store(n1, n2, None, b"data") + self.cache.store(n1, n2, None, b"data") + self.assertRaises(ValueError, + self.cache.store, + n1, n3, None, b"data") + + def testEviction(self): + # Manually override the current maxsize + cache = ZEO.cache.ClientCache(None, 3395) + + # Trivial test of eviction code. Doesn't test non-current + # eviction. + data = [b"z" * i for i in range(100)] + for i in range(50): + n = p64(i) + cache.store(n, n, None, data[i]) + self.assertEqual(len(cache), i + 1) + # The cache is now almost full. The next insert + # should delete some objects. + n = p64(50) + cache.store(n, n, None, data[51]) + self.assertTrue(len(cache) < 51) + + # TODO: Need to make sure eviction of non-current data + # are handled correctly. + + def testSerialization(self): + self.cache.store(n1, n2, None, b"data for n1") + self.cache.store(n3, n3, n4, b"non-current data for n3") + self.cache.store(n3, n4, n5, b"more non-current data for n3") + + path = tempfile.mktemp() + # Copy data from self.cache into path, reaching into the cache + # guts to make the copy. + with open(path, "wb+") as dst: + src = self.cache.f + src.seek(0) + dst.write(src.read(self.cache.maxsize)) + copy = ZEO.cache.ClientCache(path) + + # Verify that internals of both objects are the same. + # Could also test that external API produces the same results. + eq = self.assertEqual + eq(copy.getLastTid(), self.cache.getLastTid()) + eq(len(copy), len(self.cache)) + eq(dict(copy.current), dict(self.cache.current)) + eq(dict([(k, dict(v)) for (k, v) in copy.noncurrent.items()]), + dict([(k, dict(v)) for (k, v) in self.cache.noncurrent.items()]), + ) + copy.close() + + def testCurrentObjectLargerThanCache(self): + if self.cache.path: + os.remove(self.cache.path) + self.cache.close() + self.cache = ZEO.cache.ClientCache(size=50) + + # We store an object that is a bit larger than the cache can handle. + self.cache.store(n1, n2, None, "x"*64) + # We can see that it was not stored. + self.assertEqual(None, self.cache.load(n1)) + # If an object cannot be stored in the cache, it must not be + # recorded as current. + self.assertTrue(n1 not in self.cache.current) + # Regression test: invalidation must still work. + self.cache.invalidate(n1, n2) + + def testOldObjectLargerThanCache(self): + if self.cache.path: + os.remove(self.cache.path) + self.cache.close() + cache = ZEO.cache.ClientCache(size=50) + + # We store an object that is a bit larger than the cache can handle. + cache.store(n1, n2, n3, "x"*64) + # We can see that it was not stored. + self.assertEqual(None, cache.load(n1)) + # If an object cannot be stored in the cache, it must not be + # recorded as non-current. + self.assertTrue(1 not in cache.noncurrent) + + def testVeryLargeCaches(self): + cache = ZEO.cache.ClientCache('cache', size=(1<<32)+(1<<20)) + cache.store(n1, n2, None, b"x") + cache.close() + cache = ZEO.cache.ClientCache('cache', size=(1<<33)+(1<<20)) + self.assertEqual(cache.load(n1), (b'x', n2)) + cache.close() + + def testConversionOfLargeFreeBlocks(self): + with open('cache', 'wb') as f: + f.write(ZEO.cache.magic+ + b'\0'*8 + + b'f'+struct.pack(">I", (1<<32)-12) + ) + f.seek((1<<32)-1) + f.write(b'x') + cache = ZEO.cache.ClientCache('cache', size=1<<32) + cache.close() + cache = ZEO.cache.ClientCache('cache', size=1<<32) + cache.close() + with open('cache', 'rb') as f: + f.seek(12) + self.assertEqual(f.read(1), b'f') + self.assertEqual(struct.unpack(">I", f.read(4))[0], + ZEO.cache.max_block_size) + + if not sys.platform.startswith('linux'): + # On platforms without sparse files, these tests are just way + # too hard on the disk and take too long (especially in a windows + # VM). + del testVeryLargeCaches + del testConversionOfLargeFreeBlocks + + def test_clear_zeo_cache(self): + cache = self.cache + for i in range(10): + cache.store(p64(i), n2, None, str(i).encode()) + cache.store(p64(i), n1, n2, str(i).encode()+b'old') + self.assertEqual(len(cache), 20) + self.assertEqual(cache.load(n3), (b'3', n2)) + self.assertEqual(cache.loadBefore(n3, n2), (b'3old', n1, n2)) + + cache.clear() + self.assertEqual(len(cache), 0) + self.assertEqual(cache.load(n3), None) + self.assertEqual(cache.loadBefore(n3, n2), None) + + def testChangingCacheSize(self): + # start with a small cache + data = b'x' + recsize = ZEO.cache.allocated_record_overhead+len(data) + + for extra in (2, recsize-2): + + cache = ZEO.cache.ClientCache( + 'cache', size=ZEO.cache.ZEC_HEADER_SIZE+100*recsize+extra) + for i in range(100): + cache.store(p64(i), n1, None, data) + self.assertEqual(len(cache), 100) + self.assertEqual(os.path.getsize( + 'cache'), ZEO.cache.ZEC_HEADER_SIZE+100*recsize+extra) + + # Now make it smaller + cache.close() + small = 50 + cache = ZEO.cache.ClientCache( + 'cache', size=ZEO.cache.ZEC_HEADER_SIZE+small*recsize+extra) + self.assertEqual(len(cache), small) + self.assertEqual(os.path.getsize( + 'cache'), ZEO.cache.ZEC_HEADER_SIZE+small*recsize+extra) + self.assertEqual(set(u64(oid) for (oid, tid) in cache.contents()), + set(range(small))) + for i in range(100, 110): + cache.store(p64(i), n1, None, data) + + # We use small-1 below because an extra object gets + # evicted because of the optimization to assure that we + # always get a free block after a new allocated block. + expected_len = small - 1 + self.assertEqual(len(cache), expected_len) + expected_oids = set(list(range(11, 50))+list(range(100, 110))) + self.assertEqual( + set(u64(oid) for (oid, tid) in cache.contents()), + expected_oids) + + # Make sure we can reopen with same size + cache.close() + cache = ZEO.cache.ClientCache( + 'cache', size=ZEO.cache.ZEC_HEADER_SIZE+small*recsize+extra) + self.assertEqual(len(cache), expected_len) + self.assertEqual(set(u64(oid) for (oid, tid) in cache.contents()), + expected_oids) + + # Now make it bigger + cache.close() + large = 150 + cache = ZEO.cache.ClientCache( + 'cache', size=ZEO.cache.ZEC_HEADER_SIZE+large*recsize+extra) + self.assertEqual(len(cache), expected_len) + self.assertEqual(os.path.getsize( + 'cache'), ZEO.cache.ZEC_HEADER_SIZE+large*recsize+extra) + self.assertEqual(set(u64(oid) for (oid, tid) in cache.contents()), + expected_oids) + + + for i in range(200, 305): + cache.store(p64(i), n1, None, data) + + # We use large-2 for the same reason we used small-1 above. + expected_len = large-2 + self.assertEqual(len(cache), expected_len) + expected_oids = set(list(range(11, 50)) + + list(range(106, 110)) + + list(range(200, 305))) + self.assertEqual(set(u64(oid) for (oid, tid) in cache.contents()), + expected_oids) + + # Make sure we can reopen with same size + cache.close() + cache = ZEO.cache.ClientCache( + 'cache', size=ZEO.cache.ZEC_HEADER_SIZE+large*recsize+extra) + self.assertEqual(len(cache), expected_len) + self.assertEqual(set(u64(oid) for (oid, tid) in cache.contents()), + expected_oids) + + # Cleanup + cache.close() + os.remove('cache') + + def testSetAnyLastTidOnEmptyCache(self): + self.cache.setLastTid(p64(5)) + self.cache.setLastTid(p64(5)) + self.cache.setLastTid(p64(3)) + self.cache.setLastTid(p64(4)) + + def test_loadBefore_doesnt_miss_current(self): + # Make sure that loadBefore get's current data if there + # isn't non-current data + + cache = self.cache + oid = n1 + cache.store(oid, n1, None, b'first') + self.assertEqual(cache.loadBefore(oid, n1), None) + self.assertEqual(cache.loadBefore(oid, n2), (b'first', n1, None)) + self.cache.invalidate(oid, n2) + cache.store(oid, n2, None, b'second') + self.assertEqual(cache.loadBefore(oid, n1), None) + self.assertEqual(cache.loadBefore(oid, n2), (b'first', n1, n2)) + self.assertEqual(cache.loadBefore(oid, n3), (b'second', n2, None)) + +def kill_does_not_cause_cache_corruption(): + r""" + +If we kill a process while a cache is being written to, the cache +isn't corrupted. To see this, we'll write a little script that +writes records to a cache file repeatedly. + +>>> import os, random, sys, time +>>> with open('t', 'w') as f: +... _ = f.write(''' +... import os, random, sys, time +... try: +... import thread +... except ImportError: +... import _thread as thread +... sys.path = %r +... +... def suicide(): +... time.sleep(random.random()/10) +... os._exit(0) +... +... import ZEO.cache +... from ZODB.utils import p64 +... cache = ZEO.cache.ClientCache('cache') +... oid = 0 +... t = 0 +... thread.start_new_thread(suicide, ()) +... while 1: +... oid += 1 +... t += 1 +... data = b'X' * random.randint(5000,25000) +... cache.store(p64(oid), p64(t), None, data) +... +... ''' % sys.path) + +>>> for i in range(10): +... _ = os.spawnl(os.P_WAIT, sys.executable, sys.executable, 't') +... if os.path.exists('cache'): +... cache = ZEO.cache.ClientCache('cache') +... cache.close() +... os.remove('cache') +... os.remove('cache.lock') + + +""" + +def full_cache_is_valid(): + r""" + +If we fill up the cache without any free space, the cache can +still be used. + +>>> import ZEO.cache +>>> cache = ZEO.cache.ClientCache('cache', 1000) +>>> data = b'X' * (1000 - ZEO.cache.ZEC_HEADER_SIZE - 41) +>>> cache.store(p64(1), p64(1), None, data) +>>> cache.close() +>>> cache = ZEO.cache.ClientCache('cache', 1000) +>>> cache.store(p64(2), p64(2), None, b'XXX') + +>>> cache.close() +""" + +def cannot_open_same_cache_file_twice(): + r""" +>>> import ZEO.cache +>>> cache = ZEO.cache.ClientCache('cache', 1000) +>>> cache2 = ZEO.cache.ClientCache('cache', 1000) \ +... # doctest: +IGNORE_EXCEPTION_DETAIL +Traceback (most recent call last): +... +LockError: Couldn't lock 'cache.lock' + +>>> cache.close() +""" + +def broken_non_current(): + r""" + +In production, we saw a situation where an _del_noncurrent raused +a key error when trying to free space, causing the cache to become +unusable. I can't see why this would occur, but added a logging +exception handler so, in the future, we'll still see cases in the +log, but will ignore the error and keep going. + +>>> import ZEO.cache, ZODB.utils, logging, sys +>>> logger = logging.getLogger('ZEO.cache') +>>> logger.setLevel(logging.ERROR) +>>> handler = logging.StreamHandler(sys.stdout) +>>> logger.addHandler(handler) +>>> cache = ZEO.cache.ClientCache('cache', 1000) +>>> cache.store(ZODB.utils.p64(1), ZODB.utils.p64(1), None, b'0') +>>> cache.invalidate(ZODB.utils.p64(1), ZODB.utils.p64(2)) +>>> cache._del_noncurrent(ZODB.utils.p64(1), ZODB.utils.p64(2)) +... # doctest: +NORMALIZE_WHITESPACE +Couldn't find non-current +('\x00\x00\x00\x00\x00\x00\x00\x01', '\x00\x00\x00\x00\x00\x00\x00\x02') +>>> cache._del_noncurrent(ZODB.utils.p64(1), ZODB.utils.p64(1)) +>>> cache._del_noncurrent(ZODB.utils.p64(1), ZODB.utils.p64(1)) # +... # doctest: +NORMALIZE_WHITESPACE +Couldn't find non-current +('\x00\x00\x00\x00\x00\x00\x00\x01', '\x00\x00\x00\x00\x00\x00\x00\x01') + +>>> logger.setLevel(logging.NOTSET) +>>> logger.removeHandler(handler) + +>>> cache.close() +""" + +# def bad_magic_number(): See rename_bad_cache_file + +def cache_trace_analysis(): + r""" +Check to make sure the cache analysis scripts work. + + >>> import time + >>> timetime = time.time + >>> now = 1278864701.5 + >>> time.time = lambda : now + + >>> os.environ["ZEO_CACHE_TRACE"] = 'yes' + >>> import random2 as random + >>> random = random.Random(42) + >>> history = [] + >>> serial = 1 + >>> for i in range(1000): + ... serial += 1 + ... oid = random.randint(i+1000, i+6000) + ... history.append((b's', p64(oid), p64(serial), + ... b'x'*random.randint(200,2000))) + ... for j in range(10): + ... oid = random.randint(i+1000, i+6000) + ... history.append((b'l', p64(oid), p64(serial), + ... b'x'*random.randint(200,2000))) + + >>> def cache_run(name, size): + ... serial = 1 + ... random.seed(42) + ... global now + ... now = 1278864701.5 + ... cache = ZEO.cache.ClientCache(name, size*(1<<20)) + ... for action, oid, serial, data in history: + ... now += 1 + ... if action == b's': + ... cache.invalidate(oid, serial) + ... cache.store(oid, serial, None, data) + ... else: + ... v = cache.load(oid) + ... if v is None: + ... cache.store(oid, serial, None, data) + ... cache.close() + + >>> cache_run('cache', 2) + + >>> import ZEO.scripts.cache_stats, ZEO.scripts.cache_simul + + >>> def ctime(t): + ... return time.asctime(time.gmtime(t-3600*4)) + >>> ZEO.scripts.cache_stats.ctime = ctime + >>> ZEO.scripts.cache_simul.ctime = ctime + + ############################################################ + Stats + + >>> ZEO.scripts.cache_stats.main(['cache.trace']) + loads hits inv(h) writes hitrate + Jul 11 12:11-11 0 0 0 0 n/a + Jul 11 12:11:41 ==================== Restart ==================== + Jul 11 12:11-14 180 1 2 197 0.6% + Jul 11 12:15-29 818 107 9 793 13.1% + Jul 11 12:30-44 818 213 22 687 26.0% + Jul 11 12:45-59 818 291 19 609 35.6% + Jul 11 13:00-14 818 295 36 605 36.1% + Jul 11 13:15-29 818 277 31 623 33.9% + Jul 11 13:30-44 819 276 29 624 33.7% + Jul 11 13:45-59 818 251 25 649 30.7% + Jul 11 14:00-14 818 295 27 605 36.1% + Jul 11 14:15-29 818 262 33 638 32.0% + Jul 11 14:30-44 818 297 32 603 36.3% + Jul 11 14:45-59 819 268 23 632 32.7% + Jul 11 15:00-14 818 291 30 609 35.6% + Jul 11 15:15-15 2 1 0 1 50.0% + + Read 18,876 trace records (641,776 bytes) in 0.0 seconds + Versions: 0 records used a version + First time: Sun Jul 11 12:11:41 2010 + Last time: Sun Jul 11 15:15:01 2010 + Duration: 11,000 seconds + Data recs: 11,000 (58.3%), average size 1108 bytes + Hit rate: 31.2% (load hits / loads) + + Count Code Function (action) + 1 00 _setup_trace (initialization) + 682 10 invalidate (miss) + 318 1c invalidate (hit, saving non-current) + 6,875 20 load (miss) + 3,125 22 load (hit) + 7,875 52 store (current, non-version) + + >>> ZEO.scripts.cache_stats.main('-q cache.trace'.split()) + loads hits inv(h) writes hitrate + + Read 18,876 trace records (641,776 bytes) in 0.0 seconds + Versions: 0 records used a version + First time: Sun Jul 11 12:11:41 2010 + Last time: Sun Jul 11 15:15:01 2010 + Duration: 11,000 seconds + Data recs: 11,000 (58.3%), average size 1108 bytes + Hit rate: 31.2% (load hits / loads) + + Count Code Function (action) + 1 00 _setup_trace (initialization) + 682 10 invalidate (miss) + 318 1c invalidate (hit, saving non-current) + 6,875 20 load (miss) + 3,125 22 load (hit) + 7,875 52 store (current, non-version) + + >>> ZEO.scripts.cache_stats.main('-v cache.trace'.split()) + ... # doctest: +ELLIPSIS + loads hits inv(h) writes hitrate + Jul 11 12:11:41 00 '' 0000000000000000 0000000000000000 - + Jul 11 12:11-11 0 0 0 0 n/a + Jul 11 12:11:41 ==================== Restart ==================== + Jul 11 12:11:42 10 1065 0000000000000002 0000000000000000 - + Jul 11 12:11:42 52 1065 0000000000000002 0000000000000000 - 245 + Jul 11 12:11:43 20 947 0000000000000000 0000000000000000 - + Jul 11 12:11:43 52 947 0000000000000002 0000000000000000 - 602 + Jul 11 12:11:44 20 124b 0000000000000000 0000000000000000 - + Jul 11 12:11:44 52 124b 0000000000000002 0000000000000000 - 1418 + ... + Jul 11 15:14:55 52 10cc 00000000000003e9 0000000000000000 - 1306 + Jul 11 15:14:56 20 18a7 0000000000000000 0000000000000000 - + Jul 11 15:14:56 52 18a7 00000000000003e9 0000000000000000 - 1610 + Jul 11 15:14:57 22 18b5 000000000000031d 0000000000000000 - 1636 + Jul 11 15:14:58 20 b8a 0000000000000000 0000000000000000 - + Jul 11 15:14:58 52 b8a 00000000000003e9 0000000000000000 - 838 + Jul 11 15:14:59 22 1085 0000000000000357 0000000000000000 - 217 + Jul 11 15:00-14 818 291 30 609 35.6% + Jul 11 15:15:00 22 1072 000000000000037e 0000000000000000 - 204 + Jul 11 15:15:01 20 16c5 0000000000000000 0000000000000000 - + Jul 11 15:15:01 52 16c5 00000000000003e9 0000000000000000 - 1712 + Jul 11 15:15-15 2 1 0 1 50.0% + + Read 18,876 trace records (641,776 bytes) in 0.0 seconds + Versions: 0 records used a version + First time: Sun Jul 11 12:11:41 2010 + Last time: Sun Jul 11 15:15:01 2010 + Duration: 11,000 seconds + Data recs: 11,000 (58.3%), average size 1108 bytes + Hit rate: 31.2% (load hits / loads) + + Count Code Function (action) + 1 00 _setup_trace (initialization) + 682 10 invalidate (miss) + 318 1c invalidate (hit, saving non-current) + 6,875 20 load (miss) + 3,125 22 load (hit) + 7,875 52 store (current, non-version) + + >>> ZEO.scripts.cache_stats.main('-h cache.trace'.split()) + loads hits inv(h) writes hitrate + Jul 11 12:11-11 0 0 0 0 n/a + Jul 11 12:11:41 ==================== Restart ==================== + Jul 11 12:11-14 180 1 2 197 0.6% + Jul 11 12:15-29 818 107 9 793 13.1% + Jul 11 12:30-44 818 213 22 687 26.0% + Jul 11 12:45-59 818 291 19 609 35.6% + Jul 11 13:00-14 818 295 36 605 36.1% + Jul 11 13:15-29 818 277 31 623 33.9% + Jul 11 13:30-44 819 276 29 624 33.7% + Jul 11 13:45-59 818 251 25 649 30.7% + Jul 11 14:00-14 818 295 27 605 36.1% + Jul 11 14:15-29 818 262 33 638 32.0% + Jul 11 14:30-44 818 297 32 603 36.3% + Jul 11 14:45-59 819 268 23 632 32.7% + Jul 11 15:00-14 818 291 30 609 35.6% + Jul 11 15:15-15 2 1 0 1 50.0% + + Read 18,876 trace records (641,776 bytes) in 0.0 seconds + Versions: 0 records used a version + First time: Sun Jul 11 12:11:41 2010 + Last time: Sun Jul 11 15:15:01 2010 + Duration: 11,000 seconds + Data recs: 11,000 (58.3%), average size 1108 bytes + Hit rate: 31.2% (load hits / loads) + + Count Code Function (action) + 1 00 _setup_trace (initialization) + 682 10 invalidate (miss) + 318 1c invalidate (hit, saving non-current) + 6,875 20 load (miss) + 3,125 22 load (hit) + 7,875 52 store (current, non-version) + + Histogram of object load frequency + Unique oids: 4,585 + Total loads: 10,000 + loads objects %obj %load %cum + 1 1,645 35.9% 16.4% 16.4% + 2 1,465 32.0% 29.3% 45.8% + 3 809 17.6% 24.3% 70.0% + 4 430 9.4% 17.2% 87.2% + 5 167 3.6% 8.3% 95.6% + 6 49 1.1% 2.9% 98.5% + 7 12 0.3% 0.8% 99.3% + 8 7 0.2% 0.6% 99.9% + 9 1 0.0% 0.1% 100.0% + + >>> ZEO.scripts.cache_stats.main('-s cache.trace'.split()) + ... # doctest: +ELLIPSIS + loads hits inv(h) writes hitrate + Jul 11 12:11-11 0 0 0 0 n/a + Jul 11 12:11:41 ==================== Restart ==================== + Jul 11 12:11-14 180 1 2 197 0.6% + Jul 11 12:15-29 818 107 9 793 13.1% + Jul 11 12:30-44 818 213 22 687 26.0% + Jul 11 12:45-59 818 291 19 609 35.6% + Jul 11 13:00-14 818 295 36 605 36.1% + Jul 11 13:15-29 818 277 31 623 33.9% + Jul 11 13:30-44 819 276 29 624 33.7% + Jul 11 13:45-59 818 251 25 649 30.7% + Jul 11 14:00-14 818 295 27 605 36.1% + Jul 11 14:15-29 818 262 33 638 32.0% + Jul 11 14:30-44 818 297 32 603 36.3% + Jul 11 14:45-59 819 268 23 632 32.7% + Jul 11 15:00-14 818 291 30 609 35.6% + Jul 11 15:15-15 2 1 0 1 50.0% + + Read 18,876 trace records (641,776 bytes) in 0.0 seconds + Versions: 0 records used a version + First time: Sun Jul 11 12:11:41 2010 + Last time: Sun Jul 11 15:15:01 2010 + Duration: 11,000 seconds + Data recs: 11,000 (58.3%), average size 1108 bytes + Hit rate: 31.2% (load hits / loads) + + Count Code Function (action) + 1 00 _setup_trace (initialization) + 682 10 invalidate (miss) + 318 1c invalidate (hit, saving non-current) + 6,875 20 load (miss) + 3,125 22 load (hit) + 7,875 52 store (current, non-version) + + Histograms of object sizes + + + Unique sizes written: 1,782 + size objs writes + 200 5 5 + 201 4 4 + 202 4 4 + 203 1 1 + 204 1 1 + 205 6 6 + 206 8 8 + ... + 1,995 1 2 + 1,996 2 2 + 1,997 1 1 + 1,998 2 2 + 1,999 2 4 + 2,000 1 1 + + >>> ZEO.scripts.cache_stats.main('-S cache.trace'.split()) + loads hits inv(h) writes hitrate + Jul 11 12:11-11 0 0 0 0 n/a + Jul 11 12:11:41 ==================== Restart ==================== + Jul 11 12:11-14 180 1 2 197 0.6% + Jul 11 12:15-29 818 107 9 793 13.1% + Jul 11 12:30-44 818 213 22 687 26.0% + Jul 11 12:45-59 818 291 19 609 35.6% + Jul 11 13:00-14 818 295 36 605 36.1% + Jul 11 13:15-29 818 277 31 623 33.9% + Jul 11 13:30-44 819 276 29 624 33.7% + Jul 11 13:45-59 818 251 25 649 30.7% + Jul 11 14:00-14 818 295 27 605 36.1% + Jul 11 14:15-29 818 262 33 638 32.0% + Jul 11 14:30-44 818 297 32 603 36.3% + Jul 11 14:45-59 819 268 23 632 32.7% + Jul 11 15:00-14 818 291 30 609 35.6% + Jul 11 15:15-15 2 1 0 1 50.0% + + >>> ZEO.scripts.cache_stats.main('-X cache.trace'.split()) + loads hits inv(h) writes hitrate + Jul 11 12:11-11 0 0 0 0 n/a + Jul 11 12:11:41 ==================== Restart ==================== + Jul 11 12:11-14 180 1 2 197 0.6% + Jul 11 12:15-29 818 107 9 793 13.1% + Jul 11 12:30-44 818 213 22 687 26.0% + Jul 11 12:45-59 818 291 19 609 35.6% + Jul 11 13:00-14 818 295 36 605 36.1% + Jul 11 13:15-29 818 277 31 623 33.9% + Jul 11 13:30-44 819 276 29 624 33.7% + Jul 11 13:45-59 818 251 25 649 30.7% + Jul 11 14:00-14 818 295 27 605 36.1% + Jul 11 14:15-29 818 262 33 638 32.0% + Jul 11 14:30-44 818 297 32 603 36.3% + Jul 11 14:45-59 819 268 23 632 32.7% + Jul 11 15:00-14 818 291 30 609 35.6% + Jul 11 15:15-15 2 1 0 1 50.0% + + Read 18,876 trace records (641,776 bytes) in 0.0 seconds + Versions: 0 records used a version + First time: Sun Jul 11 12:11:41 2010 + Last time: Sun Jul 11 15:15:01 2010 + Duration: 11,000 seconds + Data recs: 11,000 (58.3%), average size 1108 bytes + Hit rate: 31.2% (load hits / loads) + + Count Code Function (action) + 1 00 _setup_trace (initialization) + 682 10 invalidate (miss) + 318 1c invalidate (hit, saving non-current) + 6,875 20 load (miss) + 3,125 22 load (hit) + 7,875 52 store (current, non-version) + + >>> ZEO.scripts.cache_stats.main('-i 5 cache.trace'.split()) + loads hits inv(h) writes hitrate + Jul 11 12:11-11 0 0 0 0 n/a + Jul 11 12:11:41 ==================== Restart ==================== + Jul 11 12:11-14 180 1 2 197 0.6% + Jul 11 12:15-19 272 19 2 281 7.0% + Jul 11 12:20-24 273 35 5 265 12.8% + Jul 11 12:25-29 273 53 2 247 19.4% + Jul 11 12:30-34 272 60 8 240 22.1% + Jul 11 12:35-39 273 68 6 232 24.9% + Jul 11 12:40-44 273 85 8 215 31.1% + Jul 11 12:45-49 273 84 6 216 30.8% + Jul 11 12:50-54 272 104 9 196 38.2% + Jul 11 12:55-59 273 103 4 197 37.7% + Jul 11 13:00-04 273 92 12 208 33.7% + Jul 11 13:05-09 273 103 8 197 37.7% + Jul 11 13:10-14 272 100 16 200 36.8% + Jul 11 13:15-19 273 91 11 209 33.3% + Jul 11 13:20-24 273 96 9 204 35.2% + Jul 11 13:25-29 272 90 11 210 33.1% + Jul 11 13:30-34 273 82 14 218 30.0% + Jul 11 13:35-39 273 102 9 198 37.4% + Jul 11 13:40-44 273 92 6 208 33.7% + Jul 11 13:45-49 272 82 6 218 30.1% + Jul 11 13:50-54 273 83 8 217 30.4% + Jul 11 13:55-59 273 86 11 214 31.5% + Jul 11 14:00-04 273 95 11 205 34.8% + Jul 11 14:05-09 272 91 10 209 33.5% + Jul 11 14:10-14 273 109 6 191 39.9% + Jul 11 14:15-19 273 89 9 211 32.6% + Jul 11 14:20-24 272 84 16 216 30.9% + Jul 11 14:25-29 273 89 8 211 32.6% + Jul 11 14:30-34 273 97 12 203 35.5% + Jul 11 14:35-39 273 93 10 207 34.1% + Jul 11 14:40-44 272 107 10 193 39.3% + Jul 11 14:45-49 273 80 8 220 29.3% + Jul 11 14:50-54 273 100 8 200 36.6% + Jul 11 14:55-59 273 88 7 212 32.2% + Jul 11 15:00-04 272 99 8 201 36.4% + Jul 11 15:05-09 273 95 11 205 34.8% + Jul 11 15:10-14 273 97 11 203 35.5% + Jul 11 15:15-15 2 1 0 1 50.0% + + Read 18,876 trace records (641,776 bytes) in 0.0 seconds + Versions: 0 records used a version + First time: Sun Jul 11 12:11:41 2010 + Last time: Sun Jul 11 15:15:01 2010 + Duration: 11,000 seconds + Data recs: 11,000 (58.3%), average size 1108 bytes + Hit rate: 31.2% (load hits / loads) + + Count Code Function (action) + 1 00 _setup_trace (initialization) + 682 10 invalidate (miss) + 318 1c invalidate (hit, saving non-current) + 6,875 20 load (miss) + 3,125 22 load (hit) + 7,875 52 store (current, non-version) + + >>> ZEO.scripts.cache_simul.main('-s 2 -i 5 cache.trace'.split()) + CircularCacheSimulation, cache size 2,097,152 bytes + START TIME DUR. LOADS HITS INVALS WRITES HITRATE EVICTS INUSE + Jul 11 12:11 3:17 180 1 2 197 0.6% 0 10.7 + Jul 11 12:15 4:59 272 19 2 281 7.0% 0 26.4 + Jul 11 12:20 4:59 273 35 5 265 12.8% 0 40.4 + Jul 11 12:25 4:59 273 53 2 247 19.4% 0 54.8 + Jul 11 12:30 4:59 272 60 8 240 22.1% 0 67.1 + Jul 11 12:35 4:59 273 68 6 232 24.9% 0 79.8 + Jul 11 12:40 4:59 273 85 8 215 31.1% 0 91.4 + Jul 11 12:45 4:59 273 84 6 216 30.8% 77 99.1 + Jul 11 12:50 4:59 272 104 9 196 38.2% 196 98.9 + Jul 11 12:55 4:59 273 104 4 196 38.1% 188 99.1 + Jul 11 13:00 4:59 273 92 12 208 33.7% 213 99.3 + Jul 11 13:05 4:59 273 103 8 197 37.7% 190 99.0 + Jul 11 13:10 4:59 272 100 16 200 36.8% 203 99.2 + Jul 11 13:15 4:59 273 91 11 209 33.3% 222 98.7 + Jul 11 13:20 4:59 273 96 9 204 35.2% 210 99.2 + Jul 11 13:25 4:59 272 89 11 211 32.7% 212 99.1 + Jul 11 13:30 4:59 273 82 14 218 30.0% 220 99.1 + Jul 11 13:35 4:59 273 101 9 199 37.0% 191 99.5 + Jul 11 13:40 4:59 273 92 6 208 33.7% 214 99.4 + Jul 11 13:45 4:59 272 80 6 220 29.4% 217 99.3 + Jul 11 13:50 4:59 273 81 8 219 29.7% 214 99.2 + Jul 11 13:55 4:59 273 86 11 214 31.5% 208 98.8 + Jul 11 14:00 4:59 273 95 11 205 34.8% 188 99.3 + Jul 11 14:05 4:59 272 93 10 207 34.2% 207 99.3 + Jul 11 14:10 4:59 273 110 6 190 40.3% 198 98.8 + Jul 11 14:15 4:59 273 91 9 209 33.3% 209 99.1 + Jul 11 14:20 4:59 272 85 16 215 31.2% 210 99.3 + Jul 11 14:25 4:59 273 89 8 211 32.6% 226 99.3 + Jul 11 14:30 4:59 273 96 12 204 35.2% 214 99.3 + Jul 11 14:35 4:59 273 90 10 210 33.0% 213 99.3 + Jul 11 14:40 4:59 272 106 10 194 39.0% 196 98.8 + Jul 11 14:45 4:59 273 80 8 220 29.3% 230 99.0 + Jul 11 14:50 4:59 273 99 8 201 36.3% 202 99.0 + Jul 11 14:55 4:59 273 87 8 213 31.9% 205 99.4 + Jul 11 15:00 4:59 272 98 8 202 36.0% 211 99.3 + Jul 11 15:05 4:59 273 93 11 207 34.1% 198 99.2 + Jul 11 15:10 4:59 273 96 11 204 35.2% 184 99.2 + Jul 11 15:15 1 2 1 0 1 50.0% 1 99.2 + -------------------------------------------------------------------------- + Jul 11 12:45 2:30:01 8184 2794 286 6208 34.1% 6067 99.2 + + >>> cache_run('cache4', 4) + + >>> ZEO.scripts.cache_stats.main('cache4.trace'.split()) + loads hits inv(h) writes hitrate + Jul 11 12:11-11 0 0 0 0 n/a + Jul 11 12:11:41 ==================== Restart ==================== + Jul 11 12:11-14 180 1 2 197 0.6% + Jul 11 12:15-29 818 107 9 793 13.1% + Jul 11 12:30-44 818 213 22 687 26.0% + Jul 11 12:45-59 818 322 23 578 39.4% + Jul 11 13:00-14 818 381 43 519 46.6% + Jul 11 13:15-29 818 450 44 450 55.0% + Jul 11 13:30-44 819 503 47 397 61.4% + Jul 11 13:45-59 818 496 49 404 60.6% + Jul 11 14:00-14 818 516 48 384 63.1% + Jul 11 14:15-29 818 532 59 368 65.0% + Jul 11 14:30-44 818 516 51 384 63.1% + Jul 11 14:45-59 819 529 53 371 64.6% + Jul 11 15:00-14 818 515 49 385 63.0% + Jul 11 15:15-15 2 2 0 0 100.0% + + Read 16,918 trace records (575,204 bytes) in 0.0 seconds + Versions: 0 records used a version + First time: Sun Jul 11 12:11:41 2010 + Last time: Sun Jul 11 15:15:01 2010 + Duration: 11,000 seconds + Data recs: 11,000 (65.0%), average size 1104 bytes + Hit rate: 50.8% (load hits / loads) + + Count Code Function (action) + 1 00 _setup_trace (initialization) + 501 10 invalidate (miss) + 499 1c invalidate (hit, saving non-current) + 4,917 20 load (miss) + 5,083 22 load (hit) + 5,917 52 store (current, non-version) + + >>> ZEO.scripts.cache_simul.main('-s 4 cache.trace'.split()) + CircularCacheSimulation, cache size 4,194,304 bytes + START TIME DUR. LOADS HITS INVALS WRITES HITRATE EVICTS INUSE + Jul 11 12:11 3:17 180 1 2 197 0.6% 0 5.4 + Jul 11 12:15 14:59 818 107 9 793 13.1% 0 27.4 + Jul 11 12:30 14:59 818 213 22 687 26.0% 0 45.7 + Jul 11 12:45 14:59 818 322 23 578 39.4% 0 61.4 + Jul 11 13:00 14:59 818 381 43 519 46.6% 0 75.8 + Jul 11 13:15 14:59 818 450 44 450 55.0% 0 88.2 + Jul 11 13:30 14:59 819 503 47 397 61.4% 36 98.2 + Jul 11 13:45 14:59 818 496 49 404 60.6% 388 98.5 + Jul 11 14:00 14:59 818 515 48 385 63.0% 376 98.3 + Jul 11 14:15 14:59 818 529 58 371 64.7% 391 98.1 + Jul 11 14:30 14:59 818 511 51 389 62.5% 376 98.5 + Jul 11 14:45 14:59 819 529 53 371 64.6% 410 97.9 + Jul 11 15:00 14:59 818 512 49 388 62.6% 379 97.7 + Jul 11 15:15 1 2 2 0 0 100.0% 0 97.7 + -------------------------------------------------------------------------- + Jul 11 13:30 1:45:01 5730 3597 355 2705 62.8% 2356 97.7 + + >>> cache_run('cache1', 1) + + >>> ZEO.scripts.cache_stats.main('cache1.trace'.split()) + loads hits inv(h) writes hitrate + Jul 11 12:11-11 0 0 0 0 n/a + Jul 11 12:11:41 ==================== Restart ==================== + Jul 11 12:11-14 180 1 2 197 0.6% + Jul 11 12:15-29 818 107 9 793 13.1% + Jul 11 12:30-44 818 160 16 740 19.6% + Jul 11 12:45-59 818 158 8 742 19.3% + Jul 11 13:00-14 818 141 21 759 17.2% + Jul 11 13:15-29 818 128 17 772 15.6% + Jul 11 13:30-44 819 151 13 749 18.4% + Jul 11 13:45-59 818 120 17 780 14.7% + Jul 11 14:00-14 818 159 17 741 19.4% + Jul 11 14:15-29 818 141 13 759 17.2% + Jul 11 14:30-44 818 157 16 743 19.2% + Jul 11 14:45-59 819 133 13 767 16.2% + Jul 11 15:00-14 818 158 10 742 19.3% + Jul 11 15:15-15 2 1 0 1 50.0% + + Read 20,286 trace records (689,716 bytes) in 0.0 seconds + Versions: 0 records used a version + First time: Sun Jul 11 12:11:41 2010 + Last time: Sun Jul 11 15:15:01 2010 + Duration: 11,000 seconds + Data recs: 11,000 (54.2%), average size 1105 bytes + Hit rate: 17.1% (load hits / loads) + + Count Code Function (action) + 1 00 _setup_trace (initialization) + 828 10 invalidate (miss) + 172 1c invalidate (hit, saving non-current) + 8,285 20 load (miss) + 1,715 22 load (hit) + 9,285 52 store (current, non-version) + + >>> ZEO.scripts.cache_simul.main('-s 1 cache.trace'.split()) + CircularCacheSimulation, cache size 1,048,576 bytes + START TIME DUR. LOADS HITS INVALS WRITES HITRATE EVICTS INUSE + Jul 11 12:11 3:17 180 1 2 197 0.6% 0 21.5 + Jul 11 12:15 14:59 818 107 9 793 13.1% 96 99.6 + Jul 11 12:30 14:59 818 160 16 740 19.6% 724 99.6 + Jul 11 12:45 14:59 818 158 8 742 19.3% 741 99.2 + Jul 11 13:00 14:59 818 140 21 760 17.1% 771 99.5 + Jul 11 13:15 14:59 818 125 17 775 15.3% 781 99.6 + Jul 11 13:30 14:59 819 147 13 753 17.9% 748 99.5 + Jul 11 13:45 14:59 818 120 17 780 14.7% 763 99.5 + Jul 11 14:00 14:59 818 159 17 741 19.4% 728 99.4 + Jul 11 14:15 14:59 818 141 13 759 17.2% 787 99.6 + Jul 11 14:30 14:59 818 150 15 750 18.3% 755 99.2 + Jul 11 14:45 14:59 819 132 13 768 16.1% 771 99.5 + Jul 11 15:00 14:59 818 154 10 746 18.8% 723 99.2 + Jul 11 15:15 1 2 1 0 1 50.0% 0 99.3 + -------------------------------------------------------------------------- + Jul 11 12:15 3:00:01 9820 1694 169 9108 17.3% 8388 99.3 + +Cleanup: + + >>> del os.environ["ZEO_CACHE_TRACE"] + >>> time.time = timetime + >>> ZEO.scripts.cache_stats.ctime = time.ctime + >>> ZEO.scripts.cache_simul.ctime = time.ctime + +""" + +def cache_simul_properly_handles_load_miss_after_eviction_and_inval(): + r""" + +Set up evicted and then invalidated oid + + >>> os.environ["ZEO_CACHE_TRACE"] = 'yes' + >>> cache = ZEO.cache.ClientCache('cache', 1<<21) + >>> cache.store(p64(1), p64(1), None, b'x') + >>> for i in range(10): + ... cache.store(p64(2+i), p64(1), None, b'x'*(1<<19)) # Evict 1 + >>> cache.store(p64(1), p64(1), None, b'x') + >>> cache.invalidate(p64(1), p64(2)) + >>> cache.load(p64(1)) + >>> cache.close() + +Now try to do simulation: + + >>> import ZEO.scripts.cache_simul + >>> ZEO.scripts.cache_simul.main('-s 1 cache.trace'.split()) + ... # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + CircularCacheSimulation, cache size 1,048,576 bytes + START TIME DUR. LOADS HITS INVALS WRITES HITRATE EVICTS INUSE + ... 1 0 1 12 0.0% 10 50.0 + -------------------------------------------------------------------------- + ... 1 0 1 12 0.0% 10 50.0 + + >>> del os.environ["ZEO_CACHE_TRACE"] + + """ + +def invalidations_with_current_tid_dont_wreck_cache(): + """ + >>> cache = ZEO.cache.ClientCache('cache', 1000) + >>> cache.store(p64(1), p64(1), None, b'data') + >>> import logging, sys + >>> handler = logging.StreamHandler(sys.stdout) + >>> logging.getLogger().addHandler(handler) + >>> old_level = logging.getLogger().getEffectiveLevel() + >>> logging.getLogger().setLevel(logging.WARNING) + >>> cache.invalidate(p64(1), p64(1)) + Ignoring invalidation with same tid as current + >>> cache.close() + >>> cache = ZEO.cache.ClientCache('cache', 1000) + >>> cache.close() + >>> logging.getLogger().removeHandler(handler) + >>> logging.getLogger().setLevel(old_level) + """ + +def rename_bad_cache_file(): + """ +An attempt to open a bad cache file will cause it to be dropped and recreated. + + >>> with open('cache', 'w') as f: + ... _ = f.write('x'*100) + >>> import logging, sys + >>> handler = logging.StreamHandler(sys.stdout) + >>> logging.getLogger().addHandler(handler) + >>> old_level = logging.getLogger().getEffectiveLevel() + >>> logging.getLogger().setLevel(logging.WARNING) + + >>> cache = ZEO.cache.ClientCache('cache', 1000) # doctest: +ELLIPSIS + Moving bad cache file to 'cache.bad'. + Traceback (most recent call last): + ... + ValueError: unexpected magic number: 'xxxx' + + >>> cache.store(p64(1), p64(1), None, b'data') + >>> cache.close() + >>> with open('cache') as f: + ... _ = f.seek(0, 2) + ... print(f.tell()) + 1000 + + >>> with open('cache', 'w') as f: + ... _ = f.write('x'*200) + >>> cache = ZEO.cache.ClientCache('cache', 1000) # doctest: +ELLIPSIS + Removing bad cache file: 'cache' (prev bad exists). + Traceback (most recent call last): + ... + ValueError: unexpected magic number: 'xxxx' + + >>> cache.store(p64(1), p64(1), None, b'data') + >>> cache.close() + >>> with open('cache') as f: + ... _ = f.seek(0, 2) + ... print(f.tell()) + 1000 + + >>> with open('cache.bad') as f: + ... _ = f.seek(0, 2) + ... print(f.tell()) + 100 + + >>> logging.getLogger().removeHandler(handler) + >>> logging.getLogger().setLevel(old_level) + """ + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(CacheTests)) + suite.addTest( + doctest.DocTestSuite( + setUp=zope.testing.setupstack.setUpDirectory, + tearDown=zope.testing.setupstack.tearDown, + checker=ZODB.tests.util.checker + \ + zope.testing.renormalizing.RENormalizing([ + (re.compile(r'31\.3%'), '31.2%'), + ]), + ) + ) + return suite diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_client_credentials.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_client_credentials.py new file mode 100644 index 0000000..2a9d00a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_client_credentials.py @@ -0,0 +1,61 @@ +"""Clients can pass credentials to a server. + +This is an experimental feature to enable server authentication and +authorization. +""" +from zope.testing import setupstack +import unittest + +import ZEO.StorageServer + +from . import forker +from .threaded import threaded_server_tests + +@unittest.skipIf(forker.ZEO4_SERVER, "ZEO4 servers don't support SSL") +class ClientAuthTests(setupstack.TestCase): + + def setUp(self): + self.setUpDirectory() + self.__register = ZEO.StorageServer.ZEOStorage.register + + def tearDown(self): + ZEO.StorageServer.ZEOStorage.register = self.__register + + def test_passing_credentials(self): + + # First, we'll temporarily swap the storage server register + # method with one that let's is see credentials that were passed: + + creds_log = [] + + def register(zs, storage_id, read_only, credentials=self): + creds_log.append(credentials) + return self.__register(zs, storage_id, read_only) + + ZEO.StorageServer.ZEOStorage.register = register + + # Now start an in process server + addr, stop = ZEO.server() + + # If we connect, without providing credentials, then no + # credentials will be passed to register: + + client = ZEO.client(addr) + + self.assertEqual(creds_log, [self]) + client.close() + creds_log.pop() + + # But if we pass credentials, they'll be passed to register: + creds = dict(user='me', password='123') + client = ZEO.client(addr, credentials=creds) + self.assertEqual(creds_log, [creds]) + client.close() + + stop() + +def test_suite(): + suite = unittest.makeSuite(ClientAuthTests) + suite.layer = threaded_server_tests + return suite + diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_client_side_conflict_resolution.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_client_side_conflict_resolution.py new file mode 100644 index 0000000..d48ccc4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_client_side_conflict_resolution.py @@ -0,0 +1,152 @@ +import unittest +import zope.testing.setupstack + +from BTrees.Length import Length +from ZODB import serialize +from ZODB.DemoStorage import DemoStorage +from ZODB.utils import p64, z64, maxtid +from ZODB.broken import find_global + +import ZEO + +from . import forker +from .utils import StorageServer + +class Var(object): + def __eq__(self, other): + self.value = other + return True + +@unittest.skipIf(forker.ZEO4_SERVER, "ZEO4 servers don't support SSL") +class ClientSideConflictResolutionTests(zope.testing.setupstack.TestCase): + + def test_server_side(self): + # First, verify default conflict resolution. + server = StorageServer(self, DemoStorage()) + zs = server.zs + + reader = serialize.ObjectReader( + factory=lambda conn, *args: find_global(*args)) + writer = serialize.ObjectWriter() + ob = Length(0) + ob._p_oid = z64 + + # 2 non-conflicting transactions: + + zs.tpc_begin(1, '', '', {}) + zs.storea(ob._p_oid, z64, writer.serialize(ob), 1) + self.assertEqual(zs.vote(1), []) + tid1 = server.unpack_result(zs.tpc_finish(1)) + server.assert_calls(self, ('info', {'length': 1, 'size': Var()})) + + ob.change(1) + zs.tpc_begin(2, '', '', {}) + zs.storea(ob._p_oid, tid1, writer.serialize(ob), 2) + self.assertEqual(zs.vote(2), []) + tid2 = server.unpack_result(zs.tpc_finish(2)) + server.assert_calls(self, ('info', {'size': Var(), 'length': 1})) + + # Now, a cnflicting one: + zs.tpc_begin(3, '', '', {}) + zs.storea(ob._p_oid, tid1, writer.serialize(ob), 3) + + # Vote returns the object id, indicating that a conflict was resolved. + self.assertEqual(zs.vote(3), [ob._p_oid]) + tid3 = server.unpack_result(zs.tpc_finish(3)) + + p, serial, next_serial = zs.loadBefore(ob._p_oid, maxtid) + self.assertEqual((serial, next_serial), (tid3, None)) + self.assertEqual(reader.getClassName(p), 'BTrees.Length.Length') + self.assertEqual(reader.getState(p), 2) + + + # Now, we'll create a server that expects the client to + # resolve conflicts: + + server = StorageServer( + self, DemoStorage(), client_conflict_resolution=True) + zs = server.zs + + # 2 non-conflicting transactions: + + zs.tpc_begin(1, '', '', {}) + zs.storea(ob._p_oid, z64, writer.serialize(ob), 1) + self.assertEqual(zs.vote(1), []) + tid1 = server.unpack_result(zs.tpc_finish(1)) + server.assert_calls(self, ('info', {'size': Var(), 'length': 1})) + + ob.change(1) + zs.tpc_begin(2, '', '', {}) + zs.storea(ob._p_oid, tid1, writer.serialize(ob), 2) + self.assertEqual(zs.vote(2), []) + tid2 = server.unpack_result(zs.tpc_finish(2)) + server.assert_calls(self, ('info', {'length': 1, 'size': Var()})) + + # Now, a conflicting one: + zs.tpc_begin(3, '', '', {}) + zs.storea(ob._p_oid, tid1, writer.serialize(ob), 3) + + # Vote returns an object, indicating that a conflict was not resolved. + self.assertEqual( + zs.vote(3), + [dict(oid=ob._p_oid, + serials=(tid2, tid1), + data=writer.serialize(ob), + )], + ) + + # Now, it's up to the client to resolve the conflict. It can + # do this by making another store call. In this call, we use + # tid2 as the starting tid: + ob.change(1) + zs.storea(ob._p_oid, tid2, writer.serialize(ob), 3) + self.assertEqual(zs.vote(3), []) + tid3 = server.unpack_result(zs.tpc_finish(3)) + server.assert_calls(self, ('info', {'size': Var(), 'length': 1})) + + p, serial, next_serial = zs.loadBefore(ob._p_oid, maxtid) + self.assertEqual((serial, next_serial), (tid3, None)) + self.assertEqual(reader.getClassName(p), 'BTrees.Length.Length') + self.assertEqual(reader.getState(p), 3) + + def test_client_side(self): + # First, traditional: + addr, stop = ZEO.server('data.fs', threaded=False) + db = ZEO.DB(addr) + with db.transaction() as conn: + conn.root.l = Length(0) + conn2 = db.open() + conn2.root.l.change(1) + with db.transaction() as conn: + conn.root.l.change(1) + + conn2.transaction_manager.commit() + + self.assertEqual(conn2.root.l.value, 2) + + db.close(); stop() + + # Now, do conflict resolution on the client. + addr2, stop = ZEO.server( + storage_conf='\n\n', + zeo_conf=dict(client_conflict_resolution=True), + threaded=False, + ) + + db = ZEO.DB(addr2) + with db.transaction() as conn: + conn.root.l = Length(0) + conn2 = db.open() + conn2.root.l.change(1) + with db.transaction() as conn: + conn.root.l.change(1) + + self.assertEqual(conn2.root.l.value, 1) + conn2.transaction_manager.commit() + + self.assertEqual(conn2.root.l.value, 2) + + db.close(); stop() + +def test_suite(): + return unittest.makeSuite(ClientSideConflictResolutionTests) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_marshal.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_marshal.py new file mode 100644 index 0000000..25f5c81 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_marshal.py @@ -0,0 +1,34 @@ +import unittest +from ZEO.asyncio.marshal import encode +from ZEO.asyncio.marshal import pickle_server_decode + +try: + from ZopeUndo.Prefix import Prefix +except ImportError: + _HAVE_ZOPE_UNDO = False +else: + _HAVE_ZOPE_UNDO = True + + +class MarshalTests(unittest.TestCase): + + @unittest.skipUnless(_HAVE_ZOPE_UNDO, 'ZopeUndo is not installed') + def testServerDecodeZopeUndoFilter(self): + # this is an example (1) of Zope2's arguments for + # undoInfo call. Arguments are encoded by ZEO client + # and decoded by server. The operation must be idempotent. + # (1) https://github.com/zopefoundation/Zope/blob/2.13/src/App/Undo.py#L111 + args = (0, 20, {'user_name': Prefix('test')}) + # test against repr because Prefix __eq__ operator + # doesn't compare Prefix with Prefix but only + # Prefix with strings. see Prefix.__doc__ + self.assertEqual( + repr(pickle_server_decode(encode(*args))), + repr(args) + ) + + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(MarshalTests)) + return suite diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_sync.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_sync.py new file mode 100644 index 0000000..18e949f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/test_sync.py @@ -0,0 +1,49 @@ +import unittest + +from zope.testing import setupstack + +from .. import server, client + +from ZEO import _forker as forker + +if forker.ZEO4_SERVER: + server_ping_method = 'lastTransaction' + server_zss = 'connections' +else: + server_ping_method = 'ping' + server_zss = 'zeo_storages_by_storage_id' + +class SyncTests(setupstack.TestCase): + + def instrument(self): + self.__ping_calls = 0 + + server = getattr(forker, self.__name + '_server') + + [zs] = getattr(server.server, server_zss)['1'] + orig_ping = getattr(zs, server_ping_method) + def ping(): + self.__ping_calls += 1 + return orig_ping() + + setattr(zs, server_ping_method, ping) + + def test_server_sync(self): + self.__name = 's%s' % id(self) + addr, stop = server(name=self.__name) + + # By default the client sync method is a noop: + c = client(addr) + self.instrument() + c.sync() + self.assertEqual(self.__ping_calls, 0) + c.close() + + # But if we pass server_sync: + c = client(addr, server_sync=True) + self.instrument() + c.sync() + self.assertEqual(self.__ping_calls, 1) + c.close() + + stop() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/testssl.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testssl.py new file mode 100644 index 0000000..b71af11 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/testssl.py @@ -0,0 +1,380 @@ +from .._compat import PY3 + +import mock +import os +import ssl +import unittest +from ZODB.config import storageFromString + +from ..Exceptions import ClientDisconnected +from .. import runzeo + +from .testConfig import ZEOConfigTestBase +from . import forker +from .threaded import threaded_server_tests + +here = os.path.dirname(__file__) +server_cert = os.path.join(here, 'server.pem') +server_key = os.path.join(here, 'server_key.pem') +serverpw_cert = os.path.join(here, 'serverpw.pem') +serverpw_key = os.path.join(here, 'serverpw_key.pem') +client_cert = os.path.join(here, 'client.pem') +client_key = os.path.join(here, 'client_key.pem') + +@unittest.skipIf(forker.ZEO4_SERVER, "ZEO4 servers don't support SSL") +class SSLConfigTest(ZEOConfigTestBase): + + def test_ssl_basic(self): + # This shows that configuring ssl has an actual effect on connections. + # Other SSL configuration tests will be Mockiavellian. + + # Also test that an SSL connection mismatch doesn't kill + # the server loop. + + # An SSL client can't talk to a non-SSL server: + addr, stop = self.start_server() + with self.assertRaises(ClientDisconnected): + self.start_client( + addr, + """ + certificate {} + key {} + """.format(client_cert, client_key), wait_timeout=1) + + # But a non-ssl one can: + client = self.start_client(addr) + self._client_assertions(client, addr) + client.close() + stop() + + # A non-SSL client can't talk to an SSL server: + addr, stop = self.start_server( + """ + certificate {} + key {} + authenticate {} + """.format(server_cert, server_key, client_cert) + ) + with self.assertRaises(ClientDisconnected): + self.start_client(addr, wait_timeout=1) + + # But an SSL one can: + client = self.start_client( + addr, + """ + certificate {} + key {} + authenticate {} + server-hostname zodb.org + """.format(client_cert, client_key, server_cert)) + self._client_assertions(client, addr) + client.close() + stop() + + def test_ssl_hostname_check(self): + addr, stop = self.start_server( + """ + certificate {} + key {} + authenticate {} + """.format(server_cert, server_key, client_cert) + ) + + # Connext with bad hostname fails: + + with self.assertRaises(ClientDisconnected): + client = self.start_client( + addr, + """ + certificate {} + key {} + authenticate {} + server-hostname example.org + """.format(client_cert, client_key, server_cert), + wait_timeout=1) + + # Connext with good hostname succeeds: + client = self.start_client( + addr, + """ + certificate {} + key {} + authenticate {} + server-hostname zodb.org + """.format(client_cert, client_key, server_cert)) + self._client_assertions(client, addr) + client.close() + stop() + + def test_ssl_pw(self): + addr, stop = self.start_server( + """ + certificate {} + key {} + authenticate {} + password-function ZEO.tests.testssl.pwfunc + """.format(serverpw_cert, serverpw_key, client_cert) + ) + stop() + +@unittest.skipIf(forker.ZEO4_SERVER, "ZEO4 servers don't support SSL") +@mock.patch(('asyncio' if PY3 else 'trollius') + '.ensure_future') +@mock.patch(('asyncio' if PY3 else 'trollius') + '.set_event_loop') +@mock.patch(('asyncio' if PY3 else 'trollius') + '.new_event_loop') +@mock.patch('ZEO.asyncio.client.new_event_loop') +@mock.patch('ZEO.asyncio.server.new_event_loop') +class SSLConfigTestMockiavellian(ZEOConfigTestBase): + + @mock.patch('ssl.create_default_context') + def test_ssl_mockiavellian_server_no_ssl(self, factory, *_): + server = create_server() + self.assertFalse(factory.called) + self.assertEqual(server.acceptor.ssl_context, None) + server.close() + + def assert_context( + self, factory, context, + cert=(server_cert, server_key, None), + verify_mode=ssl.CERT_REQUIRED, + check_hostname=False, + cafile=None, capath=None, + ): + factory.assert_called_with( + ssl.Purpose.CLIENT_AUTH, cafile=cafile, capath=capath) + context.load_cert_chain.assert_called_with(*cert) + self.assertEqual(context, factory.return_value) + self.assertEqual(context.verify_mode, verify_mode) + self.assertEqual(context.check_hostname, check_hostname) + + @mock.patch('ssl.create_default_context') + def test_ssl_mockiavellian_server_ssl_no_auth(self, factory, *_): + with self.assertRaises(SystemExit): + # auth is required + create_server(certificate=server_cert, key=server_key) + + @mock.patch('ssl.create_default_context') + def test_ssl_mockiavellian_server_ssl_auth_file(self, factory, *_): + server = create_server( + certificate=server_cert, key=server_key, authenticate=__file__) + context = server.acceptor.ssl_context + self.assert_context(factory, context, cafile=__file__) + server.close() + + @mock.patch('ssl.create_default_context') + def test_ssl_mockiavellian_server_ssl_auth_dir(self, factory, *_): + server = create_server( + certificate=server_cert, key=server_key, authenticate=here) + context = server.acceptor.ssl_context + self.assert_context(factory, context, capath=here) + server.close() + + @mock.patch('ssl.create_default_context') + def test_ssl_mockiavellian_server_ssl_pw(self, factory, *_): + server = create_server( + certificate=server_cert, + key=server_key, + password_function='ZEO.tests.testssl.pwfunc', + authenticate=here, + ) + context = server.acceptor.ssl_context + self.assert_context( + factory, context, (server_cert, server_key, pwfunc), capath=here) + server.close() + + @mock.patch('ssl.create_default_context') + @mock.patch('ZEO.ClientStorage.ClientStorage') + def test_ssl_mockiavellian_client_no_ssl(self, ClientStorage, factory, *_): + client = ssl_client() + self.assertFalse('ssl' in ClientStorage.call_args[1]) + self.assertFalse('ssl_server_hostname' in ClientStorage.call_args[1]) + + @mock.patch('ssl.create_default_context') + @mock.patch('ZEO.ClientStorage.ClientStorage') + def test_ssl_mockiavellian_client_server_signed( + self, ClientStorage, factory, *_ + ): + client = ssl_client(certificate=client_cert, key=client_key) + context = ClientStorage.call_args[1]['ssl'] + self.assertEqual(ClientStorage.call_args[1]['ssl_server_hostname'], + None) + self.assert_context( + factory, context, (client_cert, client_key, None), + check_hostname=True) + + context.load_default_certs.assert_called_with() + + @mock.patch('ssl.create_default_context') + @mock.patch('ZEO.ClientStorage.ClientStorage') + def test_ssl_mockiavellian_client_auth_dir( + self, ClientStorage, factory, *_ + ): + client = ssl_client( + certificate=client_cert, key=client_key, authenticate=here) + context = ClientStorage.call_args[1]['ssl'] + self.assertEqual(ClientStorage.call_args[1]['ssl_server_hostname'], + None) + self.assert_context( + factory, context, (client_cert, client_key, None), + capath=here, + check_hostname=True, + ) + context.load_default_certs.assert_not_called() + + @mock.patch('ssl.create_default_context') + @mock.patch('ZEO.ClientStorage.ClientStorage') + def test_ssl_mockiavellian_client_auth_file( + self, ClientStorage, factory, *_ + ): + client = ssl_client( + certificate=client_cert, key=client_key, authenticate=server_cert) + context = ClientStorage.call_args[1]['ssl'] + self.assertEqual(ClientStorage.call_args[1]['ssl_server_hostname'], + None) + self.assert_context( + factory, context, (client_cert, client_key, None), + cafile=server_cert, + check_hostname=True, + ) + context.load_default_certs.assert_not_called() + + @mock.patch('ssl.create_default_context') + @mock.patch('ZEO.ClientStorage.ClientStorage') + def test_ssl_mockiavellian_client_pw( + self, ClientStorage, factory, *_ + ): + client = ssl_client( + certificate=client_cert, key=client_key, + password_function='ZEO.tests.testssl.pwfunc', + authenticate=server_cert) + context = ClientStorage.call_args[1]['ssl'] + self.assertEqual(ClientStorage.call_args[1]['ssl_server_hostname'], + None) + self.assert_context( + factory, context, (client_cert, client_key, pwfunc), + cafile=server_cert, + check_hostname=True, + ) + + @mock.patch('ssl.create_default_context') + @mock.patch('ZEO.ClientStorage.ClientStorage') + def test_ssl_mockiavellian_client_server_hostname( + self, ClientStorage, factory, *_ + ): + client = ssl_client( + certificate=client_cert, key=client_key, authenticate=server_cert, + server_hostname='example.com') + context = ClientStorage.call_args[1]['ssl'] + self.assertEqual(ClientStorage.call_args[1]['ssl_server_hostname'], + 'example.com') + self.assert_context( + factory, context, (client_cert, client_key, None), + cafile=server_cert, + check_hostname=True, + ) + + @mock.patch('ssl.create_default_context') + @mock.patch('ZEO.ClientStorage.ClientStorage') + def test_ssl_mockiavellian_client_check_hostname( + self, ClientStorage, factory, *_ + ): + client = ssl_client( + certificate=client_cert, key=client_key, authenticate=server_cert, + check_hostname=False) + context = ClientStorage.call_args[1]['ssl'] + self.assertEqual(ClientStorage.call_args[1]['ssl_server_hostname'], + None) + self.assert_context( + factory, context, (client_cert, client_key, None), + cafile=server_cert, + check_hostname=False, + ) + +def args(*a, **kw): + return a, kw + +def ssl_conf(**ssl_settings): + if ssl_settings: + ssl_conf = '\n' + '\n'.join( + '{} {}'.format(name.replace('_', '-'), value) + for name, value in ssl_settings.items() + ) + '\n\n' + else: + ssl_conf = '' + + return ssl_conf + +def ssl_client(**ssl_settings): + return storageFromString( + """%import ZEO + + + server 127.0.0.1:0 + {} + + """.format(ssl_conf(**ssl_settings)) + ) + +def create_server(**ssl_settings): + with open('conf', 'w') as f: + f.write( + """ + + address 127.0.0.1:0 + {} + + + + """.format(ssl_conf(**ssl_settings))) + + options = runzeo.ZEOOptions() + options.realize(['-C', 'conf']) + s = runzeo.ZEOServer(options) + s.open_storages() + s.create_server() + return s.server + +pwfunc = lambda : '1234' + + +def test_suite(): + suite = unittest.TestSuite(( + unittest.makeSuite(SSLConfigTest), + unittest.makeSuite(SSLConfigTestMockiavellian), + )) + suite.layer = threaded_server_tests + return suite + +# Helpers for other tests: + +server_config = """ + + address 127.0.0.1:0 + + certificate {} + key {} + authenticate {} + + + """.format(server_cert, server_key, client_cert) + +def client_ssl(cafile=server_key, + client_cert=client_cert, + client_key=client_key, + ): + context = ssl.create_default_context( + ssl.Purpose.CLIENT_AUTH, cafile=server_cert) + + context.load_cert_chain(client_cert, client_key) + context.verify_mode = ssl.CERT_REQUIRED + context.check_hostname = False + return context + +# See +# https://discuss.pivotal.io/hc/en-us/articles/202653388-How-to-renew-an-expired-Apache-Web-Server-self-signed-certificate-using-the-OpenSSL-tool +# for instructions on updating the server.pem (the certificate) if +# needed. server.pem.csr is the request. +# This should do it: +# openssl x509 -req -days 999999 -in src/ZEO/tests/server.pem.csr -signkey src/ZEO/tests/server_key.pem -out src/ZEO/tests/server.pem +# If you need to create a new key first: +# openssl genrsa -out server_key.pem 2048 +# These two files should then be copied to client_key.pem and client.pem. diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/threaded.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/threaded.py new file mode 100644 index 0000000..03ace39 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/threaded.py @@ -0,0 +1,12 @@ +"""Test layer for threaded-server tests + +uvloop currently has a bug, +https://github.com/MagicStack/uvloop/issues/39, that causes failure if +multiprocessing and threaded servers are mixed in the same +application, so we isolate the few threaded tests in their own layer. +""" +import ZODB.tests.util + +threaded_server_tests = ZODB.tests.util.MininalTestLayer( + 'threaded_server_tests') + diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/utils.py b/thesisenv/lib/python3.6/site-packages/ZEO/tests/utils.py new file mode 100644 index 0000000..4ec4024 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/utils.py @@ -0,0 +1,64 @@ +"""Testing helpers +""" +import ZEO.StorageServer +from ..asyncio.server import best_protocol_version + +class ServerProtocol(object): + + method = ('register', ) + + def __init__(self, zs, + protocol_version=best_protocol_version, + addr='test-address'): + self.calls = [] + self.addr = addr + self.zs = zs + self.protocol_version = protocol_version + zs.notify_connected(self) + + closed = False + def close(self): + if not self.closed: + self.closed = True + self.zs.notify_disconnected() + + def call_soon_threadsafe(self, func, *args): + func(*args) + + def async_(self, *args): + self.calls.append(args) + + async_threadsafe = async_ + +class StorageServer(object): + """Create a client interface to a StorageServer. + + This is for testing StorageServer. It interacts with the storgr + server through its network interface, but without creating a + network connection. + """ + + def __init__(self, test, storage, + protocol_version=b'Z' + best_protocol_version, + **kw): + self.test = test + self.storage_server = ZEO.StorageServer.StorageServer( + None, {'1': storage}, **kw) + self.zs = self.storage_server.create_client_handler() + self.protocol = ServerProtocol(self.zs, + protocol_version=protocol_version) + self.zs.register('1', kw.get('read_only', False)) + + def assert_calls(self, test, *argss): + if argss: + for args in argss: + test.assertEqual(self.protocol.calls.pop(0), args) + else: + test.assertEqual(self.protocol.calls, ()) + + def unpack_result(self, result): + """For methods that return Result objects, unwrap the results + """ + result, callback = result.args + callback() + return result diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/zdoptions.test b/thesisenv/lib/python3.6/site-packages/ZEO/tests/zdoptions.test new file mode 100644 index 0000000..9ddf2de --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/zdoptions.test @@ -0,0 +1,142 @@ +Minimal test of Server Options Handling +======================================= + +This is initially motivated by a desire to remove the requirement of +specifying a storage name when there is only one storage. + +Storage Names +------------- + +It is an error not to specify any storages: + + >>> import sys, ZEO.runzeo + >>> try: + ... from StringIO import StringIO + ... except ImportError: + ... from io import StringIO + >>> stderr = sys.stderr + + >>> with open('config', 'w') as f: + ... _ = f.write(""" + ... + ... address 8100 + ... + ... """) + + >>> sys.stderr = StringIO() + >>> options = ZEO.runzeo.ZEOOptions() + >>> options.realize('-C config'.split()) + Traceback (most recent call last): + ... + SystemExit: 2 + + >>> print(sys.stderr.getvalue()) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + Error: not enough values for section type 'zodb.storage'; + 0 found, 1 required + ... + + +But we can specify a storage without a name: + + >>> with open('config', 'w') as f: + ... _ = f.write(""" + ... + ... address 8100 + ... + ... + ... + ... """) + >>> options = ZEO.runzeo.ZEOOptions() + >>> options.realize('-C config'.split()) + >>> [storage.name for storage in options.storages] + ['1'] + +We can't have multiple unnamed storages: + + >>> sys.stderr = StringIO() + >>> with open('config', 'w') as f: + ... _ = f.write(""" + ... + ... address 8100 + ... + ... + ... + ... + ... + ... """) + >>> options = ZEO.runzeo.ZEOOptions() + >>> options.realize('-C config'.split()) + Traceback (most recent call last): + ... + SystemExit: 2 + + >>> print(sys.stderr.getvalue()) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + Error: No more than one storage may be unnamed. + ... + +Or an unnamed storage and one named '1': + + >>> sys.stderr = StringIO() + >>> with open('config', 'w') as f: + ... _ = f.write(""" + ... + ... address 8100 + ... + ... + ... + ... + ... + ... """) + >>> options = ZEO.runzeo.ZEOOptions() + >>> options.realize('-C config'.split()) + Traceback (most recent call last): + ... + SystemExit: 2 + + >>> print(sys.stderr.getvalue()) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + Error: Can't have an unnamed storage and a storage named 1. + ... + +But we can have multiple storages: + + >>> with open('config', 'w') as f: + ... _ = f.write(""" + ... + ... address 8100 + ... + ... + ... + ... + ... + ... """) + >>> options = ZEO.runzeo.ZEOOptions() + >>> options.realize('-C config'.split()) + >>> [storage.name for storage in options.storages] + ['x', 'y'] + +As long as the names are unique: + + >>> sys.stderr = StringIO() + >>> with open('config', 'w') as f: + ... _ = f.write(""" + ... + ... address 8100 + ... + ... + ... + ... + ... + ... """) + >>> options = ZEO.runzeo.ZEOOptions() + >>> options.realize('-C config'.split()) + Traceback (most recent call last): + ... + SystemExit: 2 + + >>> print(sys.stderr.getvalue()) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + Error: section names must not be re-used within the same container:'1' + ... + +.. Cleanup ===================================================== + + >>> sys.stderr = stderr diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/zeo-fan-out.test b/thesisenv/lib/python3.6/site-packages/ZEO/tests/zeo-fan-out.test new file mode 100644 index 0000000..efe7408 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/zeo-fan-out.test @@ -0,0 +1,165 @@ +ZEO Fan Out +=========== + +We should be able to set up ZEO servers with ZEO clients. Let's see +if we can make it work. + +We'll use some helper functions. The first is a helper that starts +ZEO servers for us and another one that picks ports. + +We'll start the first server: + + >>> (_, port0), adminaddr0 = start_server( + ... '\npath fs\nblob-dir blobs\n', keep=1) + +Then we'll start 2 others that use this one: + + >>> addr1, _ = start_server( + ... '\nserver %s\nblob-dir b1\n' % port0) + >>> addr2, _ = start_server( + ... '\nserver %s\nblob-dir b2\n' % port0) + + +Now, let's create some client storages that connect to these: + + >>> import os, ZEO, ZODB.blob, ZODB.POSException, transaction + + >>> db0 = ZEO.DB(port0, blob_dir='cb0') + +XXX Work around a ZEO4 server bug (that was fixed in the ZEO5 server) +which prevents sending invalidations, and thus tids for transactions +that only add objects. This doesn't matter for ZODB4/ZEO4 but does +for ZODB5/ZEO5, with it's greater reliance on MVCC. This mainly +affects creation of the database root object, which is typically the +only transaction that only adds objects. Exacerbating this further is +that previously, databases didn't really use MVCC when checking for +the root object, but now they do because they go through a connection. + + >>> with db0.transaction() as conn: + ... conn.root.work_around_zeo4_server_bug = 1 + >>> with db0.transaction() as conn: + ... del conn.root.work_around_zeo4_server_bug + + + >>> db1 = ZEO.DB(addr1, blob_dir='cb1') + >>> tm1 = transaction.TransactionManager() + >>> c1 = db1.open(transaction_manager=tm1) + >>> r1 = c1.root() + >>> r1 + {} + + >>> db2 = ZEO.DB(addr2, blob_dir='cb2') + >>> tm2 = transaction.TransactionManager() + >>> c2 = db2.open(transaction_manager=tm2) + >>> r2 = c2.root() + >>> r2 + {} + +If we update c1, we'll eventually see the change in c2: + + >>> import persistent.mapping + + >>> r1[1] = persistent.mapping.PersistentMapping() + >>> r1[1].v = 1000 + >>> r1[2] = persistent.mapping.PersistentMapping() + >>> r1[2].v = -1000 + >>> r1[3] = ZODB.blob.Blob(b'x'*4111222) + >>> for i in range(1000, 2000): + ... r1[i] = persistent.mapping.PersistentMapping() + ... r1[i].v = 0 + >>> tm1.commit() + >>> blob_id = r1[3]._p_oid, r1[1]._p_serial + + >>> import time + >>> for i in range(100): + ... t = tm2.begin() + ... if 1 in r2: + ... break + ... time.sleep(0.01) + >>> tm2.abort() + + + >>> r2[1].v + 1000 + + >>> r2[2].v + -1000 + +Now, let's see if we can break it. :) + + >>> def f(): + ... c = db1.open(transaction.TransactionManager()) + ... r = c.root() + ... i = 0 + ... while i < 100: + ... r[1].v -= 1 + ... r[2].v += 1 + ... try: + ... c.transaction_manager.commit() + ... i += 1 + ... except ZODB.POSException.ConflictError: + ... c.transaction_manager.abort() + ... c.close() + + >>> import threading + >>> threadf = threading.Thread(target=f) + >>> threadg = threading.Thread(target=f) + >>> threadf.start() + + >>> threadg.start() + + >>> s2 = db2.storage + >>> start_time = time.time() + >>> import os + >>> from ZEO.ClientStorage import _lock_blob + >>> while time.time() - start_time < 999: + ... t = tm2.begin() + ... if r2[1].v + r2[2].v: + ... print('oops', r2[1], r2[2]) + ... if r2[1].v == 800: + ... break # we caught up + ... path = s2.fshelper.getBlobFilename(*blob_id) + ... if os.path.exists(path): + ... ZODB.blob.remove_committed(path) + ... _ = s2.fshelper.createPathForOID(blob_id[0]) + ... blob_lock = _lock_blob(path) + ... s2._call('sendBlob', *blob_id, timeout=9999) + ... blob_lock.close() + ... else: print('Dang') + + >>> threadf.join() + + >>> threadg.join() + +If we shutdown and restart the source server, the variables will be +invalidated: + + >>> stop_server(adminaddr0) + >>> _ = start_server('\npath fs\n\n', + ... port=port0) + >>> time.sleep(10) # get past startup / verification + + >>> for i in range(1000): + ... c1.sync() + ... c2.sync() + ... if ( + ... (r1[1]._p_changed is None) + ... and + ... (r1[2]._p_changed is None) + ... and + ... (r2[1]._p_changed is None) + ... and + ... (r2[2]._p_changed is None) + ... ): + ... print('Cool') + ... break + ... time.sleep(0.01) + ... else: + ... print('Dang') + Cool + +Cleanup: + + >>> db0.close() + >>> db1.close() + >>> db2.close() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/tests/zeo_blob_cache.test b/thesisenv/lib/python3.6/site-packages/ZEO/tests/zeo_blob_cache.test new file mode 100644 index 0000000..38a4a94 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/tests/zeo_blob_cache.test @@ -0,0 +1,151 @@ +ZEO caching of blob data +======================== + +ZEO supports 2 modes for providing clients access to blob data: + +shared + Blob data are shared via a network file system. The client shares + a common blob directory with the server. + +non-shared + Blob data are loaded from the storage server and cached locally. + A maximum size for the blob data can be set and data are removed + when the size is exceeded. + +In this test, we'll demonstrate that blobs data are removed from a ZEO +cache when the amount of data stored exceeds a given limit. + +Let's start by setting up some data: + + >>> addr, _ = start_server(blob_dir='server-blobs') + +We'll also create a client. + + >>> import ZEO + >>> db = ZEO.DB(addr, blob_dir='blobs', blob_cache_size=3000) + +Here, we passed a blob_cache_size parameter, which specifies a target +blob cache size. This is not a hard limit, but rather a target. It +defaults to a very large value. We also passed a blob_cache_size_check +option. The blob_cache_size_check option specifies the number of +bytes, as a percent of the target that can be written or downloaded +from the server before the cache size is checked. The +blob_cache_size_check option defaults to 100. We passed 10, to check +after writing 10% of the target size. + +.. We're going to wait for any threads we started to finish, so... + + >>> import threading + >>> old_threads = list(threading.enumerate()) + +We want to check for name collections in the blob cache dir. We'll try +to provoke name collections by reducing the number of cache directory +subdirectories. + + >>> import ZEO.ClientStorage + >>> orig_blob_cache_layout_size = ZEO.ClientStorage.BlobCacheLayout.size + >>> ZEO.ClientStorage.BlobCacheLayout.size = 11 + +Now, let's write some data: + + >>> import ZODB.blob, transaction, time + >>> conn = db.open() + >>> for i in range(1, 101): + ... conn.root()[i] = ZODB.blob.Blob() + ... with conn.root()[i].open('w') as f: + ... w = f.write((chr(i)*100).encode('ascii')) + >>> transaction.commit() + +We've committed 10000 bytes of data, but our target size is 3000. We +expect to have not much more than the target size in the cache blob +directory. + + >>> import os + >>> def cache_size(d): + ... size = 0 + ... for base, dirs, files in os.walk(d): + ... for f in files: + ... if f.endswith('.blob'): + ... try: + ... size += os.stat(os.path.join(base, f)).st_size + ... except OSError: + ... if os.path.exists(os.path.join(base, f)): + ... raise + ... return size + + >>> def check(): + ... return cache_size('blobs') < 5000 + >>> def onfail(): + ... return cache_size('blobs') + + >>> from ZEO.tests.forker import wait_until + >>> wait_until("size is reduced", check, 99, onfail) + +If we read all of the blobs, data will be downloaded again, as +necessary, but the cache size will remain not much bigger than the +target: + + >>> for i in range(1, 101): + ... with conn.root()[i].open() as f: + ... data = f.read() + ... if data != (chr(i)*100).encode('ascii'): + ... print('bad data', repr(chr(i)), repr(data)) + + >>> wait_until("size is reduced", check, 99, onfail) + + >>> for i in range(1, 101): + ... with conn.root()[i].open() as f: + ... data = f.read() + ... if data != (chr(i)*100).encode('ascii'): + ... print('bad data', repr(chr(i)), repr(data)) + + >>> for i in range(1, 101): + ... with conn.root()[i].open('c') as f: + ... data = f.read() + ... if data != (chr(i)*100).encode('ascii'): + ... print('bad data', repr(chr(i)), repr(data)) + + >>> wait_until("size is reduced", check, 99, onfail) + +Now let see if we can stress things a bit. We'll create many clients +and get them to pound on the blobs all at once to see if we can +provoke problems: + + >>> import threading, random + >>> def run(): + ... db = ZEO.DB(addr, blob_dir='blobs', blob_cache_size=4000) + ... conn = db.open() + ... for i in range(300): + ... time.sleep(0) + ... i = random.randint(1, 100) + ... with conn.root()[i].open() as f: + ... data = f.read() + ... if data != (chr(i)*100).encode('ascii'): + ... print('bad data', repr(chr(i)), repr(data)) + ... i = random.randint(1, 100) + ... with conn.root()[i].open('c') as f: + ... data = f.read() + ... if data != (chr(i)*100).encode('ascii'): + ... print('bad data', repr(chr(i)), repr(data)) + ... db.close() + + >>> threads = [threading.Thread(target=run) for i in range(10)] + >>> for thread in threads: + ... thread.setDaemon(True) + >>> for thread in threads: + ... thread.start() + >>> for thread in threads: + ... thread.join(99) + ... if thread.isAlive(): + ... print("Can't join thread.") + + >>> wait_until("size is reduced", check, 99, onfail) + +.. cleanup + + >>> for thread in threading.enumerate(): + ... if thread not in old_threads: + ... thread.join(33) + + >>> db.close() + >>> ZEO.ClientStorage.BlobCacheLayout.size = orig_blob_cache_layout_size diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/util.py b/thesisenv/lib/python3.6/site-packages/ZEO/util.py new file mode 100644 index 0000000..05948a7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/util.py @@ -0,0 +1,56 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Utilities for setting up the server environment.""" + +import os + +def parentdir(p, n=1): + """Return the ancestor of p from n levels up.""" + d = p + while n: + d = os.path.dirname(d) + if not d or d == '.': + d = os.getcwd() + n -= 1 + return d + +class Environment(object): + """Determine location of the Data.fs & ZEO_SERVER.pid files. + + Pass the argv[0] used to start ZEO to the constructor. + + Use the zeo_pid and fs attributes to get the filenames. + """ + + def __init__(self, argv0): + v = os.environ.get("INSTANCE_HOME") + if v is None: + # looking for a Zope/var directory assuming that this code + # is installed in Zope/lib/python/ZEO + p = parentdir(argv0, 4) + if os.path.isdir(os.path.join(p, "var")): + v = p + else: + v = os.getcwd() + self.home = v + self.var = os.path.join(v, "var") + if not os.path.isdir(self.var): + self.var = self.home + + pid = os.environ.get("ZEO_SERVER_PID") + if pid is None: + pid = os.path.join(self.var, "ZEO_SERVER.pid") + + self.zeo_pid = pid + self.fs = os.path.join(self.var, "Data.fs") diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/version.txt b/thesisenv/lib/python3.6/site-packages/ZEO/version.txt new file mode 100644 index 0000000..a3b098b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/version.txt @@ -0,0 +1 @@ +3.7.0b3 diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/zconfig.py b/thesisenv/lib/python3.6/site-packages/ZEO/zconfig.py new file mode 100644 index 0000000..c40b303 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/zconfig.py @@ -0,0 +1,90 @@ +"""SSL configuration support +""" +import os +import sys + +def ssl_config(section, server): + import ssl + + cafile = capath = None + auth = section.authenticate + if auth: + if os.path.isdir(auth): + capath=auth + elif auth != 'DYNAMIC': + cafile=auth + + context = ssl.create_default_context( + ssl.Purpose.CLIENT_AUTH, cafile=cafile, capath=capath) + + if not auth: + assert not server + context.load_default_certs() + + if section.certificate: + password = section.password_function + if password: + module, name = password.rsplit('.', 1) + module = __import__(module, globals(), locals(), ['*'], 0) + password = getattr(module, name) + context.load_cert_chain(section.certificate, section.key, password) + + context.verify_mode = ssl.CERT_REQUIRED + + if sys.version_info >= (3, 4): + context.verify_flags |= ssl.VERIFY_X509_STRICT | ( + context.cert_store_stats()['crl'] and ssl.VERIFY_CRL_CHECK_LEAF) + + if server: + context.check_hostname = False + return context + + context.check_hostname = section.check_hostname + + return context, section.server_hostname + +def server_ssl(section): + return ssl_config(section, True) + +def client_ssl(section): + return ssl_config(section, False) + +class ClientStorageConfig(object): + + def __init__(self, config): + self.config = config + self.name = config.getSectionName() + + def open(self): + from ZEO.ClientStorage import ClientStorage + # config.server is a multikey of socket-connection-address values + # where the value is a socket family, address tuple. + config = self.config + + addresses = [server.address for server in config.server] + options = {} + if config.blob_cache_size is not None: + options['blob_cache_size'] = config.blob_cache_size + if config.blob_cache_size_check is not None: + options['blob_cache_size_check'] = config.blob_cache_size_check + if config.client_label is not None: + options['client_label'] = config.client_label + + ssl = config.ssl + if ssl: + options['ssl'] = ssl[0] + options['ssl_server_hostname'] = ssl[1] + + return ClientStorage( + addresses, + blob_dir=config.blob_dir, + shared_blob_dir=config.shared_blob_dir, + storage=config.storage, + cache_size=config.cache_size, + cache=config.cache_path, + name=config.name, + read_only=config.read_only, + read_only_fallback=config.read_only_fallback, + server_sync = config.server_sync, + wait_timeout=config.wait_timeout, + **options) diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/zeoctl.py b/thesisenv/lib/python3.6/site-packages/ZEO/zeoctl.py new file mode 100644 index 0000000..ec12c9a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/zeoctl.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python2.3 +############################################################################## +# +# Copyright (c) 2005 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +"""Wrapper script for zdctl.py that causes it to use the ZEO schema.""" + +import os + +import ZEO +import zdaemon.zdctl + +# Main program +def main(args=None): + options = zdaemon.zdctl.ZDCtlOptions() + options.schemadir = os.path.dirname(ZEO.__file__) + options.schemafile = "zeoctl.xml" + zdaemon.zdctl.main(args, options) + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZEO/zeoctl.xml b/thesisenv/lib/python3.6/site-packages/ZEO/zeoctl.xml new file mode 100644 index 0000000..36c0b8c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZEO/zeoctl.xml @@ -0,0 +1,31 @@ + + + + This schema describes the configuration of the ZEO storage server + controller. It differs from the schema for the storage server + only in that the "runner" section is required. + + + + + + + + + + + + + + +

+ +
+ + + +
+ + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/DESCRIPTION.rst b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..46fdc17 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,546 @@ +======================================= +ZODB, a Python object-oriented database +======================================= + +.. image:: https://img.shields.io/pypi/v/ZODB.svg + :target: https://pypi.python.org/pypi/ZODB/ + :alt: Latest release + +.. image:: https://img.shields.io/pypi/pyversions/ZODB.svg + :target: https://pypi.org/project/ZODB/ + :alt: Supported Python versions + +.. image:: https://travis-ci.org/zopefoundation/ZODB.svg?branch=master + :target: https://travis-ci.org/zopefoundation/ZODB + :alt: Build status + +.. image:: https://coveralls.io/repos/github/zopefoundation/ZODB/badge.svg + :target: https://coveralls.io/github/zopefoundation/ZODB + :alt: Coverage status + +.. image:: https://readthedocs.org/projects/zodb/badge/?version=latest + :target: https://zodb.readthedocs.io/en/latest/ + :alt: Documentation status + +ZODB provides an object-oriented database for Python that provides a +high-degree of transparency. ZODB runs on Python 2.7 or Python 3.4 and +above. It also runs on PyPy. + +- no separate language for database operations + +- very little impact on your code to make objects persistent + +- no database mapper that partially hides the database. + + Using an object-relational mapping **is not** like using an + object-oriented database. + +- almost no seam between code and database. + +ZODB is an ACID Transactional database. + +To learn more, visit: http://www.zodb.org + +The github repository is: at https://github.com/zopefoundation/zodb + +If you're interested in contributing to ZODB itself, see the +`developer notes +`_. + + +================ + Change History +================ + +5.5.0 (2018-10-13) +================== + +- Add support for Python 3.7. + +- Bump the dependency on zodbpickle to at least 1.0.1. This is + required to avoid a memory leak on Python 2.7. See `issue 203 + `_. + +- Bump the dependency on persistent to at least 4.4.0. + +- Make the internal support functions for dealing with OIDs (``p64`` + and ``u64``) somewhat faster and raise more informative + exceptions on certain types of bad input. See `issue 216 + `_. + +- Remove support for ``python setup.py test``. It hadn't been working + for some time. See `issue #218 + `_. + +- Make the tests run faster by avoiding calls to ``time.sleep()``. + +5.4.0 (2018-03-26) +================== + +- ZODB now uses pickle protocol 3 for both Python 2 and Python 3. + + (Previously, protocol 2 was used for Python 2.) + + The zodbpickle package provides a `zodbpickle.binary` string type + that should be used in Python 2 to cause binary strings to be saved + in a pickle binary format, so they can be loaded correctly in + Python 3. Pickle protocol 3 is needed for this to work correctly. + +- Object identifiers in persistent references are saved as + `zodbpickle.binary` strings in Python 2, so that they are loaded + correctly in Python 3. + +- If an object is missing from the index while packing a ``FileStorage``, + report its full ``oid``. + +- Storage imports are a bit faster. + +- Storages can be important from non-seekable sources, like + file-wrapped pipes. + +5.3.0 (2017-08-30) +================== + +- Add support for Python 3.6. + +- Drop support for Python 3.3. + +- Ensure that the ``HistoricalStorageAdapter`` forwards the ``release`` method to + its base instance. See `issue 78 `_. + +- Use a higher pickle protocol (2) for serializing objects on Python + 2; previously protocol 1 was used. This is *much* more efficient for + new-style classes (all persistent objects are new-style), at the + cost of being very slightly less efficient for old-style classes. + + .. note:: On Python 2, this will now allow open ``file`` objects + (but **not** open blobs or sockets) to be pickled (loading + the object will result in a closed file); previously this + would result in a ``TypeError``. Doing so is not + recommended as they cannot be loaded in Python 3. + + See `issue 179 `_. + +5.2.4 (2017-05-17) +================== + +- ``DB.close`` now explicitly frees internal resources. This is + helpful to avoid false positives in tests that check for leaks. + +- Optimize getting the path to a blob file. See + `issue 161 `_. + +- All classes are new-style classes on Python 2 (they were already + new-style on Python 3). This improves performance on PyPy. See + `issue 160 `_. + +5.2.3 (2017-04-11) +================== + +- Fix an import error. See `issue 158 `_. + +5.2.2 (2017-04-11) +================== + +- Fixed: A blob misfeature set blob permissions so that blobs and blob + directories were only readable by the database process owner, rather + than honoring user-controlled permissions (e.g. ``umask``). + See `issue 155 `_. + +5.2.1 (2017-04-08) +================== + +- Fixed: When opening FileStorages in read-only mode, non-existent + files were silently created. Creating a read-only file-storage + against a non-existent file errors. + +5.2.0 (2017-02-09) +================== + +- Call new afterCompletion API on storages to allow them to free + resources after transaction complete. + See `issue 147 `__. +- Take advantage of the new transaction-manager explicit mode to avoid + starting transactions unnecessarily when transactions end. + +- ``Connection.new_oid`` delegates to its storage, not the DB. This is + helpful for improving concurrency in MVCC storages like RelStorage. + See `issue 139 `_. + +- ``persistent`` is no longer required at setup time. + See `issue 119 `_. + +- ``Connection.close`` and ``Connection.open`` no longer race on + ``self.transaction_manager``, which could lead to + ``AttributeError``. This was a bug introduced in 5.0.1. See `issue + 142 `_. + + +5.1.1 (2016-11-18) +================== + +- Fixed: ``ZODB.Connection.TransactionMetaData`` didn't support custom data + storage that some storages rely on. + +5.1.0 (2016-11-17) +================== + +- ZODB now translates transaction meta data, ``user`` and + ``description`` from text to bytes before passing them to storages, + and converts them back to text when retrieving them from storages in + the ``history``, ``undoLog`` and ``undoInfo`` methods. + + The ``IDatabase`` interface was updated to reflect that ``history``, + ``undoLog`` and ``undoInfo`` are available on database objects. + (They were always available, but not documented in the interface.) + +5.0.1 (2016-11-17) +================== + +- Fix an AttributeError that DemoStorage could raise if it was asked + to store a blob into a temporary changes before reading a blob. See + `issue 103 `_. + +- Call _p_resolveConflict() even if a conflicting change doesn't change the + state. This reverts to the behaviour of 3.10.3 and older. + +- Closing a Connection now reverts its ``transaction_manager`` to + None. This helps prevent errors and release resources when the + ``transaction_manager`` was the (default) thread-local manager. See + `issue 114 `_. + +- Many docstrings have been improved. + +5.0.0 (2016-09-06) +================== + +Major internal improvements and cleanups plus: + +- Added a connection ``prefetch`` method that can be used to request + that a storage prefetch data an application will need:: + + conn.prefetch(obj, ...) + + Where arguments can be objects, object ids, or iterables of objects + or object ids. + + Added optional ``prefetch`` methods to the storage APIs. If a + storage doesn't support prefetch, then the connection prefetch + method is a noop. + +- fstail: print the txn offset and header size, instead of only the data offset. + fstail can now be used to truncate a DB at the right offset. + +- Drop support for old commit protocol. All of the build-in storages + implement the new protocol. This new protocol allows storages to + provide better write performance by allowing multiple commits to + execute in parallel. + +5.0.0b1 (2016-08-04) +==================== + +- fstail: print the txn offset and header size, instead of only the data offset. + fstail can now be used to truncate a DB at the right offset. + +Numerous internal cleanups, including: + +- Changed the way the root object was created. Now the root object is + created using a database connection, rather than by making low-level + storage calls. + +- Drop support for the old commit protocol. + +- Internal FileStorage-undo fixes that should allow undo in some cases + where it didn't work before. + +- Drop the ``version`` argument to some methods where it was the last + argument and optional. + +5.0.0a6 (2016-07-21) +==================== + +- Added a connection ``prefetch`` method that can be used to request + that a storage prefect data an application will need:: + + conn.prefetch(obj, ...) + + Where arguments can be objects, object ids, or iterables of objects + or object ids. + + Added optional ``prefetch`` methods to the storage APIs. If a + storage doesn't support prefetch, then the connection prefetch + method is a noop. + +5.0.0a5 (2016-07-06) +==================== + +Drop support for old commit protocol. All of the build-in storages +implement the new protocol. This new protocol allows storages to +provide better write performance by allowing multiple commits to +execute in parallel. + +5.0.0a4 (2016-07-05) +==================== + +See 4.4.2. + +5.0.0a3 (2016-07-01) +==================== + +See 4.4.1. + +5.0.0a2 (2016-07-01) +==================== + +See 4.4.0. + +5.0.0a1 (2016-06-20) +==================== + +Major **internal** implementation changes to the Multi Version +Concurrency Control (MVCC) implementation: + +- For storages that implement IMVCCStorage (RelStorage), no longer + implement MVCC in ZODB. + +- For other storages, MVCC is implemented using an additional storage + layer. This underlying layer works by calling ``loadBefore``. The + low-level storage ``load`` method isn't used any more. + + This change allows server-based storages like ZEO and NEO to be + implemented more simply and cleanly. + +4.4.3 (2016-08-04) +================== + +- Internal FileStorage-undo fixes that should allow undo in some cases + where it didn't work before. + +- fstail: print the txn offset and header size, instead of only the data offset. + fstail can now be used to truncate a DB at the right offset. + +4.4.2 (2016-07-08) +================== + +Better support of the new commit protocol. This fixes issues with blobs and +undo. See pull requests #77, #80, #83 + +4.4.1 (2016-07-01) +================== + +Added IMultiCommitStorage to directly represent the changes in the 4.4.0 +release and to make complient storages introspectable. + +4.4.0 (2016-06-30) +================== + +This release begins evolution to a more effcient commit protocol that +allows storage implementations, like `NEO `_, +to support multiple transactions committing at the same time, for +greater write parallelism. + +This release updates IStorage: + +- The committed transaction's ID is returned by ``tpc_finish``, rather + than being returned in response store and tpc_vote results. + +- ``tpc_vote`` is now expected to return ``None`` or a list of object + ids for objects for which conflicts were resolved. + +This release works with storages that implemented the older version of +the storage interface, but also supports storages that implement the +updated interface. + +4.3.1 (2016-06-06) +================== + +- Fixed: FileStorage loadBefore didn't handle deleted/undone data correctly. + +4.3.0 (2016-05-31) +================== + +- Drop support for Python 2.6 and 3.2. + +- Make the ``zodbpickle`` dependency required and not conditional. + This fixes various packaging issues involving pip and its wheel + cache. zodbpickle was only optional under Python 2.6 so this change + only impacts users of that version. See + https://github.com/zopefoundation/ZODB/pull/42. + +- Add support for Python 3.5. + +- Avoid failure during cleanup of nested databases that provide MVCC + on storage level (Relstorage). + https://github.com/zopefoundation/ZODB/issues/45 + +- Remove useless dependency to `zdaemon` in setup.py. Remove ZEO documentation. + Both were leftovers from the time where ZEO was part of this repository. + +- Fix possible data corruption after FileStorage is truncated to roll back a + transaction. + https://github.com/zopefoundation/ZODB/pull/52 + +- DemoStorage: add support for conflict resolution and fix history() + https://github.com/zopefoundation/ZODB/pull/58 + +- Fixed a test that depended on implementation-specific behavior in tpc_finish + +4.2.0 (2015-06-02) +================== + +- Declare conditional dependencies using PEP-426 environment markers + (fixing interation between pip 7's wheel cache and tox). See + https://github.com/zopefoundation/ZODB/issues/36. + +4.2.0b1 (2015-05-22) +==================== + +- Log failed conflict resolution attempts at ``DEBUG`` level. See: + https://github.com/zopefoundation/ZODB/pull/29. + +- Fix command-line parsing of ``--verbose`` and ``--verify`` arguments. + (The short versions, ``-v`` and ``-V``, were parsed correctly.) + +- Add support for PyPy. + +- Fix the methods in ``ZODB.serialize`` that find object references + under Python 2.7 (used in scripts like ``referrers``, ``netspace``, + and ``fsrecover`` among others). This requires the addition of the + ``zodbpickle`` dependency. + +- FileStorage: fix an edge case when disk space runs out while packing, + do not leave the ``.pack`` file around. That would block any write to the + to-be-packed ``Data.fs``, because the disk would stay at 0 bytes free. + See https://github.com/zopefoundation/ZODB/pull/21. + +4.1.0 (2015-01-11) +================== + +- Fix registration of custom logging level names ("BLATHER", "TRACE"). + + We have been registering them in the wrong order since 2004. Before + Python 3.4, the stdlib ``logging`` module masked the error by registering + them in *both* directions. + +- Add support for Python 3.4. + +4.0.1 (2014-07-13) +================== + +- Fix ``POSKeyError`` during ``transaction.commit`` when after + ``savepoint.rollback``. See + https://github.com/zopefoundation/ZODB/issues/16 + +- Ensure that the pickler used in PyPy always has a ``persistent_id`` + attribute (``inst_persistent_id`` is not present on the pure-Python + pickler). (PR #17) + +- Provide better error reporting when trying to load an object on a + closed connection. + +4.0.0 (2013-08-18) +================== + +Finally released. + +4.0.0b3 (2013-06-11) +==================== + +- Switch to using non-backward-compatible pickles (protocol 3, without + storing bytes as strings) under Python 3. Updated the magic number + for file-storage files under Python3 to indicate the incompatibility. + +- Fixed: A ``UnicodeDecodeError`` could happen for non-ASCII OIDs + when using bushy blob layout. + +4.0.0b2 (2013-05-14) +==================== + +- Extended the filename renormalizer used for blob doctests to support + the filenames used by ZEO in non-shared mode. + +- Added ``url`` parameter to ``setup()`` (PyPI says it is required). + +4.0.0b1 (2013-05-10) +===================== + +- Skipped non-unit tests in ``setup.py test``. Use the buildout to run tests + requiring "layer" support. + +- Included the filename in the exception message to support debugging in case + ``loadBlob`` does not find the file. + +- Added support for Python 3.2 / 3.3. + +.. note:: + + ZODB 4.0.x is supported on Python 3.x for *new* applications only. + Due to changes in the standard library's pickle support, the Python3 + support does **not** provide forward- or backward-compatibility + at the data level with Python2. A future version of ZODB may add + such support. + + Applications which need migrate data from Python2 to Python3 should + plan to script this migration using separte databases, e.g. via a + "dump-and-reload" approach, or by providing explicit fix-ups of the + pickled values as transactions are copied between storages. + + +4.0.0a4 (2012-12-17) +===================== + +- Enforced usage of bytes for ``_p_serial`` of persistent objects (fixes + compatibility with recent persistent releases). + +4.0.0a3 (2012-12-01) +===================== + +- Fixed: An elaborate test for trvial logic corrupted module state in a + way that made other tests fail spuriously. + +4.0.0a2 (2012-11-13) +===================== + +Bugs Fixed +---------- + +- An unneeded left-over setting in setup.py caused installation with + pip to fail. + +4.0.0a1 (2012-11-07) +===================== + +New Features +------------ + +- The ``persistent`` and ``BTrees`` packages are now released as separate + distributions, on which ZODB now depends. + +- ZODB no longer depends on zope.event. It now uses ZODB.event, which + uses zope.event if it is installed. You can override + ZODB.event.notify to provide your own event handling, although + zope.event is recommended. + +- BTrees allowed object keys with insane comparison. (Comparison + inherited from object, which compares based on in-process address.) + Now BTrees raise TypeError if an attempt is made to save a key with + comparison inherited from object. (This doesn't apply to old-style + class instances.) + +Bugs Fixed +---------- + +- Ensured that the export file and index file created by ``repozo`` share + the same timestamp. + + https://bugs.launchpad.net/zodb/+bug/993350 + +- Pinned the ``transaction`` and ``manuel`` dependencies to Python 2.5- + compatible versions when installing under Python 2.5. + + +.. note:: + Please see https://github.com/zopefoundation/ZODB/blob/master/HISTORY.rst + for older versions of ZODB. + + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/INSTALLER b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/METADATA b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/METADATA new file mode 100644 index 0000000..2ba32b9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/METADATA @@ -0,0 +1,590 @@ +Metadata-Version: 2.0 +Name: ZODB +Version: 5.5.0 +Summary: ZODB, a Python object-oriented database +Home-page: http://www.zodb.org/ +Author: Zope Foundation and Contributors +Author-email: zodb-dev@zope.org +License: ZPL 2.1 +Description-Content-Type: UNKNOWN +Keywords: database nosql python zope +Platform: any +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: Unix +Classifier: Framework :: ZODB +Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* +Requires-Dist: persistent (>=4.4.0) +Requires-Dist: BTrees (>=4.2.0) +Requires-Dist: ZConfig +Requires-Dist: transaction (>=2.0.3) +Requires-Dist: six +Requires-Dist: zc.lockfile +Requires-Dist: zope.interface +Requires-Dist: zodbpickle (>=1.0.1) +Provides-Extra: test +Requires-Dist: manuel; extra == 'test' +Requires-Dist: zope.testing; extra == 'test' +Requires-Dist: zope.testrunner (>=4.4.6); extra == 'test' +Provides-Extra: test +Requires-Dist: mock; python_version == "2.7" and extra == 'test' + +======================================= +ZODB, a Python object-oriented database +======================================= + +.. image:: https://img.shields.io/pypi/v/ZODB.svg + :target: https://pypi.python.org/pypi/ZODB/ + :alt: Latest release + +.. image:: https://img.shields.io/pypi/pyversions/ZODB.svg + :target: https://pypi.org/project/ZODB/ + :alt: Supported Python versions + +.. image:: https://travis-ci.org/zopefoundation/ZODB.svg?branch=master + :target: https://travis-ci.org/zopefoundation/ZODB + :alt: Build status + +.. image:: https://coveralls.io/repos/github/zopefoundation/ZODB/badge.svg + :target: https://coveralls.io/github/zopefoundation/ZODB + :alt: Coverage status + +.. image:: https://readthedocs.org/projects/zodb/badge/?version=latest + :target: https://zodb.readthedocs.io/en/latest/ + :alt: Documentation status + +ZODB provides an object-oriented database for Python that provides a +high-degree of transparency. ZODB runs on Python 2.7 or Python 3.4 and +above. It also runs on PyPy. + +- no separate language for database operations + +- very little impact on your code to make objects persistent + +- no database mapper that partially hides the database. + + Using an object-relational mapping **is not** like using an + object-oriented database. + +- almost no seam between code and database. + +ZODB is an ACID Transactional database. + +To learn more, visit: http://www.zodb.org + +The github repository is: at https://github.com/zopefoundation/zodb + +If you're interested in contributing to ZODB itself, see the +`developer notes +`_. + + +================ + Change History +================ + +5.5.0 (2018-10-13) +================== + +- Add support for Python 3.7. + +- Bump the dependency on zodbpickle to at least 1.0.1. This is + required to avoid a memory leak on Python 2.7. See `issue 203 + `_. + +- Bump the dependency on persistent to at least 4.4.0. + +- Make the internal support functions for dealing with OIDs (``p64`` + and ``u64``) somewhat faster and raise more informative + exceptions on certain types of bad input. See `issue 216 + `_. + +- Remove support for ``python setup.py test``. It hadn't been working + for some time. See `issue #218 + `_. + +- Make the tests run faster by avoiding calls to ``time.sleep()``. + +5.4.0 (2018-03-26) +================== + +- ZODB now uses pickle protocol 3 for both Python 2 and Python 3. + + (Previously, protocol 2 was used for Python 2.) + + The zodbpickle package provides a `zodbpickle.binary` string type + that should be used in Python 2 to cause binary strings to be saved + in a pickle binary format, so they can be loaded correctly in + Python 3. Pickle protocol 3 is needed for this to work correctly. + +- Object identifiers in persistent references are saved as + `zodbpickle.binary` strings in Python 2, so that they are loaded + correctly in Python 3. + +- If an object is missing from the index while packing a ``FileStorage``, + report its full ``oid``. + +- Storage imports are a bit faster. + +- Storages can be important from non-seekable sources, like + file-wrapped pipes. + +5.3.0 (2017-08-30) +================== + +- Add support for Python 3.6. + +- Drop support for Python 3.3. + +- Ensure that the ``HistoricalStorageAdapter`` forwards the ``release`` method to + its base instance. See `issue 78 `_. + +- Use a higher pickle protocol (2) for serializing objects on Python + 2; previously protocol 1 was used. This is *much* more efficient for + new-style classes (all persistent objects are new-style), at the + cost of being very slightly less efficient for old-style classes. + + .. note:: On Python 2, this will now allow open ``file`` objects + (but **not** open blobs or sockets) to be pickled (loading + the object will result in a closed file); previously this + would result in a ``TypeError``. Doing so is not + recommended as they cannot be loaded in Python 3. + + See `issue 179 `_. + +5.2.4 (2017-05-17) +================== + +- ``DB.close`` now explicitly frees internal resources. This is + helpful to avoid false positives in tests that check for leaks. + +- Optimize getting the path to a blob file. See + `issue 161 `_. + +- All classes are new-style classes on Python 2 (they were already + new-style on Python 3). This improves performance on PyPy. See + `issue 160 `_. + +5.2.3 (2017-04-11) +================== + +- Fix an import error. See `issue 158 `_. + +5.2.2 (2017-04-11) +================== + +- Fixed: A blob misfeature set blob permissions so that blobs and blob + directories were only readable by the database process owner, rather + than honoring user-controlled permissions (e.g. ``umask``). + See `issue 155 `_. + +5.2.1 (2017-04-08) +================== + +- Fixed: When opening FileStorages in read-only mode, non-existent + files were silently created. Creating a read-only file-storage + against a non-existent file errors. + +5.2.0 (2017-02-09) +================== + +- Call new afterCompletion API on storages to allow them to free + resources after transaction complete. + See `issue 147 `__. +- Take advantage of the new transaction-manager explicit mode to avoid + starting transactions unnecessarily when transactions end. + +- ``Connection.new_oid`` delegates to its storage, not the DB. This is + helpful for improving concurrency in MVCC storages like RelStorage. + See `issue 139 `_. + +- ``persistent`` is no longer required at setup time. + See `issue 119 `_. + +- ``Connection.close`` and ``Connection.open`` no longer race on + ``self.transaction_manager``, which could lead to + ``AttributeError``. This was a bug introduced in 5.0.1. See `issue + 142 `_. + + +5.1.1 (2016-11-18) +================== + +- Fixed: ``ZODB.Connection.TransactionMetaData`` didn't support custom data + storage that some storages rely on. + +5.1.0 (2016-11-17) +================== + +- ZODB now translates transaction meta data, ``user`` and + ``description`` from text to bytes before passing them to storages, + and converts them back to text when retrieving them from storages in + the ``history``, ``undoLog`` and ``undoInfo`` methods. + + The ``IDatabase`` interface was updated to reflect that ``history``, + ``undoLog`` and ``undoInfo`` are available on database objects. + (They were always available, but not documented in the interface.) + +5.0.1 (2016-11-17) +================== + +- Fix an AttributeError that DemoStorage could raise if it was asked + to store a blob into a temporary changes before reading a blob. See + `issue 103 `_. + +- Call _p_resolveConflict() even if a conflicting change doesn't change the + state. This reverts to the behaviour of 3.10.3 and older. + +- Closing a Connection now reverts its ``transaction_manager`` to + None. This helps prevent errors and release resources when the + ``transaction_manager`` was the (default) thread-local manager. See + `issue 114 `_. + +- Many docstrings have been improved. + +5.0.0 (2016-09-06) +================== + +Major internal improvements and cleanups plus: + +- Added a connection ``prefetch`` method that can be used to request + that a storage prefetch data an application will need:: + + conn.prefetch(obj, ...) + + Where arguments can be objects, object ids, or iterables of objects + or object ids. + + Added optional ``prefetch`` methods to the storage APIs. If a + storage doesn't support prefetch, then the connection prefetch + method is a noop. + +- fstail: print the txn offset and header size, instead of only the data offset. + fstail can now be used to truncate a DB at the right offset. + +- Drop support for old commit protocol. All of the build-in storages + implement the new protocol. This new protocol allows storages to + provide better write performance by allowing multiple commits to + execute in parallel. + +5.0.0b1 (2016-08-04) +==================== + +- fstail: print the txn offset and header size, instead of only the data offset. + fstail can now be used to truncate a DB at the right offset. + +Numerous internal cleanups, including: + +- Changed the way the root object was created. Now the root object is + created using a database connection, rather than by making low-level + storage calls. + +- Drop support for the old commit protocol. + +- Internal FileStorage-undo fixes that should allow undo in some cases + where it didn't work before. + +- Drop the ``version`` argument to some methods where it was the last + argument and optional. + +5.0.0a6 (2016-07-21) +==================== + +- Added a connection ``prefetch`` method that can be used to request + that a storage prefect data an application will need:: + + conn.prefetch(obj, ...) + + Where arguments can be objects, object ids, or iterables of objects + or object ids. + + Added optional ``prefetch`` methods to the storage APIs. If a + storage doesn't support prefetch, then the connection prefetch + method is a noop. + +5.0.0a5 (2016-07-06) +==================== + +Drop support for old commit protocol. All of the build-in storages +implement the new protocol. This new protocol allows storages to +provide better write performance by allowing multiple commits to +execute in parallel. + +5.0.0a4 (2016-07-05) +==================== + +See 4.4.2. + +5.0.0a3 (2016-07-01) +==================== + +See 4.4.1. + +5.0.0a2 (2016-07-01) +==================== + +See 4.4.0. + +5.0.0a1 (2016-06-20) +==================== + +Major **internal** implementation changes to the Multi Version +Concurrency Control (MVCC) implementation: + +- For storages that implement IMVCCStorage (RelStorage), no longer + implement MVCC in ZODB. + +- For other storages, MVCC is implemented using an additional storage + layer. This underlying layer works by calling ``loadBefore``. The + low-level storage ``load`` method isn't used any more. + + This change allows server-based storages like ZEO and NEO to be + implemented more simply and cleanly. + +4.4.3 (2016-08-04) +================== + +- Internal FileStorage-undo fixes that should allow undo in some cases + where it didn't work before. + +- fstail: print the txn offset and header size, instead of only the data offset. + fstail can now be used to truncate a DB at the right offset. + +4.4.2 (2016-07-08) +================== + +Better support of the new commit protocol. This fixes issues with blobs and +undo. See pull requests #77, #80, #83 + +4.4.1 (2016-07-01) +================== + +Added IMultiCommitStorage to directly represent the changes in the 4.4.0 +release and to make complient storages introspectable. + +4.4.0 (2016-06-30) +================== + +This release begins evolution to a more effcient commit protocol that +allows storage implementations, like `NEO `_, +to support multiple transactions committing at the same time, for +greater write parallelism. + +This release updates IStorage: + +- The committed transaction's ID is returned by ``tpc_finish``, rather + than being returned in response store and tpc_vote results. + +- ``tpc_vote`` is now expected to return ``None`` or a list of object + ids for objects for which conflicts were resolved. + +This release works with storages that implemented the older version of +the storage interface, but also supports storages that implement the +updated interface. + +4.3.1 (2016-06-06) +================== + +- Fixed: FileStorage loadBefore didn't handle deleted/undone data correctly. + +4.3.0 (2016-05-31) +================== + +- Drop support for Python 2.6 and 3.2. + +- Make the ``zodbpickle`` dependency required and not conditional. + This fixes various packaging issues involving pip and its wheel + cache. zodbpickle was only optional under Python 2.6 so this change + only impacts users of that version. See + https://github.com/zopefoundation/ZODB/pull/42. + +- Add support for Python 3.5. + +- Avoid failure during cleanup of nested databases that provide MVCC + on storage level (Relstorage). + https://github.com/zopefoundation/ZODB/issues/45 + +- Remove useless dependency to `zdaemon` in setup.py. Remove ZEO documentation. + Both were leftovers from the time where ZEO was part of this repository. + +- Fix possible data corruption after FileStorage is truncated to roll back a + transaction. + https://github.com/zopefoundation/ZODB/pull/52 + +- DemoStorage: add support for conflict resolution and fix history() + https://github.com/zopefoundation/ZODB/pull/58 + +- Fixed a test that depended on implementation-specific behavior in tpc_finish + +4.2.0 (2015-06-02) +================== + +- Declare conditional dependencies using PEP-426 environment markers + (fixing interation between pip 7's wheel cache and tox). See + https://github.com/zopefoundation/ZODB/issues/36. + +4.2.0b1 (2015-05-22) +==================== + +- Log failed conflict resolution attempts at ``DEBUG`` level. See: + https://github.com/zopefoundation/ZODB/pull/29. + +- Fix command-line parsing of ``--verbose`` and ``--verify`` arguments. + (The short versions, ``-v`` and ``-V``, were parsed correctly.) + +- Add support for PyPy. + +- Fix the methods in ``ZODB.serialize`` that find object references + under Python 2.7 (used in scripts like ``referrers``, ``netspace``, + and ``fsrecover`` among others). This requires the addition of the + ``zodbpickle`` dependency. + +- FileStorage: fix an edge case when disk space runs out while packing, + do not leave the ``.pack`` file around. That would block any write to the + to-be-packed ``Data.fs``, because the disk would stay at 0 bytes free. + See https://github.com/zopefoundation/ZODB/pull/21. + +4.1.0 (2015-01-11) +================== + +- Fix registration of custom logging level names ("BLATHER", "TRACE"). + + We have been registering them in the wrong order since 2004. Before + Python 3.4, the stdlib ``logging`` module masked the error by registering + them in *both* directions. + +- Add support for Python 3.4. + +4.0.1 (2014-07-13) +================== + +- Fix ``POSKeyError`` during ``transaction.commit`` when after + ``savepoint.rollback``. See + https://github.com/zopefoundation/ZODB/issues/16 + +- Ensure that the pickler used in PyPy always has a ``persistent_id`` + attribute (``inst_persistent_id`` is not present on the pure-Python + pickler). (PR #17) + +- Provide better error reporting when trying to load an object on a + closed connection. + +4.0.0 (2013-08-18) +================== + +Finally released. + +4.0.0b3 (2013-06-11) +==================== + +- Switch to using non-backward-compatible pickles (protocol 3, without + storing bytes as strings) under Python 3. Updated the magic number + for file-storage files under Python3 to indicate the incompatibility. + +- Fixed: A ``UnicodeDecodeError`` could happen for non-ASCII OIDs + when using bushy blob layout. + +4.0.0b2 (2013-05-14) +==================== + +- Extended the filename renormalizer used for blob doctests to support + the filenames used by ZEO in non-shared mode. + +- Added ``url`` parameter to ``setup()`` (PyPI says it is required). + +4.0.0b1 (2013-05-10) +===================== + +- Skipped non-unit tests in ``setup.py test``. Use the buildout to run tests + requiring "layer" support. + +- Included the filename in the exception message to support debugging in case + ``loadBlob`` does not find the file. + +- Added support for Python 3.2 / 3.3. + +.. note:: + + ZODB 4.0.x is supported on Python 3.x for *new* applications only. + Due to changes in the standard library's pickle support, the Python3 + support does **not** provide forward- or backward-compatibility + at the data level with Python2. A future version of ZODB may add + such support. + + Applications which need migrate data from Python2 to Python3 should + plan to script this migration using separte databases, e.g. via a + "dump-and-reload" approach, or by providing explicit fix-ups of the + pickled values as transactions are copied between storages. + + +4.0.0a4 (2012-12-17) +===================== + +- Enforced usage of bytes for ``_p_serial`` of persistent objects (fixes + compatibility with recent persistent releases). + +4.0.0a3 (2012-12-01) +===================== + +- Fixed: An elaborate test for trvial logic corrupted module state in a + way that made other tests fail spuriously. + +4.0.0a2 (2012-11-13) +===================== + +Bugs Fixed +---------- + +- An unneeded left-over setting in setup.py caused installation with + pip to fail. + +4.0.0a1 (2012-11-07) +===================== + +New Features +------------ + +- The ``persistent`` and ``BTrees`` packages are now released as separate + distributions, on which ZODB now depends. + +- ZODB no longer depends on zope.event. It now uses ZODB.event, which + uses zope.event if it is installed. You can override + ZODB.event.notify to provide your own event handling, although + zope.event is recommended. + +- BTrees allowed object keys with insane comparison. (Comparison + inherited from object, which compares based on in-process address.) + Now BTrees raise TypeError if an attempt is made to save a key with + comparison inherited from object. (This doesn't apply to old-style + class instances.) + +Bugs Fixed +---------- + +- Ensured that the export file and index file created by ``repozo`` share + the same timestamp. + + https://bugs.launchpad.net/zodb/+bug/993350 + +- Pinned the ``transaction`` and ``manuel`` dependencies to Python 2.5- + compatible versions when installing under Python 2.5. + + +.. note:: + Please see https://github.com/zopefoundation/ZODB/blob/master/HISTORY.rst + for older versions of ZODB. + + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/RECORD b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/RECORD new file mode 100644 index 0000000..7c70e58 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/RECORD @@ -0,0 +1,277 @@ +../../../bin/fsdump,sha256=c48yKlWbgjSrHWheWevKlpQ_K1oDYM7nQ578SUrgnQ0,254 +../../../bin/fsoids,sha256=YdDoEykNeZ4nGcUvqvd4mjvBG0cATj-_CZsgrcctEKE,250 +../../../bin/fsrefs,sha256=3wY5UwB0ZRAyBdCLVeMTtFcK2gX76CCjJWod7umbwrw,250 +../../../bin/fstail,sha256=PX3ENZ8A9In3m3YJuID7Z8Vg8JD9upKJINdFi3nvJGw,250 +../../../bin/repozo,sha256=4TLBJCat9hy0uUODmerVP1zVFcRCbxq1z0aBk1223bw,250 +ZODB-5.5.0.dist-info/DESCRIPTION.rst,sha256=qblfOeT02CHbJX3TadRP_OU9Co85WSOiQ18zwHSNjFU,18020 +ZODB-5.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +ZODB-5.5.0.dist-info/METADATA,sha256=W3bs6yKiCL0ai3xm3sNRPe2OPv9idXPHyVaSbE92i9A,19725 +ZODB-5.5.0.dist-info/RECORD,, +ZODB-5.5.0.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 +ZODB-5.5.0.dist-info/entry_points.txt,sha256=DfURVrlE-hp96z0SUIhMO81waW8vW2sN38l-a_hV4PY,233 +ZODB-5.5.0.dist-info/metadata.json,sha256=6RoiljEM4GFbg0e1xmJF-eSE6L9t_vBinpFP4W3Iqic,2252 +ZODB-5.5.0.dist-info/top_level.txt,sha256=o9jRNss1In57AUSkvNt0qA8rDVFASxILkoqe1uM5iPY,5 +ZODB/ActivityMonitor.py,sha256=zVOPfwg5skIp8cR2_Aobkayivt8UnBdI8r6NrQ02FXg,3614 +ZODB/BaseStorage.py,sha256=j9nUA4iWzH4o5yFH0mKirx5DCijpJln5wAN7IzfeGU8,12996 +ZODB/ConflictResolution.py,sha256=fLV_j4a1iLxzNZL4a1_hcRfOQmqJnHO6LdznJr6d9NM,10946 +ZODB/ConflictResolution.txt,sha256=k8WiU8vLONpHKP-7LtH6scHQJlHvmns1FBTtd_bEZIc,20243 +ZODB/Connection.py,sha256=BIxNeUOdYGTQVsDWBZww2pLpwssTGjALBAnt_NrFxfg,50263 +ZODB/DB.py,sha256=06c41-wPsK8wggAdsdX4WNgi-0h-2HbKJ1nr_k3wtm0,39273 +ZODB/DemoStorage.py,sha256=XCVpoAnYFAR6lzb-paWNnH7yIB6r2WCoH3WcpLJk8lg,16304 +ZODB/DemoStorage.test,sha256=ez27oD8tXW-FtRxLdfbEmTUhLzL6uh7oJ1jvmEnsVlM,12421 +ZODB/ExportImport.py,sha256=qyZOGTyFS9iMAVp7kqA_BiShx5nnPKyL-mfbsYhMJUA,6985 +ZODB/FileStorage/FileStorage.py,sha256=YBfT091Frela-4bLx3EaAQoTZsCZ-mI2qrUlvNueY4M,77760 +ZODB/FileStorage/__init__.py,sha256=IDVAb0xyDp1bYtInsvl7b3P-4HbM9G_ONbyPQbcbQ0Q,238 +ZODB/FileStorage/__pycache__/FileStorage.cpython-36.pyc,, +ZODB/FileStorage/__pycache__/__init__.cpython-36.pyc,, +ZODB/FileStorage/__pycache__/format.cpython-36.pyc,, +ZODB/FileStorage/__pycache__/fsdump.cpython-36.pyc,, +ZODB/FileStorage/__pycache__/fsoids.cpython-36.pyc,, +ZODB/FileStorage/__pycache__/fspack.cpython-36.pyc,, +ZODB/FileStorage/__pycache__/interfaces.cpython-36.pyc,, +ZODB/FileStorage/__pycache__/tests.cpython-36.pyc,, +ZODB/FileStorage/format.py,sha256=mUMdirN4uDRDz4YzZwt9Y3Hqq2h1ttgZHLx2lvKmcVg,9449 +ZODB/FileStorage/fsdump.py,sha256=sRi2Wsf6tlNfT17j1FRnfZ18MnoKJiPmaKb4RvPuvGg,4735 +ZODB/FileStorage/fsoids.py,sha256=AF42VX29ppg2oFzgI3PHjao7zjgzQSHFYJmEsfhX_Tw,8115 +ZODB/FileStorage/fspack.py,sha256=XS4wQtA9sf4IboeZbAJTTprEyIeMQknOKrFtc8C-41c,24484 +ZODB/FileStorage/interfaces.py,sha256=VZ_8W1MFlYk6bKc_27l1i1JMXo65pqFPICLBTwoyDs8,2766 +ZODB/FileStorage/iterator.test,sha256=IAbsXEJX0es2W48T5ts-1QinKBoNH1Hm93AQZ47eDcQ,4856 +ZODB/FileStorage/tests.py,sha256=7zHwicYE8txsoOxAaN5xZiYE7uE6lFat4ypy-jL8AE4,9211 +ZODB/FileStorage/zconfig.txt,sha256=ScCPDBH7UQpZzy3oVh4BCwtYfcnlFcZXP-gsZK6HK5E,5561 +ZODB/MappingStorage.py,sha256=b45jaLUgoH8xKXogtI8gOuCONTS03MK6tOaG7_Nm0KY,11732 +ZODB/POSException.py,sha256=P1AKpmaGxSpbCUXATSSQYhWHR9jPTWDQMO-n7OwEdsE,11650 +ZODB/UndoLogCompatible.py,sha256=Jhv-VhYO-wAYqSyxnekZYHvKM4cglzvCTEcqq6bSc-8,1548 +ZODB/__init__.py,sha256=kFYv_AeZAl9HBciOvVUJadL3nCZhD3CxthRSF2SiHno,1052 +ZODB/__pycache__/ActivityMonitor.cpython-36.pyc,, +ZODB/__pycache__/BaseStorage.cpython-36.pyc,, +ZODB/__pycache__/ConflictResolution.cpython-36.pyc,, +ZODB/__pycache__/Connection.cpython-36.pyc,, +ZODB/__pycache__/DB.cpython-36.pyc,, +ZODB/__pycache__/DemoStorage.cpython-36.pyc,, +ZODB/__pycache__/ExportImport.cpython-36.pyc,, +ZODB/__pycache__/MappingStorage.cpython-36.pyc,, +ZODB/__pycache__/POSException.cpython-36.pyc,, +ZODB/__pycache__/UndoLogCompatible.cpython-36.pyc,, +ZODB/__pycache__/__init__.cpython-36.pyc,, +ZODB/__pycache__/_compat.cpython-36.pyc,, +ZODB/__pycache__/blob.cpython-36.pyc,, +ZODB/__pycache__/broken.cpython-36.pyc,, +ZODB/__pycache__/config.cpython-36.pyc,, +ZODB/__pycache__/conversionhack.cpython-36.pyc,, +ZODB/__pycache__/event.cpython-36.pyc,, +ZODB/__pycache__/fsIndex.cpython-36.pyc,, +ZODB/__pycache__/fsrecover.cpython-36.pyc,, +ZODB/__pycache__/fstools.cpython-36.pyc,, +ZODB/__pycache__/interfaces.cpython-36.pyc,, +ZODB/__pycache__/loglevels.cpython-36.pyc,, +ZODB/__pycache__/mvccadapter.cpython-36.pyc,, +ZODB/__pycache__/persistentclass.cpython-36.pyc,, +ZODB/__pycache__/serialize.cpython-36.pyc,, +ZODB/__pycache__/transact.cpython-36.pyc,, +ZODB/__pycache__/utils.cpython-36.pyc,, +ZODB/__pycache__/valuedoc.cpython-36.pyc,, +ZODB/_compat.py,sha256=ClvbWhWM4APyub53Og9fnCK33hZKKKWwVnCFhlbcdMQ,4836 +ZODB/blob.py,sha256=Ec6jsPH6FIhfCRLrOYJKjxUqPVMHS0aqF0zWprZJMYw,36303 +ZODB/broken.py,sha256=Xaz9H1g8pXf9zB9_Gdkf7CMaJWl97ag_wdJiR-Gb80Y,10343 +ZODB/collaborations.txt,sha256=23wRocUfgFGUpXUhxgzGpBWBPbbHsOqklQzlKzPtS3s,5952 +ZODB/component.xml,sha256=BFdewxwIJYGqEbELbZmivozYrFUJB39Nhgzz31vcSe0,13074 +ZODB/config.py,sha256=XNSm64S6nKFzIdK7_Yj2kPU9zlk7PywGc2xxiyl2v6Q,8903 +ZODB/config.xml,sha256=NwnnhqBLaMTtRj9Hp4lMNckuHu7Bj86ugYDhLoLAj-g,171 +ZODB/conversionhack.py,sha256=nNowcKI4zw0tVSGoqGUn5Aw8gDA1AXjRHs_RPXFXWTg,1051 +ZODB/cross-database-references.txt,sha256=HT_GQKFLbLqteep_shPOhFDFULIZUd082FaHVIAUP3A,6277 +ZODB/event.py,sha256=JrhsdLuV8A9vPS3bAL0IE0I7QoRZWQjEiChKkMEWqYo,719 +ZODB/event.txt,sha256=_QIeNrG9kDSf09ZvNKZhZEWyZ9Fw-m-RZb_zDnXo5LE,384 +ZODB/fsIndex.py,sha256=0jfDQBY4MPqlLIEnlriSQ6TFhyrKuQzUl7BYyObAbck,8772 +ZODB/fsrecover.py,sha256=ybtEQhIWZ6jt-JIrjW0TXXp3wdSZz9s1w0zQX45PWcs,10489 +ZODB/fstools.py,sha256=QzL5WsR2g-tW4-sqU-GYdC9iZSycb8hCTi8uaPheaBU,4857 +ZODB/historical_connections.txt,sha256=uQTQQRthLtRMANV8S_2Cnd2r3wlU-90p1Zpnf2kgU54,10449 +ZODB/interfaces.py,sha256=iot5MyJKlWNeX7NLeCO9HIGCMNwIXN0brl0mNHs9R0s,53770 +ZODB/loglevels.py,sha256=xqQ8wfrZ0BtoF8gpymIjlMi3SBHcvpp8fkJwW5g6yBo,1776 +ZODB/mvccadapter.py,sha256=nRjxnOljV7rdt_BDy6l3BOPbEqKAeAYysKuOFdQytK8,7709 +ZODB/persistentclass.py,sha256=CMCRa-btVFau5TAn5D7SBf-Ma1VWCruPmCxIG8opvec,6695 +ZODB/persistentclass.txt,sha256=IQatQwG4dPBchrHjCMzVVj3qR_9n2KFvAT1KDjWZcWE,7635 +ZODB/scripts/README.txt,sha256=u77Pfrs8j6KCwWdMPpDFGVCekVc0-De98_jDb1oCMHk,3540 +ZODB/scripts/__init__.py,sha256=MsSFjiLMLJZ7QhUPpVBWKiyDnCzryquRyr329NoCACI,2 +ZODB/scripts/__pycache__/__init__.cpython-36.pyc,, +ZODB/scripts/__pycache__/analyze.cpython-36.pyc,, +ZODB/scripts/__pycache__/checkbtrees.cpython-36.pyc,, +ZODB/scripts/__pycache__/fsoids.cpython-36.pyc,, +ZODB/scripts/__pycache__/fsrefs.cpython-36.pyc,, +ZODB/scripts/__pycache__/fsstats.cpython-36.pyc,, +ZODB/scripts/__pycache__/fstail.cpython-36.pyc,, +ZODB/scripts/__pycache__/fstest.cpython-36.pyc,, +ZODB/scripts/__pycache__/migrate.cpython-36.pyc,, +ZODB/scripts/__pycache__/migrateblobs.cpython-36.pyc,, +ZODB/scripts/__pycache__/netspace.cpython-36.pyc,, +ZODB/scripts/__pycache__/referrers.cpython-36.pyc,, +ZODB/scripts/__pycache__/repozo.cpython-36.pyc,, +ZODB/scripts/__pycache__/space.cpython-36.pyc,, +ZODB/scripts/__pycache__/zodbload.cpython-36.pyc,, +ZODB/scripts/analyze.py,sha256=wDv0bgadI8_JSrbeYSEMLKZtgnMF-h_awpYhH3_YPZ0,4447 +ZODB/scripts/checkbtrees.py,sha256=qFJspJDmprD0iHh6dZQmYay8ygR02q4PSWB-ijMa6SY,3143 +ZODB/scripts/fsoids.py,sha256=AbiG-dh1Fr1zV6-eGI-SQ4BRyeW3tPCgwRrNiLK_MlI,2386 +ZODB/scripts/fsrefs.py,sha256=tDh0iFjygDZAS6fEjcG-QFA4yj3jIew2RaHD3X1ME88,5979 +ZODB/scripts/fsstats.py,sha256=LCdFPeq5HTc78KP5YuUv8TbKDNRNbE7_CsKhICy8Jvc,5870 +ZODB/scripts/fstail.py,sha256=cc-98o6pJOWt5QG87jXQ1egcbpqX9nbexUQAKWt948E,1772 +ZODB/scripts/fstest.py,sha256=vBHK4j8alFC-iTdWjcW8kSuqpkTbaElOJhvTZxc63hY,6855 +ZODB/scripts/manual_tests/__pycache__/testfstest.cpython-36.pyc,, +ZODB/scripts/manual_tests/test-checker.fs,sha256=lVSVmB6r85jF2Cvx694IARX1NqKUvL0QFJGX6-QnT5c,802 +ZODB/scripts/manual_tests/testfstest.py,sha256=DyHOBJSedFLZydxfJLy6xFGo5YLn41SXxkXcQs9MoP4,5494 +ZODB/scripts/migrate.py,sha256=WZrjDXYJUADga26_rQEjEACZCFCNccQ1qwag6Go9SWU,11224 +ZODB/scripts/migrateblobs.py,sha256=AejRHXQgAvH5LEBX7Wypyw82ZhJ9YY1sx7BfPWKKCpE,2758 +ZODB/scripts/netspace.py,sha256=wh9s17zLzn-v0oUqrLWm2JzPZpxL9THzBBXEl0SDrrU,3429 +ZODB/scripts/referrers.py,sha256=w9ccX5l9BppVvUgVzVPXl2puq_Sbi4dOT6aunhxiqm0,990 +ZODB/scripts/repozo.py,sha256=zXEXqkxpwz_ZXkxo53-33jYOlRh0EVrTyrHCKarIu_k,24832 +ZODB/scripts/space.py,sha256=ksx_5nUIZaIYsixS047A9brcT0vWh1Fl1MaEP6LTEIs,1641 +ZODB/scripts/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ZODB/scripts/tests/__pycache__/__init__.cpython-36.pyc,, +ZODB/scripts/tests/__pycache__/test_doc.cpython-36.pyc,, +ZODB/scripts/tests/__pycache__/test_fstest.cpython-36.pyc,, +ZODB/scripts/tests/__pycache__/test_repozo.cpython-36.pyc,, +ZODB/scripts/tests/fstail.txt,sha256=vr91-Z3xkJPg5cZqhYljB5ioKE7-ARCclonjIN6cXuI,1260 +ZODB/scripts/tests/referrers.txt,sha256=wsO4QWm7iE_BioIeo4OrK4G9NhxsKw1Z614LiuxAKPo,1348 +ZODB/scripts/tests/test_doc.py,sha256=2YRFHNKcpJjS-_7fonp9JtVB7uJkqmiPcCvFDqhxaEQ,1895 +ZODB/scripts/tests/test_fstest.py,sha256=8RhATUXkmOIff2w2J8-lUtFh_H_CYgyF-pI4-Fc_7IM,1779 +ZODB/scripts/tests/test_repozo.py,sha256=23ycBGWJL8yQGaTTT_lJsCGaNFrIbvYxR74QQGXlM64,47523 +ZODB/scripts/zodbload.py,sha256=wHnALLJhSuwrClDvQeKM9NHaUvx7Px4lYsEyeXBp7YA,30889 +ZODB/serialize.py,sha256=nnn4AFZJS_ltN87_CpxgvLju4Ks-KlgdoHeza7C8W68,24200 +ZODB/storage.xml,sha256=SdtD4K_JJS2UleuMJZF-UntJbfxlfVblngJMtxwXOsU,104 +ZODB/subtransactions.txt,sha256=fTDHzN9OgiHXCy_A1vzpnOw000LTkfTpOZ4WcWPkkAc,1486 +ZODB/tests/BasicStorage.py,sha256=5fcQskXFiK5aeFVcoI0YVMKEiRDygT89e8dNefAcOso,14612 +ZODB/tests/ConflictResolution.py,sha256=xSteVS4yiHrrpKx-xbniGGyXSXVTdAk7PwjQuVrcsSY,5918 +ZODB/tests/Corruption.py,sha256=81ns9DNxcps3ZWA5lKvWsDgdMvjHRRPQiQt2cqNqIEI,2345 +ZODB/tests/HistoryStorage.py,sha256=w2z6wURDs2rf5jDMH3YWvA4D6BhlbTwGiQL4eSLDpGg,2112 +ZODB/tests/IExternalGC.test,sha256=fax3Uy0UUfPcaiqVEP1duflZAGCxCOWerqqtZotfOIc,3691 +ZODB/tests/IteratorStorage.py,sha256=-aozfL2tfa1KawnMelCFs80trH_fyyULs7RaK8Jn9dw,9976 +ZODB/tests/MTStorage.py,sha256=RhqgFOa3sFwen6UeO0X_642Fodt9qkdTY97glVw71bw,7325 +ZODB/tests/MVCCMappingStorage.py,sha256=o_c7wHfev4k4b8yhP7AwhQdr8bp5BJze3DW20Ao_4Ak,4674 +ZODB/tests/MinPO.py,sha256=Q4lXLgDUrdsGFtp8EBA8WRN1ccxQ0R4tk7pxJBabvsU,1582 +ZODB/tests/PackableStorage.py,sha256=5abCkd2aCFHdEqiDzSaUlxddztYTeHMQfablWp9ptvI,29282 +ZODB/tests/PersistentStorage.py,sha256=waZwrWAcwrItbLXKobWPtHPXZsy_HyXm9MPx44SNqLk,1741 +ZODB/tests/ReadOnlyStorage.py,sha256=53UrUz551xcgRtR8l7xSlq0JY2JUVE0Ghz9wrcX9OIQ,2219 +ZODB/tests/RecoveryStorage.py,sha256=BA_aGKhbi9B7dj5v6m7wCCAQNdSgBryaBqLMVUqpmVE,7899 +ZODB/tests/RevisionStorage.py,sha256=z3nJ1_IL7q40hfR8Rhsra5BbDn0Ho65WPVsWz_CuM94,6734 +ZODB/tests/StorageTestBase.py,sha256=LQB77HHm2wjgbx0c4sBWmfer9__6vyl2md0lZTWvYm0,6124 +ZODB/tests/Synchronization.py,sha256=K11z2sadixu7Au-YyYchK8QyPs--BQkrm9Ik172RS3Q,4377 +ZODB/tests/TransactionalUndoStorage.py,sha256=uQE7tKwQvL3LqQiW6v466Js6NN9kzAWVu2tX4VVR6LQ,28041 +ZODB/tests/__init__.py,sha256=i54so5PgcCHmet8ZG90HheVvx-lx5v9aVywAIcLI6D0,38 +ZODB/tests/__pycache__/BasicStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/ConflictResolution.cpython-36.pyc,, +ZODB/tests/__pycache__/Corruption.cpython-36.pyc,, +ZODB/tests/__pycache__/HistoryStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/IteratorStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/MTStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/MVCCMappingStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/MinPO.cpython-36.pyc,, +ZODB/tests/__pycache__/PackableStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/PersistentStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/ReadOnlyStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/RecoveryStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/RevisionStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/StorageTestBase.cpython-36.pyc,, +ZODB/tests/__pycache__/Synchronization.cpython-36.pyc,, +ZODB/tests/__pycache__/TransactionalUndoStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/__init__.cpython-36.pyc,, +ZODB/tests/__pycache__/dangle.cpython-36.pyc,, +ZODB/tests/__pycache__/hexstorage.cpython-36.pyc,, +ZODB/tests/__pycache__/loggingsupport.cpython-36.pyc,, +ZODB/tests/__pycache__/sampledm.cpython-36.pyc,, +ZODB/tests/__pycache__/speed.cpython-36.pyc,, +ZODB/tests/__pycache__/testActivityMonitor.cpython-36.pyc,, +ZODB/tests/__pycache__/testBroken.cpython-36.pyc,, +ZODB/tests/__pycache__/testCache.cpython-36.pyc,, +ZODB/tests/__pycache__/testConfig.cpython-36.pyc,, +ZODB/tests/__pycache__/testConnection.cpython-36.pyc,, +ZODB/tests/__pycache__/testConnectionSavepoint.cpython-36.pyc,, +ZODB/tests/__pycache__/testDB.cpython-36.pyc,, +ZODB/tests/__pycache__/testDemoStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/testFileStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/testMVCCMappingStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/testMappingStorage.cpython-36.pyc,, +ZODB/tests/__pycache__/testPersistentList.cpython-36.pyc,, +ZODB/tests/__pycache__/testPersistentMapping.cpython-36.pyc,, +ZODB/tests/__pycache__/testPersistentWeakref.cpython-36.pyc,, +ZODB/tests/__pycache__/testRecover.cpython-36.pyc,, +ZODB/tests/__pycache__/testSerialize.cpython-36.pyc,, +ZODB/tests/__pycache__/testUtils.cpython-36.pyc,, +ZODB/tests/__pycache__/testZODB.cpython-36.pyc,, +ZODB/tests/__pycache__/test_TransactionMetaData.cpython-36.pyc,, +ZODB/tests/__pycache__/test_cache.cpython-36.pyc,, +ZODB/tests/__pycache__/test_datamanageradapter.cpython-36.pyc,, +ZODB/tests/__pycache__/test_doctest_files.cpython-36.pyc,, +ZODB/tests/__pycache__/test_fsdump.cpython-36.pyc,, +ZODB/tests/__pycache__/test_mvccadapter.cpython-36.pyc,, +ZODB/tests/__pycache__/test_prefetch.cpython-36.pyc,, +ZODB/tests/__pycache__/test_storage.cpython-36.pyc,, +ZODB/tests/__pycache__/testblob.cpython-36.pyc,, +ZODB/tests/__pycache__/testconflictresolution.cpython-36.pyc,, +ZODB/tests/__pycache__/testcrossdatabasereferences.cpython-36.pyc,, +ZODB/tests/__pycache__/testdocumentation.cpython-36.pyc,, +ZODB/tests/__pycache__/testfsIndex.cpython-36.pyc,, +ZODB/tests/__pycache__/testfsoids.cpython-36.pyc,, +ZODB/tests/__pycache__/testhistoricalconnections.cpython-36.pyc,, +ZODB/tests/__pycache__/testmvcc.cpython-36.pyc,, +ZODB/tests/__pycache__/testpersistentclass.cpython-36.pyc,, +ZODB/tests/__pycache__/util.cpython-36.pyc,, +ZODB/tests/__pycache__/warnhook.cpython-36.pyc,, +ZODB/tests/blob_basic.txt,sha256=jnHy3M06TLvBjMcdrgW0Tp0JtvnSf_h1SmwfQmzPjOs,4814 +ZODB/tests/blob_connection.txt,sha256=VOOJ-X7kkJlyCn49sA62artSpXfuVa_GRokzSK9ZpNg,2777 +ZODB/tests/blob_consume.txt,sha256=OPURjaQ1bcJJUuRRe6dV8JeG7RhzpXnq21M6o3fYcqY,3797 +ZODB/tests/blob_importexport.txt,sha256=8_WPJWijsXuolDtFxhLbJsCcF1oXSUrJS8M-MBmZQNA,1986 +ZODB/tests/blob_layout.txt,sha256=bsljiF1LKt_mZSaWr224kPowEHGhwdPx_6WdrQFp3Lw,10572 +ZODB/tests/blob_packing.txt,sha256=dmfDafadrV5gbEWEO9Xi88jwIiGQXjlKOXA8XQXZNro,3195 +ZODB/tests/blob_tempdir.txt,sha256=blzd5PXlXqbqC1I6rWn7qDEcFeX4Fou5yNkbA1CkDSA,1566 +ZODB/tests/blob_transaction.txt,sha256=Vh2stDZjZ1VQ_djVnihYjXzgonOhuZuThWEDY-BVKVE,11898 +ZODB/tests/blobstorage_packing.txt,sha256=2Lc9o_RhCTiXKg5kY64cZ1p83fKy7OQF1boOtvkt3vo,5292 +ZODB/tests/component.xml,sha256=A_nQOWBTCo-jyWaTtlbNvrfH-IY1n25_eQFwvN9Ii1c,475 +ZODB/tests/dangle.py,sha256=6UgsMH6SkZH0h8PR69-2kiopoKRXFlK5yp4CfD4khNE,1807 +ZODB/tests/dbopen.txt,sha256=JsITmwoYuFJSdxjgDLV0bFdtsvok2a2eYzqOqixELHE,10571 +ZODB/tests/fix84.rst,sha256=SiKIXI8oGufFN8S0hcfKI4qTSvbffCDgvvplcNrdXzg,797 +ZODB/tests/hexstorage.py,sha256=etsuRs0f1dFJw9LNL5oPQcx4AMZ1NJI22OU_n7dWGiA,5445 +ZODB/tests/loggingsupport.py,sha256=d6C0nDMl5nT7LhplqiELIMckuyR7U0oqACEgIyDEqNg,3389 +ZODB/tests/multidb.txt,sha256=AzVBkyaPZ3m-dlaciCgi15DrQ1-4c7jA0S8E-84wywg,5913 +ZODB/tests/sampledm.py,sha256=vywl-oqkQVU8EmlcP3AOK50uxVLyvcX9c8pmBSKH0mA,10692 +ZODB/tests/speed.py,sha256=WGLC2HKbxCDLdFsKwIruAtxSQC4P3uqiwFYVp30za1s,3573 +ZODB/tests/synchronizers.txt,sha256=JsY8EJxPna5oa73VVRp1sdAvFLrwbBJ-pmyQTl9zaM0,2621 +ZODB/tests/testActivityMonitor.py,sha256=VFwHtHFCWFZLHc4dOtfu2XLMsqTasuUbuBZ1fT60QsM,3247 +ZODB/tests/testBroken.py,sha256=VkPMIXbq_fB8pXoHYbld3BXF0_jaYUitaVgVOiVay4k,2675 +ZODB/tests/testCache.py,sha256=OdV_-oB_0N9fdQwC3qkvc_2V5tWExQVueoDbwh2gQrA,18490 +ZODB/tests/testConfig.py,sha256=5n1GAA0UbucUzBnSOGylBcAk4-ag4ae5sB03kOUCeQI,5466 +ZODB/tests/testConnection.py,sha256=jjuaseuYifMKyN13tFwW3aoie4WhtXkr54sshECl88Y,43580 +ZODB/tests/testConnectionSavepoint.py,sha256=XndyQQTUjglcoSM5zSPv_4z9U8LUPEbdxq2CtgSRyAw,6650 +ZODB/tests/testConnectionSavepoint.txt,sha256=RNJC5hZNdElunbWLAx77SZkKrxw7jDWyOl58tkHP_js,5272 +ZODB/tests/testDB.py,sha256=wftDiIAKXhcq30P-CaZliifijH1OLRzipAo4vuTw9bI,11585 +ZODB/tests/testDemoStorage.py,sha256=diK4oDHtCke4_6AsMlsHr3x5feWJBdi0kBZCYECQN30,8925 +ZODB/tests/testFileStorage.py,sha256=5siSfWJg98trtOsFSTGtQsjtU-NLnUjkAppKrgZ2xHU,25314 +ZODB/tests/testMVCCMappingStorage.py,sha256=ghgy3CRqNlybBEkLN7CU7kiVqTRbhyWogAxCLaAOezc,6860 +ZODB/tests/testMappingStorage.py,sha256=20UOChHCMxPH1Mfe8yr1X-vd-Yw7QjcvZJC-5hG7KPc,3172 +ZODB/tests/testPersistentList.py,sha256=k4am83pfO36SwA-3G_rs7F1ADZL-_wK-m399BNzxoN0,6086 +ZODB/tests/testPersistentMapping.py,sha256=yJx8elL6U0O6KzvoEKcJtisg9qYTG6F-uATESM09hNs,5093 +ZODB/tests/testPersistentWeakref.py,sha256=2dfukkypsAHga6C6d_dEw8i6XtS8j_X43ylGdHHXY2Q,7457 +ZODB/tests/testRecover.py,sha256=iLS03NyktNBsH8By98svA2IfA6nPpiHbbtS0wFa-tQU,7882 +ZODB/tests/testSerialize.py,sha256=Wld1xE__VSHHyCummNKAf-bH5D_B_i1Bx9pCukBN1bk,8741 +ZODB/tests/testUtils.py,sha256=c4YqY75_TN642Bh5mZP2OHd_P0kjY-STZJg_9SGowNw,6199 +ZODB/tests/testZODB.py,sha256=PVkhxraA8XnARynPfPxTOUIhXxRuc1kcBrUaRWafpYI,22274 +ZODB/tests/test_TransactionMetaData.py,sha256=yEQghADGjF2l6WCbyloJRMydcaw_UlXA21QL79fKHzk,4502 +ZODB/tests/test_cache.py,sha256=bDFJdPel2Uenk_v0M6yZcGht-H7RlZNjcVqYrQW334M,7128 +ZODB/tests/test_datamanageradapter.py,sha256=UX5ToCspuxON3Qp2CCoipp-AR_rtPCvDYmsW6UQkifM,5245 +ZODB/tests/test_doctest_files.py,sha256=c0ruQk4LjDxrMVDT-7lE0EB76_aV2h2AV83lMV_cD3c,1814 +ZODB/tests/test_fsdump.py,sha256=jKG09S5tFAVH2kyDfoTQ9r1W2AWe-PpmCak10SO7aKA,2980 +ZODB/tests/test_mvccadapter.py,sha256=Q2KS-KS2w9Dkgws_3k2ty7Gszrw3g2cNTxR1wsriwbA,1718 +ZODB/tests/test_prefetch.py,sha256=vMDgx76KIYa4IClHa8yguClefBMvDNAUxZJ-Uhc6ayU,1629 +ZODB/tests/test_storage.py,sha256=vrwCRaFYZqhJMlcOH4U-tC9ql61sTVKHRqZ6E0b9hs0,4895 +ZODB/tests/testblob.py,sha256=1yWgBCLggmNZGihR5B2TC43ZCuc3tNl7wVi0FGQO_Pw,25400 +ZODB/tests/testconflictresolution.py,sha256=uc6VSplnP0N_3p4kqboyWRfAQSl-DmnsS6IBmrs-YSQ,12029 +ZODB/tests/testcrossdatabasereferences.py,sha256=DKpAGIYQmXoAWis08NH742qv-QmtCTHeaUkMankxWk4,5984 +ZODB/tests/testdocumentation.py,sha256=UfnggH7vMQKvG1pn2NNmYXDTFtzteeTKSlVXdIHfYCI,1799 +ZODB/tests/testfsIndex.py,sha256=WCIngHitrC9X4QZCk6KXk6aAoKsVSfxpFvoMGRVdvZw,6911 +ZODB/tests/testfsoids.py,sha256=Zh5JBZbQtoeasKY2-V4cFdzFmcwWwcbyAOewlj5sbSg,7059 +ZODB/tests/testhistoricalconnections.py,sha256=tcA2yh7Eyg_9SkfgWRp9IivQ404umpa0-UF9Lab83s4,1004 +ZODB/tests/testmvcc.py,sha256=oi4Guo7442ci1Pel2rxBc-FlOJgw985xn5-yLiLvGBo,13224 +ZODB/tests/testpersistentclass.py,sha256=S2wCuPdv0pvGD-IiF5UkpPOvW__W-RlwkK9tCjrDSFk,2656 +ZODB/tests/util.py,sha256=gVxllESudiBs-EpbcHLuf5pKHvO_Zh4TkFSCeQo-yKA,10193 +ZODB/tests/warnhook.py,sha256=_akvqWZecvkL4nkQocgfYZI5pI6xOvy_47PQGZQnMQo,2122 +ZODB/transact.py,sha256=9yk1aDNeRyz6SX_EPIW-9rM4mX3YIMhXIJ8qKKfdH-M,1995 +ZODB/utils.py,sha256=8oj6wcSQ6x532qq-Vv0uEzdXVknHYBKytG6nNPVrWcw,11547 +ZODB/utils.txt,sha256=9v4O-TKoTpSdj5GlswLNGKo5w5y1Hpl7Euj28r_NkN0,5947 +ZODB/valuedoc.py,sha256=ipb-nNxMnGVifVVyIgXhvJTGxcfcYgaq2yCF0QA2W9M,308 diff --git a/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/WHEEL b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/WHEEL new file mode 100644 index 0000000..7332a41 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/entry_points.txt b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/entry_points.txt new file mode 100644 index 0000000..a698837 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/entry_points.txt @@ -0,0 +1,8 @@ + + [console_scripts] + fsdump = ZODB.FileStorage.fsdump:main + fsoids = ZODB.scripts.fsoids:main + fsrefs = ZODB.scripts.fsrefs:main + fstail = ZODB.scripts.fstail:Main + repozo = ZODB.scripts.repozo:main + \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/metadata.json b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/metadata.json new file mode 100644 index 0000000..72800f3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: Zope Public License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database", "Topic :: Software Development :: Libraries :: Python Modules", "Operating System :: Microsoft :: Windows", "Operating System :: Unix", "Framework :: ZODB"], "description_content_type": "UNKNOWN", "extensions": {"python.commands": {"wrap_console": {"fsdump": "ZODB.FileStorage.fsdump:main", "fsoids": "ZODB.scripts.fsoids:main", "fsrefs": "ZODB.scripts.fsrefs:main", "fstail": "ZODB.scripts.fstail:Main", "repozo": "ZODB.scripts.repozo:main"}}, "python.details": {"contacts": [{"email": "zodb-dev@zope.org", "name": "Zope Foundation and Contributors", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://www.zodb.org/"}}, "python.exports": {"console_scripts": {"fsdump": "ZODB.FileStorage.fsdump:main", "fsoids": "ZODB.scripts.fsoids:main", "fsrefs": "ZODB.scripts.fsrefs:main", "fstail": "ZODB.scripts.fstail:Main", "repozo": "ZODB.scripts.repozo:main"}}}, "extras": ["test"], "generator": "bdist_wheel (0.30.0)", "keywords": ["database", "nosql", "python", "zope"], "license": "ZPL 2.1", "metadata_version": "2.0", "name": "ZODB", "platform": "any", "requires_python": ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", "run_requires": [{"requires": ["BTrees (>=4.2.0)", "ZConfig", "persistent (>=4.4.0)", "six", "transaction (>=2.0.3)", "zc.lockfile", "zodbpickle (>=1.0.1)", "zope.interface"]}, {"extra": "test", "requires": ["manuel", "zope.testing", "zope.testrunner (>=4.4.6)"]}, {"environment": "python_version == \"2.7\"", "extra": "test", "requires": ["mock"]}], "summary": "ZODB, a Python object-oriented database", "test_requires": [{"requires": ["manuel", "mock", "zope.testing", "zope.testrunner (>=4.4.6)"]}], "version": "5.5.0"} \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/top_level.txt new file mode 100644 index 0000000..4247b32 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB-5.5.0.dist-info/top_level.txt @@ -0,0 +1 @@ +ZODB diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/ActivityMonitor.py b/thesisenv/lib/python3.6/site-packages/ZODB/ActivityMonitor.py new file mode 100644 index 0000000..a4140e9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/ActivityMonitor.py @@ -0,0 +1,109 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""ZODB transfer activity monitoring +""" + +import time + +from . import utils + + +class ActivityMonitor(object): + """ZODB load/store activity monitor + + This simple implementation just keeps a small log in memory + and iterates over the log when getActivityAnalysis() is called. + + It assumes that log entries are added in chronological sequence. + """ + + def __init__(self, history_length=3600): + self.history_length = history_length # Number of seconds + self.log = [] # [(time, loads, stores)] + self.trim_lock = utils.Lock() + + def closedConnection(self, conn): + log = self.log + now = time.time() + loads, stores = conn.getTransferCounts(1) + log.append((now, loads, stores)) + self.trim(now) + + def trim(self, now): + with self.trim_lock: + log = self.log + cutoff = now - self.history_length + n = 0 + loglen = len(log) + while n < loglen and log[n][0] < cutoff: + n = n + 1 + if n: + del log[:n] + + def setHistoryLength(self, history_length): + self.history_length = history_length + self.trim(time.time()) + + def getHistoryLength(self): + return self.history_length + + def getActivityAnalysis(self, start=0, end=0, divisions=10): + res = [] + now = time.time() + if start == 0: + start = now - self.history_length + if end == 0: + end = now + for n in range(divisions): + res.append({ + 'start': start + (end - start) * n / divisions, + 'end': start + (end - start) * (n + 1) / divisions, + 'loads': 0, + 'stores': 0, + 'connections': 0, + }) + + div = res[0] + div_end = div['end'] + div_index = 0 + connections = 0 + total_loads = 0 + total_stores = 0 + for t, loads, stores in self.log: + if t < start: + # We could use a binary search to find the start. + continue + elif t > end: + # We could use a binary search to find the end also. + break + while t > div_end: + div['loads'] = total_loads + div['stores'] = total_stores + div['connections'] = connections + total_loads = 0 + total_stores = 0 + connections = 0 + div_index = div_index + 1 + if div_index < divisions: + div = res[div_index] + div_end = div['end'] + connections = connections + 1 + total_loads = total_loads + loads + total_stores = total_stores + stores + + div['stores'] = div['stores'] + total_stores + div['loads'] = div['loads'] + total_loads + div['connections'] = div['connections'] + connections + + return res diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/BaseStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/BaseStorage.py new file mode 100644 index 0000000..b3041ba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/BaseStorage.py @@ -0,0 +1,381 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Storage base class that is mostly a mistake + +The base class here is tightly coupled with its subclasses and +its use is not recommended. It's still here for historical reasons. +""" +from __future__ import print_function + +import time +import logging +import sys +from struct import pack as _structpack, unpack as _structunpack + +import zope.interface +from persistent.TimeStamp import TimeStamp + +import ZODB.interfaces +from . import POSException, utils +from .Connection import TransactionMetaData +from .utils import z64, oid_repr, byte_ord, byte_chr, load_current +from .UndoLogCompatible import UndoLogCompatible +from ._compat import dumps, _protocol, py2_hasattr + +log = logging.getLogger("ZODB.BaseStorage") + +class BaseStorage(UndoLogCompatible): + """Base class that supports storage implementations. + + XXX Base classes like this are an attractive nuisance. They often + introduce more complexity than they save. While important logic + is implemented here, we should consider exposing it as utility + functions or as objects that can be used through composition. + + A subclass must define the following methods: + load() + store() + close() + cleanup() + lastTransaction() + + It must override these hooks: + _begin() + _vote() + _abort() + _finish() + _clear_temp() + + If it stores multiple revisions, it should implement + loadSerial() + loadBefore() + + Each storage will have two locks that are accessed via lock + acquire and release methods bound to the instance. (Yuck.) + _lock_acquire / _lock_release (reentrant) + _commit_lock_acquire / _commit_lock_release + + The commit lock is acquired in tpc_begin() and released in + tpc_abort() and tpc_finish(). It is never acquired with the other + lock held. + + The other lock appears to protect _oid and _transaction and + perhaps other things. It is always held when load() is called, so + presumably the load() implementation should also acquire the lock. + """ + _transaction=None # Transaction that is being committed + _tstatus=' ' # Transaction status, used for copying data + _is_read_only = False + + def __init__(self, name, base=None): + self.__name__= name + log.debug("create storage %s", self.__name__) + + # Allocate locks: + self._lock = utils.RLock() + self._commit_lock = utils.Lock() + + # Needed by external storages that use this dumb api :( + self._lock_acquire = self._lock.acquire + self._lock_release = self._lock.release + self._commit_lock_acquire = self._commit_lock.acquire + self._commit_lock_release = self._commit_lock.release + + t = time.time() + t = self._ts = TimeStamp(*(time.gmtime(t)[:5] + (t%60,))) + self._tid = t.raw() + + # ._oid is the highest oid in use (0 is always in use -- it's + # a reserved oid for the root object). Our new_oid() method + # increments it by 1, and returns the result. It's really a + # 64-bit integer stored as an 8-byte big-endian string. + oid = getattr(base, '_oid', None) + if oid is None: + self._oid = z64 + else: + self._oid = oid + # In case that conflicts are resolved during store, + # this collects oids to be returned by tpc_vote. + self._resolved = [] + + def sortKey(self): + """Return a string that can be used to sort storage instances. + + The key must uniquely identify a storage and must be the same + across multiple instantiations of the same storage. + """ + # name may not be sufficient, e.g. ZEO has a user-definable name. + return self.__name__ + + def getName(self): + return self.__name__ + + def getSize(self): + return len(self)*300 # WAG! + + def history(self, oid, version, length=1, filter=None): + return () + + def new_oid(self): + if self._is_read_only: + raise POSException.ReadOnlyError() + + with self._lock: + last = self._oid + d = byte_ord(last[-1]) + if d < 255: # fast path for the usual case + last = last[:-1] + byte_chr(d+1) + else: # there's a carry out of the last byte + last_as_long, = _structunpack(">Q", last) + last = _structpack(">Q", last_as_long + 1) + self._oid = last + return last + + # Update the maximum oid in use, under protection of a lock. The + # maximum-in-use attribute is changed only if possible_new_max_oid is + # larger than its current value. + def set_max_oid(self, possible_new_max_oid): + with self._lock: + if possible_new_max_oid > self._oid: + self._oid = possible_new_max_oid + + def registerDB(self, db): + pass # we don't care + + def isReadOnly(self): + return self._is_read_only + + def tpc_abort(self, transaction): + with self._lock: + + if transaction is not self._transaction: + return + + try: + self._abort() + self._clear_temp() + self._transaction = None + finally: + self._commit_lock_release() + + def _abort(self): + """Subclasses should redefine this to supply abort actions""" + pass + + def tpc_begin(self, transaction, tid=None, status=' '): + if self._is_read_only: + raise POSException.ReadOnlyError() + + with self._lock: + if self._transaction is transaction: + raise POSException.StorageTransactionError( + "Duplicate tpc_begin calls for same transaction") + + self._commit_lock.acquire() + + with self._lock: + self._transaction = transaction + self._clear_temp() + + user = transaction.user + desc = transaction.description + ext = transaction.extension + if ext: + ext = dumps(ext, _protocol) + else: + ext = "" + + self._ude = user, desc, ext + + if tid is None: + now = time.time() + t = TimeStamp(*(time.gmtime(now)[:5] + (now % 60,))) + self._ts = t = t.laterThan(self._ts) + self._tid = t.raw() + else: + self._ts = TimeStamp(tid) + self._tid = tid + + del self._resolved[:] + self._tstatus = status + self._begin(self._tid, user, desc, ext) + + def tpc_transaction(self): + return self._transaction + + def _begin(self, tid, u, d, e): + """Subclasses should redefine this to supply transaction start actions. + """ + pass + + def tpc_vote(self, transaction): + with self._lock: + if transaction is not self._transaction: + raise POSException.StorageTransactionError( + "tpc_vote called with wrong transaction") + return self._vote() + + def _vote(self): + """Subclasses should redefine this to supply transaction vote actions. + """ + return self._resolved + + def tpc_finish(self, transaction, f=None): + # It's important that the storage calls the function we pass + # while it still has its lock. We don't want another thread + # to be able to read any updated data until we've had a chance + # to send an invalidation message to all of the other + # connections! + + with self._lock: + if transaction is not self._transaction: + raise POSException.StorageTransactionError( + "tpc_finish called with wrong transaction") + try: + if f is not None: + f(self._tid) + u, d, e = self._ude + self._finish(self._tid, u, d, e) + self._clear_temp() + finally: + self._ude = None + self._transaction = None + self._commit_lock.release() + return self._tid + + def _finish(self, tid, u, d, e): + """Subclasses should redefine this to supply transaction finish actions + """ + pass + + def lastTransaction(self): + with self._lock: + return self._ltid + + def getTid(self, oid): + with self._lock: + return load_current(self, oid)[1] + + def loadSerial(self, oid, serial): + raise POSException.Unsupported( + "Retrieval of historical revisions is not supported") + + def loadBefore(self, oid, tid): + """Return most recent revision of oid before tid committed.""" + return None + + def copyTransactionsFrom(self, other, verbose=0): + """Copy transactions from another storage. + + This is typically used for converting data from one storage to + another. `other` must have an .iterator() method. + """ + copy(other, self, verbose) + +def copy(source, dest, verbose=0): + """Copy transactions from a source to a destination storage + + This is typically used for converting data from one storage to + another. `source` must have an .iterator() method. + """ + _ts = None + ok = 1 + preindex = {}; + preget = preindex.get + # restore() is a new storage API method which has an identical + # signature to store() except that it does not return anything. + # Semantically, restore() is also identical to store() except that it + # doesn't do the ConflictError or VersionLockError consistency + # checks. The reason to use restore() over store() in this method is + # that store() cannot be used to copy transactions spanning a version + # commit or abort, or over transactional undos. + # + # We'll use restore() if it's available, otherwise we'll fall back to + # using store(). However, if we use store, then + # copyTransactionsFrom() may fail with VersionLockError or + # ConflictError. + restoring = py2_hasattr(dest, 'restore') + fiter = source.iterator() + for transaction in fiter: + tid = transaction.tid + if _ts is None: + _ts = TimeStamp(tid) + else: + t = TimeStamp(tid) + if t <= _ts: + if ok: print(('Time stamps out of order %s, %s' % (_ts, t))) + ok = 0 + _ts = t.laterThan(_ts) + tid = _ts.raw() + else: + _ts = t + if not ok: + print(('Time stamps back in order %s' % (t))) + ok = 1 + + if verbose: + print(_ts) + + dest.tpc_begin(transaction, tid, transaction.status) + for r in transaction: + oid = r.oid + if verbose: + print(oid_repr(oid), r.version, len(r.data)) + if restoring: + dest.restore(oid, r.tid, r.data, r.version, + r.data_txn, transaction) + else: + pre = preget(oid, None) + dest.store(oid, pre, r.data, r.version, transaction) + preindex[oid] = tid + + dest.tpc_vote(transaction) + dest.tpc_finish(transaction) + + +# defined outside of BaseStorage to facilitate independent reuse. +# just depends on _transaction attr and getTid method. +def checkCurrentSerialInTransaction(self, oid, serial, transaction): + if transaction is not self._transaction: + raise POSException.StorageTransactionError(self, transaction) + + committed_tid = self.getTid(oid) + if committed_tid != serial: + raise POSException.ReadConflictError( + oid=oid, serials=(committed_tid, serial)) + +BaseStorage.checkCurrentSerialInTransaction = checkCurrentSerialInTransaction + +@zope.interface.implementer(ZODB.interfaces.IStorageTransactionInformation) +class TransactionRecord(TransactionMetaData): + """Abstract base class for iterator protocol""" + + + def __init__(self, tid, status, user, description, extension): + self.tid = tid + self.status = status + TransactionMetaData.__init__(self, user, description, extension) + +@zope.interface.implementer(ZODB.interfaces.IStorageRecordInformation) +class DataRecord(object): + """Abstract base class for iterator protocol""" + + + version = '' + + def __init__(self, oid, tid, data, prev): + self.oid = oid + self.tid = tid + self.data = data + self.data_txn = prev diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/ConflictResolution.py b/thesisenv/lib/python3.6/site-packages/ZODB/ConflictResolution.py new file mode 100644 index 0000000..c75f482 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/ConflictResolution.py @@ -0,0 +1,315 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +import logging + +import six +import zope.interface +from ZODB.POSException import ConflictError +from ZODB.loglevels import BLATHER +from ZODB._compat import ( + BytesIO, PersistentUnpickler, PersistentPickler, _protocol) + +# Subtle: Python 2.x has pickle.PicklingError and cPickle.PicklingError, +# and these are unrelated classes! So we shouldn't use pickle.PicklingError, +# since on Python 2, ZODB._compat.pickle is cPickle. +from pickle import PicklingError + + +logger = logging.getLogger('ZODB.ConflictResolution') + +class BadClassName(Exception): + pass + +class BadClass(object): + + def __init__(self, *args): + self.args = args + + def __reduce__(self): + raise BadClassName(*self.args) + +_class_cache = {} +_class_cache_get = _class_cache.get +def find_global(*args): + cls = _class_cache_get(args, 0) + if cls == 0: + # Not cached. Try to import + try: + module = __import__(args[0], {}, {}, ['cluck']) + except ImportError: + cls = 1 + else: + cls = getattr(module, args[1], 1) + _class_cache[args] = cls + + if cls == 1: + logger.log(BLATHER, "Unable to load class", exc_info=True) + + if cls == 1: + # Not importable + if (isinstance(args, tuple) and len(args) == 2 and + isinstance(args[0], six.string_types) and + isinstance(args[1], six.string_types) + ): + return BadClass(*args) + else: + raise BadClassName(*args) + return cls + +def state(self, oid, serial, prfactory, p=''): + p = p or self.loadSerial(oid, serial) + p = self._crs_untransform_record_data(p) + file = BytesIO(p) + unpickler = PersistentUnpickler( + find_global, prfactory.persistent_load, file) + unpickler.load() # skip the class tuple + return unpickler.load() + +class IPersistentReference(zope.interface.Interface): + '''public contract for references to persistent objects from an object + with conflicts.''' + + oid = zope.interface.Attribute( + 'The oid of the persistent object that this reference represents') + + database_name = zope.interface.Attribute( + '''The name of the database of the reference, *if* different. + + If not different, None.''') + + klass = zope.interface.Attribute( + '''class meta data. Presence is not reliable.''') + + weak = zope.interface.Attribute( + '''bool: whether this reference is weak''') + + def __cmp__(other): + '''if other is equivalent reference, return 0; else raise ValueError. + + Equivalent in this case means that oid and database_name are the same. + + If either is a weak reference, we only support `is` equivalence, and + otherwise raise a ValueError even if the datbase_names and oids are + the same, rather than guess at the correct semantics. + + It is impossible to sort reliably, since the actual persistent + class may have its own comparison, and we have no idea what it is. + We assert that it is reasonably safe to assume that an object is + equivalent to itself, but that's as much as we can say. + + We don't compare on 'is other', despite the + PersistentReferenceFactory.data cache, because it is possible to + have two references to the same object that are spelled with different + data (for instance, one with a class and one without).''' + +@zope.interface.implementer(IPersistentReference) +class PersistentReference(object): + + + weak = False + oid = database_name = klass = None + + def __init__(self, data): + self.data = data + # see serialize.py, ObjectReader._persistent_load + if isinstance(data, tuple): + self.oid, klass = data + if isinstance(klass, BadClass): + # We can't use the BadClass directly because, if + # resolution succeeds, there's no good way to pickle + # it. Fortunately, a class reference in a persistent + # reference is allowed to be a module+name tuple. + self.data = self.oid, klass.args + elif isinstance(data, (bytes, str)): + self.oid = data + else: # a list + reference_type = data[0] + # 'm' = multi_persistent: (database_name, oid, klass) + # 'n' = multi_oid: (database_name, oid) + # 'w' = persistent weakref: (oid) + # or persistent weakref: (oid, database_name) + # else it is a weakref: reference_type + if reference_type == 'm': + self.database_name, self.oid, klass = data[1] + if isinstance(klass, BadClass): + # see above wrt BadClass + data[1] = self.database_name, self.oid, klass.args + elif reference_type == 'n': + self.database_name, self.oid = data[1] + elif reference_type == 'w': + try: + self.oid, = data[1] + except ValueError: + self.oid, self.database_name = data[1] + self.weak = True + else: + assert len(data) == 1, 'unknown reference format' + self.oid = data[0] + self.weak = True + if not isinstance(self.oid, (bytes, type(None))): + assert isinstance(self.oid, str) + # this happens on Python 3 when all bytes in the oid are < 0x80 + self.oid = self.oid.encode('ascii') + + def __cmp__(self, other): + if self is other or ( + isinstance(other, PersistentReference) and + self.oid == other.oid and + self.database_name == other.database_name and + not self.weak and + not other.weak): + return 0 + else: + raise ValueError( + "can't reliably compare against different " + "PersistentReferences") + + # Python 3 dropped __cmp__ + + def __eq__(self, other): + return self.__cmp__(other) == 0 + + def __ne__(self, other): + return self.__cmp__(other) != 0 + + def __lt__(self, other): + return self.__cmp__(other) < 0 + + def __gt__(self, other): + return self.__cmp__(other) > 0 + + def __le__(self, other): + return self.__cmp__(other) <= 0 + + def __ge__(self, other): + return self.__cmp__(other) >= 0 + + def __repr__(self): + return "PR(%s %s)" % (id(self), self.data) + + def __getstate__(self): + raise PicklingError("Can't pickle PersistentReference") + + @property + def klass(self): + # for tests + data = self.data + if isinstance(data, tuple): + return data[1] + elif isinstance(data, list) and data[0] == 'm': + return data[1][2] + +class PersistentReferenceFactory(object): + + data = None + + def persistent_load(self, ref): + if self.data is None: + self.data = {} + key = tuple(ref) # lists are not hashable; formats are different enough + # even after eliminating list/tuple distinction + r = self.data.get(key, None) + if r is None: + r = PersistentReference(ref) + self.data[key] = r + + return r + +def persistent_id(object): + if getattr(object, '__class__', 0) is not PersistentReference: + return None + return object.data + +_unresolvable = {} +def tryToResolveConflict(self, oid, committedSerial, oldSerial, newpickle, + committedData=b''): + # class_tuple, old, committed, newstate = ('',''), 0, 0, 0 + klass = 'n/a' + try: + prfactory = PersistentReferenceFactory() + newpickle = self._crs_untransform_record_data(newpickle) + file = BytesIO(newpickle) + unpickler = PersistentUnpickler( + find_global, prfactory.persistent_load, file) + meta = unpickler.load() + if isinstance(meta, tuple): + klass = meta[0] + newargs = meta[1] or () + if isinstance(klass, tuple): + klass = find_global(*klass) + else: + klass = meta + newargs = () + + if klass in _unresolvable: + raise ConflictError + + inst = klass.__new__(klass, *newargs) + + try: + resolve = inst._p_resolveConflict + except AttributeError: + _unresolvable[klass] = 1 + raise ConflictError + + + oldData = self.loadSerial(oid, oldSerial) + if not committedData: + committedData = self.loadSerial(oid, committedSerial) + + newstate = unpickler.load() + old = state(self, oid, oldSerial, prfactory, oldData) + committed = state(self, oid, committedSerial, prfactory, committedData) + + resolved = resolve(old, committed, newstate) + + file = BytesIO() + pickler = PersistentPickler(persistent_id, file, _protocol) + pickler.dump(meta) + pickler.dump(resolved) + return self._crs_transform_record_data(file.getvalue()) + except (ConflictError, BadClassName) as e: + logger.debug( + "Conflict resolution on %s failed with %s: %s", + klass, e.__class__.__name__, str(e)) + except: + # If anything else went wrong, catch it here and avoid passing an + # arbitrary exception back to the client. The error here will mask + # the original ConflictError. A client can recover from a + # ConflictError, but not necessarily from other errors. But log + # the error so that any problems can be fixed. + logger.exception( + "Unexpected error while trying to resolve conflict on %s", klass) + + raise ConflictError(oid=oid, serials=(committedSerial, oldSerial), + data=newpickle) + +class ConflictResolvingStorage(object): + "Mix-in class that provides conflict resolution handling for storages" + + tryToResolveConflict = tryToResolveConflict + + _crs_transform_record_data = _crs_untransform_record_data = ( + lambda self, o: o) + + def registerDB(self, wrapper): + self._crs_untransform_record_data = wrapper.untransform_record_data + self._crs_transform_record_data = wrapper.transform_record_data + try: + m = super(ConflictResolvingStorage, self).registerDB + except AttributeError: + pass + else: + m(wrapper) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/ConflictResolution.txt b/thesisenv/lib/python3.6/site-packages/ZODB/ConflictResolution.txt new file mode 100644 index 0000000..1f5e69e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/ConflictResolution.txt @@ -0,0 +1,579 @@ +=================== +Conflict Resolution +=================== + +Overview +======== + +Conflict resolution is a way to resolve transaction conflicts that would +otherwise abort a transaction. As such, it risks data integrity in order to +try to avoid throwing away potentially computationally expensive transactions. + +The risk of harming data integrity should not be underestimated. Writing +conflict resolution code takes some responsibility for transactional +integrity away from the ZODB, and puts it in the hands of the developer +writing the conflict resolution code. + +The current conflict resolution code is implemented with a storage mix-in +found in ZODB/ConflictResolution.py. The idea's proposal, and an explanation +of the interface, can be found here: +http://www.zope.org/Members/jim/ZODB/ApplicationLevelConflictResolution + +Here is the most pertinent section, somewhat modified for this document's +use: + + A new interface is proposed to allow object authors to provide a method + for resolving conflicts. When a conflict is detected, then the database + checks to see if the class of the object being saved defines the method, + _p_resolveConflict. If the method is defined, then the method is called + on the object. If the method succeeds, then the object change can be + committed, otherwise a ConflictError is raised as usual. + + def _p_resolveConflict(oldState, savedState, newState): + Return the state of the object after resolving different changes. + + Arguments: + + oldState + The state of the object that the changes made by the current + transaction were based on. + + The method is permitted to modify this value. + + savedState + The state of the object that is currently stored in the + database. This state was written after oldState and reflects + changes made by a transaction that committed before the + current transaction. + + The method is permitted to modify this value. + + newState + The state after changes made by the current transaction. + + The method is not permitted to modify this value. + + This method should compute a new state by merging changes + reflected in savedState and newState, relative to oldState. + + If the method cannot resolve the changes, then it should raise + ZODB.POSException.ConflictError. + + + Consider an extremely simple example, a counter:: + + from persistent import Persistent + class PCounter(Persistent): + '`value` is readonly; increment it with `inc`.' + + # Fool BTree checks for sane comparison :/ + def __cmp__(self, other): + return object.__cmp__(self, other) + def __lt__(self, other): + return object.__lt__(self, other) + + _val = 0 + def inc(self): + self._val += 1 + @property + def value(self): + return self._val + def _p_resolveConflict(self, oldState, savedState, newState): + oldState['_val'] = ( + savedState.get('_val', 0) + + newState.get('_val', 0) - + oldState.get('_val', 0)) + return oldState + + .. -> src + + >>> import ConflictResolution_txt + >>> exec(src, ConflictResolution_txt.__dict__) + >>> PCounter = ConflictResolution_txt.PCounter + >>> PCounter.__module__ = 'ConflictResolution_txt' + + +By "state", the excerpt above means the value used by __getstate__ and +__setstate__: a dictionary, in most cases. We'll look at more details below, +but let's continue the example above with a simple successful resolution +story. + +First we create a storage and a database, and put a PCounter in the database. + + >>> import ZODB + >>> db = ZODB.DB('Data.fs') + >>> import transaction + >>> tm_A = transaction.TransactionManager() + >>> conn_A = db.open(transaction_manager=tm_A) + >>> p_A = conn_A.root()['p'] = PCounter() + >>> p_A.value + 0 + >>> tm_A.commit() + +Now get another copy of 'p' so we can make a conflict. Think of `conn_A` +(connection A) as one thread, and `conn_B` (connection B) as a concurrent +thread. `p_A` is a view on the object in the first connection, and `p_B` +is a view on *the same persistent object* in the second connection. + + >>> tm_B = transaction.TransactionManager() + >>> conn_B = db.open(transaction_manager=tm_B) + >>> p_B = conn_B.root()['p'] + >>> p_B.value + 0 + >>> p_A._p_oid == p_B._p_oid + True + +Now we can make a conflict, and see it resolved. + + >>> p_A.inc() + >>> p_A.value + 1 + >>> p_B.inc() + >>> p_B.value + 1 + >>> tm_B.commit() + >>> p_B.value + 1 + >>> tm_A.commit() + >>> p_A.value + 2 + +We need to synchronize connection B, in any of a variety of ways, to see the +change from connection A. + + >>> p_B.value + 1 + >>> trans = tm_B.begin() + >>> p_B.value + 2 + +A very similar class found in real world use is BTrees.Length.Length. + +This conflict resolution approach is simple, yet powerful. However, it +has a few caveats and rough edges in practice. The simplicity, then, is +a bit of a disguise. Again, be warned, writing conflict resolution code +means that you claim significant responsibilty for your data integrity. + +Because of the rough edges, the current conflict resolution approach is slated +for change (as of this writing, according to Jim Fulton, the ZODB +primary author and maintainer). Others have talked about different approaches +as well (see, for instance, http://www.python.org/~jeremy/weblog/031031c.html). +But for now, the _p_resolveConflict method is what we have. + +Caveats and Dangers +=================== + +Here are caveats for working with this conflict resolution approach. +Each sub-section has a "DANGERS" section that outlines what might happen +if you ignore the warning. We work from the least danger to the most. + +Conflict Resolution Is on the Server +------------------------------------ + +If you are using ZEO or ZRS, be aware that the classes for which you have +conflict resolution code *and* the classes of the non-persistent objects +they reference must be available to import by the *server* (or ZRS +primary). + +DANGERS: You think you are going to get conflict resolution, but you won't. + +Ignore `self` +------------- + +Even though the _p_resolveConflict method has a "self", ignore it. +Don't change it. You make changes by returning the state. This is +effectively a class method. + +DANGERS: The changes you make to the instance will be discarded. The +instance is not initialized, so other methods that depend on instance +attributes will not work. + +Here's an example of a broken _p_resolveConflict method:: + + class PCounter2(PCounter): + def __init__(self): + self.data = [] + def _p_resolveConflict(self, oldState, savedState, newState): + self.data.append('bad idea') + return super(PCounter2, self)._p_resolveConflict( + oldState, savedState, newState) + +.. -> src + + >>> exec(src, ConflictResolution_txt.__dict__) + >>> PCounter2 = ConflictResolution_txt.PCounter2 + >>> PCounter2.__module__ = 'ConflictResolution_txt' + +Now we'll prepare for the conflict again. + + >>> p2_A = conn_A.root()['p2'] = PCounter2() + >>> p2_A.value + 0 + >>> tm_A.commit() + >>> trans = tm_B.begin() # sync + >>> p2_B = conn_B.root()['p2'] + >>> p2_B.value + 0 + >>> p2_A._p_oid == p2_B._p_oid + True + +And now we will make a conflict. + + >>> p2_A.inc() + >>> p2_A.value + 1 + >>> p2_B.inc() + >>> p2_B.value + 1 + >>> tm_B.commit() + >>> p2_B.value + 1 + >>> tm_A.commit() # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ConflictError: database conflict error... + +oops! + + >>> tm_A.abort() + >>> p2_A.value + 1 + >>> trans = tm_B.begin() + >>> p2_B.value + 1 + +Watch Out for Persistent Objects in the State +--------------------------------------------- + +If the object state has a reference to Persistent objects (instances +of classes that inherit from persistent.Persistent) then these references +*will not be loaded and are inaccessible*. Instead, persistent objects +in the state dictionary are ZODB.ConflictResolution.PersistentReference +instances. These objects have the following interface:: + + class IPersistentReference(zope.interface.Interface): + '''public contract for references to persistent objects from an object + with conflicts.''' + + oid = zope.interface.Attribute( + 'The oid of the persistent object that this reference represents') + + database_name = zope.interface.Attribute( + '''The name of the database of the reference, *if* different. + + If not different, None.''') + + klass = zope.interface.Attribute( + '''class meta data. Presence is not reliable.''') + + weak = zope.interface.Attribute( + '''bool: whether this reference is weak''') + + def __cmp__(other): + '''if other is equivalent reference, return 0; else raise ValueError. + + Equivalent in this case means that oid and database_name are the same. + + If either is a weak reference, we only support `is` equivalence, and + otherwise raise a ValueError even if the datbase_names and oids are + the same, rather than guess at the correct semantics. + + It is impossible to sort reliably, since the actual persistent + class may have its own comparison, and we have no idea what it is. + We assert that it is reasonably safe to assume that an object is + equivalent to itself, but that's as much as we can say. + + We don't compare on 'is other', despite the + PersistentReferenceFactory.data cache, because it is possible to + have two references to the same object that are spelled with different + data (for instance, one with a class and one without).''' + +So let's look at one of these. Let's assume we have three, `old`, +`saved`, and `new`, each representing a persistent reference to the same +object within a _p_resolveConflict call from the oldState, savedState, +and newState [#get_persistent_reference]_. They have an oid, `weak` is +False, and `database_name` is None. `klass` happens to be set but this is +not always the case. + + >>> isinstance(new.oid, bytes) + True + >>> new.weak + False + >>> print(new.database_name) + None + >>> new.klass is PCounter + True + +There are a few subtleties to highlight here. First, notice that the +database_name is only present if this is a cross-database reference +(see cross-database-references.txt in this directory, and examples +below). The database name and oid is sometimes a reasonable way to +reliably sort Persistent objects (see zope.app.keyreference, for +instance) but if your code compares one PersistentReference with a +database_name and another without, you need to refuse to give an answer +and raise an exception, because you can't know how the unknown +database_name sorts. + +We already saw a persistent reference with a database_name of None. Now +let's suppose `new` is an example of a cross-database reference from a +database named '2' [#cross-database]_. + + >>> new.database_name + '2' + +As seen, the database_name is available for this cross-database reference, +and not for others. References to persistent objects, as defined in +seialize.py, have other variations, such as weak references, which are +handled but not discussed here [#instantiation_test]_ + +Second, notice the __cmp__ behavior [#cmp_test]_. This is new behavior +after ZODB 3.8 and addresses a serious problem for when persistent +objects are compared in an _p_resolveConflict, such as that in the ZODB +BTrees code. Prior to this change, it was not safe to use Persistent +objects as keys in a BTree. You needed to define a __cmp__ for them to +be sorted reliably out of the context of conflict resolution, but then +during conflict resolution the sorting would be arbitrary, on the basis +of the persistent reference's memory location. This could have lead to +inconsistent state for BTrees (or BTree module buckets or tree sets or sets). + +Here's an example of how the new behavior stops potentially incorrect +resolution. + + >>> import BTrees + >>> treeset_A = conn_A.root()['treeset'] = BTrees.family32.OI.TreeSet() + >>> tm_A.commit() + >>> trans = tm_B.begin() # sync + >>> treeset_B = conn_B.root()['treeset'] + >>> treeset_A.insert(PCounter()) + 1 + >>> treeset_B.insert(PCounter()) + 1 + >>> tm_B.commit() + >>> tm_A.commit() # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ConflictError: database conflict error... + >>> tm_A.abort() + +Third, note that, even if the persistent object to which the reference refers +changes in the same transaction, the reference is still the same. + +DANGERS: subtle and potentially serious. Beyond the two subtleties above, +which should now be addressed, there is a general problem for objects that +are composites of smaller persistent objects--for instance, a BTree, in +which the BTree and each bucket is a persistent object; or a +zc.queue.CompositePersistentQueue, which is a persistent queue of +persistent queues. Consider the following situation. It is actually solved, +but it is a concrete example of what might go wrong. + +A BTree (persistent object) has a two buckets (persistent objects). The +second bucket has one persistent object in it. Concurrently, one thread +deletes the one object in the second bucket, which causes the BTree to dump +the bucket; and another thread puts an object in the second bucket. What +happens during conflict resolution? Remember, each persistent object cannot +see the other. From the perspective of the BTree object, it has no +conflicts: one transaction modified it, causing it to lose a bucket; and the +other transaction did not change it. From the perspective of the bucket, +one transaction deleted an object and the other added it: it will resolve +conflicts and say that the bucket has the new object and not the old one. +However, it will be garbage collected, and effectively the addition of the +new object will be lost. + +As mentioned, this story is actually solved for BTrees. As +BTrees/MergeTemplate.c explains, whenever savedState or newState for a bucket +shows an empty bucket, the code refuses to resolve the conflict: this avoids +the situation above. + + >>> bucket_A = conn_A.root()['bucket'] = BTrees.family32.II.Bucket() + >>> bucket_A[0] = 255 + >>> tm_A.commit() + >>> trans = tm_B.begin() # sync + >>> bucket_B = conn_B.root()['bucket'] + >>> bucket_B[1] = 254 + >>> del bucket_A[0] + >>> tm_B.commit() + >>> tm_A.commit() # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ConflictError: database conflict error... + >>> tm_A.abort() + +However, the story highlights the kinds of subtle problems that units +made up of multiple composite Persistent objects need to contemplate. +Any structure made up of objects that contain persistent objects with +conflict resolution code, as a catalog index is made up of multiple +BTree Buckets and Sets, each with conflict resolution, needs to think +through these kinds of problems or be faced with potential data +integrity issues. + +.. cleanup + + >>> db.close() + >>> db1.close() + >>> db2.close() + +.. ......... .. +.. FOOTNOTES .. +.. ......... .. + +.. [#get_persistent_reference] We'll catch persistent references with a class + mutable. + + :: + + class PCounter3(PCounter): + data = [] + def _p_resolveConflict(self, oldState, savedState, newState): + PCounter3.data.append( + (oldState.get('other'), + savedState.get('other'), + newState.get('other'))) + return super(PCounter3, self)._p_resolveConflict( + oldState, savedState, newState) + + .. -> src + + >>> exec(src, ConflictResolution_txt.__dict__) + >>> PCounter3 = ConflictResolution_txt.PCounter3 + >>> PCounter3.__module__ = 'ConflictResolution_txt' + + >>> p3_A = conn_A.root()['p3'] = PCounter3() + >>> p3_A.other = conn_A.root()['p'] + >>> tm_A.commit() + >>> trans = tm_B.begin() # sync + >>> p3_B = conn_B.root()['p3'] + >>> p3_A.inc() + >>> p3_B.inc() + >>> tm_B.commit() + >>> tm_A.commit() + >>> old, saved, new = PCounter3.data[-1] + +.. [#cross-database] We need a whole different set of databases for this. + See cross-database-references.txt in this directory for a discussion of + what is going on here. + + >>> databases = {} + >>> db1 = ZODB.DB('1', databases=databases, database_name='1') + >>> db2 = ZODB.DB('2', databases=databases, database_name='2') + >>> tm_multi_A = transaction.TransactionManager() + >>> conn_1A = db1.open(transaction_manager=tm_multi_A) + >>> conn_2A = conn_1A.get_connection('2') + >>> p4_1A = conn_1A.root()['p4'] = PCounter3() + >>> p5_2A = conn_2A.root()['p5'] = PCounter3() + >>> conn_2A.add(p5_2A) + >>> p4_1A.other = p5_2A + >>> tm_multi_A.commit() + >>> tm_multi_B = transaction.TransactionManager() + >>> conn_1B = db1.open(transaction_manager=tm_multi_B) + >>> p4_1B = conn_1B.root()['p4'] + >>> p4_1A.inc() + >>> p4_1B.inc() + >>> tm_multi_B.commit() + >>> tm_multi_A.commit() + >>> old, saved, new = PCounter3.data[-1] + +.. [#instantiation_test] We'll simply instantiate PersistentReferences + with examples of types described in ZODB/serialize.py. + + >>> from ZODB.ConflictResolution import PersistentReference + + >>> ref1 = PersistentReference(b'my_oid') + >>> ref1.oid + 'my_oid' + >>> print(ref1.klass) + None + >>> print(ref1.database_name) + None + >>> ref1.weak + False + + >>> ref2 = PersistentReference((b'my_oid', 'my_class')) + >>> ref2.oid + 'my_oid' + >>> ref2.klass + 'my_class' + >>> print(ref2.database_name) + None + >>> ref2.weak + False + + >>> ref3 = PersistentReference(['w', (b'my_oid',)]) + >>> ref3.oid + 'my_oid' + >>> print(ref3.klass) + None + >>> print(ref3.database_name) + None + >>> ref3.weak + True + + >>> ref3a = PersistentReference(['w', (b'my_oid', 'other_db')]) + >>> ref3a.oid + 'my_oid' + >>> print(ref3a.klass) + None + >>> ref3a.database_name + 'other_db' + >>> ref3a.weak + True + + >>> ref4 = PersistentReference(['m', ('other_db', b'my_oid', 'my_class')]) + >>> ref4.oid + 'my_oid' + >>> ref4.klass + 'my_class' + >>> ref4.database_name + 'other_db' + >>> ref4.weak + False + + >>> ref5 = PersistentReference(['n', ('other_db', b'my_oid')]) + >>> ref5.oid + 'my_oid' + >>> print(ref5.klass) + None + >>> ref5.database_name + 'other_db' + >>> ref5.weak + False + + >>> ref6 = PersistentReference([b'my_oid']) # legacy + >>> ref6.oid + 'my_oid' + >>> print(ref6.klass) + None + >>> print(ref6.database_name) + None + >>> ref6.weak + True + +.. [#cmp_test] All references are equal to themselves. + + >>> ref1 == ref1 and ref2 == ref2 and ref4 == ref4 and ref5 == ref5 + True + >>> ref3 == ref3 and ref3a == ref3a and ref6 == ref6 # weak references + True + + Non-weak references with the same oid and database_name are equal. + + >>> ref1 == ref2 and ref4 == ref5 + True + + Everything else raises a ValueError: weak references with the same oid and + database, and references with a different database_name or oid. + + >>> ref3 == ref6 + Traceback (most recent call last): + ... + ValueError: can't reliably compare against different PersistentReferences + + >>> ref1 == PersistentReference(('another_oid', 'my_class')) + Traceback (most recent call last): + ... + ValueError: can't reliably compare against different PersistentReferences + + >>> ref4 == PersistentReference( + ... ['m', ('another_db', 'my_oid', 'my_class')]) + Traceback (most recent call last): + ... + ValueError: can't reliably compare against different PersistentReferences diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/Connection.py b/thesisenv/lib/python3.6/site-packages/ZODB/Connection.py new file mode 100644 index 0000000..10821b3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/Connection.py @@ -0,0 +1,1350 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Database connection support +""" +from __future__ import print_function +import logging +import sys +import tempfile +import warnings +import os +import time + +from persistent import PickleCache + +# interfaces +from persistent.interfaces import IPersistentDataManager +from ZODB.interfaces import IConnection +from ZODB.interfaces import IBlobStorage +from ZODB.interfaces import IStorageTransactionMetaData +from ZODB.blob import Blob, rename_or_copy_blob, remove_committed_dir +from transaction.interfaces import ISavepointDataManager +from transaction.interfaces import IDataManagerSavepoint +from transaction.interfaces import ISynchronizer +from zope.interface import implementer + +import transaction + +import ZODB +from ZODB.blob import SAVEPOINT_SUFFIX +from ZODB.ExportImport import ExportImport +from ZODB import POSException +from ZODB.POSException import InvalidObjectReference, ConnectionStateError +from ZODB.POSException import ConflictError, ReadConflictError +from ZODB.POSException import Unsupported, ReadOnlyHistoryError +from ZODB.POSException import POSKeyError +from ZODB.serialize import ObjectWriter, ObjectReader +from ZODB.utils import p64, u64, z64, oid_repr, positive_id +from ZODB import utils +import six + +from .mvccadapter import HistoricalStorageAdapter + +from . import valuedoc +from . import _compat + +global_reset_counter = 0 + +noop = lambda : None + +def resetCaches(): + """Causes all connection caches to be reset as connections are reopened. + + Zope's refresh feature uses this. When you reload Python modules, + instances of classes continue to use the old class definitions. + To use the new code immediately, the refresh feature asks ZODB to + clear caches by calling resetCaches(). When the instances are + loaded by subsequent connections, they will use the new class + definitions. + """ + global global_reset_counter + global_reset_counter += 1 + + +def className(obj): + cls = type(obj) + return "%s.%s" % (cls.__module__, cls.__name__) + + +@implementer(IConnection, + ISavepointDataManager, + IPersistentDataManager, + ISynchronizer) +class Connection(ExportImport, object): + """Connection to ZODB for loading and storing objects. + + Connections manage object state in collaboration with transaction + managers. They're created by calling the + :meth:`~ZODB.DB.open` method on :py:class:`database + ` objects. + """ + + _code_timestamp = 0 + + #: Transaction manager associated with the connection when it was opened. + transaction_manager = valuedoc.ValueDoc('current transaction manager') + + ########################################################################## + # Connection methods, ZODB.IConnection + + def __init__(self, db, cache_size=400, before=None, cache_size_bytes=0): + """Create a new Connection.""" + + self._log = logging.getLogger('ZODB.Connection') + self._debug_info = () + + self._db = db + self.large_record_size = db.large_record_size + + # historical connection + self.before = before + + # Multi-database support + self.connections = {self._db.database_name: self} + + storage = db._mvcc_storage + if before: + try: + before_instance = storage.before_instance + except AttributeError: + def before_instance(before): + return HistoricalStorageAdapter( + storage.new_instance(), before) + storage = before_instance(before) + else: + storage = storage.new_instance() + + self._normal_storage = self._storage = storage + self._savepoint_storage = None + + # Do we need to join a txn manager? + self._needs_to_join = True + self.transaction_manager = None + self.opened = None # time.time() when DB.open() opened us + + self._reset_counter = global_reset_counter + self._load_count = 0 # Number of objects unghosted + self._store_count = 0 # Number of objects stored + + # Cache which can ghostify (forget the state of) objects not + # recently used. Its API is roughly that of a dict, with + # additional gc-related and invalidation-related methods. + self._cache = PickleCache(self, cache_size, cache_size_bytes) + + # The pre-cache is used by get to avoid infinite loops when + # objects immediately load their state whern they get their + # persistent data set. + self._pre_cache = {} + + # List of all objects (not oids) registered as modified by the + # persistence machinery, or by add(), or whose access caused a + # ReadConflictError (just to be able to clean them up from the + # cache on abort with the other modified objects). All objects + # of this list are either in _cache or in _added. + self._registered_objects = [] # [object] + + # ids and serials of objects for which readCurrent was called + # in a transaction. + self._readCurrent = {} # {oid ->serial} + + # Dict of oid->obj added explicitly through add(). Used as a + # preliminary cache until commit time when objects are all moved + # to the real _cache. The objects are moved to _creating at + # commit time. + self._added = {} # {oid -> object} + + # During commit this is turned into a list, which receives + # objects added as a side-effect of storing a modified object. + self._added_during_commit = None + + # During commit, all objects go to either _modified or _creating: + + # Dict of oid->flag of new objects (without serial), either + # added by add() or implicitly added (discovered by the + # serializer during commit). The flag is True for implicit + # adding. Used during abort to remove created objects from the + # _cache, and by persistent_id to check that a new object isn't + # reachable from multiple databases. + self._creating = {} # {oid -> implicitly_added_flag} + + # List of oids of modified objects, which have to be invalidated + # in the cache on abort and in other connections on finish. + self._modified = [] # [oid] + + # We intend to prevent committing a transaction in which + # ReadConflictError occurs. _conflicts is the set of oids that + # experienced ReadConflictError. Any time we raise ReadConflictError, + # the oid should be added to this set, and we should be sure that the + # object is registered. Because it's registered, Connection.commit() + # will raise ReadConflictError again (because the oid is in + # _conflicts). + self._conflicts = {} + + # To support importFile(), implemented in the ExportImport base + # class, we need to run _importDuringCommit() from our commit() + # method. If _import is not None, it is a two-tuple of arguments + # to pass to _importDuringCommit(). + self._import = None + + self._reader = ObjectReader(self, self._cache, self._db.classFactory) + + def new_oid(self): + return self._storage.new_oid() + + def add(self, obj): + """Add a new object 'obj' to the database and assign it an oid.""" + if self.opened is None: + raise ConnectionStateError("The database connection is closed") + + marker = object() + oid = getattr(obj, "_p_oid", marker) + if oid is marker: + raise TypeError("Only first-class persistent objects may be" + " added to a Connection.", obj) + elif obj._p_jar is None: + self._add(obj, self.new_oid()) + elif obj._p_jar is not self: + raise InvalidObjectReference(obj, obj._p_jar) + + def _add(self, obj, oid): + assert obj._p_oid is None + oid = obj._p_oid = oid + obj._p_jar = self + if self._added_during_commit is not None: + self._added_during_commit.append(obj) + self._register(obj) + # Add to _added after calling register(), so that _added + # can be used as a test for whether the object has been + # registered with the transaction. + self._added[oid] = obj + + def get(self, oid): + """Return the persistent object with oid 'oid'.""" + if self.opened is None: + raise ConnectionStateError("The database connection is closed") + + obj = self._cache.get(oid, None) + if obj is not None: + return obj + obj = self._added.get(oid, None) + if obj is not None: + return obj + obj = self._pre_cache.get(oid, None) + if obj is not None: + return obj + + p, _ = self._storage.load(oid) + obj = self._reader.getGhost(p) + + # Avoid infiniate loop if obj tries to load its state before + # it is added to the cache and it's state refers to it. + # (This will typically be the case for non-ghostifyable objects, + # like persistent caches.) + self._pre_cache[oid] = obj + self._cache.new_ghost(oid, obj) + self._pre_cache.pop(oid) + return obj + + def cacheMinimize(self): + """Deactivate all unmodified objects in the cache. + """ + for connection in six.itervalues(self.connections): + connection._cache.minimize() + + # TODO: we should test what happens when cacheGC is called mid-transaction. + def cacheGC(self): + """Reduce cache size to target size. + """ + for connection in six.itervalues(self.connections): + connection._cache.incrgc() + + __onCloseCallbacks = None + def onCloseCallback(self, f): + """Register a callable, f, to be called by close().""" + if self.__onCloseCallbacks is None: + self.__onCloseCallbacks = [] + self.__onCloseCallbacks.append(f) + + def close(self, primary=True): + """Close the Connection.""" + if not self._needs_to_join: + # We're currently joined to a transaction. + raise ConnectionStateError("Cannot close a connection joined to " + "a transaction") + + self._cache.incrgc() # This is a good time to do some GC + + # Call the close callbacks. + if self.__onCloseCallbacks is not None: + callbacks = self.__onCloseCallbacks + self.__onCloseCallbacks = None + for f in callbacks: + try: + f() + except: # except what? + f = getattr(f, 'im_self', f) + self._log.exception("Close callback failed for %s", f) + + self._debug_info = () + + if self.opened and self.transaction_manager is not None: + # transaction_manager could be None if one of the __onCloseCallbacks + # closed the DB already, .e.g, ZODB.connection() does this. + self.transaction_manager.unregisterSynch(self) + + + am = self._db._activity_monitor + if am is not None: + am.closedConnection(self) + + # Drop transaction manager to release resources and help prevent errors + self.transaction_manager = None + + if hasattr(self._storage, 'afterCompletion'): + self._storage.afterCompletion() + + if primary: + for connection in self.connections.values(): + if connection is not self: + connection.close(False) + + # Return the connection to the pool. + if self.opened is not None: + self._db._returnToPool(self) + + # _returnToPool() set self.opened to None. + # However, we can't assert that here, because self may + # have been reused (by another thread) by the time we + # get back here. + else: + self.opened = None + + # We may have been reused by another thread at this point so + # we can't manipulate or check the state of `self` any more. + + + def db(self): + """Returns a handle to the database this connection belongs to.""" + return self._db + + def isReadOnly(self): + """Returns True if this connection is read only.""" + if self.opened is None: + raise ConnectionStateError("The database connection is closed") + return self._storage.isReadOnly() + + @property + def root(self): + """Return the database root object.""" + return RootConvenience(self.get(z64)) + + def get_connection(self, database_name): + """Return a Connection for the named database.""" + connection = self.connections.get(database_name) + if connection is None: + new_con = self._db.databases[database_name].open( + transaction_manager=self.transaction_manager, + before=self.before, + ) + self.connections.update(new_con.connections) + new_con.connections = self.connections + connection = new_con + return connection + + def _implicitlyAdding(self, oid): + """Are we implicitly adding an object within the current transaction + + This is used in a check to avoid implicitly adding an object + to a database in a multi-database situation. + See serialize.ObjectWriter.persistent_id. + + """ + return (self._creating.get(oid, 0) + or + ((self._savepoint_storage is not None) + and + self._savepoint_storage.creating.get(oid, 0) + ) + ) + + def sync(self): + """Manually update the view on the database.""" + self.transaction_manager.begin() + + def getDebugInfo(self): + """Returns a tuple with different items for debugging the + connection. + """ + return self._debug_info + + def setDebugInfo(self, *args): + """Add the given items to the debug information of this connection.""" + self._debug_info = self._debug_info + args + + def getTransferCounts(self, clear=False): + """Returns the number of objects loaded and stored.""" + res = self._load_count, self._store_count + if clear: + self._load_count = 0 + self._store_count = 0 + return res + + # Connection methods + ########################################################################## + + ########################################################################## + # Data manager (ISavepointDataManager) methods + + def abort(self, transaction): + """Abort a transaction and forget all changes.""" + # The order is important here. We want to abort registered + # objects before we process the cache. Otherwise, we may un-add + # objects added in savepoints. If they've been modified since + # the savepoint, then they won't have _p_oid or _p_jar after + # they've been unadded. This will make the code in _abort + # confused. + self._abort() + + if self._savepoint_storage is not None: + self._abort_savepoint() + + self._invalidate_creating() + self._tpc_cleanup() + + def _abort(self): + """Abort a transaction and forget all changes.""" + + for obj in self._registered_objects: + oid = obj._p_oid + assert oid is not None + if oid in self._added: + del self._added[oid] + if self._cache.get(oid) is not None: + del self._cache[oid] + del obj._p_jar + del obj._p_oid + if obj._p_changed: + obj._p_changed = False + else: + # Note: If we invalidate a non-ghostifiable object + # (i.e. a persistent class), the object will + # immediately reread its state. That means that the + # following call could result in a call to + # self.setstate, which, of course, must succeed. + # In general, it would be better if the read could be + # delayed until the start of the next transaction. If + # we read at the end of a transaction and if the + # object was invalidated during this transaction, then + # we'll read non-current data, which we'll discard + # later in transaction finalization. Unfortnately, we + # can only delay the read if this abort corresponds to + # a top-level-transaction abort. We can't tell if + # this is a top-level-transaction abort, so we have to + # go ahead and invalidate now. Fortunately, it's + # pretty unlikely that the object we are invalidating + # was invalidated by another thread, so the risk of a + # reread is pretty low. + + self._cache.invalidate(oid) + + def _tpc_cleanup(self): + """Performs cleanup operations to support tpc_finish and tpc_abort.""" + self._conflicts.clear() + self._needs_to_join = True + self._registered_objects = [] + self._creating.clear() + + def tpc_begin(self, transaction): + """Begin commit of a transaction, starting the two-phase commit.""" + self._modified = [] + meta_data = TransactionMetaData( + transaction.user, + transaction.description, + transaction.extension) + transaction.set_data(self, meta_data) + + # _creating is a list of oids of new objects, which is used to + # remove them from the cache if a transaction aborts. + self._creating.clear() + self._normal_storage.tpc_begin(meta_data) + + def commit(self, transaction): + """Commit changes to an object""" + transaction = transaction.data(self) + + if self._savepoint_storage is not None: + + # We first checkpoint the current changes to the savepoint + self.savepoint() + + # then commit all of the savepoint changes at once + self._commit_savepoint(transaction) + + # No need to call _commit since savepoint did. + + else: + self._commit(transaction) + + for oid, serial in six.iteritems(self._readCurrent): + try: + self._storage.checkCurrentSerialInTransaction( + oid, serial, transaction) + except ConflictError: + self._cache.invalidate(oid) + raise + + def _commit(self, transaction): + """Commit changes to an object""" + + if self.before is not None: + raise ReadOnlyHistoryError() + + if self._import: + # We are importing an export file. We alsways do this + # while making a savepoint so we can copy export data + # directly to our storage, typically a TmpStore. + self._importDuringCommit(transaction, *self._import) + self._import = None + + # Just in case an object is added as a side-effect of storing + # a modified object. If, for example, a __getstate__() method + # calls add(), the newly added objects will show up in + # _added_during_commit. This sounds insane, but has actually + # happened. + + self._added_during_commit = [] + + for obj in self._registered_objects: + oid = obj._p_oid + assert oid + if oid in self._conflicts: + raise ReadConflictError(object=obj) + + if obj._p_jar is not self: + raise InvalidObjectReference(obj, obj._p_jar) + elif oid in self._added: + assert obj._p_serial == z64 + elif oid in self._creating or not obj._p_changed: + # Nothing to do. It's been said that it's legal, e.g., for + # an object to set _p_changed to false after it's been + # changed and registered. + # And new objects that are registered after any referrer are + # already processed. + continue + + self._store_objects(ObjectWriter(obj), transaction) + + for obj in self._added_during_commit: + self._store_objects(ObjectWriter(obj), transaction) + self._added_during_commit = None + + def _store_objects(self, writer, transaction): + for obj in writer: + oid = obj._p_oid + serial = getattr(obj, "_p_serial", z64) + + if ((serial == z64) + and + ((self._savepoint_storage is None) + or (oid not in self._savepoint_storage.creating) + or self._savepoint_storage.creating[oid] + ) + ): + + # obj is a new object + + # Because obj was added, it is now in _creating, so it + # can be removed from _added. If oid wasn't in + # adding, then we are adding it implicitly. + + implicitly_adding = self._added.pop(oid, None) is None + + self._creating[oid] = implicitly_adding + + else: + self._modified.append(oid) + + p = writer.serialize(obj) # This calls __getstate__ of obj + if len(p) >= self.large_record_size: + warnings.warn(large_object_message % (obj.__class__, len(p))) + + if isinstance(obj, Blob): + if not IBlobStorage.providedBy(self._storage): + raise Unsupported( + "Storing Blobs in %s is not supported." % + repr(self._storage)) + if obj.opened(): + raise ValueError("Can't commit with opened blobs.") + blobfilename = obj._uncommitted() + if blobfilename is None: + assert serial is not None # See _uncommitted + self._modified.pop() # not modified + continue + s = self._storage.storeBlob(oid, serial, p, blobfilename, + '', transaction) + # we invalidate the object here in order to ensure + # that that the next attribute access of its name + # unghostify it, which will cause its blob data + # to be reattached "cleanly" + obj._p_invalidate() + else: + s = self._storage.store(oid, serial, p, '', transaction) + + self._store_count += 1 + # Put the object in the cache before handling the + # response, just in case the response contains the + # serial number for a newly created object + try: + self._cache[oid] = obj + except: + # Dang, I bet it's wrapped: + # TODO: Deprecate, then remove, this. + if hasattr(obj, 'aq_base'): + self._cache[oid] = obj.aq_base + else: + raise + + self._cache.update_object_size_estimation(oid, len(p)) + obj._p_estimated_size = len(p) + + # if we write an object, we don't want to check if it was read + # while current. This is a convenient choke point to do this. + self._readCurrent.pop(oid, None) + if s: + # savepoint + obj._p_changed = 0 # transition from changed to up-to-date + obj._p_serial = s + + def tpc_abort(self, transaction): + transaction = transaction.data(self) + + if self._import: + self._import = None + + if self._savepoint_storage is not None: + self._abort_savepoint() + + self._storage.tpc_abort(transaction) + + # Note: If we invalidate a non-ghostifiable object (i.e. a + # persistent class), the object will immediately reread its + # state. That means that the following call could result in a + # call to self.setstate, which, of course, must succeed. In + # general, it would be better if the read could be delayed + # until the start of the next transaction. If we read at the + # end of a transaction and if the object was invalidated + # during this transaction, then we'll read non-current data, + # which we'll discard later in transaction finalization. We + # could, theoretically queue this invalidation by calling + # self.invalidate. Unfortunately, attempts to make that + # change resulted in mysterious test failures. It's pretty + # unlikely that the object we are invalidating was invalidated + # by another thread, so the risk of a reread is pretty low. + # It's really not worth the effort to pursue this. + + self._cache.invalidate(self._modified) + self._invalidate_creating() + while self._added: + oid, obj = self._added.popitem() + if obj._p_changed: + obj._p_changed = False + del obj._p_oid + del obj._p_jar + self._tpc_cleanup() + + def _invalidate_creating(self, creating=None): + """Disown any objects newly saved in an uncommitted transaction.""" + if creating is None: + creating = self._creating + self._creating = {} + + for oid in creating: + o = self._cache.get(oid) + if o is not None: + del self._cache[oid] + if o._p_changed: + o._p_changed = False + del o._p_jar + del o._p_oid + + + def tpc_vote(self, transaction): + """Verify that a data manager can commit the transaction.""" + try: + vote = self._storage.tpc_vote + except AttributeError: + return + + transaction = transaction.data(self) + + try: + s = vote(transaction) + except ReadConflictError as v: + if v.oid: + self._cache.invalidate(v.oid) + raise + if s: + # Resolved conflicts. + for oid in s: + obj = self._cache.get(oid) + if obj is not None: + del obj._p_changed # transition from changed to ghost + + def tpc_finish(self, transaction): + """Indicate confirmation that the transaction is done. + """ + transaction = transaction.data(self) + + serial = self._storage.tpc_finish(transaction) + assert type(serial) is bytes, repr(serial) + for oid_iterator in self._modified, self._creating: + for oid in oid_iterator: + obj = self._cache.get(oid) + # Ignore missing objects and don't update ghosts. + if obj is not None and obj._p_changed is not None: + obj._p_changed = 0 + obj._p_serial = serial + self._tpc_cleanup() + + def sortKey(self): + """Return a consistent sort key for this connection.""" + return "%s:%s" % (self._storage.sortKey(), id(self)) + + # Data manager (ISavepointDataManager) methods + ########################################################################## + + ########################################################################## + # Transaction-manager synchronization -- ISynchronizer + + def beforeCompletion(self, txn): + # We don't do anything before a commit starts. + pass + + def newTransaction(self, transaction, sync=True): + self._readCurrent.clear() + self._storage.sync(sync) + invalidated = self._storage.poll_invalidations() + if invalidated is None: + # special value: the transaction is so old that + # we need to flush the whole cache. + invalidated = self._cache.cache_data.copy() + self._cache.invalidate(invalidated) + + def afterCompletion(self, transaction): + # Note that we we call newTransaction here for 2 reasons: + # a) Applying invalidations early frees up resources + # early. This is especially useful if the connection isn't + # going to be used in a while. + # b) Non-hygienic applications might start new transactions by + # finalizing previous ones without calling begin. We pass + # False to avoid possiblyt expensive sync calls to not + # penalize well-behaved applications that call begin. + if hasattr(self._storage, 'afterCompletion'): + self._storage.afterCompletion() + + if not self.explicit_transactions: + self.newTransaction(transaction, False) + + # Now is a good time to collect some garbage. + self._cache.incrgc() + + # Transaction-manager synchronization -- ISynchronizer + ########################################################################## + + ########################################################################## + # persistent.interfaces.IPersistentDatamanager + + def oldstate(self, obj, tid): + """Return copy of 'obj' that was written by transaction 'tid'.""" + assert obj._p_jar is self + p = self._storage.loadSerial(obj._p_oid, tid) + return self._reader.getState(p) + + def setstate(self, obj): + """Load the state for an (ghost) object + """ + + oid = obj._p_oid + + if self.opened is None: + msg = ("Shouldn't load state for %s %s " + "when the connection is closed" + % (className(obj), oid_repr(oid))) + try: + raise ConnectionStateError(msg) + except: + self._log.exception(msg) + raise + + try: + p, serial = self._storage.load(oid) + + self._load_count += 1 + + self._reader.setGhostState(obj, p) + obj._p_serial = serial + self._cache.update_object_size_estimation(oid, len(p)) + obj._p_estimated_size = len(p) + + # Blob support + if isinstance(obj, Blob): + obj._p_blob_uncommitted = None + obj._p_blob_committed = self._storage.loadBlob(oid, serial) + + except ConflictError: + raise + except: + self._log.exception("Couldn't load state for %s %s", + className(obj), oid_repr(oid)) + raise + + def register(self, obj): + """Register obj with the current transaction manager. + + A subclass could override this method to customize the default + policy of one transaction manager for each thread. + + obj must be an object loaded from this Connection. + """ + assert obj._p_jar is self + if obj._p_oid is None: + # The actual complaint here is that an object without + # an oid is being registered. I can't think of any way to + # achieve that without assignment to _p_jar. If there is + # a way, this will be a very confusing exception. + raise ValueError("assigning to _p_jar is not supported") + elif obj._p_oid in self._added: + # It was registered before it was added to _added. + return + self._register(obj) + + def _register(self, obj=None): + + # The order here is important. We need to join before + # registering the object, because joining may take a + # savepoint, and the savepoint should not reflect the change + # to the object. + + if self._needs_to_join: + self.transaction_manager.get().join(self) + self._needs_to_join = False + + if obj is not None: + self._registered_objects.append(obj) + + def readCurrent(self, ob): + assert ob._p_jar is self + assert ob._p_oid is not None and ob._p_serial is not None + if ob._p_serial != z64: + self._readCurrent[ob._p_oid] = ob._p_serial + + # persistent.interfaces.IPersistentDatamanager + ########################################################################## + + ########################################################################## + # PROTECTED stuff (used by e.g. ZODB.DB.DB) + + def _cache_items(self): + # find all items on the lru list + items = self._cache.lru_items() + # fine everything. some on the lru list, some not + everything = self._cache.cache_data + # remove those items that are on the lru list + for k,v in items: + del everything[k] + # return a list of [ghosts....not recently used.....recently used] + return list(everything.items()) + items + + def open(self, transaction_manager=None, delegate=True): + """Register odb, the DB that this Connection uses. + + This method is called by the DB every time a Connection + is opened. Any invalidations received while the Connection + was closed will be processed. + + If the global module function resetCaches() was called, the + cache will be cleared. + + Parameters: + odb: database that owns the Connection + transaction_manager: transaction manager to use. None means + use the default transaction manager. + register for afterCompletion() calls. + """ + + if transaction_manager is None: + transaction_manager = transaction.manager + + self.transaction_manager = transaction_manager + + self.explicit_transactions = getattr(transaction_manager, + 'explicit', False) + + self.opened = time.time() + + if self._reset_counter != global_reset_counter: + # New code is in place. Start a new cache. + self._resetCache() + + if not self.explicit_transactions: + # This newTransaction is to deal with some pathalogical cases: + # + # a) Someone opens a connection when a transaction isn't + # active and proceeeds without calling begin on a + # transaction manager. We initialize the transaction for + # the connection, but we don't do a storage sync, since + # this will be done if a well-nehaved application calls + # begin, and we don't want to penalize well-behaved + # transactions by syncing twice, as storage syncs might be + # expensive. + # b) Lots of tests assume that connection transaction + # information is set on open. + # + # Fortunately, this is a cheap operation. It doesn't + # really cost much, if anything. Well, except for + # RelStorage, in which case it adds a server round + # trip. + self.newTransaction(None, False) + + transaction_manager.registerSynch(self) + + self._cache.incrgc() # This is a good time to do some GC + + if delegate: + # delegate open to secondary connections + for connection in self.connections.values(): + if connection is not self: + connection.open(transaction_manager, False) + + def _resetCache(self): + """Creates a new cache, discarding the old one. + + See the docstring for the resetCaches() function. + """ + self._reset_counter = global_reset_counter + cache_size = self._cache.cache_size + cache_size_bytes = self._cache.cache_size_bytes + self._cache = cache = PickleCache(self, cache_size, cache_size_bytes) + if getattr(self, '_reader', None) is not None: + self._reader._cache = cache + + def _release_resources(self): + for c in six.itervalues(self.connections): + if c._storage is not None: + c._storage.release() + c._storage = c._normal_storage = None + c._cache = PickleCache(self, 0, 0) + c.close(False) + + ########################################################################## + # Python protocol + + def __repr__(self): + return '' % (positive_id(self),) + + # Python protocol + ########################################################################## + + ########################################################################## + # DEPRECATION candidates + + __getitem__ = get + + def exchange(self, old, new): + # called by a ZClasses method that isn't executed by the test suite + oid = old._p_oid + new._p_oid = oid + new._p_jar = self + new._p_changed = 1 + self._register(new) + self._cache[oid] = new + + # DEPRECATION candidates + ########################################################################## + + ########################################################################## + # DEPRECATED methods + + # None at present. + + # DEPRECATED methods + ########################################################################## + + ##################################################################### + # Savepoint support + + def savepoint(self): + if self._savepoint_storage is None: + tmpstore = TmpStore(self._normal_storage) + self._savepoint_storage = tmpstore + self._storage = self._savepoint_storage + + self._creating.clear() + self._commit(None) + self._storage.creating.update(self._creating) + self._creating.clear() + self._registered_objects = [] + + state = (self._storage.position, + self._storage.index.copy(), + self._storage.creating.copy(), + ) + result = Savepoint(self, state) + # While the interface doesn't guarantee this, savepoints are + # sometimes used just to "break up" very long transactions, and as + # a pragmatic matter this is a good time to reduce the cache + # memory burden. + self.cacheGC() + return result + + def _rollback_savepoint(self, state): + self._abort() + self._registered_objects = [] + src = self._storage + + # Invalidate objects created *after* the savepoint. + self._invalidate_creating((oid for oid in src.creating + if oid not in state[2])) + index = src.index + src.reset(*state) + self._cache.invalidate(index) + + def _commit_savepoint(self, transaction): + """Commit all changes made in savepoints and begin 2-phase commit + """ + src = self._savepoint_storage + self._storage = self._normal_storage + self._savepoint_storage = None + try: + self._log.debug("Committing savepoints of size %s", src.getSize()) + oids = sorted(src.index.keys()) + + # Copy invalidating and creating info from temporary storage: + self._modified.extend(oids) + self._creating.update(src.creating) + + for oid in oids: + data, serial = src.load(oid) + obj = self._cache.get(oid, None) + if obj is not None: + self._cache.update_object_size_estimation( + obj._p_oid, len(data)) + obj._p_estimated_size = len(data) + if isinstance(self._reader.getGhost(data), Blob): + blobfilename = src.loadBlob(oid, serial) + self._storage.storeBlob( + oid, serial, data, blobfilename, + '', transaction) + # we invalidate the object here in order to ensure + # that that the next attribute access of its name + # unghostify it, which will cause its blob data + # to be reattached "cleanly" + self._cache.invalidate(oid) + else: + self._storage.store(oid, serial, data, '', transaction) + + self._readCurrent.pop(oid, None) # same as in _store_objects() + finally: + src.close() + + def _abort_savepoint(self): + """Discard all savepoint data.""" + src = self._savepoint_storage + self._invalidate_creating(src.creating) + self._storage = self._normal_storage + self._savepoint_storage = None + + # Note: If we invalidate a non-ghostifiable object (i.e. a + # persistent class), the object will immediately reread it's + # state. That means that the following call could result in a + # call to self.setstate, which, of course, must succeed. In + # general, it would be better if the read could be delayed + # until the start of the next transaction. If we read at the + # end of a transaction and if the object was invalidated + # during this transaction, then we'll read non-current data, + # which we'll discard later in transaction finalization. We + # could, theoretically queue this invalidation by calling + # self.invalidate. Unfortunately, attempts to make that + # change resulted in mysterious test failures. It's pretty + # unlikely that the object we are invalidating was invalidated + # by another thread, so the risk of a reread is pretty low. + # It's really not worth the effort to pursue this. + + # Note that we do this *after* reseting the storage so that, if + # data are read, we read it from the reset storage! + + self._cache.invalidate(src.index) + + src.close() + + # Savepoint support + ##################################################################### + + def prefetch(self, *args): + try: + self._storage.prefetch(self._prefetch_flatten(args)) + except AttributeError: + if not hasattr(self._storage, 'prefetch'): + self.prefetch = lambda *a: None + else: + raise + + def _prefetch_flatten(self, args): + for arg in args: + if isinstance(arg, bytes): + yield arg + elif hasattr(arg, '_p_oid'): + yield arg._p_oid + else: + for ob in arg: + if isinstance(ob, bytes): + yield ob + else: + yield ob._p_oid + +@implementer(IDataManagerSavepoint) +class Savepoint(object): + + def __init__(self, datamanager, state): + self.datamanager = datamanager + self.state = state + + def rollback(self): + self.datamanager._rollback_savepoint(self.state) + + +@implementer(IBlobStorage) +class TmpStore(object): + """A storage-like thing to support savepoints.""" + + + def __init__(self, storage): + self._storage = storage + for method in ( + 'getName', 'new_oid', 'getSize', 'sortKey', + 'isReadOnly' + ): + setattr(self, method, getattr(storage, method)) + + self._file = tempfile.TemporaryFile(prefix='TmpStore') + # position: current file position + # _tpos: file position at last commit point + self.position = 0 + # index: map oid to pos of last committed version + self.index = {} + self.creating = {} + + self._blob_dir = None + + def __len__(self): + return len(self.index) + + def close(self): + self._file.close() + if self._blob_dir is not None: + remove_committed_dir(self._blob_dir) + self._blob_dir = None + + def load(self, oid, version=''): + pos = self.index.get(oid) + if pos is None: + return self._storage.load(oid) + self._file.seek(pos) + h = self._file.read(8) + oidlen = u64(h) + read_oid = self._file.read(oidlen) + if read_oid != oid: + raise POSException.StorageSystemError('Bad temporary storage') + h = self._file.read(16) + size = u64(h[8:]) + serial = h[:8] + return self._file.read(size), serial + + def store(self, oid, serial, data, version, transaction): + # we have this funny signature so we can reuse the normal non-commit + # commit logic + assert version == '' + self._file.seek(self.position) + l = len(data) + if serial is None: + serial = z64 + header = p64(len(oid)) + oid + serial + p64(l) + self._file.write(header) + self._file.write(data) + self.index[oid] = self.position + self.position += l + len(header) + return serial + + def storeBlob(self, oid, serial, data, blobfilename, version, + transaction): + assert version == '' + serial = self.store(oid, serial, data, '', transaction) + + targetpath = self._getBlobPath() + if not os.path.exists(targetpath): + os.makedirs(targetpath) + + targetname = self._getCleanFilename(oid, serial) + rename_or_copy_blob(blobfilename, targetname, chmod=False) + + def loadBlob(self, oid, serial): + """Return the filename where the blob file can be found. + """ + if not IBlobStorage.providedBy(self._storage): + raise Unsupported( + "Blobs are not supported by the underlying storage %r." % + self._storage) + filename = self._getCleanFilename(oid, serial) + if not os.path.exists(filename): + return self._storage.loadBlob(oid, serial) + return filename + + def openCommittedBlobFile(self, oid, serial, blob=None): + blob_filename = self.loadBlob(oid, serial) + if blob is None: + return open(blob_filename, 'rb') + else: + return ZODB.blob.BlobFile(blob_filename, 'r', blob) + + def _getBlobPath(self): + blob_dir = self._blob_dir + if blob_dir is None: + blob_dir = tempfile.mkdtemp(dir=self.temporaryDirectory(), + prefix='savepoints') + self._blob_dir = blob_dir + return blob_dir + + def _getCleanFilename(self, oid, tid): + return os.path.join( + self._getBlobPath(), + "%s-%s%s" % (utils.oid_repr(oid), utils.tid_repr(tid), + SAVEPOINT_SUFFIX,) + ) + + def temporaryDirectory(self): + return self._storage.temporaryDirectory() + + def reset(self, position, index, creating): + self._file.truncate(position) + self.position = position + # Caution: We're typically called as part of a savepoint rollback. + # Other machinery remembers the index to restore, and passes it to + # us. If we simply bind self.index to `index`, then if the caller + # didn't pass a copy of the index, the caller's index will mutate + # when self.index mutates. This can be a disaster if the caller is a + # savepoint to which the user rolls back again later (the savepoint + # loses the original index it passed). Therefore, to be safe, we make + # a copy of the index here. An alternative would be to ensure that + # all callers pass copies. As is, our callers do not make copies. + self.index = index.copy() + self.creating = creating + + +class RootConvenience(object): + + def __init__(self, root): + self.__dict__['_root'] = root + + def __getattr__(self, name): + try: + return self._root[name] + except KeyError: + raise AttributeError(name) + + def __setattr__(self, name, v): + self._root[name] = v + + def __delattr__(self, name): + try: + del self._root[name] + except KeyError: + raise AttributeError(name) + + def __call__(self): + return self._root + + def __repr__(self): + names = " ".join(sorted(self._root)) + if len(names) > 60: + names = names[:57].rsplit(' ', 1)[0] + ' ...' + return "" % names + +large_object_message = """The %s +object you're saving is large. (%s bytes.) + +Perhaps you're storing media which should be stored in blobs. + +Perhaps you're using a non-scalable data structure, such as a +PersistentMapping or PersistentList. + +Perhaps you're storing data in objects that aren't persistent at +all. In cases like that, the data is stored in the record of the +containing persistent object. + +In any case, storing records this big is probably a bad idea. + +If you insist and want to get rid of this warning, use the +large_record_size option of the ZODB.DB constructor (or the +large-record-size option in a configuration file) to specify a larger +size. +""" + +@implementer(IStorageTransactionMetaData) +class TransactionMetaData(object): + + def __init__(self, user=u'', description=u'', extension=b''): + if not isinstance(user, bytes): + user = user.encode('utf-8') + self.user = user + + if not isinstance(description, bytes): + description = description.encode('utf-8') + self.description = description + + if not isinstance(extension, dict): + extension = _compat.loads(extension) if extension else {} + self.extension = extension + + def note(self, text): # for tests + text = text.strip() + if not isinstance(text, bytes): + text = text.encode('utf-8') + if self.description: + self.description = self.description.strip() + b' ' + text + else: + self.description = text + + @property + def _extension(self): + warnings.warn("_extension is deprecated, use extension", + DeprecationWarning, stacklevel=2) + return self.extension + + @_extension.setter + def _extension(self, v): + self.extension = v + + def data(self, ob): + try: + return self._data[id(ob)] + except (AttributeError, KeyError): + raise KeyError(ob) + + def set_data(self, ob, ob_data): + try: + data = self._data + except AttributeError: + data = self._data = {} + + data[id(ob)] = ob_data diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/DB.py b/thesisenv/lib/python3.6/site-packages/ZODB/DB.py new file mode 100644 index 0000000..6a5af7c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/DB.py @@ -0,0 +1,1111 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Database objects +""" +from __future__ import print_function +import sys +import logging +import datetime +import time +import warnings + +from . import utils + +from ZODB.broken import find_global +from ZODB.utils import z64 +from ZODB.Connection import Connection, TransactionMetaData, noop +from ZODB._compat import Pickler, _protocol, BytesIO +import ZODB.serialize + +import transaction.weakset + +from zope.interface import implementer +from ZODB.interfaces import IDatabase +from ZODB.interfaces import IMVCCStorage + +import transaction + +from persistent.TimeStamp import TimeStamp +import six + +from . import POSException, valuedoc + +logger = logging.getLogger('ZODB.DB') + +class AbstractConnectionPool(object): + """Manage a pool of connections. + + CAUTION: Methods should be called under the protection of a lock. + This class does no locking of its own. + + There's no limit on the number of connections this can keep track of, + but a warning is logged if there are more than pool_size active + connections, and a critical problem if more than twice pool_size. + + New connections are registered via push(). This will log a message if + "too many" connections are active. + + When a connection is explicitly closed, tell the pool via repush(). + That adds the connection to a stack of connections available for + reuse, and throws away the oldest stack entries if the stack is too large. + pop() pops this stack. + + When a connection is obtained via pop(), the pool holds only a weak + reference to it thereafter. It's not necessary to inform the pool + if the connection goes away. A connection handed out by pop() counts + against pool_size only so long as it exists, and provided it isn't + repush()'ed. A weak reference is retained so that DB methods like + connectionDebugInfo() can still gather statistics. + """ + + def __init__(self, size, timeout): + # The largest # of connections we expect to see alive simultaneously. + self._size = size + + # The minimum number of seconds that an available connection should + # be kept, or None. + self._timeout = timeout + + # A weak set of all connections we've seen. A connection vanishes + # from this set if pop() hands it out, it's not reregistered via + # repush(), and it becomes unreachable. + self.all = transaction.weakset.WeakSet() + + def setSize(self, size): + """Change our belief about the expected maximum # of live connections. + + If the pool_size is smaller than the current value, this may discard + the oldest available connections. + """ + self._size = size + self._reduce_size() + + def setTimeout(self, timeout): + old = self._timeout + self._timeout = timeout + if timeout < old: + self._reduce_size() + + def getSize(self): + return self._size + + def getTimeout(self): + return self._timeout + + timeout = property(getTimeout, lambda self, v: self.setTimeout(v)) + + size = property(getSize, lambda self, v: self.setSize(v)) + + def clear(self): + pass + + +class ConnectionPool(AbstractConnectionPool): + + def __init__(self, size, timeout=1<<31): + super(ConnectionPool, self).__init__(size, timeout) + + # A stack of connections available to hand out. This is a subset + # of self.all. push() and repush() add to this, and may remove + # the oldest available connections if the pool is too large. + # pop() pops this stack. There are never more than size entries + # in this stack. + self.available = [] + + def _append(self, c): + available = self.available + cactive = c._cache.cache_non_ghost_count + if (available and + (available[-1][1]._cache.cache_non_ghost_count > cactive) + ): + i = len(available) - 1 + while (i and + (available[i-1][1]._cache.cache_non_ghost_count > cactive) + ): + i -= 1 + available.insert(i, (time.time(), c)) + else: + available.append((time.time(), c)) + + def push(self, c): + """Register a new available connection. + + We must not know about c already. c will be pushed onto the available + stack even if we're over the pool size limit. + """ + assert c not in self.all + assert c not in self.available + self._reduce_size(strictly_less=True) + self.all.add(c) + self._append(c) + n = len(self.all) + limit = self.size + if n > limit: + reporter = logger.warning + if n > 2 * limit: + reporter = logger.critical + reporter("DB.open() has %s open connections with a pool_size " + "of %s", n, limit) + + def repush(self, c): + """Reregister an available connection formerly obtained via pop(). + + This pushes it on the stack of available connections, and may discard + older available connections. + """ + assert c in self.all + assert c not in self.available + self._reduce_size(strictly_less=True) + self._append(c) + + def _reduce_size(self, strictly_less=False): + """Throw away the oldest available connections until we're under our + target size (strictly_less=False, the default) or no more than that + (strictly_less=True). + """ + threshhold = time.time() - self.timeout + target = self.size + if strictly_less: + target -= 1 + + available = self.available + while ( + (len(available) > target) + or + (available and available[0][0] < threshhold) + ): + t, c = available.pop(0) + assert not c.opened + self.all.remove(c) + c._release_resources() + + def reduce_size(self): + self._reduce_size() + + def pop(self): + """Pop an available connection and return it. + + Return None if none are available - in this case, the caller should + create a new connection, register it via push(), and call pop() again. + The caller is responsible for serializing this sequence. + """ + result = None + if self.available: + _, result = self.available.pop() + # Leave it in self.all, so we can still get at it for statistics + # while it's alive. + assert result in self.all + return result + + def map(self, f): + """For every live connection c, invoke f(c).""" + self.all.map(f) + + def availableGC(self): + """Perform garbage collection on available connections. + + If a connection is no longer viable because it has timed out, it is + garbage collected. + """ + threshhold = time.time() - self.timeout + + to_remove = () + for (t, c) in self.available: + assert not c.opened + if t < threshhold: + to_remove += (c,) + self.all.remove(c) + c._release_resources() + else: + c.cacheGC() + + if to_remove: + self.available[:] = [i for i in self.available + if i[1] not in to_remove] + + def clear(self): + while self.pop(): + pass + + +class KeyedConnectionPool(AbstractConnectionPool): + # this pool keeps track of keyed connections all together. It makes + # it possible to make assertions about total numbers of keyed connections. + # The keys in this case are "before" TIDs, but this is used by other + # packages as well. + + # see the comments in ConnectionPool for method descriptions. + + def __init__(self, size, timeout=1<<31): + super(KeyedConnectionPool, self).__init__(size, timeout) + self.pools = {} + + def setSize(self, v): + self._size = v + for pool in self.pools.values(): + pool.setSize(v) + + def setTimeout(self, v): + self._timeout = v + for pool in self.pools.values(): + pool.setTimeout(v) + + def push(self, c, key): + pool = self.pools.get(key) + if pool is None: + pool = self.pools[key] = ConnectionPool(self.size, self.timeout) + pool.push(c) + + def repush(self, c, key): + self.pools[key].repush(c) + + def _reduce_size(self, strictly_less=False): + for key, pool in list(self.pools.items()): + pool._reduce_size(strictly_less) + if not pool.all: + del self.pools[key] + + def reduce_size(self): + self._reduce_size() + + def pop(self, key): + pool = self.pools.get(key) + if pool is not None: + return pool.pop() + + def map(self, f): + for pool in six.itervalues(self.pools): + pool.map(f) + + def availableGC(self): + for key, pool in list(self.pools.items()): + pool.availableGC() + if not pool.all: + del self.pools[key] + + def clear(self): + for pool in self.pools.values(): + pool.clear() + self.pools.clear() + + @property + def test_all(self): + result = set() + for pool in six.itervalues(self.pools): + result.update(pool.all) + return frozenset(result) + + @property + def test_available(self): + result = [] + for pool in six.itervalues(self.pools): + result.extend(pool.available) + return tuple(result) + + +def toTimeStamp(dt): + utc_struct = dt.utctimetuple() + # if this is a leapsecond, this will probably fail. That may be a good + # thing: leapseconds are not really accounted for with serials. + args = utc_struct[:5]+(utc_struct[5] + dt.microsecond/1000000.0,) + return TimeStamp(*args) + +def getTID(at, before): + if at is not None: + if before is not None: + raise ValueError('can only pass zero or one of `at` and `before`') + if isinstance(at, datetime.datetime): + at = toTimeStamp(at) + else: + at = TimeStamp(at) + before = at.laterThan(at).raw() + elif before is not None: + if isinstance(before, datetime.datetime): + before = toTimeStamp(before).raw() + else: + before = TimeStamp(before).raw() + return before + +@implementer(IDatabase) +class DB(object): + """The Object Database + + The DB class coordinates the activities of multiple database + Connection instances. Most of the work is done by the + Connections created via the open method. + + The DB instance manages a pool of connections. If a connection is + closed, it is returned to the pool and its object cache is + preserved. A subsequent call to open() will reuse the connection. + There is no hard limit on the pool size. If more than `pool_size` + connections are opened, a warning is logged, and if more than twice + that many, a critical problem is logged. + + The database provides a few methods intended for application code + -- open, close, undo, and pack -- and a large collection of + methods for inspecting the database and its connections' caches. + """ + + klass = Connection # Class to use for connections + _activity_monitor = next = previous = None + + #: Database storage, implementing :interface:`~ZODB.interfaces.IStorage` + storage = valuedoc.ValueDoc('storage object') + + def __init__(self, + storage, + pool_size=7, + pool_timeout=1<<31, + cache_size=400, + cache_size_bytes=0, + historical_pool_size=3, + historical_cache_size=1000, + historical_cache_size_bytes=0, + historical_timeout=300, + database_name='unnamed', + databases=None, + xrefs=True, + large_record_size=1<<24, + **storage_args): + """Create an object database. + + :param storage: the storage used by the database, such as a + :class:`~ZODB.FileStorage.FileStorage.FileStorage`. + This can be a string path name to use a constructed + :class:`~ZODB.FileStorage.FileStorage.FileStorage` + storage or ``None`` to use a constructed + :class:`~ZODB.MappingStorage.MappingStorage`. + :param int pool_size: expected maximum number of open connections. + Warnings are logged when this is exceeded and critical + messages are logged if twice the pool size is exceeded. + :param seconds pool_timeout: Maximum age of inactive connections + When a connection has remained unused in a connection + pool for more than pool_timeout seconds, it will be + discarded and it's resources released. + :param objects cache_size: target maximum number of non-ghost + objects in each connection object cache. + :param int cache_size_bytes: target total memory usage of non-ghost + objects in each connection object cache. + :param int historical_pool_size: expected maximum number of total + historical connections + :param objects historical_cache_size: target maximum number + of non-ghost objects in each historical connection object + cache. + :param int historical_cache_size_bytes: target total memory + usage of non-ghost objects in each historical connection + object cache. + :param seconds historical_timeout: Maximum age of inactive + historical connections. When a connection has remained + unused in a historical connection pool for more than pool_timeout + seconds, it will be discarded and it's resources + released. + :param str database_name: The name of this database in a + multi-database configuration. The name is used when + constructing cross-database references ans when accessing + database connections fron other databases. + :param dict databases: dictionary of database name to + databases in a multi-database configuration. The new + database will add itself to this dictionary. The + dictionary is used when getting connections in other databases. + :param boolean xrefs: Flag indicating whether cross-database + references are allowed from this database to other + databases in a multi-database configuration. + :param int large_record_size: When object records are saved + that are larger than this, a warning is issued, + suggesting that blobs should be used instead. + :param storage_args: Extra keywork arguments passed to a + storage constructor if a path name or None is passed as + the storage argument. + """ + + # Allocate lock. + self._lock = utils.RLock() + + # pools and cache sizes + self.pool = ConnectionPool(pool_size, pool_timeout) + self.historical_pool = KeyedConnectionPool(historical_pool_size, + historical_timeout) + self._cache_size = cache_size + self._cache_size_bytes = cache_size_bytes + self._historical_cache_size = historical_cache_size + self._historical_cache_size_bytes = historical_cache_size_bytes + + # Setup storage + if isinstance(storage, six.string_types): + from ZODB import FileStorage + storage = ZODB.FileStorage.FileStorage(storage, **storage_args) + elif storage is None: + from ZODB import MappingStorage + storage = ZODB.MappingStorage.MappingStorage(**storage_args) + else: + assert not storage_args + + self.storage = storage + + if IMVCCStorage.providedBy(storage): + self._mvcc_storage = storage + else: + from .mvccadapter import MVCCAdapter + self._mvcc_storage = MVCCAdapter(storage) + + self.references = ZODB.serialize.referencesf + + if (not hasattr(storage, 'tpc_vote')) and not storage.isReadOnly(): + warnings.warn( + "Storage doesn't have a tpc_vote and this violates " + "the storage API. Violently monkeypatching in a do-nothing " + "tpc_vote.", + DeprecationWarning, 2) + storage.tpc_vote = lambda *args: None + + # Multi-database setup. + if databases is None: + databases = {} + self.databases = databases + self.database_name = database_name + if database_name in databases: + raise ValueError("database_name %r already in databases" % + database_name) + databases[database_name] = self + self.xrefs = xrefs + + self.large_record_size = large_record_size + + # Make sure we have a root: + with self.transaction(u'initial database creation') as conn: + try: + conn.get(z64) + except KeyError: + from persistent.mapping import PersistentMapping + root = PersistentMapping() + conn._add(root, z64) + + @property + def _storage(self): # Backward compatibility + return self.storage + + # This is called by Connection.close(). + def _returnToPool(self, connection): + """Return a connection to the pool. + + connection._db must be self on entry. + """ + + with self._lock: + assert connection._db is self + connection.opened = None + + if connection.before: + self.historical_pool.repush(connection, connection.before) + else: + self.pool.repush(connection) + + def _connectionMap(self, f): + """Call f(c) for all connections c in all pools, live and historical. + """ + with self._lock: + self.pool.map(f) + self.historical_pool.map(f) + + def cacheDetail(self): + """Return object counts by class accross all connections. + """ + + detail = {} + def f(con, detail=detail): + for oid, ob in con._cache.items(): + module = getattr(ob.__class__, '__module__', '') + module = module and '%s.' % module or '' + c = "%s%s" % (module, ob.__class__.__name__) + if c in detail: + detail[c] += 1 + else: + detail[c] = 1 + + self._connectionMap(f) + return sorted(detail.items()) + + def cacheExtremeDetail(self): + """Return information about all of the objects in the object caches. + + Information includes a connection number, class, object id, + reference count and state. The reference count returned + excludes references help by ZODB itself. + """ + detail = [] + conn_no = [0] # A mutable reference to a counter + # sys.getrefcount is a CPython implementation detail + # not required to exist on, e.g., PyPy. + rc = getattr(sys, 'getrefcount', None) + + def f(con, detail=detail, rc=rc, conn_no=conn_no): + conn_no[0] += 1 + cn = conn_no[0] + for oid, ob in con._cache_items(): + id = '' + if hasattr(ob, '__dict__'): + d = ob.__dict__ + if 'id' in d: + id = d['id'] + elif '__name__' in d: + id = d['__name__'] + + module = getattr(ob.__class__, '__module__', '') + module = module and ('%s.' % module) or '' + + # What refcount ('rc') should we return? The intent is + # that we return the true Python refcount, but as if the + # cache didn't exist. This routine adds 3 to the true + # refcount: 1 for binding to name 'ob', another because + # ob lives in the con._cache_items() list we're iterating + # over, and calling sys.getrefcount(ob) boosts ob's + # count by 1 too. So the true refcount is 3 less than + # sys.getrefcount(ob) returns. But, in addition to that, + # the cache holds an extra reference on non-ghost objects, + # and we also want to pretend that doesn't exist. + # If we have no way to get a refcount, we return False + # to symbolize that. As opposed to None, this has the + # advantage of being usable as a number (0) in case + # clients depended on that. + detail.append({ + 'conn_no': cn, + 'oid': oid, + 'id': id, + 'klass': "%s%s" % (module, ob.__class__.__name__), + 'rc': (rc(ob) - 3 - (ob._p_changed is not None) + if rc else False), + 'state': ob._p_changed, + #'references': con.references(oid), + }) + + self._connectionMap(f) + return detail + + def cacheFullSweep(self): # XXX this is the same as cacheMinimize + self._connectionMap(lambda c: c._cache.full_sweep()) + + def cacheLastGCTime(self): + m = [0] + def f(con, m=m): + t = con._cache.cache_last_gc_time + if t > m[0]: + m[0] = t + + self._connectionMap(f) + return m[0] + + def cacheMinimize(self): + """Minimize cache sizes for all connections + """ + self._connectionMap(lambda c: c._cache.minimize()) + + def cacheSize(self): + """Return the total count of non-ghost objects in all object caches + """ + m = [0] + def f(con, m=m): + m[0] += con._cache.cache_non_ghost_count + + self._connectionMap(f) + return m[0] + + def cacheDetailSize(self): + """Return non-ghost counts sizes for all connections. + """ + m = [] + def f(con, m=m): + m.append({'connection': repr(con), + 'ngsize': con._cache.cache_non_ghost_count, + 'size': len(con._cache)}) + self._connectionMap(f) + # Py3: Simulate Python 2 m.sort() functionality. + return sorted( + m, key=lambda x: (x['connection'], x['ngsize'], x['size'])) + + def close(self): + """Close the database and its underlying storage. + + It is important to close the database, because the storage may + flush in-memory data structures to disk when it is closed. + Leaving the storage open with the process exits can cause the + next open to be slow. + + What effect does closing the database have on existing + connections? Technically, they remain open, but their storage + is closed, so they stop behaving usefully. Perhaps close() + should also close all the Connections. + """ + self.close = noop + + @self._connectionMap + def _(conn): + if conn.transaction_manager is not None: + for c in six.itervalues(conn.connections): + # Prevent connections from implicitly starting new + # transactions. + c.explicit_transactions = True + conn.transaction_manager.abort() + conn._release_resources() + + self._mvcc_storage.close() + del self.storage + del self._mvcc_storage + # clean up references to other DBs + self.databases = {} + # clean up the connection pool + self.pool.clear() + self.historical_pool.clear() + + def getCacheSize(self): + """Get the configured cache size (objects). + """ + return self._cache_size + + def getCacheSizeBytes(self): + """Get the configured cache size in bytes. + """ + return self._cache_size_bytes + + def lastTransaction(self): + """Get the storage last transaction id. + """ + return self.storage.lastTransaction() + + def getName(self): + """Get the storage name + """ + return self.storage.getName() + + def getPoolSize(self): + """Get the configured pool size + """ + return self.pool.size + + def getSize(self): + """Get the approximate database size, in bytes + """ + return self.storage.getSize() + + def getHistoricalCacheSize(self): + """Get the configured historical cache size (objects). + """ + return self._historical_cache_size + + def getHistoricalCacheSizeBytes(self): + """Get the configured historical cache size in bytes. + """ + return self._historical_cache_size_bytes + + def getHistoricalPoolSize(self): + """Get the configured historical pool size + """ + return self.historical_pool.size + + def getHistoricalTimeout(self): + """Get the configured historical pool timeout + """ + return self.historical_pool.timeout + + transform_record_data = untransform_record_data = lambda self, data: data + + def objectCount(self): + """Get the approximate object count + """ + return len(self.storage) + + def open(self, transaction_manager=None, at=None, before=None): + """Return a database Connection for use by application code. + + Note that the connection pool is managed as a stack, to + increase the likelihood that the connection's stack will + include useful objects. + + :Parameters: + - `transaction_manager`: transaction manager to use. None means + use the default transaction manager. + - `at`: a datetime.datetime or 8 character transaction id of the + time to open the database with a read-only connection. Passing + both `at` and `before` raises a ValueError, and passing neither + opens a standard writable transaction of the newest state. + A timezone-naive datetime.datetime is treated as a UTC value. + - `before`: like `at`, but opens the readonly state before the + tid or datetime. + """ + # `at` is normalized to `before`, since we use storage.loadBefore + # as the underlying implementation of both. + before = getTID(at, before) + if (before is not None and + before > self.lastTransaction() and + before > getTID(self.lastTransaction(), None)): + raise ValueError( + 'cannot open an historical connection in the future.') + + if isinstance(transaction_manager, six.string_types): + if transaction_manager: + raise TypeError("Versions aren't supported.") + warnings.warn( + "A version string was passed to open.\n" + "The first argument is a transaction manager.", + DeprecationWarning, 2) + transaction_manager = None + + with self._lock: + # result <- a connection + if before is not None: + result = self.historical_pool.pop(before) + if result is None: + c = self.klass(self, + self._historical_cache_size, + before, + self._historical_cache_size_bytes, + ) + self.historical_pool.push(c, before) + result = self.historical_pool.pop(before) + else: + result = self.pool.pop() + if result is None: + c = self.klass(self, + self._cache_size, + None, + self._cache_size_bytes, + ) + self.pool.push(c) + result = self.pool.pop() + assert result is not None + + # A good time to do some cache cleanup. + # (note we already have the lock) + self.pool.availableGC() + self.historical_pool.availableGC() + + + result.open(transaction_manager) + return result + + def connectionDebugInfo(self): + """Get debugging information about connections + + This is especially useful to debug connections that seem to be + leaking or open too long. Information includes connection + info, the connection before setting, and, if a connection is + open, the time it was opened. The info is the result of + calling :meth:`~ZODB.Connection.Connection.getDebugInfo` on + the connection, and the connection's cache size. + """ + result = [] + t = time.time() + + def get_info(c): + # `result`, `time` and `before` are lexically inherited. + o = c.opened + d = c.getDebugInfo() + if d: + if len(d) == 1: + d = d[0] + else: + d = '' + d = "%s (%s)" % (d, len(c._cache)) + + # output UTC time with the standard Z time zone indicator + result.append({ + 'opened': o and ("%s (%.2fs)" % ( + time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(o)), + t-o)), + 'info': d, + 'before': c.before, + }) + + self._connectionMap(get_info) + return result + + def getActivityMonitor(self): + return self._activity_monitor + + def pack(self, t=None, days=0): + """Pack the storage, deleting unused object revisions. + + A pack is always performed relative to a particular time, by + default the current time. All object revisions that are not + reachable as of the pack time are deleted from the storage. + + The cost of this operation varies by storage, but it is + usually an expensive operation. + + There are two optional arguments that can be used to set the + pack time: t, pack time in seconds since the epcoh, and days, + the number of days to subtract from t or from the current + time if t is not specified. + """ + if t is None: + t = time.time() + t -= days * 86400 + try: + self.storage.pack(t, self.references) + except: + logger.exception("packing") + raise + + def setActivityMonitor(self, am): + self._activity_monitor = am + + def classFactory(self, connection, modulename, globalname): + # Zope will rebind this method to arbitrary user code at runtime. + return find_global(modulename, globalname) + + def setCacheSize(self, size): + """Reconfigure the cache size (non-ghost object count) + """ + with self._lock: + self._cache_size = size + def setsize(c): + c._cache.cache_size = size + self.pool.map(setsize) + + def setCacheSizeBytes(self, size): + """Reconfigure the cache total size in bytes + """ + with self._lock: + self._cache_size_bytes = size + def setsize(c): + c._cache.cache_size_bytes = size + self.pool.map(setsize) + + def setHistoricalCacheSize(self, size): + """Reconfigure the historical cache size (non-ghost object count) + """ + with self._lock: + self._historical_cache_size = size + def setsize(c): + c._cache.cache_size = size + self.historical_pool.map(setsize) + + def setHistoricalCacheSizeBytes(self, size): + """Reconfigure the historical cache total size in bytes + """ + with self._lock: + self._historical_cache_size_bytes = size + def setsize(c): + c._cache.cache_size_bytes = size + self.historical_pool.map(setsize) + + def setPoolSize(self, size): + """Reconfigure the connection pool size + """ + with self._lock: + self.pool.size = size + + def setHistoricalPoolSize(self, size): + """Reconfigure the connection historical pool size + """ + with self._lock: + self.historical_pool.size = size + + def setHistoricalTimeout(self, timeout): + """Reconfigure the connection historical pool timeout + """ + with self._lock: + self.historical_pool.timeout = timeout + + def history(self, oid, size=1): + """Get revision history information for an object. + + See :meth:`ZODB.interfaces.IStorage.history`. + """ + return _text_transaction_info(self.storage.history(oid, size)) + + def supportsUndo(self): + """Return whether the database supports undo. + """ + try: + f = self.storage.supportsUndo + except AttributeError: + return False + return f() + + def undoLog(self, *args, **kw): + """Return a sequence of descriptions for transactions. + + See :meth:`ZODB.interfaces.IStorageUndoable.undoLog`. + """ + + if not self.supportsUndo(): + return () + return _text_transaction_info(self.storage.undoLog(*args, **kw)) + + def undoInfo(self, *args, **kw): + """Return a sequence of descriptions for transactions. + + See :meth:`ZODB.interfaces.IStorageUndoable.undoInfo`. + """ + if not self.supportsUndo(): + return () + return _text_transaction_info(self.storage.undoInfo(*args, **kw)) + + def undoMultiple(self, ids, txn=None): + """Undo multiple transactions identified by ids. + + A transaction can be undone if all of the objects involved in + the transaction were not modified subsequently, if any + modifications can be resolved by conflict resolution, or if + subsequent changes resulted in the same object state. + + The values in ids should be generated by calling undoLog() + or undoInfo(). The value of ids are not the same as a + transaction ids used by other methods; they are unique to undo(). + + :Parameters: + - `ids`: a sequence of storage-specific transaction identifiers + - `txn`: transaction context to use for undo(). + By default, uses the current transaction. + """ + if not self.supportsUndo(): + raise NotImplementedError + if txn is None: + txn = transaction.get() + if isinstance(ids, six.string_types): + ids = [ids] + txn.join(TransactionalUndo(self, ids)) + + def undo(self, id, txn=None): + """Undo a transaction identified by id. + + A transaction can be undone if all of the objects involved in + the transaction were not modified subsequently, if any + modifications can be resolved by conflict resolution, or if + subsequent changes resulted in the same object state. + + The value of id should be generated by calling undoLog() + or undoInfo(). The value of id is not the same as a + transaction id used by other methods; it is unique to undo(). + + :Parameters: + - `id`: a transaction identifier + - `txn`: transaction context to use for undo(). + By default, uses the current transaction. + """ + self.undoMultiple([id], txn) + + def transaction(self, note=None): + """Execute a block of code as a transaction. + + If a note is given, it will be added to the transaction's + description. + + The ``transaction`` method returns a context manager that can + be used with the ``with`` statement. + """ + return ContextManager(self, note) + + def new_oid(self): + """ + Return a new oid from the storage. + + Kept for backwards compatibility only. New oids should be + allocated in a transaction using an open Connection. + """ + return self.storage.new_oid() # pragma: no cover + + def open_then_close_db_when_connection_closes(self): + """Create and return a connection. + + When the connection closes, the database will close too. + """ + conn = self.open() + conn.onCloseCallback(self.close) + return conn + + +class ContextManager(object): + """PEP 343 context manager + """ + + def __init__(self, db, note=None): + self.db = db + self.note = note + + def __enter__(self): + self.tm = tm = transaction.TransactionManager() + self.conn = self.db.open(self.tm) + t = tm.begin() + if self.note: + t.note(self.note) + return self.conn + + def __exit__(self, t, v, tb): + if t is None: + self.tm.commit() + else: + self.tm.abort() + self.conn.close() + +resource_counter_lock = utils.Lock() +resource_counter = 0 + +class TransactionalUndo(object): + + def __init__(self, db, tids): + self._db = db + self._storage = getattr( + db._mvcc_storage, 'undo_instance', db._mvcc_storage.new_instance)() + self._tids = tids + + def abort(self, transaction): + pass + + def tpc_begin(self, transaction): + tdata = TransactionMetaData( + transaction.user, + transaction.description, + transaction.extension) + transaction.set_data(self, tdata) + self._storage.tpc_begin(tdata) + + def commit(self, transaction): + transaction = transaction.data(self) + for tid in self._tids: + self._storage.undo(tid, transaction) + + def tpc_vote(self, transaction): + transaction = transaction.data(self) + self._storage.tpc_vote(transaction) + + def tpc_finish(self, transaction): + transaction = transaction.data(self) + self._storage.tpc_finish(transaction) + + def tpc_abort(self, transaction): + transaction = transaction.data(self) + self._storage.tpc_abort(transaction) + + def sortKey(self): + return "%s:%s" % (self._storage.sortKey(), id(self)) + +def connection(*args, **kw): + """Create a database :class:`connection `. + + A database is created using the given arguments and opened to + create the returned connection. The database will be closed when + the connection is closed. This is a convenience function to avoid + managing a separate database object. + """ + return DB(*args, **kw).open_then_close_db_when_connection_closes() + +_transaction_meta_data_text_variables = 'user_name', 'description' +def _text_transaction_info(info): + for d in info: + for name in _transaction_meta_data_text_variables: + if name in d: + d[name] = d[name].decode('utf-8') + + return info diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/DemoStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/DemoStorage.py new file mode 100644 index 0000000..f2f56fa --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/DemoStorage.py @@ -0,0 +1,447 @@ +############################################################################## +# +# Copyright (c) Zope Corporation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Demo ZODB storage + +A demo storage supports demos by allowing a volatile changed database +to be layered over a base database. + +The base storage must not change. + +""" +from __future__ import print_function +import os +import random +import weakref +import tempfile +import ZODB.BaseStorage +import ZODB.blob +import ZODB.interfaces +import ZODB.MappingStorage +import ZODB.POSException +import ZODB.utils +import zope.interface + +from .ConflictResolution import ConflictResolvingStorage +from .utils import load_current, maxtid + +@zope.interface.implementer( + ZODB.interfaces.IStorage, + ZODB.interfaces.IStorageIteration, + ) +class DemoStorage(ConflictResolvingStorage): + """A storage that stores changes against a read-only base database + + This storage was originally meant to support distribution of + application demonstrations with populated read-only databases (on + CDROM) and writable in-memory databases. + + Demo storages are extemely convenient for testing where setup of a + base database can be shared by many tests. + + Demo storages are also handy for staging appplications where a + read-only snapshot of a production database (often accomplished + using a `beforestorage + `_) is combined + with a changes database implemented with a + :class:`~ZODB.FileStorage.FileStorage.FileStorage`. + """ + + def __init__(self, name=None, base=None, changes=None, + close_base_on_close=None, close_changes_on_close=None): + """Create a demo storage + + :param str name: The storage name used by the + :meth:`~ZODB.interfaces.IStorage.getName` and + :meth:`~ZODB.interfaces.IStorage.sortKey` methods. + :param object base: base storage + :param object changes: changes storage + :param bool close_base_on_close: A Flag indicating whether the base + database should be closed when the demo storage is closed. + :param bool close_changes_on_close: A Flag indicating whether the + changes database should be closed when the demo storage is closed. + + If a base database isn't provided, a + :class:`~ZODB.MappingStorage.MappingStorage` will be + constructed and used. + + If ``close_base_on_close`` isn't specified, it will be ``True`` if + a base database was provided and ``False`` otherwise. + + If a changes database isn't provided, a + :class:`~ZODB.MappingStorage.MappingStorage` will be + constructed and used and blob support will be provided using a + temporary blob directory. + + If ``close_changes_on_close`` isn't specified, it will be ``True`` if + a changes database was provided and ``False`` otherwise. + """ + + if close_base_on_close is None: + if base is None: + base = ZODB.MappingStorage.MappingStorage() + close_base_on_close = False + else: + close_base_on_close = True + elif base is None: + base = ZODB.MappingStorage.MappingStorage() + + self.base = base + self.close_base_on_close = close_base_on_close + + + if changes is None: + self._temporary_changes = True + changes = ZODB.MappingStorage.MappingStorage() + zope.interface.alsoProvides(self, ZODB.interfaces.IBlobStorage) + if close_changes_on_close is None: + close_changes_on_close = False + else: + if ZODB.interfaces.IBlobStorage.providedBy(changes): + zope.interface.alsoProvides(self, ZODB.interfaces.IBlobStorage) + if close_changes_on_close is None: + close_changes_on_close = True + + self.changes = changes + self.close_changes_on_close = close_changes_on_close + + self._issued_oids = set() + self._stored_oids = set() + self._resolved = [] + + self._commit_lock = ZODB.utils.Lock() + self._transaction = None + + if name is None: + name = 'DemoStorage(%r, %r)' % (base.getName(), changes.getName()) + self.__name__ = name + + self._copy_methods_from_changes(changes) + + self._next_oid = random.randint(1, 1<<62) + + def _blobify(self): + if (self._temporary_changes and + isinstance(self.changes, ZODB.MappingStorage.MappingStorage) + ): + blob_dir = tempfile.mkdtemp('.demoblobs') + _temporary_blobdirs[ + weakref.ref(self, cleanup_temporary_blobdir) + ] = blob_dir + self.changes = ZODB.blob.BlobStorage(blob_dir, self.changes) + self._copy_methods_from_changes(self.changes) + return True + + def cleanup(self): + self.base.cleanup() + self.changes.cleanup() + + __opened = True + def opened(self): + return self.__opened + + def close(self): + self.__opened = False + if self.close_base_on_close: + self.base.close() + if self.close_changes_on_close: + self.changes.close() + + def _copy_methods_from_changes(self, changes): + for meth in ( + '_lock', + 'getSize', 'isReadOnly', + 'sortKey', 'tpc_transaction', + ): + setattr(self, meth, getattr(changes, meth)) + + supportsUndo = getattr(changes, 'supportsUndo', None) + if supportsUndo is not None and supportsUndo(): + for meth in ('supportsUndo', 'undo', 'undoLog', 'undoInfo'): + setattr(self, meth, getattr(changes, meth)) + zope.interface.alsoProvides(self, ZODB.interfaces.IStorageUndoable) + + lastInvalidations = getattr(changes, 'lastInvalidations', None) + if lastInvalidations is not None: + self.lastInvalidations = lastInvalidations + + def getName(self): + return self.__name__ + __repr__ = getName + + def getTid(self, oid): + try: + return self.changes.getTid(oid) + except ZODB.POSException.POSKeyError: + return self.base.getTid(oid) + + def history(self, oid, size=1): + try: + r = self.changes.history(oid, size) + except ZODB.POSException.POSKeyError: + r = [] + size -= len(r) + if size: + try: + r += self.base.history(oid, size) + except ZODB.POSException.POSKeyError: + if not r: + raise + return r + + def iterator(self, start=None, end=None): + for t in self.base.iterator(start, end): + yield t + for t in self.changes.iterator(start, end): + yield t + + def lastTransaction(self): + t = self.changes.lastTransaction() + if t == ZODB.utils.z64: + t = self.base.lastTransaction() + return t + + def __len__(self): + return len(self.changes) + + # still want load for old clients (e.g. zeo servers) + load = load_current + + def loadBefore(self, oid, tid): + try: + result = self.changes.loadBefore(oid, tid) + except ZODB.POSException.POSKeyError: + # The oid isn't in the changes, so defer to base + return self.base.loadBefore(oid, tid) + + if result is None: + # The oid *was* in the changes, but there aren't any + # earlier records. Maybe there are in the base. + try: + result = self.base.loadBefore(oid, tid) + except ZODB.POSException.POSKeyError: + # The oid isn't in the base, so None will be the right result + pass + else: + if result and not result[-1]: + # The oid is current in the base. We need to find + # the end tid in the base by fining the first tid + # in the changes. Unfortunately, there isn't an + # api for this, so we have to walk back using + # loadBefore. + + if tid == maxtid: + # Special case: we were looking for the + # current value. We won't find anything in + # changes, so we're done. + return result + + end_tid = maxtid + t = self.changes.loadBefore(oid, end_tid) + while t: + end_tid = t[1] + t = self.changes.loadBefore(oid, end_tid) + result = result[:2] + ( + end_tid if end_tid != maxtid else None, + ) + + return result + + def loadBlob(self, oid, serial): + try: + return self.changes.loadBlob(oid, serial) + except ZODB.POSException.POSKeyError: + try: + return self.base.loadBlob(oid, serial) + except AttributeError: + if not ZODB.interfaces.IBlobStorage.providedBy(self.base): + raise ZODB.POSException.POSKeyError(oid, serial) + raise + except AttributeError: + if self._blobify(): + return self.loadBlob(oid, serial) + raise + + def openCommittedBlobFile(self, oid, serial, blob=None): + try: + return self.changes.openCommittedBlobFile(oid, serial, blob) + except ZODB.POSException.POSKeyError: + try: + return self.base.openCommittedBlobFile(oid, serial, blob) + except AttributeError: + if not ZODB.interfaces.IBlobStorage.providedBy(self.base): + raise ZODB.POSException.POSKeyError(oid, serial) + raise + except AttributeError: + if self._blobify(): + return self.openCommittedBlobFile(oid, serial, blob) + raise + + def loadSerial(self, oid, serial): + try: + return self.changes.loadSerial(oid, serial) + except ZODB.POSException.POSKeyError: + return self.base.loadSerial(oid, serial) + + def new_oid(self): + with self._lock: + while 1: + oid = ZODB.utils.p64(self._next_oid ) + if oid not in self._issued_oids: + try: + load_current(self.changes, oid) + except ZODB.POSException.POSKeyError: + try: + load_current(self.base, oid) + except ZODB.POSException.POSKeyError: + self._next_oid += 1 + self._issued_oids.add(oid) + return oid + + self._next_oid = random.randint(1, 1<<62) + + def pack(self, t, referencesf, gc=None): + if gc is None: + if self._temporary_changes: + return self.changes.pack(t, referencesf) + elif self._temporary_changes: + return self.changes.pack(t, referencesf, gc=gc) + elif gc: + raise TypeError( + "Garbage collection isn't supported" + " when there is a base storage.") + + try: + self.changes.pack(t, referencesf, gc=False) + except TypeError as v: + if 'gc' in str(v): + pass # The gc arg isn't supported. Don't pack + raise + + def pop(self): + """Close the changes database and return the base. + """ + self.changes.close() + return self.base + + def push(self, changes=None): + """Create a new demo storage using the storage as a base. + + The given changes are used as the changes for the returned + storage and ``False`` is passed as ``close_base_on_close``. + """ + return self.__class__(base=self, changes=changes, + close_base_on_close=False) + + def store(self, oid, serial, data, version, transaction): + assert version=='', "versions aren't supported" + if transaction is not self._transaction: + raise ZODB.POSException.StorageTransactionError(self, transaction) + + # Since the OID is being used, we don't have to keep up with it any + # more. Save it now so we can forget it later. :) + self._stored_oids.add(oid) + + # See if we already have changes for this oid + try: + old = load_current(self, oid)[1] + except ZODB.POSException.POSKeyError: + old = serial + + if old != serial: + rdata = self.tryToResolveConflict(oid, old, serial, data) + self.changes.store(oid, old, rdata, '', transaction) + self._resolved.append(oid) + else: + self.changes.store(oid, serial, data, '', transaction) + + def storeBlob(self, oid, oldserial, data, blobfilename, version, + transaction): + assert version=='', "versions aren't supported" + if transaction is not self._transaction: + raise ZODB.POSException.StorageTransactionError(self, transaction) + + # Since the OID is being used, we don't have to keep up with it any + # more. Save it now so we can forget it later. :) + self._stored_oids.add(oid) + + try: + self.changes.storeBlob( + oid, oldserial, data, blobfilename, '', transaction) + except AttributeError: + if not self._blobify(): + raise + self.changes.storeBlob( + oid, oldserial, data, blobfilename, '', transaction) + + checkCurrentSerialInTransaction = ( + ZODB.BaseStorage.checkCurrentSerialInTransaction) + + def temporaryDirectory(self): + try: + return self.changes.temporaryDirectory() + except AttributeError: + if self._blobify(): + return self.changes.temporaryDirectory() + raise + + def tpc_abort(self, transaction): + with self._lock: + if transaction is not self._transaction: + return + self._stored_oids = set() + self._transaction = None + self.changes.tpc_abort(transaction) + self._commit_lock.release() + + def tpc_begin(self, transaction, *a, **k): + with self._lock: + # The tid argument exists to support testing. + if transaction is self._transaction: + raise ZODB.POSException.StorageTransactionError( + "Duplicate tpc_begin calls for same transaction") + + self._commit_lock.acquire() + + with self._lock: + self.changes.tpc_begin(transaction, *a, **k) + self._transaction = transaction + self._stored_oids = set() + del self._resolved[:] + + def tpc_vote(self, *a, **k): + if self.changes.tpc_vote(*a, **k): + raise ZODB.POSException.StorageTransactionError( + "Unexpected resolved conflicts") + return self._resolved + + def tpc_finish(self, transaction, func = lambda tid: None): + with self._lock: + if (transaction is not self._transaction): + raise ZODB.POSException.StorageTransactionError( + "tpc_finish called with wrong transaction") + self._issued_oids.difference_update(self._stored_oids) + self._stored_oids = set() + self._transaction = None + tid = self.changes.tpc_finish(transaction, func) + self._commit_lock.release() + return tid + +_temporary_blobdirs = {} +def cleanup_temporary_blobdir( + ref, + _temporary_blobdirs=_temporary_blobdirs, # Make sure it stays around + ): + blob_dir = _temporary_blobdirs.pop(ref, None) + if blob_dir and os.path.exists(blob_dir): + ZODB.blob.remove_committed_dir(blob_dir) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/DemoStorage.test b/thesisenv/lib/python3.6/site-packages/ZODB/DemoStorage.test new file mode 100644 index 0000000..ec2cd03 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/DemoStorage.test @@ -0,0 +1,468 @@ +========================== +DemoStorage demo (doctest) +========================== + +DemoStorages provide a way to provide incremental updates to an +existing, base, storage without updating the storage. + +.. We need to mess with time to prevent spurious test failures on windows + + >>> now = 1231019584.0 + >>> def faux_time_time(): + ... global now + ... now += .1 + ... return now + >>> import time + >>> real_time_time = time.time + >>> if isinstance(time,type): + ... time.time = staticmethod(faux_time_time) # Jython + ... else: + ... time.time = faux_time_time + +To see how this works, we'll start by creating a base storage and +puting an object (in addition to the root object) in it: + + >>> from ZODB.FileStorage import FileStorage + >>> base = FileStorage('base.fs') + >>> from ZODB.DB import DB + >>> db = DB(base) + >>> from persistent.mapping import PersistentMapping + >>> conn = db.open() + >>> conn.root()['1'] = PersistentMapping({'a': 1, 'b':2}) + >>> import transaction + >>> transaction.commit() + >>> db.close() + >>> import os + >>> original_size = os.path.getsize('base.fs') + +Now, lets reopen the base storage in read-only mode: + + >>> base = FileStorage('base.fs', read_only=True) + +And open a new storage to store changes: + + >>> changes = FileStorage('changes.fs') + +and combine the 2 in a demofilestorage: + + >>> from ZODB.DemoStorage import DemoStorage + >>> storage = DemoStorage(base=base, changes=changes) + +The storage will assign OIDs in a pseudo-random fashion, but for test +purposes we need to control where they start (since the random seeds +can be different on different platforms): + + >>> storage._next_oid = 3553260803050964942 + + +If there are no transactions, the storage reports the lastTransaction +of the base database: + + >>> storage.lastTransaction() == base.lastTransaction() + True + +Let's add some data: + + >>> db = DB(storage) + >>> conn = db.open() + >>> items = sorted(conn.root()['1'].items()) + >>> items + [('a', 1), ('b', 2)] + + >>> conn.root()['2'] = PersistentMapping({'a': 3, 'b':4}) + >>> transaction.commit() + + >>> conn.root()['2']['c'] = 5 + >>> transaction.commit() + +Here we can see that we haven't modified the base storage: + + >>> original_size == os.path.getsize('base.fs') + True + +But we have modified the changes database: + + >>> len(changes) + 2 + +Our lastTransaction reflects the lastTransaction of the changes: + + >>> storage.lastTransaction() > base.lastTransaction() + True + + >>> storage.lastTransaction() == changes.lastTransaction() + True + +Let's walk over some of the methods so we can see how we delegate to +the new underlying storages: + + >>> from ZODB.utils import p64, u64 + >>> storage.load(p64(0), '') == changes.load(p64(0), '') + True + >>> storage.load(p64(0), '') == base.load(p64(0), '') + False + >>> storage.load(p64(1), '') == base.load(p64(1), '') + True + + >>> serial = base.getTid(p64(0)) + >>> storage.loadSerial(p64(0), serial) == base.loadSerial(p64(0), serial) + True + + >>> serial = changes.getTid(p64(0)) + >>> storage.loadSerial(p64(0), serial) == changes.loadSerial(p64(0), + ... serial) + True + +The object id of the new object is quite random, and typically large: + + >>> print(u64(conn.root()['2']._p_oid)) + 3553260803050964942 + +Let's look at some other methods: + + >>> storage.getName() + "DemoStorage('base.fs', 'changes.fs')" + + >>> storage.sortKey() == changes.sortKey() + True + + >>> storage.getSize() == changes.getSize() + True + + >>> len(storage) == len(changes) + True + + +Undo methods are simply copied from the changes storage: + + >>> [getattr(storage, name) == getattr(changes, name) + ... for name in ('supportsUndo', 'undo', 'undoLog', 'undoInfo') + ... ] + [True, True, True, True] + + >>> db.close() + +Closing demo storages +===================== + +Normally, when a demo storage is closed, it's base and changes +storage are closed: + + >>> from ZODB.MappingStorage import MappingStorage + >>> demo = DemoStorage(base=MappingStorage(), changes=MappingStorage()) + >>> demo.close() + >>> demo.base.opened(), demo.changes.opened() + (False, False) + +You can pass constructor arguments to control whether the base and +changes storages should be closed when the demo storage is closed: + + >>> demo = DemoStorage( + ... base=MappingStorage(), changes=MappingStorage(), + ... close_base_on_close=False, close_changes_on_close=False, + ... ) + >>> demo.close() + >>> demo.base.opened(), demo.changes.opened() + (True, True) + + +Storage Stacking +================ + +A common use case is to stack demo storages. DemoStorage provides +some helper functions to help with this. The push method, just +creates a new demo storage who's base is the original demo storage: + + >>> demo = DemoStorage() + >>> demo2 = demo.push() + >>> demo2.base is demo + True + +We can also supply an explicit changes storage, if we wish: + + >>> changes = MappingStorage() + >>> demo3 = demo2.push(changes) + >>> demo3.changes is changes, demo3.base is demo2 + (True, True) + +The pop method closes the changes storage and returns the base +*without* closing it: + + >>> demo3.pop() is demo2 + True + + >>> changes.opened() + False + +If storage returned by push is closed, the original storage isn't: + + >>> demo3.push().close() + >>> demo2.opened() + True + +Blob Support +============ + +DemoStorage supports Blobs if the changes database supports blobs. + + >>> import ZODB.blob + >>> base = ZODB.blob.BlobStorage('base', FileStorage('base.fs')) + >>> db = DB(base) + >>> conn = db.open() + >>> conn.root()['blob'] = ZODB.blob.Blob() + >>> with conn.root()['blob'].open('w') as file: + ... _ = file.write(b'state 1') + >>> transaction.commit() + >>> db.close() + + >>> base = ZODB.blob.BlobStorage('base', + ... FileStorage('base.fs', read_only=True)) + >>> changes = ZODB.blob.BlobStorage('changes', + ... FileStorage('changes.fs', create=True)) + >>> storage = DemoStorage(base=base, changes=changes) + + >>> db = DB(storage) + >>> conn = db.open() + >>> with conn.root()['blob'].open() as fp: fp.read() + 'state 1' + >>> _ = transaction.begin() + >>> with conn.root()['blob'].open('w') as file: + ... _ = file.write(b'state 2') + >>> transaction.commit() + >>> with conn.root()['blob'].open() as fp: fp.read() + 'state 2' + + >>> storage.temporaryDirectory() == changes.temporaryDirectory() + True + + >>> db.close() + +It isn't necessary for the base database to support blobs. + + >>> base = FileStorage('base.fs', read_only=True) + >>> changes = ZODB.blob.BlobStorage('changes', FileStorage('changes.fs')) + >>> storage = DemoStorage(base=base, changes=changes) + >>> db = DB(storage) + >>> conn = db.open() + >>> with conn.root()['blob'].open() as fp: fp.read() + 'state 2' + + >>> _ = transaction.begin() + >>> conn.root()['blob2'] = ZODB.blob.Blob() + >>> with conn.root()['blob2'].open('w') as file: + ... _ = file.write(b'state 1') + >>> with conn.root()['blob2'].open() as fp: fp.read() + 'state 1' + + >>> db.close() + +If the changes database is created implicitly, it will get a blob +storage wrapped around it when necessary: + + >>> base = ZODB.blob.BlobStorage('base', + ... FileStorage('base.fs', read_only=True)) + >>> storage = DemoStorage(base=base) + + >>> type(storage.changes).__name__ + 'MappingStorage' + + >>> db = DB(storage) + >>> conn = db.open() + >>> with conn.root()['blob'].open() as fp: fp.read() + 'state 1' + + >>> type(storage.changes).__name__ + 'BlobStorage' + + >>> _ = transaction.begin() + >>> with conn.root()['blob'].open('w') as file: + ... _ = file.write(b'state 2') + >>> transaction.commit() + >>> with conn.root()['blob'].open() as fp: fp.read() + 'state 2' + + >>> storage.temporaryDirectory() == storage.changes.temporaryDirectory() + True + + >>> db.close() + +This works even if we first write a blob rather than read a blob: + + >>> base = ZODB.blob.BlobStorage('base', + ... FileStorage('base.fs', read_only=True)) + >>> storage = DemoStorage(base=base) + + >>> type(storage.changes).__name__ + 'MappingStorage' + + >>> db = DB(storage) + >>> conn = db.open() + + >>> _ = transaction.begin() + >>> conn.root()['blob'] = ZODB.blob.Blob() + >>> with conn.root()['blob'].open('w') as file: + ... _ = file.write(b'state 2') + >>> transaction.commit() + + >>> type(storage.changes).__name__ + 'BlobStorage' + + >>> with conn.root()['blob'].open() as fp: fp.read() + 'state 2' + + >>> storage.temporaryDirectory() == storage.changes.temporaryDirectory() + True + + >>> db.close() + + +.. Check that the temporary directory is gone + + For now, it won't go until the storage does. + + >>> transaction.abort() + >>> blobdir = storage.temporaryDirectory() + >>> del storage, _ + + >>> import gc + >>> _ = gc.collect() + + >>> import os + >>> os.path.exists(blobdir) + False + +ZConfig support +=============== + +You can configure demo storages using ZConfig, using name, changes, +and base options: + + >>> import ZODB.config + >>> storage = ZODB.config.storageFromString(""" + ... + ... + ... """) + >>> storage.getName() + "DemoStorage('MappingStorage', 'MappingStorage')" + + >>> storage = ZODB.config.storageFromString(""" + ... + ... + ... path base.fs + ... + ... + ... + ... path changes.fs + ... + ... + ... """) + >>> storage.getName() + "DemoStorage('base.fs', 'changes.fs')" + + >>> storage.close() + + >>> storage = ZODB.config.storageFromString(""" + ... + ... name bob + ... + ... path base.fs + ... + ... + ... + ... path changes.fs + ... + ... + ... """) + >>> storage.getName() + 'bob' + >>> storage.base.getName() + 'base.fs' + + >>> storage.close() + +Generating OIDs +=============== + +When asked for a new OID DemoStorage chooses a value and then +verifies that neither the base or changes storages already contain +that OID. It chooses values sequentially from random starting +points, picking new starting points whenever a chosen value us already +in the changes or base. + +Under rare circumstances an OID can be chosen that has already been +handed out, but which hasn't yet been comitted. Lets verify that if +the same OID is chosen twice during a transaction that everything will +still work. + +To test this, we need to hack random.randint a bit. + + >>> import random + >>> randint = random.randint + + >>> rv = 42 + >>> def faux_randint(min, max): + ... print('called randint') + ... global rv + ... rv += 1000 + ... return rv + + >>> random.randint = faux_randint + +Now, we create a demostorage. + + >>> storage = DemoStorage() + called randint + +If we ask for an oid, we'll get 1042. + + >>> print(u64(storage.new_oid())) + 1042 + +oids are allocated seuentially: + + >>> print(u64(storage.new_oid())) + 1043 + +Now, we'll save 1044 in changes so that it has to pick a new one randomly. + + >>> t = transaction.get() + >>> ZODB.tests.util.store(storage.changes, 1044) + + >>> print(u64(storage.new_oid())) + called randint + 2042 + +Now, we hack rv to 1042 is given out again and we'll save 2043 in base +to force another attempt: + + >>> rv -= 1000 + >>> ZODB.tests.util.store(storage.changes, 2043) + >>> oid = storage.new_oid() + called randint + called randint + >>> print(u64(oid)) + 3042 + +DemoStorage keeps up with the issued OIDs to know when not to reissue them... + + >>> oid in storage._issued_oids + True + +...but once data is stored with a given OID... + + >>> ZODB.tests.util.store(storage, oid) + +...there's no need to remember it any longer: + + >>> oid in storage._issued_oids + False + + >>> storage.close() + +.. restore randint + + >>> random.randint = randint + +.. restore time + + >>> time.time = real_time_time diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/ExportImport.py b/thesisenv/lib/python3.6/site-packages/ZODB/ExportImport.py new file mode 100644 index 0000000..6de4db8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/ExportImport.py @@ -0,0 +1,208 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Support for database export and import.""" + +import logging +import os +from tempfile import TemporaryFile + +import six + +from ZODB.blob import Blob +from ZODB.interfaces import IBlobStorage +from ZODB.POSException import ExportError +from ZODB.serialize import referencesf +from ZODB.utils import p64, u64, cp, mktemp +from ZODB._compat import PersistentPickler, Unpickler, BytesIO, _protocol + + +logger = logging.getLogger('ZODB.ExportImport') + +class ExportImport(object): + + def exportFile(self, oid, f=None, bufsize=64 * 1024): + if f is None: + f = TemporaryFile(prefix="EXP") + elif isinstance(f, six.string_types): + f = open(f,'w+b') + f.write(b'ZEXP') + oids = [oid] + done_oids = {} + done = done_oids.__contains__ + load = self._storage.load + supports_blobs = IBlobStorage.providedBy(self._storage) + while oids: + oid = oids.pop(0) + if oid in done_oids: + continue + done_oids[oid] = True + try: + p, serial = load(oid) + except: + logger.debug("broken reference for oid %s", repr(oid), + exc_info=True) + else: + referencesf(p, oids) + f.writelines([oid, p64(len(p)), p]) + + if supports_blobs: + if not isinstance(self._reader.getGhost(p), Blob): + continue # not a blob + + blobfilename = self._storage.loadBlob(oid, serial) + f.write(blob_begin_marker) + f.write(p64(os.stat(blobfilename).st_size)) + blobdata = open(blobfilename, "rb") + cp(blobdata, f, bufsize=bufsize) + blobdata.close() + + f.write(export_end_marker) + return f + + def importFile(self, f, clue='', customImporters=None): + # This is tricky, because we need to work in a transaction! + + if isinstance(f, six.string_types): + with open(f, 'rb') as fp: + return self.importFile(fp, clue=clue, + customImporters=customImporters) + + magic = f.read(4) + if magic != b'ZEXP': + if customImporters and magic in customImporters: + f.seek(0) + return customImporters[magic](self, f, clue) + raise ExportError("Invalid export header") + + t = self.transaction_manager.get() + if clue: + t.note(clue) + + return_oid_list = [] + self._import = f, return_oid_list + self._register() + t.savepoint(optimistic=True) + # Return the root imported object. + if return_oid_list: + return self.get(return_oid_list[0]) + else: + return None + + def _importDuringCommit(self, transaction, f, return_oid_list): + """Import data during two-phase commit. + + Invoked by the transaction manager mid commit. + Appends one item, the OID of the first object created, + to return_oid_list. + """ + oids = {} + + # IMPORTANT: This code should be consistent with the code in + # serialize.py. It is currently out of date and doesn't handle + # weak references. + + def persistent_load(ooid): + """Remap a persistent id to a new ID and create a ghost for it.""" + + klass = None + if isinstance(ooid, tuple): + ooid, klass = ooid + + if not isinstance(ooid, bytes): + assert isinstance(ooid, str) + # this happens on Python 3 when all bytes in the oid are < 0x80 + ooid = ooid.encode('ascii') + + if ooid in oids: + oid = oids[ooid] + else: + if klass is None: + oid = self._storage.new_oid() + else: + oid = self._storage.new_oid(), klass + oids[ooid] = oid + + return Ghost(oid) + + while 1: + header = f.read(16) + if header == export_end_marker: + break + if len(header) != 16: + raise ExportError("Truncated export file") + + # Extract header information + ooid = header[:8] + length = u64(header[8:16]) + data = f.read(length) + + if len(data) != length: + raise ExportError("Truncated export file") + + if oids: + oid = oids[ooid] + if isinstance(oid, tuple): + oid = oid[0] + else: + oids[ooid] = oid = self._storage.new_oid() + return_oid_list.append(oid) + + if (b'blob' in data and + isinstance(self._reader.getGhost(data), Blob) + ): + # Blob support + + # Make sure we have a (redundant, overly) blob marker. + if f.read(len(blob_begin_marker)) != blob_begin_marker: + raise ValueError("No data for blob object") + + # Copy the blob data to a temporary file + # and remember the name + blob_len = u64(f.read(8)) + blob_filename = mktemp(self._storage.temporaryDirectory()) + blob_file = open(blob_filename, "wb") + cp(f, blob_file, blob_len) + blob_file.close() + else: + blob_filename = None + + pfile = BytesIO(data) + unpickler = Unpickler(pfile) + unpickler.persistent_load = persistent_load + + newp = BytesIO() + pickler = PersistentPickler(persistent_id, newp, _protocol) + + pickler.dump(unpickler.load()) + pickler.dump(unpickler.load()) + data = newp.getvalue() + + if blob_filename is not None: + self._storage.storeBlob(oid, None, data, blob_filename, + '', transaction) + else: + self._storage.store(oid, None, data, '', transaction) + + +export_end_marker = b'\377'*16 +blob_begin_marker = b'\000BLOBSTART' + +class Ghost(object): + __slots__ = ("oid",) + def __init__(self, oid): + self.oid = oid + +def persistent_id(obj): + if isinstance(obj, Ghost): + return obj.oid diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/FileStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/FileStorage.py new file mode 100644 index 0000000..af3857e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/FileStorage.py @@ -0,0 +1,2224 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Storage implementation using a log written to a single file. +""" +from __future__ import print_function + +import binascii +import contextlib +import errno +import logging +import os +import time +from struct import pack +from struct import unpack + +from persistent.TimeStamp import TimeStamp +from six import string_types as STRING_TYPES +from zc.lockfile import LockFile +from zope.interface import alsoProvides +from zope.interface import implementer + +from .. import utils + +from ZODB.blob import BlobStorageMixin +from ZODB.blob import link_or_copy +from ZODB.blob import remove_committed +from ZODB.blob import remove_committed_dir +from ZODB.BaseStorage import BaseStorage +from ZODB.BaseStorage import DataRecord as _DataRecord +from ZODB.BaseStorage import TransactionRecord as _TransactionRecord +from ZODB.ConflictResolution import ConflictResolvingStorage +from ZODB.FileStorage.format import CorruptedDataError +from ZODB.FileStorage.format import CorruptedError +from ZODB.FileStorage.format import DATA_HDR +from ZODB.FileStorage.format import DATA_HDR_LEN +from ZODB.FileStorage.format import DataHeader +from ZODB.FileStorage.format import FileStorageFormatter +from ZODB.FileStorage.format import TRANS_HDR +from ZODB.FileStorage.format import TRANS_HDR_LEN +from ZODB.FileStorage.format import TxnHeader +from ZODB.FileStorage.fspack import FileStoragePacker +from ZODB.interfaces import IBlobStorageRestoreable +from ZODB.interfaces import IExternalGC +from ZODB.interfaces import IStorage +from ZODB.interfaces import IStorageCurrentRecordIteration +from ZODB.interfaces import IStorageIteration +from ZODB.interfaces import IStorageRestoreable +from ZODB.interfaces import IStorageUndoable +from ZODB.POSException import ConflictError +from ZODB.POSException import MultipleUndoErrors +from ZODB.POSException import POSKeyError +from ZODB.POSException import ReadOnlyError +from ZODB.POSException import StorageError +from ZODB.POSException import StorageSystemError +from ZODB.POSException import StorageTransactionError +from ZODB.POSException import UndoError +from ZODB.fsIndex import fsIndex +from ZODB.utils import as_bytes +from ZODB.utils import as_text +from ZODB.utils import cp +from ZODB.utils import load_current +from ZODB.utils import mktemp +from ZODB.utils import p64 +from ZODB.utils import u64 +from ZODB.utils import z64 +from ZODB._compat import Pickler +from ZODB._compat import loads +from ZODB._compat import decodebytes +from ZODB._compat import encodebytes +from ZODB._compat import _protocol +from ZODB._compat import FILESTORAGE_MAGIC + + +# Not all platforms have fsync +fsync = getattr(os, "fsync", None) + +packed_version = FILESTORAGE_MAGIC + +logger = logging.getLogger('ZODB.FileStorage') + +def panic(message, *data): + logger.critical(message, *data) + raise CorruptedTransactionError(message % data) + +class FileStorageError(StorageError): + pass + +class PackError(FileStorageError): + pass + +class FileStorageFormatError(FileStorageError): + """Invalid file format + + The format of the given file is not valid. + """ + +class CorruptedFileStorageError(FileStorageError, + StorageSystemError): + """Corrupted file storage.""" + +class CorruptedTransactionError(CorruptedFileStorageError): + pass + +class FileStorageQuotaError(FileStorageError, + StorageSystemError): + """File storage quota exceeded.""" + +# Intended to be raised only in fspack.py, and ignored here. +class RedundantPackWarning(FileStorageError): + pass + +class TempFormatter(FileStorageFormatter): + """Helper class used to read formatted FileStorage data.""" + + def __init__(self, afile): + self._file = afile + +@implementer( + IStorage, + IStorageRestoreable, + IStorageIteration, + IStorageUndoable, + IStorageCurrentRecordIteration, + IExternalGC, + ) +class FileStorage( + FileStorageFormatter, + BlobStorageMixin, + ConflictResolvingStorage, + BaseStorage, + ): + """Storage that saves data in a file + """ + + # Set True while a pack is in progress; undo is blocked for the duration. + _pack_is_in_progress = False + + def __init__(self, file_name, create=False, read_only=False, stop=None, + quota=None, pack_gc=True, pack_keep_old=True, packer=None, + blob_dir=None): + """Create a file storage + + :param str file_name: Path to store data file + :param bool create: Flag indicating whether a file should be + created even if it already exists. + :param bool read_only: Flag indicating whether the file is + read only. Only one process is able to open the file + non-read-only. + :param bytes stop: Time-travel transaction id + When the file is opened, data will be read up to the given + transaction id. Transaction ids correspond to times and + you can compute transaction ids for a given time using + :class:`~ZODB.TimeStamp.TimeStamp`. + :param int quota: File-size quota + :param bool pack_gc: Flag indicating whether garbage + collection should be performed when packing. + :param bool pack_keep_old: flag indicating whether old data + files should be retained after packing as a ``.old`` file. + :param callable packer: An alternative + :interface:`packer `. + :param str blob_dir: A blob-directory path name. + Blobs will be supported if this option is provided. + + A file storage stores data in a single file that behaves like + a traditional transaction log. New data records are appended + to the end of the file. Periodically, the file is packed to + free up space. When this is done, current records as of the + pack time or later are copied to a new file, which replaces + the old file. + + FileStorages keep in-memory indexes mapping object oids to the + location of their current records in the file. Back pointers to + previous records allow access to non-current records from the + current records. + + In addition to the data file, some ancillary files are + created. These can be lost without affecting data + integrity, however losing the index file may cause extremely + slow startup. Each has a name that's a concatenation of the + original file and a suffix. The files are listed below by + suffix: + + .index + Snapshot of the in-memory index. This are created on + shutdown, packing, and after rebuilding an index when one + was not found. For large databases, creating a + file-storage object without an index file can take very + long because it's necessary to scan the data file to build + the index. + + .lock + A lock file preventing multiple processes from opening a + file storage on non-read-only mode. + + .tmp + A file used to store data being committed in the first phase + of 2-phase commit + + .index_tmp + A temporary file used when saving the in-memory index to + avoid overwriting an existing index until a new index has + been fully saved. + + .pack + A temporary file written while packing containing current + records as of and after the pack time. + + .old + The previous database file after a pack. + + When the database is packed, current records as of the pack + time and later are written to the ``.pack`` file. At the end + of packing, the ``.old`` file is removed, if it exists, and + the data file is renamed to the ``.old`` file and finally the + ``.pack`` file is rewritten to the data file. + """ + + if read_only: + self._is_read_only = True + if create: + raise ValueError("can't create a read-only file") + elif stop is not None: + raise ValueError("time-travel only supported in read-only mode") + + if stop is None: + stop = b'\377'*8 + + # Lock the database and set up the temp file. + if not read_only: + # Create the lock file + self._lock_file = LockFile(file_name + '.lock') + self._tfile = open(file_name + '.tmp', 'w+b') + self._tfmt = TempFormatter(self._tfile) + else: + self._tfile = None + + self._file_name = os.path.abspath(file_name) + + self._pack_gc = pack_gc + self.pack_keep_old = pack_keep_old + if packer is not None: + self.packer = packer + + BaseStorage.__init__(self, file_name) + + index, tindex = self._newIndexes() + self._initIndex(index, tindex) + + # Now open the file + + self._file = None + if not create: + try: + self._file = open(file_name, read_only and 'rb' or 'r+b') + except IOError as exc: + if exc.errno == errno.EFBIG: + # The file is too big to open. Fail visibly. + raise + if read_only: + # When open request is read-only we do not want to create + # the file + raise + if exc.errno == errno.ENOENT: + # The file doesn't exist. Create it. + create = 1 + # If something else went wrong, it's hard to guess + # what the problem was. If the file does not exist, + # create it. Otherwise, fail. + if os.path.exists(file_name): + raise + else: + create = 1 + + if self._file is None and create: + if os.path.exists(file_name): + os.remove(file_name) + self._file = open(file_name, 'w+b') + self._file.write(packed_version) + + self._files = FilePool(self._file_name) + r = self._restore_index() + if r is not None: + self._used_index = 1 # Marker for testing + index, start, ltid = r + + self._initIndex(index, tindex) + self._pos, self._oid, tid = read_index( + self._file, file_name, index, tindex, stop, + ltid=ltid, start=start, read_only=read_only, + ) + else: + self._used_index = 0 # Marker for testing + self._pos, self._oid, tid = read_index( + self._file, file_name, index, tindex, stop, + read_only=read_only, + ) + self._save_index() + + self._ltid = tid + + # self._pos should always point just past the last + # transaction. During 2PC, data is written after _pos. + # invariant is restored at tpc_abort() or tpc_finish(). + + self._ts = tid = TimeStamp(tid) + t = time.time() + t = TimeStamp(*time.gmtime(t)[:5] + (t % 60,)) + if tid > t: + seconds = tid.timeTime() - t.timeTime() + complainer = logger.warning + if seconds > 30 * 60: # 30 minutes -- way screwed up + complainer = logger.critical + complainer("%s Database records %d seconds in the future", + file_name, seconds) + + self._quota = quota + + if blob_dir: + self.blob_dir = os.path.abspath(blob_dir) + if create and os.path.exists(self.blob_dir): + remove_committed_dir(self.blob_dir) + + self._blob_init(blob_dir) + alsoProvides(self, IBlobStorageRestoreable) + else: + self.blob_dir = None + self._blob_init_no_blobs() + + def copyTransactionsFrom(self, other): + if self.blob_dir: + return BlobStorageMixin.copyTransactionsFrom(self, other) + else: + return BaseStorage.copyTransactionsFrom(self, other) + + def _initIndex(self, index, tindex): + self._index=index + self._tindex=tindex + self._index_get=index.get + + def __len__(self): + return len(self._index) + + def _newIndexes(self): + # hook to use something other than builtin dict + return fsIndex(), {} + + _saved = 0 + def _save_index(self): + """Write the database index to a file to support quick startup.""" + + if self._is_read_only: + return + + index_name = self.__name__ + '.index' + tmp_name = index_name + '.index_tmp' + + self._index.save(self._pos, tmp_name) + + try: + try: + os.remove(index_name) + except OSError: + pass + os.rename(tmp_name, index_name) + except: pass + + self._saved += 1 + + def _clear_index(self): + index_name = self.__name__ + '.index' + if os.path.exists(index_name): + try: + os.remove(index_name) + except OSError: + pass + + def _sane(self, index, pos): + """Sanity check saved index data by reading the last undone trans + + Basically, we read the last not undone transaction and + check to see that the included records are consistent + with the index. Any invalid record records or inconsistent + object positions cause zero to be returned. + """ + r = self._check_sanity(index, pos) + if not r: + logger.warning("Ignoring index for %s", self._file_name) + return r + + def _check_sanity(self, index, pos): + + if pos < 100: + return 0 # insane + self._file.seek(0, 2) + if self._file.tell() < pos: + return 0 # insane + ltid = None + + max_checked = 5 + checked = 0 + + while checked < max_checked: + self._file.seek(pos - 8) + rstl = self._file.read(8) + tl = u64(rstl) + pos = pos - tl - 8 + if pos < 4: + return 0 # insane + h = self._read_txn_header(pos) + if not ltid: + ltid = h.tid + if h.tlen != tl: + return 0 # inconsistent lengths + if h.status == 'u': + continue # undone trans, search back + if h.status not in ' p': + return 0 # insane + if tl < h.headerlen(): + return 0 # insane + tend = pos + tl + opos = pos + h.headerlen() + if opos == tend: + continue # empty trans + + while opos < tend and checked < max_checked: + # Read the data records for this transaction + h = self._read_data_header(opos) + + if opos + h.recordlen() > tend or h.tloc != pos: + return 0 + + if index.get(h.oid, 0) != opos: + return 0 # insane + + checked += 1 + + opos = opos + h.recordlen() + + return ltid + + def _restore_index(self): + """Load database index to support quick startup.""" + # Returns (index, pos, tid), or None in case of error. + # The index returned is always an instance of fsIndex. If the + # index cached in the file is a Python dict, it's converted to + # fsIndex here, and, if we're not in read-only mode, the .index + # file is rewritten with the converted fsIndex so we don't need to + # convert it again the next time. + file_name=self.__name__ + index_name=file_name+'.index' + + if os.path.exists(index_name): + try: + info = fsIndex.load(index_name) + except: + logger.exception('loading index') + return None + else: + return None + + index = info.get('index') + pos = info.get('pos') + if index is None or pos is None: + return None + pos = int(pos) + + if (isinstance(index, dict) or + (isinstance(index, fsIndex) and + isinstance(index._data, dict))): + # Convert dictionary indexes to fsIndexes *or* convert fsIndexes + # which have a dict `_data` attribute to a new fsIndex (newer + # fsIndexes have an OOBTree as `_data`). + newindex = fsIndex() + newindex.update(index) + index = newindex + if not self._is_read_only: + # Save the converted index. + f = open(index_name, 'wb') + p = Pickler(f, _protocol) + info['index'] = index + p.dump(info) + f.close() + # Now call this method again to get the new data. + return self._restore_index() + + tid = self._sane(index, pos) + if not tid: + return None + + return index, pos, tid + + def close(self): + self._file.close() + self._files.close() + if hasattr(self,'_lock_file'): + self._lock_file.close() + if self._tfile: + self._tfile.close() + try: + self._save_index() + except: + # Log the error and continue + logger.exception("Error saving index on close()") + + def getSize(self): + return self._pos + + def _lookup_pos(self, oid): + try: + return self._index[oid] + except KeyError: + raise POSKeyError(oid) + except TypeError: + raise TypeError("invalid oid %r" % (oid,)) + + load = load_current # Keep load for now for old clients + + def load(self, oid, version=''): + """Return pickle data and serial number.""" + assert not version + + with self._files.get() as _file: + pos = self._lookup_pos(oid) + h = self._read_data_header(pos, oid, _file) + if h.plen: + data = _file.read(h.plen) + return data, h.tid + elif h.back: + # Get the data from the backpointer, but tid from + # current txn. + data = self._loadBack_impl(oid, h.back, _file=_file)[0] + return data, h.tid + else: + raise POSKeyError(oid) + + def loadSerial(self, oid, serial): + with self._lock: + pos = self._lookup_pos(oid) + while 1: + h = self._read_data_header(pos, oid) + if h.tid == serial: + break + pos = h.prev + if h.tid < serial or not pos: + raise POSKeyError(oid) + if h.plen: + return self._file.read(h.plen) + else: + return self._loadBack_impl(oid, h.back)[0] + + def loadBefore(self, oid, tid): + with self._files.get() as _file: + pos = self._lookup_pos(oid) + end_tid = None + while True: + h = self._read_data_header(pos, oid, _file) + if h.tid < tid: + break + + pos = h.prev + end_tid = h.tid + if not pos: + return None + + if h.plen: + return _file.read(h.plen), h.tid, end_tid + elif h.back: + data, _, _, _ = self._loadBack_impl(oid, h.back, _file=_file) + return data, h.tid, end_tid + else: + raise POSKeyError(oid) + + def store(self, oid, oldserial, data, version, transaction): + if self._is_read_only: + raise ReadOnlyError() + if transaction is not self._transaction: + raise StorageTransactionError(self, transaction) + assert not version + + with self._lock: + if oid > self._oid: + self.set_max_oid(oid) + old = self._index_get(oid, 0) + committed_tid = None + pnv = None + if old: + h = self._read_data_header(old, oid) + committed_tid = h.tid + + if oldserial != committed_tid: + data = self.tryToResolveConflict(oid, committed_tid, + oldserial, data) + self._resolved.append(oid) + + pos = self._pos + here = pos + self._tfile.tell() + self._thl + self._tindex[oid] = here + new = DataHeader(oid, self._tid, old, pos, 0, len(data)) + + self._tfile.write(new.asString()) + self._tfile.write(data) + + # Check quota + if self._quota is not None and here > self._quota: + raise FileStorageQuotaError( + "The storage quota has been exceeded.") + + def deleteObject(self, oid, oldserial, transaction): + if self._is_read_only: + raise ReadOnlyError() + if transaction is not self._transaction: + raise StorageTransactionError(self, transaction) + + with self._lock: + old = self._index_get(oid, 0) + if not old: + raise POSKeyError(oid) + h = self._read_data_header(old, oid) + committed_tid = h.tid + + if oldserial != committed_tid: + raise ConflictError( + oid=oid, serials=(committed_tid, oldserial)) + + pos = self._pos + here = pos + self._tfile.tell() + self._thl + self._tindex[oid] = here + new = DataHeader(oid, self._tid, old, pos, 0, 0) + self._tfile.write(new.asString()) + self._tfile.write(z64) + + # Check quota + if self._quota is not None and here > self._quota: + raise FileStorageQuotaError( + "The storage quota has been exceeded.") + + def _data_find(self, tpos, oid, data): + # Return backpointer for oid. Must call with the lock held. + # This is a file offset to oid's data record if found, else 0. + # The data records in the transaction at tpos are searched for oid. + # If a data record for oid isn't found, returns 0. + # Else if oid's data record contains a backpointer, that + # backpointer is returned. + # Else oid's data record contains the data, and the file offset of + # oid's data record is returned. This data record should contain + # a pickle identical to the 'data' argument. + + # Unclear: If the length of the stored data doesn't match len(data), + # an exception is raised. If the lengths match but the data isn't + # the same, 0 is returned. Why the discrepancy? + self._file.seek(tpos) + h = self._file.read(TRANS_HDR_LEN) + tid, tl, status, ul, dl, el = unpack(TRANS_HDR, h) + status = as_text(status) + self._file.read(ul + dl + el) + tend = tpos + tl + 8 + pos = self._file.tell() + while pos < tend: + h = self._read_data_header(pos) + if h.oid == oid: + # Make sure this looks like the right data record + if h.plen == 0: + # This is also a backpointer. Gotta trust it. + return pos + if h.plen != len(data): + # The expected data doesn't match what's in the + # backpointer. Something is wrong. + logger.error("Mismatch between data and" + " backpointer at %d", pos) + return 0 + _data = self._file.read(h.plen) + if data != _data: + return 0 + return pos + pos += h.recordlen() + self._file.seek(pos) + return 0 + + def restore(self, oid, serial, data, version, prev_txn, transaction): + # A lot like store() but without all the consistency checks. This + # should only be used when we /know/ the data is good, hence the + # method name. While the signature looks like store() there are some + # differences: + # + # - serial is the serial number of /this/ revision, not of the + # previous revision. It is used instead of self._tid, which is + # ignored. + # + # - Nothing is returned + # + # - data can be None, which indicates a George Bailey object + # (i.e. one who's creation has been transactionally undone). + # + # prev_txn is a backpointer. In the original database, it's possible + # that the data was actually living in a previous transaction. This + # can happen for transactional undo and other operations, and is used + # as a space saving optimization. Under some circumstances the + # prev_txn may not actually exist in the target database (i.e. self) + # for example, if it's been packed away. In that case, the prev_txn + # should be considered just a hint, and is ignored if the transaction + # doesn't exist. + if self._is_read_only: + raise ReadOnlyError() + if transaction is not self._transaction: + raise StorageTransactionError(self, transaction) + if version: + raise TypeError("Versions are no-longer supported") + + with self._lock: + if oid > self._oid: + self.set_max_oid(oid) + prev_pos = 0 + if prev_txn is not None: + prev_txn_pos = self._txn_find(prev_txn, 0) + if prev_txn_pos: + prev_pos = self._data_find(prev_txn_pos, oid, data) + old = self._index_get(oid, 0) + # Calculate the file position in the temporary file + here = self._pos + self._tfile.tell() + self._thl + # And update the temp file index + self._tindex[oid] = here + if prev_pos: + # If there is a valid prev_pos, don't write data. + data = None + if data is None: + dlen = 0 + else: + dlen = len(data) + + # Write the recovery data record + new = DataHeader(oid, serial, old, self._pos, 0, dlen) + + self._tfile.write(new.asString()) + + # Finally, write the data or a backpointer. + if data is None: + if prev_pos: + self._tfile.write(p64(prev_pos)) + else: + # Write a zero backpointer, which indicates an + # un-creation transaction. + self._tfile.write(z64) + else: + self._tfile.write(data) + + def supportsUndo(self): + return 1 + + def _clear_temp(self): + self._tindex.clear() + if self._tfile is not None: + self._tfile.seek(0) + + def _begin(self, tid, u, d, e): + self._nextpos = 0 + self._thl = TRANS_HDR_LEN + len(u) + len(d) + len(e) + if self._thl > 65535: + # one of u, d, or e may be > 65535 + # We have to check lengths here because struct.pack + # doesn't raise an exception on overflow! + if len(u) > 65535: + raise FileStorageError('user name too long') + if len(d) > 65535: + raise FileStorageError('description too long') + if len(e) > 65535: + raise FileStorageError('too much extension data') + + def tpc_vote(self, transaction): + with self._lock: + if transaction is not self._transaction: + raise StorageTransactionError( + "tpc_vote called with wrong transaction") + dlen = self._tfile.tell() + if not dlen: + return # No data in this trans + self._tfile.seek(0) + user, descr, ext = self._ude + + self._file.seek(self._pos) + tl = self._thl + dlen + + try: + h = TxnHeader(self._tid, tl, "c", len(user), + len(descr), len(ext)) + h.user = user + h.descr = descr + h.ext = ext + self._file.write(h.asString()) + cp(self._tfile, self._file, dlen) + self._file.write(p64(tl)) + self._file.flush() + except: + # Hm, an error occurred writing out the data. Maybe the + # disk is full. We don't want any turd at the end. + self._file.truncate(self._pos) + self._files.flush() + raise + self._nextpos = self._pos + (tl + 8) + return self._resolved + + def tpc_finish(self, transaction, f=None): + with self._files.write_lock(): + with self._lock: + if transaction is not self._transaction: + raise StorageTransactionError( + "tpc_finish called with wrong transaction") + try: + tid = self._tid + if f is not None: + f(tid) + self._finish(tid, *self._ude) + self._clear_temp() + finally: + self._ude = None + self._transaction = None + self._commit_lock.release() + return tid + + def _finish(self, tid, u, d, e): + # If self._nextpos is 0, then the transaction didn't write any + # data, so we don't bother writing anything to the file. + if self._nextpos: + # Clear the checkpoint flag + self._file.seek(self._pos+16) + self._file.write(as_bytes(self._tstatus)) + try: + # At this point, we may have committed the data to disk. + # If we fail from here, we're in bad shape. + self._finish_finish(tid) + except: + # Ouch. This is bad. Let's try to get back to where we were + # and then roll over and die + logger.critical("Failure in _finish. Closing.", exc_info=True) + self.close() + raise + + def _finish_finish(self, tid): + # This is a separate method to allow tests to replace it with + # something broken. :) + + self._file.flush() + if fsync is not None: + fsync(self._file.fileno()) + + self._pos = self._nextpos + self._index.update(self._tindex) + self._ltid = tid + self._blob_tpc_finish() + + def _abort(self): + if self._nextpos: + self._file.truncate(self._pos) + self._files.flush() + self._nextpos=0 + self._blob_tpc_abort() + + def _undoDataInfo(self, oid, pos, tpos): + """Return the tid, data pointer, and data for the oid record at pos + """ + if tpos: + itpos = tpos - self._pos - self._thl + pos = tpos + tpos = self._tfile.tell() + h = self._tfmt._read_data_header(itpos, oid) + afile = self._tfile + else: + h = self._read_data_header(pos, oid) + afile = self._file + + if h.oid != oid: + raise UndoError("Invalid undo transaction id", oid) + + if h.plen: + data = afile.read(h.plen) + else: + data = '' + pos = h.back + + if tpos: + self._tfile.seek(tpos) # Restore temp file to end + + return h.tid, pos, data + + def getTid(self, oid): + with self._lock: + pos = self._lookup_pos(oid) + h = self._read_data_header(pos, oid) + if h.plen == 0 and h.back == 0: + # Undone creation + raise POSKeyError(oid) + return h.tid + + def _transactionalUndoRecord(self, oid, pos, tid, pre): + """Get the undo information for a data record + + 'pos' points to the data header for 'oid' in the transaction + being undone. 'tid' refers to the transaction being undone. + 'pre' is the 'prev' field of the same data header. + + Return a 3-tuple consisting of a pickle, data pointer, and + current position. If the pickle is true, then the data + pointer must be 0, but the pickle can be empty *and* the + pointer 0. + """ + + copy = True # Can we just copy a data pointer + + # First check if it is possible to undo this record. + tpos = self._tindex.get(oid, 0) + ipos = self._index.get(oid, 0) + tipos = tpos or ipos + + if tipos != pos: + # The transaction being undone isn't current because: + # a) A later transaction was committed ipos != pos, or + # b) A change was made in the current transaction. This + # could only be a previous undo in a multi-undo. + # (We don't allow multiple data managers with the same + # storage to participate in the same transaction.) + assert tipos > pos + + # Get current data, as identified by tipos. We'll use + # it to decide if and how we can undo in this case. + ctid, cdataptr, current_data = self._undoDataInfo(oid, ipos, tpos) + + if cdataptr != pos: + + # if cdataptr was == pos, then we'd be cool, because + # we're dealing with the same data. + + # Because they aren't equal, we have to dig deeper + + # Let's see if data to be undone and current data + # are the same. If not, we'll have to decide whether + # we should try conflict resolution. + + try: + data_to_be_undone = self._loadBack_impl(oid, pos)[0] + if not current_data: + current_data = self._loadBack_impl(oid, cdataptr)[0] + + if data_to_be_undone != current_data: + # OK, so the current data is different from + # the data being undone. We can't just copy: + copy = False + + if not pre: + # The transaction we're undoing has no + # previous state to merge with, so we + # can't resolve a conflict. + raise UndoError( + "Can't undo an add transaction followed by" + " conflicting transactions.", oid) + except KeyError: + # LoadBack gave us a key error. Bail. + raise UndoError("_loadBack() failed", oid) + + # Return the data that should be written in the undo record. + if not pre: + # We're undoing object addition. We're doing this because + # subsequent transactions has no net effect on the state + # (possibly because some of them were undos). + return "", 0, ipos + + if copy: + # we can just copy our previous-record pointer forward + return "", pre, ipos + + try: + pre_data = self._loadBack_impl(oid, pre)[0] + except KeyError: + # couldn't find oid; what's the real explanation for this? + raise UndoError("_loadBack() failed for %s", oid) + + try: + data = self.tryToResolveConflict( + oid, ctid, tid, pre_data, current_data) + return data, 0, ipos + except ConflictError: + pass + + raise UndoError("Some data were modified by a later transaction", oid) + + # undoLog() returns a description dict that includes an id entry. + # The id is opaque to the client, but contains the transaction id. + # The transactionalUndo() implementation does a simple linear + # search through the file (from the end) to find the transaction. + + def undoLog(self, first=0, last=-20, filter=None): + if last < 0: + # -last is supposed to be the max # of transactions. Convert to + # a positive index. Should have x - first = -last, which + # means x = first - last. This is spelled out here because + # the normalization code was incorrect for years (used +1 + # instead -- off by 1), until ZODB 3.4. + last = first - last + with self._lock: + if self._pack_is_in_progress: + raise UndoError( + 'Undo is currently disabled for database maintenance.

') + us = UndoSearch(self._file, self._pos, first, last, filter) + while not us.finished(): + # Hold lock for batches of 20 searches, so default search + # parameters will finish without letting another thread run. + for i in range(20): + if us.finished(): + break + us.search() + # Give another thread a chance, so that a long undoLog() + # operation doesn't block all other activity. + self._lock.release() + self._lock.acquire() + return us.results + + def undo(self, transaction_id, transaction): + """Undo a transaction, given by transaction_id. + + Do so by writing new data that reverses the action taken by + the transaction. + + Usually, we can get by with just copying a data pointer, by + writing a file position rather than a pickle. Sometimes, we + may do conflict resolution, in which case we actually copy + new data that results from resolution. + """ + + if self._is_read_only: + raise ReadOnlyError() + if transaction is not self._transaction: + raise StorageTransactionError(self, transaction) + + with self._lock: + # Find the right transaction to undo and call _txn_undo_write(). + tid = decodebytes(transaction_id + b'\n') + assert len(tid) == 8 + tpos = self._txn_find(tid, 1) + tindex = self._txn_undo_write(tpos) + self._tindex.update(tindex) + return self._tid, tindex.keys() + + def _txn_find(self, tid, stop_at_pack): + pos = self._pos + while pos > 39: + self._file.seek(pos - 8) + pos = pos - u64(self._file.read(8)) - 8 + self._file.seek(pos) + h = self._file.read(TRANS_HDR_LEN) + _tid = h[:8] + if _tid == tid: + return pos + if stop_at_pack: + # check the status field of the transaction header + if h[16] == b'p': + break + raise UndoError("Invalid transaction id") + + def _txn_undo_write(self, tpos): + # a helper function to write the data records for transactional undo + + otloc = self._pos + here = self._pos + self._tfile.tell() + self._thl + base = here - self._tfile.tell() + # Let's move the file pointer back to the start of the txn record. + th = self._read_txn_header(tpos) + if th.status != " ": + raise UndoError('non-undoable transaction') + tend = tpos + th.tlen + pos = tpos + th.headerlen() + tindex = {} + + # keep track of failures, cause we may succeed later + failures = {} + # Read the data records for this transaction + while pos < tend: + h = self._read_data_header(pos) + if h.oid in failures: + del failures[h.oid] # second chance! + + assert base + self._tfile.tell() == here, (here, base, + self._tfile.tell()) + try: + p, prev, ipos = self._transactionalUndoRecord( + h.oid, pos, h.tid, h.prev) + except UndoError as v: + # Don't fail right away. We may be redeemed later! + failures[h.oid] = v + else: + + if self.blob_dir and not p and prev: + try: + up, userial = self._loadBackTxn(h.oid, prev) + except POSKeyError: + pass # It was removed, so no need to copy data + else: + if self.is_blob_record(up): + # We're undoing a blob modification operation. + # We have to copy the blob data + tmp = mktemp(dir=self.fshelper.temp_dir) + with self.openCommittedBlobFile( + h.oid, userial) as sfp: + with open(tmp, 'wb') as dfp: + cp(sfp, dfp) + self._blob_storeblob(h.oid, self._tid, tmp) + + new = DataHeader(h.oid, self._tid, ipos, otloc, 0, len(p)) + + # TODO: This seek shouldn't be necessary, but some other + # bit of code is messing with the file pointer. + assert self._tfile.tell() == here - base, (here, base, + self._tfile.tell()) + self._tfile.write(new.asString()) + if p: + self._tfile.write(p) + else: + self._tfile.write(p64(prev)) + tindex[h.oid] = here + here += new.recordlen() + + pos += h.recordlen() + if pos > tend: + raise UndoError("non-undoable transaction") + + if failures: + raise MultipleUndoErrors(list(failures.items())) + + return tindex + + def history(self, oid, size=1, filter=None): + with self._lock: + r = [] + pos = self._lookup_pos(oid) + + while 1: + if len(r) >= size: return r + h = self._read_data_header(pos) + + th = self._read_txn_header(h.tloc) + if th.ext: + d = loads(th.ext) + else: + d = {} + + d.update({"time": TimeStamp(h.tid).timeTime(), + "user_name": th.user, + "description": th.descr, + "tid": h.tid, + "size": h.plen, + }) + + if filter is None or filter(d): + r.append(d) + + if h.prev: + pos = h.prev + else: + return r + + def _redundant_pack(self, file, pos): + assert pos > 8, pos + file.seek(pos - 8) + p = u64(file.read(8)) + file.seek(pos - p + 8) + return file.read(1) not in ' u' + + @staticmethod + def packer(storage, referencesf, stop, gc): + # Our default packer is built around the original packer. We + # simply adapt the old interface to the new. We don't really + # want to invest much in the old packer, at least for now. + assert referencesf is not None + p = FileStoragePacker(storage, referencesf, stop, gc) + try: + opos = p.pack() + if opos is None: + return None + return opos, p.index + finally: + p.close() + + def pack(self, t, referencesf, gc=None): + """Copy data from the current database file to a packed file + + Non-current records from transactions with time-stamp strings less + than packtss are ommitted. As are all undone records. + + Also, data back pointers that point before packtss are resolved and + the associated data are copied, since the old records are not copied. + """ + if self._is_read_only: + raise ReadOnlyError() + + stop = TimeStamp(*time.gmtime(t)[:5]+(t%60,)).raw() + if stop == z64: + raise FileStorageError('Invalid pack time') + + # If the storage is empty, there's nothing to do. + if not self._index: + return + + with self._lock: + if self._pack_is_in_progress: + raise FileStorageError('Already packing') + self._pack_is_in_progress = True + + if gc is None: + gc = self._pack_gc + + oldpath = self._file_name + ".old" + if os.path.exists(oldpath): + os.remove(oldpath) + if self.blob_dir and os.path.exists(self.blob_dir + ".old"): + remove_committed_dir(self.blob_dir + ".old") + + cleanup = [] + + have_commit_lock = False + try: + pack_result = None + try: + pack_result = self.packer(self, referencesf, stop, gc) + except RedundantPackWarning as detail: + logger.info(str(detail)) + if pack_result is None: + return + have_commit_lock = True + opos, index = pack_result + with self._files.write_lock(): + with self._lock: + self._files.empty() + self._file.close() + try: + os.rename(self._file_name, oldpath) + except Exception: + self._file = open(self._file_name, 'r+b') + raise + + # OK, we're beyond the point of no return + os.rename(self._file_name + '.pack', self._file_name) + self._file = open(self._file_name, 'r+b') + self._initIndex(index, self._tindex) + self._pos = opos + + # We're basically done. Now we need to deal with removed + # blobs and removing the .old file (see further down). + + if self.blob_dir: + self._commit_lock.release() + have_commit_lock = False + self._remove_blob_files_tagged_for_removal_during_pack() + + finally: + if have_commit_lock: + self._commit_lock.release() + with self._lock: + self._pack_is_in_progress = False + + if not self.pack_keep_old: + os.remove(oldpath) + + with self._lock: + self._save_index() + + def _remove_blob_files_tagged_for_removal_during_pack(self): + lblob_dir = len(self.blob_dir) + fshelper = self.fshelper + old = self.blob_dir+'.old' + + # Helper to clean up dirs left empty after moving things to old + def maybe_remove_empty_dir_containing(path, level=0): + path = os.path.dirname(path) + if len(path) <= lblob_dir or os.listdir(path): + return + + # Path points to an empty dir. There may be a race. We + # might have just removed the dir for an oid (or a parent + # dir) and while we're cleaning up it's parent, another + # thread is adding a new entry to it. + + # We don't have to worry about level 0, as this is just a + # directory containing an object's revisions. If it is + # enmpty, the object must have been garbage. + + # If the level is 1 or higher, we need to be more + # careful. We'll get the storage lock and double check + # that the dir is still empty before removing it. + + removed = False + if level: + self._lock.acquire() + try: + if not os.listdir(path): + os.rmdir(path) + removed = True + finally: + if level: + self._lock.release() + + if removed: + maybe_remove_empty_dir_containing(path, level+1) + + + if self.pack_keep_old: + # Helpers that move oid dir or revision file to the old dir. + os.mkdir(old) + link_or_copy(os.path.join(self.blob_dir, '.layout'), + os.path.join(old, '.layout')) + def handle_file(path): + newpath = old+path[lblob_dir:] + dest = os.path.dirname(newpath) + if not os.path.exists(dest): + os.makedirs(dest) + os.rename(path, newpath) + handle_dir = handle_file + else: + # Helpers that remove an oid dir or revision file. + handle_file = remove_committed + handle_dir = remove_committed_dir + + # Fist step: move or remove oids or revisions + with open(os.path.join(self.blob_dir, '.removed'), 'rb') as fp: + for line in fp: + line = binascii.unhexlify(line.strip()) + + if len(line) == 8: + # oid is garbage, re/move dir + path = fshelper.getPathForOID(line) + if not os.path.exists(path): + # Hm, already gone. Odd. + continue + handle_dir(path) + maybe_remove_empty_dir_containing(path, 1) + continue + + if len(line) != 16: + raise ValueError( + "Bad record in ", self.blob_dir, '.removed') + + oid, tid = line[:8], line[8:] + path = fshelper.getBlobFilename(oid, tid) + if not os.path.exists(path): + # Hm, already gone. Odd. + continue + handle_file(path) + assert not os.path.exists(path) + maybe_remove_empty_dir_containing(path) + + os.remove(os.path.join(self.blob_dir, '.removed')) + + if not self.pack_keep_old: + return + + # Second step, copy remaining files. + for path, dir_names, file_names in os.walk(self.blob_dir): + for file_name in file_names: + if not file_name.endswith('.blob'): + continue + file_path = os.path.join(path, file_name) + dest = os.path.dirname(old+file_path[lblob_dir:]) + if not os.path.exists(dest): + os.makedirs(dest) + link_or_copy(file_path, old+file_path[lblob_dir:]) + + def iterator(self, start=None, stop=None): + return FileIterator(self._file_name, start, stop) + + def lastInvalidations(self, count): + file = self._file + seek = file.seek + read = file.read + with self._lock: + pos = self._pos + while count > 0 and pos > 4: + count -= 1 + seek(pos-8) + pos = pos - 8 - u64(read(8)) + + seek(0) + return [(trans.tid, [r.oid for r in trans]) + for trans in FileIterator(self._file_name, pos=pos)] + + def lastTid(self, oid): + """Return last serialno committed for object oid. + + If there is no serialno for this oid -- which can only occur + if it is a new object -- return None. + """ + try: + return self.getTid(oid) + except KeyError: + return None + + def cleanup(self): + """Remove all files created by this storage.""" + for ext in '', '.old', '.tmp', '.lock', '.index', '.pack': + try: + os.remove(self._file_name + ext) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + def record_iternext(self, next=None): + index = self._index + oid = index.minKey(next) + + oid_as_long, = unpack(">Q", oid) + next_oid = pack(">Q", oid_as_long + 1) + try: + next_oid = index.minKey(next_oid) + except ValueError: # "empty tree" error + next_oid = None + + data, tid = load_current(self, oid) + + return oid, tid, data, next_oid + + ###################################################################### + # The following 2 methods are for testing a ZEO extension mechanism + def getExtensionMethods(self): + return dict(answer_to_the_ultimate_question=None) + + def answer_to_the_ultimate_question(self): + return 42 + # + ###################################################################### + +def shift_transactions_forward(index, tindex, file, pos, opos): + """Copy transactions forward in the data file + + This might be done as part of a recovery effort + """ + + # Cache a bunch of methods + seek=file.seek + read=file.read + write=file.write + + index_get=index.get + + # Initialize, + pv=z64 + p1=opos + p2=pos + offset=p2-p1 + + # Copy the data in two stages. In the packing stage, + # we skip records that are non-current or that are for + # unreferenced objects. We also skip undone transactions. + # + # After the packing stage, we copy everything but undone + # transactions, however, we have to update various back pointers. + # We have to have the storage lock in the second phase to keep + # data from being changed while we're copying. + pnv=None + while 1: + + # Read the transaction record + seek(pos) + h=read(TRANS_HDR_LEN) + if len(h) < TRANS_HDR_LEN: break + tid, stl, status, ul, dl, el = unpack(TRANS_HDR,h) + status = as_text(status) + if status=='c': break # Oops. we found a checkpoint flag. + tl=u64(stl) + tpos=pos + tend=tpos+tl + + otpos=opos # start pos of output trans + + thl=ul+dl+el + h2=read(thl) + if len(h2) != thl: + raise PackError(opos) + + # write out the transaction record + seek(opos) + write(h) + write(h2) + + thl=TRANS_HDR_LEN+thl + pos=tpos+thl + opos=otpos+thl + + while pos < tend: + # Read the data records for this transaction + seek(pos) + h=read(DATA_HDR_LEN) + oid,serial,sprev,stloc,vlen,splen = unpack(DATA_HDR, h) + assert not vlen + plen=u64(splen) + dlen=DATA_HDR_LEN+(plen or 8) + + tindex[oid]=opos + + if plen: p=read(plen) + else: + p=read(8) + p=u64(p) + if p >= p2: p=p-offset + elif p >= p1: + # Ick, we're in trouble. Let's bail + # to the index and hope for the best + p=index_get(oid, 0) + p=p64(p) + + # WRITE + seek(opos) + sprev=p64(index_get(oid, 0)) + write(pack(DATA_HDR, + oid, serial, sprev, p64(otpos), 0, splen)) + + write(p) + + opos=opos+dlen + pos=pos+dlen + + # skip the (intentionally redundant) transaction length + pos=pos+8 + + if status != 'u': + index.update(tindex) # Record the position + + tindex.clear() + + write(stl) + opos=opos+8 + + return opos + +def search_back(file, pos): + seek=file.seek + read=file.read + seek(0,2) + s=p=file.tell() + while p > pos: + seek(p-8) + l=u64(read(8)) + if l <= 0: break + p=p-l-8 + + return p, s + +def recover(file_name): + file=open(file_name, 'r+b') + index={} + tindex={} + + pos, oid, tid = read_index(file, file_name, index, tindex, recover=1) + if oid is not None: + print("Nothing to recover") + return + + opos=pos + pos, sz = search_back(file, pos) + if pos < sz: + npos = shift_transactions_forward(index, tindex, file, pos, opos) + + file.truncate(npos) + + print("Recovered file, lost %s, ended up with %s bytes" % ( + pos-opos, npos)) + + + +def read_index(file, name, index, tindex, stop=b'\377'*8, + ltid=z64, start=4, maxoid=z64, recover=0, read_only=0): + """Scan the file storage and update the index. + + Returns file position, max oid, and last transaction id. It also + stores index information in the three dictionary arguments. + + Arguments: + file -- a file object (the Data.fs) + name -- the name of the file (presumably file.name) + index -- fsIndex, oid -> data record file offset + tindex -- dictionary, oid -> data record offset + tindex is cleared before return + + There are several default arguments that affect the scan or the + return values. TODO: document them. + + start -- the file position at which to start scanning for oids added + beyond the ones the passed-in indices know about. The .index + file caches the highest ._pos FileStorage knew about when the + the .index file was last saved, and that's the intended value + to pass in for start; accept the default (and pass empty + indices) to recreate the index from scratch + maxoid -- ignored (it meant something prior to ZODB 3.2.6; the argument + still exists just so the signature of read_index() stayed the + same) + + The file position returned is the position just after the last + valid transaction record. The oid returned is the maximum object + id in `index`, or z64 if the index is empty. The transaction id is the + tid of the last transaction, or ltid if the index is empty. + """ + + read = file.read + seek = file.seek + seek(0, 2) + file_size = file.tell() + fmt = TempFormatter(file) + + if file_size: + if file_size < start: + raise FileStorageFormatError(file.name) + seek(0) + if read(4) != packed_version: + raise FileStorageFormatError(name) + else: + if not read_only: + file.write(packed_version) + return 4, z64, ltid + + index_get = index.get + + pos = start + seek(start) + tid = b'\0' * 7 + b'\1' + + while 1: + # Read the transaction record + h = read(TRANS_HDR_LEN) + if not h: + break + if len(h) != TRANS_HDR_LEN: + if not read_only: + logger.warning('%s truncated at %s', name, pos) + seek(pos) + file.truncate() + break + + tid, tl, status, ul, dl, el = unpack(TRANS_HDR, h) + status = as_text(status) + + if tid <= ltid: + logger.warning("%s time-stamp reduction at %s", name, pos) + ltid = tid + + if pos+(tl+8) > file_size or status=='c': + # Hm, the data were truncated or the checkpoint flag wasn't + # cleared. They may also be corrupted, + # in which case, we don't want to totally lose the data. + if not read_only: + logger.warning("%s truncated, possibly due to damaged" + " records at %s", name, pos) + _truncate(file, name, pos) + break + + if status not in ' up': + logger.warning('%s has invalid status, %s, at %s', + name, status, pos) + + if tl < TRANS_HDR_LEN + ul + dl + el: + # We're in trouble. Find out if this is bad data in the + # middle of the file, or just a turd that Win 9x dropped + # at the end when the system crashed. + # Skip to the end and read what should be the transaction length + # of the last transaction. + seek(-8, 2) + rtl = u64(read(8)) + # Now check to see if the redundant transaction length is + # reasonable: + if file_size - rtl < pos or rtl < TRANS_HDR_LEN: + logger.critical('%s has invalid transaction header at %s', + name, pos) + if not read_only: + logger.warning( + "It appears that there is invalid data at the end of " + "the file, possibly due to a system crash. %s " + "truncated to recover from bad data at end." % name) + _truncate(file, name, pos) + break + else: + if recover: + return pos, None, None + panic('%s has invalid transaction header at %s', name, pos) + + if tid >= stop: + break + + tpos = pos + tend = tpos + tl + + if status == 'u': + # Undone transaction, skip it + seek(tend) + h = u64(read(8)) + if h != tl: + if recover: + return tpos, None, None + panic('%s has inconsistent transaction length at %s', + name, pos) + pos = tend + 8 + continue + + pos = tpos + TRANS_HDR_LEN + ul + dl + el + while pos < tend: + # Read the data records for this transaction + h = fmt._read_data_header(pos) + dlen = h.recordlen() + tindex[h.oid] = pos + + if pos + dlen > tend or h.tloc != tpos: + if recover: + return tpos, None, None + panic("%s data record exceeds transaction record at %s", + name, pos) + + if index_get(h.oid, 0) != h.prev: + if h.prev: + if recover: + return tpos, None, None + logger.error("%s incorrect previous pointer at %s", + name, pos) + else: + logger.warning("%s incorrect previous pointer at %s", + name, pos) + + pos += dlen + + if pos != tend: + if recover: + return tpos, None, None + panic("%s data records don't add up at %s",name,tpos) + + # Read the (intentionally redundant) transaction length + seek(pos) + h = u64(read(8)) + if h != tl: + if recover: + return tpos, None, None + panic("%s redundant transaction length check failed at %s", + name, pos) + pos += 8 + + index.update(tindex) + tindex.clear() + + # Caution: fsIndex doesn't have an efficient __nonzero__ or __len__. + # That's why we do try/except instead. fsIndex.maxKey() is fast. + try: + maxoid = index.maxKey() + except ValueError: + # The index is empty. + pass # maxoid is already equal to z64 + + return pos, maxoid, ltid + + +def _truncate(file, name, pos): + file.seek(0, 2) + file_size = file.tell() + try: + i = 0 + while 1: + oname='%s.tr%s' % (name, i) + if os.path.exists(oname): + i += 1 + else: + logger.warning("Writing truncated data from %s to %s", + name, oname) + o = open(oname,'wb') + file.seek(pos) + cp(file, o, file_size-pos) + o.close() + break + except: + logger.exception("couldn\'t write truncated data for %s", name) + raise StorageSystemError("Couldn't save truncated data") + + file.seek(pos) + file.truncate() + + +class FileIterator(FileStorageFormatter): + """Iterate over the transactions in a FileStorage file. + """ + _ltid = z64 + _file = None + + def __init__(self, filename, start=None, stop=None, pos=4): + assert isinstance(filename, STRING_TYPES) + file = open(filename, 'rb') + self._file = file + self._file_name = filename + if file.read(4) != packed_version: + raise FileStorageFormatError(file.name) + file.seek(0,2) + self._file_size = file.tell() + if (pos < 4) or pos > self._file_size: + raise ValueError("Given position is greater than the file size", + pos, self._file_size) + self._pos = pos + assert start is None or isinstance(start, bytes) + assert stop is None or isinstance(stop, bytes) + self._start = start + self._stop = stop + if start: + if self._file_size <= 4: + return + self._skip_to_start(start) + + def __len__(self): + # Define a bogus __len__() to make the iterator work + # with code like builtin list() and tuple() in Python 2.1. + # There's a lot of C code that expects a sequence to have + # an __len__() but can cope with any sort of mistake in its + # implementation. So just return 0. + return 0 + + # This allows us to pass an iterator as the `other` argument to + # copyTransactionsFrom() in BaseStorage. The advantage here is that we + # can create the iterator manually, e.g. setting start and stop, and then + # just let copyTransactionsFrom() do its thing. + def iterator(self): + return self + + def close(self): + file = self._file + if file is not None: + self._file = None + file.close() + + def _skip_to_start(self, start): + file = self._file + pos1 = self._pos + file.seek(pos1) + tid1 = file.read(8) # XXX bytes + if len(tid1) < 8: + raise CorruptedError("Couldn't read tid.") + if start < tid1: + pos2 = pos1 + tid2 = tid1 + file.seek(4) + tid1 = file.read(8) + if start <= tid1: + self._pos = 4 + return + pos1 = 4 + else: + if start == tid1: + return + + # Try to read the last transaction. We could be unlucky and + # opened the file while committing a transaction. In that + # case, we'll just scan from the beginning if the file is + # small enough, otherwise we'll fail. + file.seek(self._file_size-8) + l = u64(file.read(8)) + if not (l + 12 <= self._file_size and + self._read_num(self._file_size-l) == l): + if self._file_size < (1<<20): + return self._scan_foreward(start) + raise ValueError("Can't find last transaction in large file") + pos2 = self._file_size-l-8 + file.seek(pos2) + tid2 = file.read(8) + if tid2 < tid1: + raise CorruptedError("Tids out of order.") + if tid2 <= start: + if tid2 == start: + self._pos = pos2 + else: + self._pos = self._file_size + return + + t1 = TimeStamp(tid1).timeTime() + t2 = TimeStamp(tid2).timeTime() + ts = TimeStamp(start).timeTime() + if (ts - t1) < (t2 - ts): + return self._scan_forward(pos1, start) + else: + return self._scan_backward(pos2, start) + + def _scan_forward(self, pos, start): + logger.debug("Scan forward %s:%s looking for %r", + self._file_name, pos, start) + file = self._file + while 1: + # Read the transaction record + h = self._read_txn_header(pos) + if h.tid >= start: + self._pos = pos + return + + pos += h.tlen + 8 + + def _scan_backward(self, pos, start): + logger.debug("Scan backward %s:%s looking for %r", + self._file_name, pos, start) + file = self._file + seek = file.seek + read = file.read + while 1: + pos -= 8 + seek(pos) + tlen = u64(read(8)) + pos -= tlen + h = self._read_txn_header(pos) + if h.tid <= start: + if h.tid == start: + self._pos = pos + else: + self._pos = pos + tlen + 8 + return + + # Iterator protocol + def __iter__(self): + return self + + def __next__(self): + if self._file is None: + raise StopIteration() + + pos = self._pos + while True: + + # Read the transaction record + try: + h = self._read_txn_header(pos) + except CorruptedDataError as err: + # If buf is empty, we've reached EOF. + if not err.buf: + break + raise + + if h.tid <= self._ltid: + logger.warning("%s time-stamp reduction at %s", + self._file.name, pos) + self._ltid = h.tid + + if self._stop is not None and h.tid > self._stop: + break + + if h.status == "c": + # Assume we've hit the last, in-progress transaction + break + + if pos + h.tlen + 8 > self._file_size: + # Hm, the data were truncated or the checkpoint flag wasn't + # cleared. They may also be corrupted, + # in which case, we don't want to totally lose the data. + logger.warning("%s truncated, possibly due to" + " damaged records at %s", self._file.name, pos) + break + + if h.status not in " up": + logger.warning('%s has invalid status,' + ' %s, at %s', self._file.name, h.status, pos) + + if h.tlen < h.headerlen(): + # We're in trouble. Find out if this is bad data in + # the middle of the file, or just a turd that Win 9x + # dropped at the end when the system crashed. Skip to + # the end and read what should be the transaction + # length of the last transaction. + self._file.seek(-8, 2) + rtl = u64(self._file.read(8)) + # Now check to see if the redundant transaction length is + # reasonable: + if self._file_size - rtl < pos or rtl < TRANS_HDR_LEN: + logger.critical("%s has invalid transaction header at %s", + self._file.name, pos) + logger.warning( + "It appears that there is invalid data at the end of " + "the file, possibly due to a system crash. %s " + "truncated to recover from bad data at end." + % self._file.name) + break + else: + logger.warning("%s has invalid transaction header at %s", + self._file.name, pos) + break + + tpos = pos + tend = tpos + h.tlen + + if h.status != "u": + pos = tpos + h.headerlen() + e = {} + if h.elen: + try: + e = loads(h.ext) + except: + pass + + result = TransactionRecord(h.tid, h.status, h.user, h.descr, + e, pos, tend, self._file, tpos) + + # Read the (intentionally redundant) transaction length + self._file.seek(tend) + rtl = u64(self._file.read(8)) + if rtl != h.tlen: + logger.warning("%s redundant transaction length check" + " failed at %s", self._file.name, tend) + break + self._pos = tend + 8 + + return result + + self.close() + raise StopIteration() + + next = __next__ + + +class TransactionRecord(_TransactionRecord): + + def __init__(self, tid, status, user, desc, ext, pos, tend, file, tpos): + _TransactionRecord.__init__( + self, tid, status, user, desc, ext) + self._pos = pos + self._tend = tend + self._file = file + self._tpos = tpos + + def __iter__(self): + return TransactionRecordIterator(self) + +class TransactionRecordIterator(FileStorageFormatter): + """Iterate over the transactions in a FileStorage file.""" + + def __init__(self, record): + self._file = record._file + self._pos = record._pos + self._tpos = record._tpos + self._tend = record._tend + + def __iter__(self): + return self + + def __next__(self): + pos = self._pos + while pos < self._tend: + # Read the data records for this transaction + h = self._read_data_header(pos) + dlen = h.recordlen() + + if pos + dlen > self._tend or h.tloc != self._tpos: + logger.warning("%s data record exceeds transaction" + " record at %s", file.name, pos) + break + + self._pos = pos + dlen + prev_txn = None + if h.plen: + data = self._file.read(h.plen) + else: + if h.back == 0: + # If the backpointer is 0, then this transaction + # undoes the object creation. It undid the + # transaction that created it. Return None + # instead of a pickle to indicate this. + data = None + else: + data, tid = self._loadBackTxn(h.oid, h.back, False) + # Caution: :ooks like this only goes one link back. + # Should it go to the original data like BDBFullStorage? + prev_txn = self.getTxnFromData(h.oid, h.back) + + return Record(h.oid, h.tid, data, prev_txn, pos) + + raise StopIteration() + + next = __next__ + + +class Record(_DataRecord): + + def __init__(self, oid, tid, data, prev, pos): + super(Record, self).__init__(oid, tid, data, prev) + self.pos = pos + + +class UndoSearch(object): + + def __init__(self, file, pos, first, last, filter=None): + self.file = file + self.pos = pos + self.first = first + self.last = last + self.filter = filter + # self.i is the index of the transaction we're _going_ to find + # next. When it reaches self.first, we should start appending + # to self.results. When it reaches self.last, we're done + # (although we may finish earlier). + self.i = 0 + self.results = [] + self.stop = False + + def finished(self): + """Return True if UndoSearch has found enough records.""" + # BAW: Why 39 please? This makes no sense (see also below). + return self.i >= self.last or self.pos < 39 or self.stop + + def search(self): + """Search for another record.""" + dict = self._readnext() + if dict is not None and (self.filter is None or self.filter(dict)): + if self.i >= self.first: + self.results.append(dict) + self.i += 1 + + def _readnext(self): + """Read the next record from the storage.""" + self.file.seek(self.pos - 8) + self.pos -= u64(self.file.read(8)) + 8 + self.file.seek(self.pos) + h = self.file.read(TRANS_HDR_LEN) + tid, tl, status, ul, dl, el = unpack(TRANS_HDR, h) + status = as_text(status) + if status == 'p': + self.stop = 1 + return None + if status != ' ': + return None + d = u = b'' + if ul: + u = self.file.read(ul) + if dl: + d = self.file.read(dl) + e = {} + if el: + try: + e = loads(self.file.read(el)) + except: + pass + d = {'id': encodebytes(tid).rstrip(), + 'time': TimeStamp(tid).timeTime(), + 'user_name': u, + 'size': tl, + 'description': d} + d.update(e) + return d + +class FilePool(object): + + closed = False + writing = False + writers = 0 + + def __init__(self, file_name): + self.name = file_name + self._files = [] + self._out = [] + self._cond = utils.Condition() + + @contextlib.contextmanager + def write_lock(self): + with self._cond: + self.writers += 1 + while self.writing or self._out: + self._cond.wait() + if self.closed: + raise ValueError('closed') + self.writing = True + + try: + yield None + finally: + with self._cond: + self.writing = False + if self.writers > 0: + self.writers -= 1 + self._cond.notifyAll() + + @contextlib.contextmanager + def get(self): + with self._cond: + while self.writers: + self._cond.wait() + assert not self.writing + if self.closed: + raise ValueError('closed') + + try: + f = self._files.pop() + except IndexError: + f = open(self.name, 'rb') + self._out.append(f) + + try: + yield f + finally: + self._out.remove(f) + self._files.append(f) + if not self._out: + with self._cond: + if self.writers and not self._out: + self._cond.notifyAll() + + def empty(self): + while self._files: + self._files.pop().close() + + + def flush(self): + """Empty read buffers. + + This is required if they contain data of rolled back transactions. + """ + # Unfortunately, Python 3.x has no API to flush read buffers, and + # the API is ineffective in Python 2 on Mac OS X. + with self.write_lock(): + self.empty() + + def close(self): + with self._cond: + self.closed = True + while self._out: + self._out.pop().close() + self.empty() + self.writing = self.writers = 0 diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/__init__.py b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/__init__.py new file mode 100644 index 0000000..449f8f2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/__init__.py @@ -0,0 +1,8 @@ +# this is a package + +from ZODB.FileStorage.FileStorage import FileStorage, TransactionRecord +from ZODB.FileStorage.FileStorage import FileIterator, Record, packed_version + + +# BBB Alias for compatibility +RecordIterator = TransactionRecord diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/format.py b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/format.py new file mode 100644 index 0000000..8bd7723 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/format.py @@ -0,0 +1,289 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +# +# File-based ZODB storage +# +# Files are arranged as follows. +# +# - The first 4 bytes are a file identifier. +# +# - The rest of the file consists of a sequence of transaction +# "records". +# +# A transaction record consists of: +# +# - 8-byte transaction id, which is also a time stamp. +# +# - 8-byte transaction record length - 8. +# +# - 1-byte status code +# ' ' (a blank) completed transaction that hasn't been packed +# 'p' completed transaction that has been packed +# 'c' checkpoint -- a transaction in progress, at the end of the file; +# it's been thru vote() but not finish(); if finish() completes +# normally, it will be overwritten with a blank; if finish() dies +# (e.g., out of disk space), cleanup code will try to truncate +# the file to chop off this incomplete transaction +# 'u' uncertain; no longer used; was previously used to record something +# about non-transactional undo +# +# - 2-byte length of user name +# +# - 2-byte length of description +# +# - 2-byte length of extension attributes +# +# - user name +# +# - description +# +# - extension attributes +# +# * A sequence of data records +# +# - 8-byte redundant transaction length -8 +# +# A data record consists of +# +# - 8-byte oid. +# +# - 8-byte tid, which matches the transaction id in the transaction record. +# +# - 8-byte previous-record file-position. +# +# - 8-byte beginning of transaction record file position. +# +# - 2-bytes with zero values. (Was version length.) +# +# - 8-byte data length +# +# ? data +# (data length > 0) +# +# ? 8-byte position of data record containing data +# (data length == 0) +# +# Note that the lengths and positions are all big-endian. +# Also, the object ids time stamps are big-endian, so comparisons +# are meaningful. +# +# Backpointers +# +# When we undo a record, we don't copy (or delete) +# data. Instead, we write records with back pointers. + +import logging +import struct + +from ZODB.POSException import POSKeyError +from ZODB.utils import u64, oid_repr, as_bytes +from ZODB._compat import PY3 + +class CorruptedError(Exception): + pass + +class CorruptedDataError(CorruptedError): + + def __init__(self, oid=None, buf=None, pos=None): + self.oid = oid + self.buf = buf + self.pos = pos + + def __str__(self): + if self.oid: + msg = "Error reading oid %s. Found %r" % (oid_repr(self.oid), + self.buf) + else: + msg = "Error reading unknown oid. Found %r" % self.buf + if self.pos: + msg += " at %d" % self.pos + return msg + +# the struct formats for the headers +TRANS_HDR = ">8sQcHHH" +DATA_HDR = ">8s8sQQHQ" +# constants to support various header sizes +TRANS_HDR_LEN = 23 +DATA_HDR_LEN = 42 +assert struct.calcsize(TRANS_HDR) == TRANS_HDR_LEN +assert struct.calcsize(DATA_HDR) == DATA_HDR_LEN + +logger = logging.getLogger('ZODB.FileStorage.format') + +class FileStorageFormatter(object): + """Mixin class that can read and write the low-level format.""" + + # subclasses must provide _file + + _metadata_size = 4 + _format_version = "21" + + def _read_num(self, pos): + """Read an 8-byte number.""" + self._file.seek(pos) + return u64(self._file.read(8)) + + def _read_data_header(self, pos, oid=None, _file=None): + """Return a DataHeader object for data record at pos. + + If ois is not None, raise CorruptedDataError if oid passed + does not match oid in file. + """ + if _file is None: + _file = self._file + + _file.seek(pos) + s = _file.read(DATA_HDR_LEN) + if len(s) != DATA_HDR_LEN: + raise CorruptedDataError(oid, s, pos) + h = DataHeaderFromString(s) + if oid is not None and oid != h.oid: + raise CorruptedDataError(oid, s, pos) + if not h.plen: + h.back = u64(_file.read(8)) + return h + + def _read_txn_header(self, pos, tid=None): + self._file.seek(pos) + s = self._file.read(TRANS_HDR_LEN) + if len(s) != TRANS_HDR_LEN: + raise CorruptedDataError(tid, s, pos) + h = TxnHeaderFromString(s) + if tid is not None and tid != h.tid: + raise CorruptedDataError(tid, s, pos) + h.user = self._file.read(h.ulen) + h.descr = self._file.read(h.dlen) + h.ext = self._file.read(h.elen) + return h + + def _loadBack_impl(self, oid, back, fail=True, _file=None): + # shared implementation used by various _loadBack methods + # + # If the backpointer ultimately resolves to 0: + # If fail is True, raise KeyError for zero backpointer. + # If fail is False, return the empty data from the record + # with no backpointer. + if _file is None: + _file = self._file + while 1: + if not back: + # If backpointer is 0, object does not currently exist. + raise POSKeyError(oid) + h = self._read_data_header(back, _file=_file) + if h.plen: + return _file.read(h.plen), h.tid, back, h.tloc + if h.back == 0 and not fail: + return None, h.tid, back, h.tloc + back = h.back + + def _loadBackTxn(self, oid, back, fail=True): + """Return data and txn id for backpointer.""" + return self._loadBack_impl(oid, back, fail)[:2] + + def _loadBackPOS(self, oid, back): + return self._loadBack_impl(oid, back)[2] + + def getTxnFromData(self, oid, back): + """Return transaction id for data at back.""" + h = self._read_data_header(back, oid) + return h.tid + + def fail(self, pos, msg, *args): + s = ("%s:%s:" + msg) % ((self._name, pos) + args) + logger.error(s) + raise CorruptedError(s) + + def checkTxn(self, th, pos): + if th.tid <= self.ltid: + self.fail(pos, "time-stamp reduction: %s <= %s", + oid_repr(th.tid), oid_repr(self.ltid)) + self.ltid = th.tid + if th.status == "c": + self.fail(pos, "transaction with checkpoint flag set") + if not th.status in " pu": # recognize " ", "p", and "u" as valid + self.fail(pos, "invalid transaction status: %r", th.status) + if th.tlen < th.headerlen(): + self.fail(pos, "invalid transaction header: " + "txnlen (%d) < headerlen(%d)", th.tlen, th.headerlen()) + + def checkData(self, th, tpos, dh, pos): + if dh.tloc != tpos: + self.fail(pos, "data record does not point to transaction header" + ": %d != %d", dh.tloc, tpos) + if pos + dh.recordlen() > tpos + th.tlen: + self.fail(pos, "data record size exceeds transaction size: " + "%d > %d", pos + dh.recordlen(), tpos + th.tlen) + if dh.prev >= pos: + self.fail(pos, "invalid previous pointer: %d", dh.prev) + if dh.back: + if dh.back >= pos: + self.fail(pos, "invalid back pointer: %d", dh.prev) + if dh.plen: + self.fail(pos, "data record has back pointer and data") + +def DataHeaderFromString(s): + return DataHeader(*struct.unpack(DATA_HDR, s)) + +class DataHeader(object): + """Header for a data record.""" + + __slots__ = ("oid", "tid", "prev", "tloc", "plen", "back") + + def __init__(self, oid, tid, prev, tloc, vlen, plen): + if vlen: + raise ValueError( + "Non-zero version length. Versions aren't supported.") + + self.oid = oid + self.tid = tid + self.prev = prev + self.tloc = tloc + self.plen = plen + self.back = 0 # default + + def asString(self): + return struct.pack(DATA_HDR, self.oid, self.tid, self.prev, + self.tloc, 0, self.plen) + + def recordlen(self): + return DATA_HDR_LEN + (self.plen or 8) + +def TxnHeaderFromString(s): + res = TxnHeader(*struct.unpack(TRANS_HDR, s)) + if PY3: + res.status = res.status.decode('ascii') + return res + +class TxnHeader(object): + """Header for a transaction record.""" + + __slots__ = ("tid", "tlen", "status", "user", "descr", "ext", + "ulen", "dlen", "elen") + + def __init__(self, tid, tlen, status, ulen, dlen, elen): + self.tid = tid + self.tlen = tlen + self.status = status + self.ulen = ulen + self.dlen = dlen + self.elen = elen + assert elen >= 0 + + def asString(self): + s = struct.pack(TRANS_HDR, self.tid, self.tlen, as_bytes(self.status), + self.ulen, self.dlen, self.elen) + return b"".join(map(as_bytes, [s, self.user, self.descr, self.ext])) + + def headerlen(self): + return TRANS_HDR_LEN + self.ulen + self.dlen + self.elen diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/fsdump.py b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/fsdump.py new file mode 100644 index 0000000..fe7b786 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/fsdump.py @@ -0,0 +1,124 @@ +from __future__ import print_function +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import struct + +from ZODB.FileStorage import FileIterator +from ZODB.FileStorage.format import TRANS_HDR, TRANS_HDR_LEN +from ZODB.FileStorage.format import DATA_HDR, DATA_HDR_LEN +from ZODB.TimeStamp import TimeStamp +from ZODB.utils import u64, get_pickle_metadata + +def fsdump(path, file=None, with_offset=1): + iter = FileIterator(path) + for i, trans in enumerate(iter): + if with_offset: + print(("Trans #%05d tid=%016x time=%s offset=%d" % + (i, u64(trans.tid), TimeStamp(trans.tid), trans._pos)), file=file) + else: + print(("Trans #%05d tid=%016x time=%s" % + (i, u64(trans.tid), TimeStamp(trans.tid))), file=file) + print((" status=%r user=%r description=%r" % + (trans.status, trans.user, trans.description)), file=file) + + for j, rec in enumerate(trans): + if rec.data is None: + fullclass = "undo or abort of object creation" + size = "" + else: + modname, classname = get_pickle_metadata(rec.data) + size = " size=%d" % len(rec.data) + fullclass = "%s.%s" % (modname, classname) + + if rec.data_txn: + # It would be nice to print the transaction number + # (i) but it would be expensive to keep track of. + bp = " bp=%016x" % u64(rec.data_txn) + else: + bp = "" + + print((" data #%05d oid=%016x%s class=%s%s" % + (j, u64(rec.oid), size, fullclass, bp)), file=file) + iter.close() + +def fmt(p64): + # Return a nicely formatted string for a packaged 64-bit value + return "%016x" % u64(p64) + +class Dumper(object): + """A very verbose dumper for debuggin FileStorage problems.""" + + # TODO: Should revise this class to use FileStorageFormatter. + + def __init__(self, path, dest=None): + self.file = open(path, "rb") + self.dest = dest + + def dump(self): + fid = self.file.read(4) + print("*" * 60, file=self.dest) + print("file identifier: %r" % fid, file=self.dest) + while self.dump_txn(): + pass + + def dump_txn(self): + pos = self.file.tell() + h = self.file.read(TRANS_HDR_LEN) + if not h: + return False + tid, tlen, status, ul, dl, el = struct.unpack(TRANS_HDR, h) + end = pos + tlen + print("=" * 60, file=self.dest) + print("offset: %d" % pos, file=self.dest) + print("end pos: %d" % end, file=self.dest) + print("transaction id: %s" % fmt(tid), file=self.dest) + print("trec len: %d" % tlen, file=self.dest) + print("status: %r" % status, file=self.dest) + user = descr = extra = "" + if ul: + user = self.file.read(ul) + if dl: + descr = self.file.read(dl) + if el: + extra = self.file.read(el) + print("user: %r" % user, file=self.dest) + print("description: %r" % descr, file=self.dest) + print("len(extra): %d" % el, file=self.dest) + while self.file.tell() < end: + self.dump_data(pos) + stlen = self.file.read(8) + print("redundant trec len: %d" % u64(stlen), file=self.dest) + return 1 + + def dump_data(self, tloc): + pos = self.file.tell() + h = self.file.read(DATA_HDR_LEN) + assert len(h) == DATA_HDR_LEN + oid, revid, prev, tloc, vlen, dlen = struct.unpack(DATA_HDR, h) + print("-" * 60, file=self.dest) + print("offset: %d" % pos, file=self.dest) + print("oid: %s" % fmt(oid), file=self.dest) + print("revid: %s" % fmt(revid), file=self.dest) + print("previous record offset: %d" % prev, file=self.dest) + print("transaction offset: %d" % tloc, file=self.dest) + assert not vlen + print("len(data): %d" % dlen, file=self.dest) + self.file.read(dlen) + if not dlen: + sbp = self.file.read(8) + print("backpointer: %d" % u64(sbp), file=self.dest) + +def main(): + import sys + fsdump(sys.argv[1]) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/fsoids.py b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/fsoids.py new file mode 100644 index 0000000..4498e6c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/fsoids.py @@ -0,0 +1,200 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +from __future__ import print_function +import ZODB.FileStorage +from ZODB.utils import get_pickle_metadata, p64, oid_repr, tid_repr +from ZODB.serialize import get_refs +from ZODB.TimeStamp import TimeStamp + +# Extract module.class string from pickle. +def get_class(pickle): + return "%s.%s" % get_pickle_metadata(pickle) + +# Shorten a string for display. +def shorten(s, size=50): + if len(s) <= size: + return s + # Stick ... in the middle. + navail = size - 5 + nleading = navail // 2 + ntrailing = size - nleading + return s[:nleading] + " ... " + s[-ntrailing:] + +class Tracer(object): + """Trace all occurrences of a set of oids in a FileStorage. + + Create passing a path to an existing FileStorage. + Call register_oids(oid, ...) one or more times to specify which oids to + investigate. + Call run() to do the analysis. This isn't swift -- it has to read + every byte in the database, in order to find all references. + Call report() to display the results. + """ + + def __init__(self, path): + import os + if not os.path.isfile(path): + raise ValueError("must specify an existing FileStorage") + self.path = path + # Map an interesting tid to (status, user, description, pos). + self.tid2info = {} + # List of messages. Each is a tuple of the form + # (oid, tid, string) + # The order in the tuple is important, because it defines the + # sort order for grouping. + self.msgs = [] + # The set of interesting oids, specified by register_oid() calls. + # Maps oid to # of revisions. + self.oids = {} + # Maps interesting oid to its module.class name. If a creation + # record for an interesting oid is never seen, it won't appear + # in this mapping. + self.oid2name = {} + + def register_oids(self, *oids): + """ + Declare that oids (0 or more) are "interesting". + + An oid can be given as a native 8-byte string, or as an + integer. + + Info will be gathered about all appearances of this oid in the + entire database, including references. + """ + for oid in oids: + if isinstance(oid, bytes): + assert len(oid) == 8 + else: + oid = p64(oid) + self.oids[oid] = 0 # 0 revisions seen so far + + def _msg(self, oid, tid, *args): + self.msgs.append( (oid, tid, ' '.join(map(str, args))) ) + self._produced_msg = True + + def report(self): + """Show all msgs, grouped by oid and sub-grouped by tid.""" + + msgs = self.msgs + oids = self.oids + oid2name = self.oid2name + # First determine which oids weren't seen at all, and synthesize msgs + # for them. + NOT_SEEN = "this oid was not defined (no data record for it found)" + for oid in oids: + if oid not in oid2name: + msgs.append( (oid, None, NOT_SEEN) ) + + msgs.sort() # oids are primary key, tids secondary + current_oid = current_tid = None + for oid, tid, msg in msgs: + if oid != current_oid: + nrev = oids[oid] + revision = "revision" + (nrev != 1 and 's' or '') + name = oid2name.get(oid, "") + print("oid", oid_repr(oid), name, nrev, revision) + current_oid = oid + current_tid = None + if msg is NOT_SEEN: + assert tid is None + print(" ", msg) + continue + if tid != current_tid: + current_tid = tid + status, user, description, pos = self.tid2info[tid] + print(" tid %s offset=%d %s" % (tid_repr(tid), + pos, + TimeStamp(tid))) + print(" tid user=%r" % shorten(user)) + print(" tid description=%r" % shorten(description)) + print(" ", msg) + + # Do the analysis. + def run(self): + """Find all occurrences of the registered oids in the database.""" + + # Maps oid of a reference to its module.class name. + self._ref2name = {} + for txn in ZODB.FileStorage.FileIterator(self.path): + self._check_trec(txn) + + # Process next transaction record. + def _check_trec(self, txn): + # txn has members tid, status, user, description, + # _extension, _pos, _tend, _file, _tpos + self._produced_msg = False + # Map and list for save data records for current transaction. + self._records_map = {} + self._records = [] + for drec in txn: + self._save_references(drec) + for drec in self._records: + self._check_drec(drec) + if self._produced_msg: + # Copy txn info for later output. + self.tid2info[txn.tid] = (txn.status, txn.user, txn.description, + txn._tpos) + + def _save_references(self, drec): + # drec has members oid, tid, data, data_txn + tid, oid, pick, pos = drec.tid, drec.oid, drec.data, drec.pos + if pick: + if oid in self.oids: + klass = get_class(pick) + self._msg(oid, tid, "new revision", klass, "at", pos) + self.oids[oid] += 1 + self.oid2name[oid] = self._ref2name[oid] = klass + self._records_map[oid] = drec + self._records.append(drec) + elif oid in self.oids: + self._msg(oid, tid, "creation undo at", pos) + + # Process next data record. If a message is produced, self._produced_msg + # will be set True. + def _check_drec(self, drec): + # drec has members oid, tid, data, data_txn + tid, oid, pick, pos = drec.tid, drec.oid, drec.data, drec.pos + ref2name = self._ref2name + ref2name_get = ref2name.get + records_map_get = self._records_map.get + if pick: + oid_in_oids = oid in self.oids + for ref, klass in get_refs(pick): + if ref in self.oids: + oidclass = ref2name_get(oid, None) + if oidclass is None: + ref2name[oid] = oidclass = get_class(pick) + self._msg(ref, tid, "referenced by", oid_repr(oid), + oidclass, "at", pos) + + if oid_in_oids: + if klass is None: + klass = ref2name_get(ref, None) + if klass is None: + r = records_map_get(ref, None) + # For save memory we only save references + # seen in one transaction with interesting + # objects changes. So in some circumstances + # we may still got "" class name. + if r is None: + klass = "" + else: + ref2name[ref] = klass = get_class(r.data) + elif isinstance(klass, tuple): + ref2name[ref] = klass = "%s.%s" % klass + else: + klass = "%s.%s" % (klass.__module__, klass.__name__) + + self._msg(oid, tid, "references", oid_repr(ref), klass, + "at", pos) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/fspack.py b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/fspack.py new file mode 100644 index 0000000..83a5155 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/fspack.py @@ -0,0 +1,668 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""FileStorage helper to perform pack. + +A storage contains an ordered set of object revisions. When a storage +is packed, object revisions that are not reachable as of the pack time +are deleted. The notion of reachability is complicated by +backpointers -- object revisions that point to earlier revisions of +the same object. + +An object revisions is reachable at a certain time if it is reachable +from the revision of the root at that time or if it is reachable from +a backpointer after that time. +""" + +from ZODB.FileStorage.format import DataHeader, TRANS_HDR_LEN +from ZODB.FileStorage.format import FileStorageFormatter, CorruptedDataError +from ZODB.utils import p64, u64, z64 + +import binascii +import logging +import os +import ZODB.fsIndex +import ZODB.POSException + +logger = logging.getLogger(__name__) + +class PackError(ZODB.POSException.POSError): + pass + +class PackCopier(FileStorageFormatter): + + def __init__(self, f, index, tindex): + self._file = f + self._index = index + self._tindex = tindex + self._pos = None + + def _txn_find(self, tid, stop_at_pack): + # _pos always points just past the last transaction + pos = self._pos + while pos > 4: + self._file.seek(pos - 8) + pos = pos - u64(self._file.read(8)) - 8 + self._file.seek(pos) + h = self._file.read(TRANS_HDR_LEN) # XXX bytes + _tid = h[:8] + if _tid == tid: + return pos + if stop_at_pack: + if h[16] == 'p': + break + raise PackError("Invalid backpointer transaction id") + + def _data_find(self, tpos, oid, data): + # Return backpointer for oid. Must call with the lock held. + # This is a file offset to oid's data record if found, else 0. + # The data records in the transaction at tpos are searched for oid. + # If a data record for oid isn't found, returns 0. + # Else if oid's data record contains a backpointer, that + # backpointer is returned. + # Else oid's data record contains the data, and the file offset of + # oid's data record is returned. This data record should contain + # a pickle identical to the 'data' argument. + + # Unclear: If the length of the stored data doesn't match len(data), + # an exception is raised. If the lengths match but the data isn't + # the same, 0 is returned. Why the discrepancy? + h = self._read_txn_header(tpos) + tend = tpos + h.tlen + pos = self._file.tell() + while pos < tend: + h = self._read_data_header(pos) + if h.oid == oid: + # Make sure this looks like the right data record + if h.plen == 0: + # This is also a backpointer. Gotta trust it. + return pos + if h.plen != len(data): + # The expected data doesn't match what's in the + # backpointer. Something is wrong. + logger.error("Mismatch between data and backpointer at %d", + pos) + return 0 + _data = self._file.read(h.plen) + if data != _data: + return 0 + return pos + pos += h.recordlen() + return 0 + + def copy(self, oid, serial, data, prev_txn, txnpos, datapos): + prev_pos = self._resolve_backpointer(prev_txn, oid, data) + old = self._index.get(oid, 0) + # Calculate the pos the record will have in the storage. + here = datapos + # And update the temp file index + self._tindex[oid] = here + if prev_pos: + # If there is a valid prev_pos, don't write data. + data = None + if data is None: + dlen = 0 + else: + dlen = len(data) + # Write the recovery data record + h = DataHeader(oid, serial, old, txnpos, 0, dlen) + + self._file.write(h.asString()) + # Write the data or a backpointer + if data is None: + if prev_pos: + self._file.write(p64(prev_pos)) + else: + # Write a zero backpointer, which indicates an + # un-creation transaction. + self._file.write(z64) + else: + self._file.write(data) + + def setTxnPos(self, pos): + self._pos = pos + + def _resolve_backpointer(self, prev_txn, oid, data): + pos = self._file.tell() + try: + prev_pos = 0 + if prev_txn is not None: + prev_txn_pos = self._txn_find(prev_txn, 0) + if prev_txn_pos: + prev_pos = self._data_find(prev_txn_pos, oid, data) + return prev_pos + finally: + self._file.seek(pos) + +class GC(FileStorageFormatter): + + def __init__(self, file, eof, packtime, gc, referencesf): + self._file = file + self._name = file.name + self.eof = eof + self.packtime = packtime + self.gc = gc + # packpos: position of first txn header after pack time + self.packpos = None + + # {oid -> current data record position}: + self.oid2curpos = ZODB.fsIndex.fsIndex() + + # The set of reachable revisions of each object. + # + # This set as managed using two data structures. The first is + # an fsIndex mapping oids to one data record pos. Since only + # a few objects will have more than one revision, we use this + # efficient data structure to handle the common case. The + # second is a dictionary mapping objects to lists of + # positions; it is used to handle the same number of objects + # for which we must keep multiple revisions. + self.reachable = ZODB.fsIndex.fsIndex() + self.reach_ex = {} + + # keep ltid for consistency checks during initial scan + self.ltid = z64 + + self.referencesf = referencesf + + def isReachable(self, oid, pos): + """Return 1 if revision of `oid` at `pos` is reachable.""" + + rpos = self.reachable.get(oid) + if rpos is None: + return 0 + if rpos == pos: + return 1 + return pos in self.reach_ex.get(oid, []) + + def findReachable(self): + self.buildPackIndex() + if self.gc: + self.findReachableAtPacktime([z64]) + self.findReachableFromFuture() + # These mappings are no longer needed and may consume a lot of + # space. + del self.oid2curpos + else: + self.reachable = self.oid2curpos + + def buildPackIndex(self): + pos = 4 + # We make the initial assumption that the database has been + # packed before and set unpacked to True only after seeing the + # first record with a status == " ". If we get to the packtime + # and unpacked is still False, we need to watch for a redundant + # pack. + unpacked = False + while pos < self.eof: + th = self._read_txn_header(pos) + if th.tid > self.packtime: + break + self.checkTxn(th, pos) + if th.status != "p": + unpacked = True + + tpos = pos + end = pos + th.tlen + pos += th.headerlen() + + while pos < end: + dh = self._read_data_header(pos) + self.checkData(th, tpos, dh, pos) + if dh.plen or dh.back: + self.oid2curpos[dh.oid] = pos + else: + if dh.oid in self.oid2curpos: + del self.oid2curpos[dh.oid] + pos += dh.recordlen() + + tlen = self._read_num(pos) + if tlen != th.tlen: + self.fail(pos, "redundant transaction length does not " + "match initial transaction length: %d != %d", + tlen, th.tlen) + pos += 8 + + self.packpos = pos + + if unpacked: + return + # check for a redundant pack. If the first record following + # the newly computed packpos has status 'p', then it was + # packed earlier and the current pack is redudant. + try: + th = self._read_txn_header(pos) + except CorruptedDataError as err: + if err.buf != b"": + raise + if th.status == 'p': + # Delayed import to cope with circular imports. + # TODO: put exceptions in a separate module. + from ZODB.FileStorage.FileStorage import RedundantPackWarning + raise RedundantPackWarning( + "The database has already been packed to a later time" + " or no changes have been made since the last pack") + + def findReachableAtPacktime(self, roots): + """Mark all objects reachable from the oids in roots as reachable.""" + reachable = self.reachable + oid2curpos = self.oid2curpos + + todo = list(roots) + while todo: + oid = todo.pop() + if oid in reachable: + continue + + try: + pos = oid2curpos[oid] + except KeyError: + if oid == z64 and len(oid2curpos) == 0: + # special case, pack to before creation time + continue + raise KeyError(oid) + + reachable[oid] = pos + for oid in self.findrefs(pos): + if oid not in reachable: + todo.append(oid) + + def findReachableFromFuture(self): + # In this pass, the roots are positions of object revisions. + # We add a pos to extra_roots when there is a backpointer to a + # revision that was not current at the packtime. The + # non-current revision could refer to objects that were + # otherwise unreachable at the packtime. + extra_roots = [] + + pos = self.packpos + while pos < self.eof: + th = self._read_txn_header(pos) + self.checkTxn(th, pos) + tpos = pos + end = pos + th.tlen + pos += th.headerlen() + + while pos < end: + dh = self._read_data_header(pos) + self.checkData(th, tpos, dh, pos) + + if dh.back and dh.back < self.packpos: + if dh.oid in self.reachable: + L = self.reach_ex.setdefault(dh.oid, []) + if dh.back not in L: + L.append(dh.back) + extra_roots.append(dh.back) + else: + self.reachable[dh.oid] = dh.back + + pos += dh.recordlen() + + tlen = self._read_num(pos) + if tlen != th.tlen: + self.fail(pos, "redundant transaction length does not " + "match initial transaction length: %d != %d", + tlen, th.tlen) + pos += 8 + + for pos in extra_roots: + refs = self.findrefs(pos) + self.findReachableAtPacktime(refs) + + def findrefs(self, pos): + """Return a list of oids referenced as of packtime.""" + dh = self._read_data_header(pos) + # Chase backpointers until we get to the record with the refs + while dh.back: + dh = self._read_data_header(dh.back) + if dh.plen: + return self.referencesf(self._file.read(dh.plen)) + else: + return [] + +class FileStoragePacker(FileStorageFormatter): + + # path is the storage file path. + # stop is the pack time, as a TimeStamp. + # current_size is the storage's _pos. All valid data at the start + # lives before that offset (there may be a checkpoint transaction in + # progress after it). + + def __init__(self, storage, referencesf, stop, gc=True): + self._storage = storage + if storage.blob_dir: + self.pack_blobs = True + self.blob_removed = open( + os.path.join(storage.blob_dir, '.removed'), 'wb') + else: + self.pack_blobs = False + self.blob_removed = None + + path = storage._file.name + self._name = path + # We open our own handle on the storage so that much of pack can + # proceed in parallel. It's important to close this file at every + # return point, else on Windows the caller won't be able to rename + # or remove the storage file. + self._file = open(path, "rb") + self._path = path + self._stop = stop + self.locked = False + self.file_end = storage.getSize() + + self.gc = GC(self._file, self.file_end, self._stop, gc, referencesf) + + # The packer needs to acquire the parent's commit lock + # during the copying stage, so the two sets of lock acquire + # and release methods are passed to the constructor. + self._lock = storage._lock + self._commit_lock = storage._commit_lock + + # The packer will use several indexes. + # index: oid -> pos + # tindex: oid -> pos, for current txn + # oid2tid: not used by the packer + + self.index = ZODB.fsIndex.fsIndex() + self.tindex = {} + self.oid2tid = {} + self.toid2tid = {} + self.toid2tid_delete = {} + + self._tfile = None + + def close(self): + self._file.close() + if self._tfile is not None: + self._tfile.close() + if self.blob_removed is not None: + self.blob_removed.close() + + def pack(self): + # Pack copies all data reachable at the pack time or later. + # + # Copying occurs in two phases. In the first phase, txns + # before the pack time are copied if the contain any reachable + # data. In the second phase, all txns after the pack time + # are copied. + # + # Txn and data records contain pointers to previous records. + # Because these pointers are stored as file offsets, they + # must be updated when we copy data. + + # TODO: Should add sanity checking to pack. + + self.gc.findReachable() + + def close_files_remove(): + # blank except: we might be in an IOError situation/handler + # try our best, but don't fail + try: + self._tfile.close() + except: + pass + try: + self._file.close() + except: + pass + try: + os.remove(self._name + ".pack") + except: + pass + if self.blob_removed is not None: + self.blob_removed.close() + + # Setup the destination file and copy the metadata. + # TODO: rename from _tfile to something clearer. + self._tfile = open(self._name + ".pack", "w+b") + try: + self._file.seek(0) + self._tfile.write(self._file.read(self._metadata_size)) + + self._copier = PackCopier(self._tfile, self.index, self.tindex) + + ipos, opos = self.copyToPacktime() + except (OSError, IOError): + # most probably ran out of disk space or some other IO error + close_files_remove() + raise # don't succeed silently + + assert ipos == self.gc.packpos + if ipos == opos: + # pack didn't free any data. there's no point in continuing. + close_files_remove() + return None + self._commit_lock.acquire() + self.locked = True + try: + with self._lock: + # Re-open the file in unbuffered mode. + + # The main thread may write new transactions to the + # file, which creates the possibility that we will + # read a status 'c' transaction into the pack thread's + # stdio buffer even though we're acquiring the commit + # lock. Transactions can still be in progress + # throughout much of packing, and are written to the + # same physical file but via a distinct Python file + # object. The code used to leave off the trailing 0 + # argument, and then on every platform except native + # Windows it was observed that we could read stale + # data from the tail end of the file. + self._file.close() # else self.gc keeps the original + # alive & open + self._file = open(self._path, "rb", 0) + self._file.seek(0, 2) + self.file_end = self._file.tell() + + if ipos < self.file_end: + self.copyRest(ipos) + + # OK, we've copied everything. Now we need to wrap things up. + pos = self._tfile.tell() + self._tfile.flush() + self._tfile.close() + self._file.close() + if self.blob_removed is not None: + self.blob_removed.close() + + return pos + except (OSError, IOError): + # most probably ran out of disk space or some other IO error + close_files_remove() + if self.locked: + self._commit_lock.release() + raise # don't succeed silently + except: + if self.locked: + self._commit_lock.release() + raise + + def copyToPacktime(self): + offset = 0 # the amount of space freed by packing + pos = self._metadata_size + new_pos = pos + + while pos < self.gc.packpos: + th = self._read_txn_header(pos) + new_tpos, pos = self.copyDataRecords(pos, th) + + if new_tpos: + new_pos = self._tfile.tell() + 8 + tlen = new_pos - new_tpos - 8 + # Update the transaction length + self._tfile.seek(new_tpos + 8) + self._tfile.write(p64(tlen)) + self._tfile.seek(new_pos - 8) + self._tfile.write(p64(tlen)) + + + tlen = self._read_num(pos) + if tlen != th.tlen: + self.fail(pos, "redundant transaction length does not " + "match initial transaction length: %d != %d", + tlen, th.tlen) + pos += 8 + + return pos, new_pos + + def copyDataRecords(self, pos, th): + """Copy any current data records between pos and tend. + + Returns position of txn header in output file and position + of next record in the input file. + + If any data records are copied, also write txn header (th). + """ + copy = 0 + new_tpos = 0 + tend = pos + th.tlen + pos += th.headerlen() + while pos < tend: + h = self._read_data_header(pos) + if not self.gc.isReachable(h.oid, pos): + if self.pack_blobs: + # We need to find out if this is a blob, so get the data: + if h.plen: + data = self._file.read(h.plen) + else: + data = self.fetchDataViaBackpointer(h.oid, h.back) + if data and self._storage.is_blob_record(data): + # We need to remove the blob record. Maybe we + # need to remove oid: + + # But first, we need to make sure the record + # we're looking at isn't a dup of the current + # record. There's a bug in ZEO blob support that causes + # duplicate data records. + rpos = self.gc.reachable.get(h.oid) + is_dup = (rpos + and self._read_data_header(rpos).tid == h.tid) + if not is_dup: + if h.oid not in self.gc.reachable: + self.blob_removed.write( + binascii.hexlify(h.oid)+b'\n') + else: + self.blob_removed.write( + binascii.hexlify(h.oid+h.tid)+b'\n') + + pos += h.recordlen() + continue + + pos += h.recordlen() + + # If we are going to copy any data, we need to copy + # the transaction header. Note that we will need to + # patch up the transaction length when we are done. + if not copy: + th.status = "p" + s = th.asString() + new_tpos = self._tfile.tell() + self._tfile.write(s) + new_pos = new_tpos + len(s) + copy = 1 + + if h.plen: + data = self._file.read(h.plen) + else: + data = self.fetchDataViaBackpointer(h.oid, h.back) + + self.writePackedDataRecord(h, data, new_tpos) + new_pos = self._tfile.tell() + + return new_tpos, pos + + def fetchDataViaBackpointer(self, oid, back): + """Return the data for oid via backpointer back + + If `back` is 0 or ultimately resolves to 0, return None. + In this case, the transaction undoes the object + creation. + """ + if back == 0: + return None + data, tid = self._loadBackTxn(oid, back, 0) + return data + + def writePackedDataRecord(self, h, data, new_tpos): + # Update the header to reflect current information, then write + # it to the output file. + if data is None: + data = b'' + h.prev = 0 + h.back = 0 + h.plen = len(data) + h.tloc = new_tpos + pos = self._tfile.tell() + self.index[h.oid] = pos + self._tfile.write(h.asString()) + self._tfile.write(data) + if not data: + # Packed records never have backpointers (?). + # If there is no data, write a z64 backpointer. + # This is a George Bailey event. + self._tfile.write(z64) + + def copyRest(self, ipos): + # After the pack time, all data records are copied. + # Copy one txn at a time, using copy() for data. + + try: + while 1: + ipos = self.copyOne(ipos) + except CorruptedDataError as err: + # The last call to copyOne() will raise + # CorruptedDataError, because it will attempt to read past + # the end of the file. Double-check that the exception + # occurred for this reason. + self._file.seek(0, 2) + endpos = self._file.tell() + if endpos != err.pos: + raise + + def copyOne(self, ipos): + # The call below will raise CorruptedDataError at EOF. + th = self._read_txn_header(ipos) + # Release commit lock while writing to pack file + self._commit_lock.release() + self.locked = False + pos = self._tfile.tell() + self._copier.setTxnPos(pos) + self._tfile.write(th.asString()) + tend = ipos + th.tlen + ipos += th.headerlen() + + while ipos < tend: + h = self._read_data_header(ipos) + ipos += h.recordlen() + prev_txn = None + if h.plen: + data = self._file.read(h.plen) + else: + data = self.fetchDataViaBackpointer(h.oid, h.back) + if h.back: + prev_txn = self.getTxnFromData(h.oid, h.back) + + self._copier.copy(h.oid, h.tid, data, prev_txn, + pos, self._tfile.tell()) + + tlen = self._tfile.tell() - pos + assert tlen == th.tlen + self._tfile.write(p64(tlen)) + ipos += 8 + + self.index.update(self.tindex) + self.tindex.clear() + self._commit_lock.acquire() + self.locked = True + return ipos diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/interfaces.py b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/interfaces.py new file mode 100644 index 0000000..ed3014d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/interfaces.py @@ -0,0 +1,77 @@ +############################################################################## +# +# Copyright (c) Zope Corporation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import zope.interface + +class IFileStoragePacker(zope.interface.Interface): + + def __call__(storage, referencesf, stop, gc): + r"""Pack the file storage into a new file + + :param FileStorage storage: The storage object to be packed + :param callable referencesf: A function that extracts object + references from a pickle bytes string. This is usually + ``ZODB.serialize.referencesf``. + :param bytes stop: A transaction id representing the time at + which to stop packing. + :param bool gc: A flag indicating whether garbage collection + should be performed. + + The new file will have the same name as the old file with + ``.pack`` appended. (The packer can get the old file name via + storage._file.name.) If blobs are supported, if the storages + blob_dir attribute is not None or empty, then a .removed file + must be created in the blob directory. This file contains records of + the form:: + + (oid+serial).encode('hex')+'\n' + + or, of the form:: + + oid.encode('hex')+'\n' + + If packing is unnecessary, or would not change the file, then + no pack or removed files are created None is returned, + otherwise a tuple is returned with: + + - the size of the packed file, and + + - the packed index + + If and only if packing was necessary (non-None) and there was + no error, then the commit lock must be acquired. In addition, + it is up to FileStorage to: + + - Rename the .pack file, and + + - process the blob_dir/.removed file by removing the blobs + corresponding to the file records. + """ + +class IFileStorage(zope.interface.Interface): + + packer = zope.interface.Attribute( + "The IFileStoragePacker to be used for packing." + ) + + _file = zope.interface.Attribute( + "The file object used to access the underlying data." + ) + + _lock = zope.interface.Attribute( + "The storage lock." + ) + + _commit_lock = zope.interface.Attribute( + "The storage commit lock." + ) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/iterator.test b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/iterator.test new file mode 100644 index 0000000..173ceb5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/iterator.test @@ -0,0 +1,182 @@ +FileStorage-specific iterator tests +=================================== + +The FileStorage iterator has some special features that deserve some +special tests. + +We'll make some assertions about time, so we'll take it over: + + >>> now = 1229959248 + >>> def faux_time(): + ... global now + ... now += 0.1 + ... return now + >>> import time + >>> time_time = time.time + >>> if isinstance(time,type): + ... time.time = staticmethod(faux_time) # Jython + ... else: + ... time.time = faux_time + +Commit a bunch of transactions: + + >>> import ZODB.FileStorage, transaction + >>> db = ZODB.DB('data.fs') + >>> tids = [db.storage.lastTransaction()] + >>> poss = [db.storage._pos] + >>> conn = db.open() + >>> for i in range(100): + ... conn.root()[i] = conn.root().__class__() + ... transaction.commit() + ... tids.append(db.storage.lastTransaction()) + ... poss.append(db.storage._pos) + +Deciding where to start +----------------------- + +By default, we start at the beginning: + + >>> it = ZODB.FileStorage.FileIterator('data.fs') + >>> it.next().tid == tids[0] + True + + >>> it.close() + +The file iterator has an optimization to deal with large files. It +can serarch from either the front or the back of the file, depending +on the starting transaction given. To see this, we'll turn on debug +logging: + + >>> import logging, sys + >>> old_log_level = logging.getLogger().getEffectiveLevel() + >>> logging.getLogger().setLevel(logging.DEBUG) + >>> handler = logging.StreamHandler(sys.stdout) + >>> logging.getLogger().addHandler(handler) + +If we specify a start transaction, we'll scan forward or backward, as +seems best and set the next record to that: + + >>> it = ZODB.FileStorage.FileIterator('data.fs', tids[0]) + >>> it.next().tid == tids[0] + True + + >>> it.close() + + >>> it = ZODB.FileStorage.FileIterator('data.fs', tids[1]) + ... # doctest: +ELLIPSIS + Scan forward data.fs: looking for ... + >>> it.next().tid == tids[1] + True + + >>> it.close() + + >>> it = ZODB.FileStorage.FileIterator('data.fs', tids[30]) + ... # doctest: +ELLIPSIS + Scan forward data.fs: looking for ... + >>> it.next().tid == tids[30] + True + + >>> it.close() + + >>> it = ZODB.FileStorage.FileIterator('data.fs', tids[70]) + ... # doctest: +ELLIPSIS + Scan backward data.fs: looking for ... + >>> it.next().tid == tids[70] + True + + >>> it.close() + + >>> it = ZODB.FileStorage.FileIterator('data.fs', tids[-2]) + ... # doctest: +ELLIPSIS + Scan backward data.fs: looking for ... + >>> it.next().tid == tids[-2] + True + + >>> it.close() + + >>> it = ZODB.FileStorage.FileIterator('data.fs', tids[-1]) + >>> it.next().tid == tids[-1] + True + + >>> it.close() + +We can also supply a file position. This can speed up finding the +starting point, or just pick up where another iterator left off: + + >>> it = ZODB.FileStorage.FileIterator('data.fs', pos=poss[50]) + >>> it.next().tid == tids[51] + True + + >>> it.close() + + >>> it = ZODB.FileStorage.FileIterator('data.fs', tids[0], pos=4) + >>> it.next().tid == tids[0] + True + + >>> it.close() + + >>> it = ZODB.FileStorage.FileIterator('data.fs', tids[-1], pos=poss[-2]) + >>> it.next().tid == tids[-1] + True + + >>> it.close() + + >>> it = ZODB.FileStorage.FileIterator('data.fs', tids[50], pos=poss[50]) + ... # doctest: +ELLIPSIS + Scan backward data.fs: looking for ... + >>> it.next().tid == tids[50] + True + + >>> it.close() + + >>> it = ZODB.FileStorage.FileIterator('data.fs', tids[49], pos=poss[50]) + ... # doctest: +ELLIPSIS + Scan backward data.fs: looking for ... + >>> it.next().tid == tids[49] + True + + >>> it.close() + + >>> it = ZODB.FileStorage.FileIterator('data.fs', tids[51], pos=poss[50]) + >>> it.next().tid == tids[51] + True + + >>> it.close() + + >>> logging.getLogger().setLevel(old_log_level) + >>> logging.getLogger().removeHandler(handler) + + +If a starting transaction is before the first transaction in the file, +then the first transaction is returned. + + >>> from ZODB.utils import p64, u64 + >>> it = ZODB.FileStorage.FileIterator('data.fs', p64(u64(tids[0])-1)) + >>> it.next().tid == tids[0] + True + + >>> it.close() + +If it is after the last transaction, then iteration be empty: + + >>> it = ZODB.FileStorage.FileIterator('data.fs', p64(u64(tids[-1])+1)) + >>> list(it) + [] + + >>> it.close() + +Even if we write more transactions: + + >>> it = ZODB.FileStorage.FileIterator('data.fs', p64(u64(tids[-1])+1)) + >>> for i in range(10): + ... conn.root()[i] = conn.root().__class__() + ... transaction.commit() + >>> list(it) + [] + + >>> it.close() + +.. Cleanup + + >>> time.time = time_time + >>> db.close() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/tests.py b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/tests.py new file mode 100644 index 0000000..64d0828 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/tests.py @@ -0,0 +1,322 @@ +############################################################################## +# +# Copyright (c) Zope Corporation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import doctest +import os +import re +import time +import transaction +import unittest +import ZODB.blob +import ZODB.FileStorage +import ZODB.tests.util +from ZODB.Connection import TransactionMetaData +from zope.testing import renormalizing + +checker = renormalizing.RENormalizing([ + # Python 3 bytes add a "b". + (re.compile("b('.*?')"), r"\1"), + # Python 3 adds module name to exceptions. + (re.compile("ZODB.POSException.POSKeyError"), r"POSKeyError"), + (re.compile("ZODB.FileStorage.FileStorage.FileStorageQuotaError"), + "FileStorageQuotaError"), + (re.compile('data.fs:[0-9]+'), 'data.fs:'), +]) + +def pack_keep_old(): + """Should a copy of the database be kept? + +The pack_keep_old constructor argument controls whether a .old file (and .old +directory for blobs is kept.) + + >>> fs = ZODB.FileStorage.FileStorage('data.fs', blob_dir='blobs') + >>> db = ZODB.DB(fs) + >>> conn = db.open() + >>> import ZODB.blob + >>> conn.root()[1] = ZODB.blob.Blob() + >>> with conn.root()[1].open('w') as file: + ... _ = file.write(b'some data') + >>> conn.root()[2] = ZODB.blob.Blob() + >>> with conn.root()[2].open('w') as file: + ... _ = file.write(b'some data') + >>> transaction.commit() + >>> with conn.root()[1].open('w') as file: + ... _ = file.write(b'some other data') + >>> del conn.root()[2] + >>> transaction.commit() + >>> old_size = os.stat('data.fs').st_size + >>> def get_blob_size(d): + ... result = 0 + ... for path, dirs, file_names in os.walk(d): + ... for file_name in file_names: + ... result += os.stat(os.path.join(path, file_name)).st_size + ... return result + >>> blob_size = get_blob_size('blobs') + + >>> db.pack(time.time()+1) + >>> packed_size = os.stat('data.fs').st_size + >>> packed_size < old_size + True + >>> os.stat('data.fs.old').st_size == old_size + True + + >>> packed_blob_size = get_blob_size('blobs') + >>> packed_blob_size < blob_size + True + >>> get_blob_size('blobs.old') == blob_size + True + >>> db.close() + + + >>> fs = ZODB.FileStorage.FileStorage('data.fs', blob_dir='blobs', + ... create=True, pack_keep_old=False) + >>> db = ZODB.DB(fs) + >>> conn = db.open() + >>> conn.root()[1] = ZODB.blob.Blob() + >>> with conn.root()[1].open('w') as file: + ... _ = file.write(b'some data') + >>> conn.root()[2] = ZODB.blob.Blob() + >>> with conn.root()[2].open('w') as file: + ... _ = file.write(b'some data') + >>> transaction.commit() + >>> with conn.root()[1].open('w') as file: + ... _ = file.write(b'some other data') + >>> del conn.root()[2] + >>> transaction.commit() + + >>> db.pack(time.time()+1) + >>> os.stat('data.fs').st_size == packed_size + True + >>> os.path.exists('data.fs.old') + False + >>> get_blob_size('blobs') == packed_blob_size + True + >>> os.path.exists('blobs.old') + False + >>> db.close() + """ + +def pack_with_repeated_blob_records(): + """ + There is a bug in ZEO that causes duplicate bloc database records + to be written in a blob store operation. (Maybe this has been + fixed by the time you read this, but there might still be + transactions in the wild that have duplicate records. + + >>> fs = ZODB.FileStorage.FileStorage('t', blob_dir='bobs') + >>> db = ZODB.DB(fs) + >>> conn = db.open() + >>> conn.root()[1] = ZODB.blob.Blob() + >>> transaction.commit() + >>> tm = transaction.TransactionManager() + >>> oid = conn.root()[1]._p_oid + >>> from ZODB.utils import load_current + >>> blob_record, oldserial = load_current(fs, oid) + + Now, create a transaction with multiple saves: + + >>> trans = TransactionMetaData() + >>> fs.tpc_begin(trans) + >>> with open('ablob', 'w') as file: + ... _ = file.write('some data') + >>> fs.store(oid, oldserial, blob_record, '', trans) + >>> fs.storeBlob(oid, oldserial, blob_record, 'ablob', '', trans) + >>> _ = fs.tpc_vote(trans) + >>> _ = fs.tpc_finish(trans) + + >>> time.sleep(.01) + >>> db.pack() + + >>> conn.sync() + >>> with conn.root()[1].open() as fp: fp.read() + 'some data' + + >>> db.close() + """ + +def _save_index(): + """ + +_save_index can fail for large indexes. + + >>> import ZODB.utils + >>> fs = ZODB.FileStorage.FileStorage('data.fs') + + >>> t = TransactionMetaData() + >>> fs.tpc_begin(t) + >>> oid = 0 + >>> for i in range(5000): + ... oid += (1<<16) + ... fs.store(ZODB.utils.p64(oid), ZODB.utils.z64, b'x', '', t) + >>> _ = fs.tpc_vote(t) + >>> _ = fs.tpc_finish(t) + + >>> import sys + >>> old_limit = sys.getrecursionlimit() + >>> sys.setrecursionlimit(50) + >>> fs._save_index() + +Make sure we can restore: + + >>> import logging + >>> handler = logging.StreamHandler(sys.stdout) + >>> logger = logging.getLogger('ZODB.FileStorage') + >>> logger.setLevel(logging.DEBUG) + >>> logger.addHandler(handler) + >>> index, pos, tid = fs._restore_index() + >>> index.items() == fs._index.items() + True + >>> pos, tid = fs._pos, fs._tid + +cleanup + + >>> fs.close() + >>> logger.setLevel(logging.NOTSET) + >>> logger.removeHandler(handler) + >>> sys.setrecursionlimit(old_limit) + + """ + +def pack_disk_full_copyToPacktime(): + """Recover from a disk full situation by removing the `.pack` file + +`copyToPacktime` fails + +Add some data + + >>> fs = ZODB.FileStorage.FileStorage('data.fs') + >>> db = ZODB.DB(fs) + >>> conn = db.open() + >>> conn.root()[1] = 'foobar' + >>> transaction.commit() + +patch `copyToPacktime` to fail + + >>> from ZODB.FileStorage import fspack + >>> save_copyToPacktime = fspack.FileStoragePacker.copyToPacktime + + >>> def failing_copyToPacktime(self): + ... self._tfile.write(b'somejunkdata') + ... raise OSError("No space left on device") + + >>> fspack.FileStoragePacker.copyToPacktime = failing_copyToPacktime + +pack -- it still raises `OSError` + + >>> db.pack(time.time()+1) + Traceback (most recent call last): + ... + OSError: No space left on device + +`data.fs.pack` must not exist + + >>> os.path.exists('data.fs.pack') + False + +undo patching + + >>> fspack.FileStoragePacker.copyToPacktime = save_copyToPacktime + + >>> db.close() + +check the data we added + + >>> fs = ZODB.FileStorage.FileStorage('data.fs') + >>> db = ZODB.DB(fs) + >>> conn = db.open() + >>> conn.root()[1] + 'foobar' + >>> db.close() + """ + +def pack_disk_full_copyRest(): + """Recover from a disk full situation by removing the `.pack` file + +`copyRest` fails + +Add some data + + >>> fs = ZODB.FileStorage.FileStorage('data.fs') + >>> db = ZODB.DB(fs) + >>> conn = db.open() + >>> conn.root()[1] = 'foobar' + >>> transaction.commit() + +patch `copyToPacktime` to add one more transaction + + >>> from ZODB.FileStorage import fspack + >>> save_copyToPacktime = fspack.FileStoragePacker.copyToPacktime + + >>> def patched_copyToPacktime(self): + ... res = save_copyToPacktime(self) + ... conn2 = db.open() + ... conn2.root()[2] = 'another bar' + ... transaction.commit() + ... return res + + >>> fspack.FileStoragePacker.copyToPacktime = patched_copyToPacktime + +patch `copyRest` to fail + + >>> save_copyRest = fspack.FileStoragePacker.copyRest + + >>> def failing_copyRest(self, ipos): + ... self._tfile.write(b'somejunkdata') + ... raise OSError("No space left on device") + + >>> fspack.FileStoragePacker.copyRest = failing_copyRest + +pack -- it still raises `OSError` + + >>> db.pack(time.time()+1) + Traceback (most recent call last): + ... + OSError: No space left on device + +`data.fs.pack` must not exist + + >>> os.path.exists('data.fs.pack') + False + +undo patching + + >>> fspack.FileStoragePacker.copyToPacktime = save_copyToPacktime + >>> fspack.FileStoragePacker.copyRest = save_copyRest + + >>> db.close() + +check the data we added + + >>> fs = ZODB.FileStorage.FileStorage('data.fs') + >>> db = ZODB.DB(fs) + >>> conn = db.open() + >>> conn.root()[1] + 'foobar' + >>> conn.root()[2] + 'another bar' + >>> db.close() + """ + +def test_suite(): + return unittest.TestSuite(( + doctest.DocFileSuite( + 'zconfig.txt', + 'iterator.test', + setUp=ZODB.tests.util.setUp, + tearDown=ZODB.tests.util.tearDown, + checker=checker), + doctest.DocTestSuite( + setUp=ZODB.tests.util.setUp, + tearDown=ZODB.tests.util.tearDown, + checker=checker), + )) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/zconfig.txt b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/zconfig.txt new file mode 100644 index 0000000..bda7a83 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/FileStorage/zconfig.txt @@ -0,0 +1,209 @@ +Defining FileStorages using ZConfig +=================================== + +ZODB provides support for defining many storages, including +FileStorages, using ZConfig. To define a FileStorage, you use a +filestorage section, and define a path: + + >>> import ZODB.config + >>> fs = ZODB.config.storageFromString(""" + ... + ... path my.fs + ... + ... """) + + >>> fs._file.name + 'my.fs' + + >>> fs.close() + +There are a number of options we can provide: + +blob-dir + If supplied, the file storage will provide blob support and this + is the name of a directory to hold blob data. The directory will + be created if it doeesn't exist. If no value (or an empty value) + is provided, then no blob support will be provided. (You can still + use a BlobStorage to provide blob support.) + + >>> fs = ZODB.config.storageFromString(""" + ... + ... path my.fs + ... blob-dir blobs + ... + ... """) + + >>> fs._file.name + 'my.fs' + >>> import os + >>> os.path.basename(fs.blob_dir) + 'blobs' + +create + Flag that indicates whether the storage should be truncated if + it already exists. + + To demonstrate this, we'll first write some data: + + >>> db = ZODB.DB(fs) + >>> conn = db.open() + >>> import ZODB.blob, transaction + >>> conn.root()[1] = ZODB.blob.Blob() + >>> transaction.commit() + >>> db.close() + + Then reopen with the create option: + + >>> fs = ZODB.config.storageFromString(""" + ... + ... path my.fs + ... blob-dir blobs + ... create true + ... + ... """) + + Because the file was truncated, we no-longer have object 0: + + >>> fs.load(b'\0'*8) + Traceback (most recent call last): + ... + POSKeyError: 0x00 + + >>> sorted(os.listdir('blobs')) + ['.layout', 'tmp'] + + >>> fs.close() + +read-only + If true, only reads may be executed against the storage. Note + that the "pack" operation is not considered a write operation + and is still allowed on a read-only filestorage. + + >>> fs = ZODB.config.storageFromString(""" + ... + ... path my.fs + ... read-only true + ... + ... """) + >>> fs.isReadOnly() + True + >>> fs.close() + +quota + Maximum allowed size of the storage file. Operations which + would cause the size of the storage to exceed the quota will + result in a ZODB.FileStorage.FileStorageQuotaError being + raised. + + >>> fs = ZODB.config.storageFromString(""" + ... + ... path my.fs + ... quota 10 + ... + ... """) + >>> db = ZODB.DB(fs) # writes object 0 + Traceback (most recent call last): + ... + FileStorageQuotaError: The storage quota has been exceeded. + + >>> fs.close() + +packer + The dotten name (dotten module name and object name) of a + packer object. This is used to provide an alternative pack + implementation. + + To demonstrate this, we'll create a null packer that just prints + some information about it's arguments: + + >>> import six + >>> def packer(storage, referencesf, stop, gc): + ... six.print_(referencesf, storage is fs, gc, storage.pack_keep_old) + >>> ZODB.FileStorage.config_demo_printing_packer = packer + + >>> fs = ZODB.config.storageFromString(""" + ... + ... path my.fs + ... packer ZODB.FileStorage.config_demo_printing_packer + ... + ... """) + + >>> import time + >>> db = ZODB.DB(fs) # writes object 0 + >>> fs.pack(time.time(), 42) + 42 True True True + + >>> fs.close() + + If the packer contains a ':', then the text after the first ':' is + interpreted as an expression. This is handy to pass limited + configuration information to the packer: + + >>> def packer_factory(name): + ... def packer(storage, referencesf, stop, gc): + ... six.print_(repr(name), referencesf, storage is fs, + ... gc, storage.pack_keep_old) + ... return packer + >>> ZODB.FileStorage.config_demo_printing_packer_factory = packer_factory + + >>> fs = ZODB.config.storageFromString(""" + ... + ... path my.fs + ... packer ZODB.FileStorage:config_demo_printing_packer_factory('bob ') + ... + ... """) + + >>> import time + >>> db = ZODB.DB(fs) # writes object 0 + >>> fs.pack(time.time(), 42) + 'bob ' 42 True True True + + >>> fs.close() + + + +pack-gc + If false, then no garbage collection will be performed when + packing. This can make packing go much faster and can avoid + problems when objects are referenced only from other + databases. + + >>> fs = ZODB.config.storageFromString(""" + ... + ... path my.fs + ... packer ZODB.FileStorage.config_demo_printing_packer + ... pack-gc false + ... + ... """) + + >>> fs.pack(time.time(), 42) + 42 True False True + + Note that if we pass the gc option to pack, then this will + override the value set in the configuration: + + >>> fs.pack(time.time(), 42, gc=True) + 42 True True True + + >>> fs.close() + +pack-keep-old + If false, then old files aren't kept when packing + + >>> fs = ZODB.config.storageFromString(""" + ... + ... path my.fs + ... packer ZODB.FileStorage.config_demo_printing_packer + ... pack-keep-old false + ... + ... """) + + >>> fs.pack(time.time(), 42) + 42 True True False + + >>> fs.close() + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/MappingStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/MappingStorage.py new file mode 100644 index 0000000..8d74bb8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/MappingStorage.py @@ -0,0 +1,374 @@ +############################################################################## +# +# Copyright (c) Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""A simple in-memory mapping-based ZODB storage + +This storage provides an example implementation of a fairly full +storage without distracting storage details. +""" + +import BTrees +import time +import ZODB.BaseStorage +import ZODB.interfaces +import ZODB.POSException +import ZODB.TimeStamp +import ZODB.utils +import zope.interface + + +@zope.interface.implementer( + ZODB.interfaces.IStorage, + ZODB.interfaces.IStorageIteration, + ) +class MappingStorage(object): + """In-memory storage implementation + + Note that this implementation is somewhat naive and inefficient + with regard to locking. Its implementation is primarily meant to + be a simple illustration of storage implementation. It's also + useful for testing and exploration where scalability and efficiency + are unimportant. + """ + + def __init__(self, name='MappingStorage'): + """Create a mapping storage + + The name parameter is used by the + :meth:`~ZODB.interfaces.IStorage.getName` and + :meth:`~ZODB.interfaces.IStorage.sortKey` methods. + """ + self.__name__ = name + self._data = {} # {oid->{tid->pickle}} + self._transactions = BTrees.OOBTree.OOBTree() # {tid->TransactionRecord} + self._ltid = ZODB.utils.z64 + self._last_pack = None + self._lock = ZODB.utils.RLock() + self._commit_lock = ZODB.utils.Lock() + self._opened = True + self._transaction = None + self._oid = 0 + + ###################################################################### + # Preconditions: + + def opened(self): + """The storage is open + """ + return self._opened + + def not_in_transaction(self): + """The storage is not committing a transaction + """ + return self._transaction is None + + # + ###################################################################### + + # testing framework (lame) + def cleanup(self): + pass + + # ZODB.interfaces.IStorage + @ZODB.utils.locked + def close(self): + self._opened = False + + # ZODB.interfaces.IStorage + def getName(self): + return self.__name__ + + # ZODB.interfaces.IStorage + @ZODB.utils.locked(opened) + def getSize(self): + size = 0 + for oid, tid_data in self._data.items(): + size += 50 + for tid, pickle in tid_data.items(): + size += 100+len(pickle) + return size + + # ZEO.interfaces.IServeable + @ZODB.utils.locked(opened) + def getTid(self, oid): + tid_data = self._data.get(oid) + if tid_data: + return tid_data.maxKey() + raise ZODB.POSException.POSKeyError(oid) + + # ZODB.interfaces.IStorage + @ZODB.utils.locked(opened) + def history(self, oid, size=1): + tid_data = self._data.get(oid) + if not tid_data: + raise ZODB.POSException.POSKeyError(oid) + + tids = tid_data.keys()[-size:] + tids.reverse() + return [ + dict( + time = ZODB.TimeStamp.TimeStamp(tid).timeTime(), + tid = tid, + serial = tid, + user_name = self._transactions[tid].user, + description = self._transactions[tid].description, + extension = self._transactions[tid].extension, + size = len(tid_data[tid]) + ) + for tid in tids] + + # ZODB.interfaces.IStorage + def isReadOnly(self): + return False + + # ZODB.interfaces.IStorageIteration + def iterator(self, start=None, end=None): + for transaction_record in self._transactions.values(start, end): + yield transaction_record + + # ZODB.interfaces.IStorage + @ZODB.utils.locked(opened) + def lastTransaction(self): + return self._ltid + + # ZODB.interfaces.IStorage + @ZODB.utils.locked(opened) + def __len__(self): + return len(self._data) + + load = ZODB.utils.load_current + + # ZODB.interfaces.IStorage + @ZODB.utils.locked(opened) + def loadBefore(self, oid, tid): + tid_data = self._data.get(oid) + if tid_data: + before = ZODB.utils.u64(tid) + if not before: + return None + before = ZODB.utils.p64(before-1) + tids_before = tid_data.keys(None, before) + if tids_before: + tids_after = tid_data.keys(tid, None) + tid = tids_before[-1] + return (tid_data[tid], tid, + (tids_after and tids_after[0] or None) + ) + else: + raise ZODB.POSException.POSKeyError(oid) + + + # ZODB.interfaces.IStorage + @ZODB.utils.locked(opened) + def loadSerial(self, oid, serial): + tid_data = self._data.get(oid) + if tid_data: + try: + return tid_data[serial] + except KeyError: + pass + + raise ZODB.POSException.POSKeyError(oid, serial) + + # ZODB.interfaces.IStorage + @ZODB.utils.locked(opened) + def new_oid(self): + self._oid += 1 + return ZODB.utils.p64(self._oid) + + # ZODB.interfaces.IStorage + @ZODB.utils.locked(opened) + def pack(self, t, referencesf, gc=True): + if not self._data: + return + + stop = ZODB.TimeStamp.TimeStamp(*time.gmtime(t)[:5]+(t%60,)).raw() + if self._last_pack is not None and self._last_pack >= stop: + if self._last_pack == stop: + return + raise ValueError("Already packed to a later time") + + self._last_pack = stop + transactions = self._transactions + + # Step 1, remove old non-current records + for oid, tid_data in self._data.items(): + tids_to_remove = tid_data.keys(None, stop) + if tids_to_remove: + tids_to_remove.pop() # Keep the last, if any + + if tids_to_remove: + for tid in tids_to_remove: + del tid_data[tid] + if transactions[tid].pack(oid): + del transactions[tid] + + if gc: + # Step 2, GC. A simple sweep+copy + new_data = BTrees.OOBTree.OOBTree() + to_copy = set([ZODB.utils.z64]) + while to_copy: + oid = to_copy.pop() + tid_data = self._data.pop(oid) + new_data[oid] = tid_data + for pickle in tid_data.values(): + for oid in referencesf(pickle): + if oid in new_data: + continue + to_copy.add(oid) + + # Remove left over data from transactions + for oid, tid_data in self._data.items(): + for tid in tid_data: + if transactions[tid].pack(oid): + del transactions[tid] + + self._data.clear() + self._data.update(new_data) + + # ZODB.interfaces.IStorage + def registerDB(self, db): + pass + + # ZODB.interfaces.IStorage + def sortKey(self): + return self.__name__ + + # ZODB.interfaces.IStorage + @ZODB.utils.locked(opened) + def store(self, oid, serial, data, version, transaction): + assert not version, "Versions are not supported" + if transaction is not self._transaction: + raise ZODB.POSException.StorageTransactionError(self, transaction) + + old_tid = None + tid_data = self._data.get(oid) + if tid_data: + old_tid = tid_data.maxKey() + if serial != old_tid: + raise ZODB.POSException.ConflictError( + oid=oid, serials=(old_tid, serial), data=data) + + self._tdata[oid] = data + + checkCurrentSerialInTransaction = ( + ZODB.BaseStorage.checkCurrentSerialInTransaction) + + # ZODB.interfaces.IStorage + @ZODB.utils.locked(opened) + def tpc_abort(self, transaction): + if transaction is not self._transaction: + return + self._transaction = None + self._commit_lock.release() + + # ZODB.interfaces.IStorage + def tpc_begin(self, transaction, tid=None): + with self._lock: + + ZODB.utils.check_precondition(self.opened) + + # The tid argument exists to support testing. + if transaction is self._transaction: + raise ZODB.POSException.StorageTransactionError( + "Duplicate tpc_begin calls for same transaction") + + self._commit_lock.acquire() + + with self._lock: + self._transaction = transaction + self._tdata = {} + if tid is None: + if self._transactions: + old_tid = self._transactions.maxKey() + else: + old_tid = None + tid = ZODB.utils.newTid(old_tid) + self._tid = tid + + # ZODB.interfaces.IStorage + @ZODB.utils.locked(opened) + def tpc_finish(self, transaction, func = lambda tid: None): + if (transaction is not self._transaction): + raise ZODB.POSException.StorageTransactionError( + "tpc_finish called with wrong transaction") + + tid = self._tid + func(tid) + + tdata = self._tdata + for oid in tdata: + tid_data = self._data.get(oid) + if tid_data is None: + tid_data = BTrees.OOBTree.OOBucket() + self._data[oid] = tid_data + tid_data[tid] = tdata[oid] + + self._ltid = tid + self._transactions[tid] = TransactionRecord(tid, transaction, tdata) + self._transaction = None + del self._tdata + self._commit_lock.release() + return tid + + # ZEO.interfaces.IServeable + @ZODB.utils.locked(opened) + def tpc_transaction(self): + return self._transaction + + # ZODB.interfaces.IStorage + def tpc_vote(self, transaction): + if transaction is not self._transaction: + raise ZODB.POSException.StorageTransactionError( + "tpc_vote called with wrong transaction") + +class TransactionRecord(object): + + status = ' ' + + def __init__(self, tid, transaction, data): + self.tid = tid + self.user = transaction.user + self.description = transaction.description + extension = transaction.extension + self.extension = extension + self.data = data + + _extension = property(lambda self: self.extension, + lambda self, v: setattr(self, 'extension', v), + ) + + def __iter__(self): + for oid, data in self.data.items(): + yield DataRecord(oid, self.tid, data) + + def pack(self, oid): + self.status = 'p' + del self.data[oid] + return not self.data + +@zope.interface.implementer(ZODB.interfaces.IStorageRecordInformation) +class DataRecord(object): + """Abstract base class for iterator protocol""" + + + version = '' + data_txn = None + + def __init__(self, oid, tid, data): + self.oid = oid + self.tid = tid + self.data = data + +def DB(*args, **kw): + return ZODB.DB(MappingStorage(), *args, **kw) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/POSException.py b/thesisenv/lib/python3.6/site-packages/ZODB/POSException.py new file mode 100644 index 0000000..edfa786 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/POSException.py @@ -0,0 +1,333 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""ZODB-defined exceptions + +$Id$""" + +from ZODB.utils import oid_repr, readable_tid_repr + +# BBB: We moved the two transactions to the transaction package +from transaction.interfaces import TransactionError, TransactionFailedError + +import transaction.interfaces + +def _fmt_undo(oid, reason): + s = reason and (": %s" % reason) or "" + return "Undo error %s%s" % (oid_repr(oid), s) + +def _recon(class_, state): + err = class_.__new__(class_) + err.__setstate__(state) + return err +_recon.__no_side_effects__ = True + +class POSError(Exception): + """Persistent object system error.""" + + def __reduce__(self): + # Copy extra data from internal structures + state = self.__dict__.copy() + state['args'] = self.args + + return (_recon, (self.__class__, state)) + + def __setstate__(self, state): + # PyPy doesn't store the 'args' attribute in an instance's + # __dict__; instead, it uses what amounts to a slot. Because + # we customize the pickled representation to just be a dictionary, + # the args would then get lost, leading to unprintable exceptions + # and worse. Manually assign to args from the state to be sure + # this doesn't happen. + super(POSError,self).__setstate__(state) + self.args = state['args'] + +class POSKeyError(POSError, KeyError): + """Key not found in database.""" + + def __str__(self): + return oid_repr(self.args[0]) + + +class ConflictError(POSError, transaction.interfaces.TransientError): + """Two transactions tried to modify the same object at once. + + This transaction should be resubmitted. + + Instance attributes: + oid : string + the OID (8-byte packed string) of the object in conflict + class_name : string + the fully-qualified name of that object's class + message : string + a human-readable explanation of the error + serials : (string, string) + a pair of 8-byte packed strings; these are the serial numbers + related to conflict. The first is the revision of object that + is in conflict, the currently committed serial. The second is + the revision the current transaction read when it started. + data : string + The database record that failed to commit, used to put the + class name in the error message. + + The caller should pass either object or oid as a keyword argument, + but not both of them. If object is passed, it should be a + persistent object with an _p_oid attribute. + """ + + def __init__(self, message=None, object=None, oid=None, serials=None, + data=None): + if message is None: + self.message = "database conflict error" + else: + self.message = message + + if object is None: + self.oid = None + self.class_name = None + else: + self.oid = object._p_oid + klass = object.__class__ + self.class_name = klass.__module__ + "." + klass.__name__ + + if oid is not None: + assert self.oid is None + self.oid = oid + + if data is not None: + # avoid circular import chain + from ZODB.utils import get_pickle_metadata + self.class_name = '.'.join(get_pickle_metadata(data)) + + self.serials = serials + + def __str__(self): + extras = [] + if self.oid: + extras.append("oid %s" % oid_repr(self.oid)) + if self.class_name: + extras.append("class %s" % self.class_name) + if self.serials: + current, old = self.serials + extras.append("serial this txn started with %s" % + readable_tid_repr(old)) + extras.append("serial currently committed %s" % + readable_tid_repr(current)) + if extras: + return "%s (%s)" % (self.message, ", ".join(extras)) + else: + return self.message + + def get_oid(self): + return self.oid + + def get_class_name(self): + return self.class_name + + def get_old_serial(self): + return self.serials[1] + + def get_new_serial(self): + return self.serials[0] + + def get_serials(self): + return self.serials + +class ReadConflictError(ConflictError): + """Conflict detected when object was loaded. + + An attempt was made to read an object that has changed in another + transaction (eg. another thread or process). + """ + def __init__(self, message=None, object=None, serials=None, **kw): + if message is None: + message = "database read conflict error" + ConflictError.__init__(self, message=message, object=object, + serials=serials, **kw) + +class BTreesConflictError(ConflictError): + """A special subclass for BTrees conflict errors.""" + + msgs = [# 0; i2 or i3 bucket split; positions are all -1 + 'Conflicting bucket split', + + # 1; keys the same, but i2 and i3 values differ, and both values + # differ from i1's value + 'Conflicting changes', + + # 2; i1's value changed in i2, but key+value deleted in i3 + 'Conflicting delete and change', + + # 3; i1's value changed in i3, but key+value deleted in i2 + 'Conflicting delete and change', + + # 4; i1 and i2 both added the same key, or both deleted the + # same key + 'Conflicting inserts or deletes', + + # 5; i2 and i3 both deleted the same key + 'Conflicting deletes', + + # 6; i2 and i3 both added the same key + 'Conflicting inserts', + + # 7; i2 and i3 both deleted the same key, or i2 changed the value + # associated with a key and i3 deleted that key + 'Conflicting deletes, or delete and change', + + # 8; i2 and i3 both deleted the same key, or i3 changed the value + # associated with a key and i2 deleted that key + 'Conflicting deletes, or delete and change', + + # 9; i2 and i3 both deleted the same key + 'Conflicting deletes', + + # 10; i2 and i3 deleted all the keys, and didn't insert any, + # leaving an empty bucket; conflict resolution doesn't have + # enough info to unlink an empty bucket from its containing + # BTree correctly + 'Empty bucket from deleting all keys', + + # 11; conflicting changes in an internal BTree node + 'Conflicting changes in an internal BTree node', + + # 12; i2 or i3 was empty + 'Empty bucket in a transaction', + + # 13; delete of first key, which causes change to parent node + 'Delete of first key', + ] + + def __init__(self, p1, p2, p3, reason): + self.p1 = p1 + self.p2 = p2 + self.p3 = p3 + self.reason = reason + + def __repr__(self): + return "BTreesConflictError(%d, %d, %d, %d)" % (self.p1, + self.p2, + self.p3, + self.reason) + def __str__(self): + return "BTrees conflict error at %d/%d/%d: %s" % ( + self.p1, self.p2, self.p3, self.msgs[self.reason]) + +class DanglingReferenceError(POSError, transaction.interfaces.TransactionError): + """An object has a persistent reference to a missing object. + + If an object is stored and it has a reference to another object + that does not exist (for example, it was deleted by pack), this + exception may be raised. Whether a storage supports this feature, + it a quality of implementation issue. + + Instance attributes: + referer: oid of the object being written + missing: referenced oid that does not have a corresponding object + """ + + def __init__(self, Aoid, Boid): + self.referer = Aoid + self.missing = Boid + + def __str__(self): + return "from %s to %s" % (oid_repr(self.referer), + oid_repr(self.missing)) + + +############################################################################ +# Only used in storages; versions are no longer supported. + +class VersionError(POSError): + """An error in handling versions occurred.""" + +class VersionCommitError(VersionError): + """An invalid combination of versions was used in a version commit.""" + +class VersionLockError(VersionError, transaction.interfaces.TransactionError): + """Modification to an object modified in an unsaved version. + + An attempt was made to modify an object that has been modified in an + unsaved version. + """ +############################################################################ + +class UndoError(POSError): + """An attempt was made to undo a non-undoable transaction.""" + + def __init__(self, reason, oid=None): + self._reason = reason + self._oid = oid + + def __str__(self): + return _fmt_undo(self._oid, self._reason) + +class MultipleUndoErrors(UndoError): + """Several undo errors occurred during a single transaction.""" + + def __init__(self, errs): + # provide a reason and oid for clients that only look at that + UndoError.__init__(self, *errs[0]) + self._errs = errs + + def __str__(self): + return "\n".join([_fmt_undo(*pair) for pair in self._errs]) + +class StorageError(POSError): + """Base class for storage based exceptions.""" + +class StorageTransactionError(StorageError): + """An operation was invoked for an invalid transaction or state.""" + +class StorageSystemError(StorageError): + """Panic! Internal storage error!""" + +class MountedStorageError(StorageError): + """Unable to access mounted storage.""" + +class ReadOnlyError(StorageError): + """Unable to modify objects in a read-only storage.""" + +class TransactionTooLargeError(StorageTransactionError): + """The transaction exhausted some finite storage resource.""" + +class ExportError(POSError): + """An export file doesn't have the right format.""" + +class Unsupported(POSError): + """A feature was used that is not supported by the storage.""" + +class ReadOnlyHistoryError(POSError): + """Unable to add or modify objects in an historical connection.""" + +class InvalidObjectReference(POSError): + """An object contains an invalid reference to another object. + + An invalid reference may be one of: + + o A reference to a wrapped persistent object. + + o A reference to an object in a different database connection. + + TODO: The exception ought to have a member that is the invalid object. + """ + +class ConnectionStateError(POSError): + """A Connection isn't in the required state for an operation. + + o An operation such as a load is attempted on a closed connection. + + o An attempt to close a connection is made while the connection is + still joined to a transaction (for example, a transaction is in + progress, with uncommitted modifications in the connection). + """ diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/UndoLogCompatible.py b/thesisenv/lib/python3.6/site-packages/ZODB/UndoLogCompatible.py new file mode 100644 index 0000000..4f88209 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/UndoLogCompatible.py @@ -0,0 +1,37 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Provide backward compatibility with storages that only have undoLog().""" + + +class UndoLogCompatible(object): + + def undoInfo(self, first=0, last=-20, specification=None): + if specification: + # filter(desc) returns true iff `desc` is a "superdict" + # of `specification`, meaning that `desc` contains the same + # (key, value) pairs as `specification`, and possibly additional + # (key, value) pairs. Another way to do this might be + # d = desc.copy() + # d.update(specification) + # return d == desc + def filter(desc, spec=specification.items()): + get = desc.get + for k, v in spec: + if get(k, None) != v: + return 0 + return 1 + else: + filter = None + + return self.undoLog(first, last, filter) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/__init__.py b/thesisenv/lib/python3.6/site-packages/ZODB/__init__.py new file mode 100644 index 0000000..2a5906d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/__init__.py @@ -0,0 +1,28 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +import sys + +from persistent import TimeStamp +from persistent import list +from persistent import mapping + +# Backward compat for old imports. +sys.modules['ZODB.TimeStamp'] = sys.modules['persistent.TimeStamp'] +sys.modules['ZODB.PersistentMapping'] = sys.modules['persistent.mapping'] +sys.modules['ZODB.PersistentList'] = sys.modules['persistent.list'] + +del mapping, list, sys + +from ZODB.DB import DB, connection diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/_compat.py b/thesisenv/lib/python3.6/site-packages/ZODB/_compat.py new file mode 100644 index 0000000..64132ab --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/_compat.py @@ -0,0 +1,160 @@ +############################################################################## +# +# Copyright (c) 2013 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import sys +from six import PY3 + +IS_JYTHON = sys.platform.startswith('java') + +_protocol = 3 +from zodbpickle import binary + +if not PY3: + # Python 2.x + # PyPy's cPickle doesn't have noload, and noload is broken in Python 2.7, + # so we need zodbpickle. + # Get the fastest working version we can (PyPy has no fastpickle) + try: + import zodbpickle.fastpickle as cPickle + except ImportError: + import zodbpickle.pickle as cPickle + Pickler = cPickle.Pickler + Unpickler = cPickle.Unpickler + dump = cPickle.dump + dumps = cPickle.dumps + loads = cPickle.loads + HIGHEST_PROTOCOL = cPickle.HIGHEST_PROTOCOL + IMPORT_MAPPING = {} + NAME_MAPPING = {} + FILESTORAGE_MAGIC = b"FS21" +else: + # Python 3.x: can't use stdlib's pickle because + # http://bugs.python.org/issue6784 + import zodbpickle.pickle + HIGHEST_PROTOCOL = 3 + from _compat_pickle import IMPORT_MAPPING, NAME_MAPPING + + class Pickler(zodbpickle.pickle.Pickler): + def __init__(self, f, protocol=None): + super(Pickler, self).__init__(f, protocol) + + class Unpickler(zodbpickle.pickle.Unpickler): + def __init__(self, f): + super(Unpickler, self).__init__(f) + + # Py3: Python 3 doesn't allow assignments to find_global, + # instead, find_class can be overridden + + find_global = None + + def find_class(self, modulename, name): + if self.find_global is None: + return super(Unpickler, self).find_class(modulename, name) + return self.find_global(modulename, name) + + def dump(o, f, protocol=None): + return zodbpickle.pickle.dump(o, f, protocol) + + def dumps(o, protocol=None): + return zodbpickle.pickle.dumps(o, protocol) + + def loads(s): + return zodbpickle.pickle.loads(s, encoding='ASCII', errors='bytes') + FILESTORAGE_MAGIC = b"FS30" + + +def PersistentPickler(persistent_id, *args, **kwargs): + """ + Returns a :class:`Pickler` that will use the given ``persistent_id`` + to get persistent IDs. The remainder of the arguments are passed to the + Pickler itself. + + This covers the differences between Python 2 and 3 and PyPy/zodbpickle. + """ + p = Pickler(*args, **kwargs) + if not PY3: + p.inst_persistent_id = persistent_id + + # PyPy uses a python implementation of cPickle/zodbpickle in both Python 2 + # and Python 3. We can't really detect inst_persistent_id as its + # a magic attribute that's not readable, but it doesn't hurt to + # simply always assign to persistent_id also + p.persistent_id = persistent_id + return p + +def PersistentUnpickler(find_global, load_persistent, *args, **kwargs): + """ + Returns a :class:`Unpickler` that will use the given `find_global` function + to locate classes, and the given `load_persistent` function to load + objects from a persistent id. + + This covers the differences between Python 2 and 3 and PyPy/zodbpickle. + """ + unpickler = Unpickler(*args, **kwargs) + if find_global is not None: + unpickler.find_global = find_global + try: + unpickler.find_class = find_global # PyPy, zodbpickle, the non-c-accelerated version + except AttributeError: + pass + if load_persistent is not None: + unpickler.persistent_load = load_persistent + + return unpickler + + +try: + # XXX: why not just import BytesIO from io? + from cStringIO import StringIO as BytesIO +except ImportError: + # Python 3.x + from io import BytesIO + + +try: + # Python 3.x + from base64 import decodebytes, encodebytes +except ImportError: + # Python 2.x + from base64 import decodestring as decodebytes, encodestring as encodebytes + + +# Python 3.x: ``hasattr()`` swallows only AttributeError. +def py2_hasattr(obj, name): + try: + getattr(obj, name) + except: + return False + return True + + +try: + # Py2: simply reexport the builtin + long = long +except NameError: + # Py3 + long = int + INT_TYPES = (int,) +else: + INT_TYPES = (int, long) + + +try: + TEXT = unicode +except NameError: #pragma NO COVER Py3k + TEXT = str + +def ascii_bytes(x): + if isinstance(x, TEXT): + x = x.encode('ascii') + return x diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/blob.py b/thesisenv/lib/python3.6/site-packages/ZODB/blob.py new file mode 100644 index 0000000..4886416 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/blob.py @@ -0,0 +1,1015 @@ +############################################################################## +# +# Copyright (c) 2005-2006 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Blobs +""" + +import binascii +import logging +import os +import re +import shutil +import stat +import sys +import tempfile +import weakref + +import zope.interface +import persistent + +import ZODB.interfaces +from ZODB.interfaces import BlobError +from ZODB import utils +from ZODB.POSException import POSKeyError +from ZODB._compat import BytesIO +from ZODB._compat import PersistentUnpickler +from ZODB._compat import decodebytes +from ZODB._compat import ascii_bytes +from ZODB._compat import INT_TYPES +from ZODB._compat import PY3 + + +if PY3: + from io import FileIO as file + + +logger = logging.getLogger('ZODB.blob') + +BLOB_SUFFIX = ".blob" +SAVEPOINT_SUFFIX = ".spb" + +LAYOUT_MARKER = '.layout' +LAYOUTS = {} + +valid_modes = 'r', 'w', 'r+', 'a', 'c' + +# Threading issues: +# We want to support closing blob files when they are destroyed. +# This introduces a threading issue, since a blob file may be destroyed +# via GC in any thread. + +# PyPy 2.5 doesn't properly call the cleanup function +# of a weakref when the weakref object dies at the same time +# as the object it refers to. In other words, this doesn't work: +# self._ref = weakref.ref(self, lambda ref: ...) +# because the function never gets called (https://bitbucket.org/pypy/pypy/issue/2030). +# The Blob class used to use that pattern to clean up uncommitted +# files; now we use this module-level global (but still keep a +# reference in the Blob in case we need premature cleanup). +_blob_close_refs = [] + +@zope.interface.implementer(ZODB.interfaces.IBlob) +class Blob(persistent.Persistent): + """A BLOB supports efficient handling of large data within ZODB.""" + + + _p_blob_uncommitted = None # Filename of the uncommitted (dirty) data + _p_blob_committed = None # Filename of the committed data + _p_blob_ref = None # weakreference to self; also in _blob_close_refs + + readers = writers = None + + def __init__(self, data=None): + # Raise exception if Blobs are getting subclassed + # refer to ZODB-Bug No.127182 by Jim Fulton on 2007-07-20 + if (self.__class__ is not Blob): + raise TypeError('Blobs do not support subclassing.') + self.__setstate__() + if data is not None: + with self.open('w') as f: + f.write(data) + + def __setstate__(self, state=None): + # we use lists here because it will allow us to add and remove + # atomically + self.readers = [] + self.writers = [] + + def __getstate__(self): + return None + + def _p_deactivate(self): + # Only ghostify if we are unopened. + if self.readers or self.writers: + return + super(Blob, self)._p_deactivate() + + def _p_invalidate(self): + # Force-close any open readers or writers, + # XXX should we warn of this? Maybe? + if self._p_changed is None: + return + for ref in (self.readers or [])+(self.writers or []): + f = ref() + if f is not None: + f.close() + + if (self._p_blob_uncommitted): + os.remove(self._p_blob_uncommitted) + + super(Blob, self)._p_invalidate() + + def opened(self): + return bool(self.readers or self.writers) + + def closed(self, f): + # We use try/except below because another thread might remove + # the ref after we check it if the file is GCed. + for file_refs in (self.readers, self.writers): + for ref in file_refs: + if ref() is f: + try: + file_refs.remove(ref) + except ValueError: + pass + return + + def open(self, mode="r"): + if mode not in valid_modes: + raise ValueError("invalid mode", mode) + + if mode == 'c': + if (self._p_blob_uncommitted + or + not self._p_blob_committed + or + self._p_blob_committed.endswith(SAVEPOINT_SUFFIX) + ): + raise BlobError('Uncommitted changes') + return self._p_jar._storage.openCommittedBlobFile( + self._p_oid, self._p_serial) + + if self.writers: + raise BlobError("Already opened for writing.") + + if self.readers is None: + self.readers = [] + + if mode == 'r': + result = None + to_open = self._p_blob_uncommitted + if not to_open: + to_open = self._p_blob_committed + if to_open: + result = self._p_jar._storage.openCommittedBlobFile( + self._p_oid, self._p_serial, self) + else: + self._create_uncommitted_file() + to_open = self._p_blob_uncommitted + assert to_open + + if result is None: + result = BlobFile(to_open, mode, self) + + def destroyed(ref, readers=self.readers): + try: + readers.remove(ref) + except ValueError: + pass + + self.readers.append(weakref.ref(result, destroyed)) + else: + if self.readers: + raise BlobError("Already opened for reading.") + + if mode == 'w': + if self._p_blob_uncommitted is None: + self._create_uncommitted_file() + result = BlobFile(self._p_blob_uncommitted, mode, self) + else: # 'r+' and 'a' + if self._p_blob_uncommitted is None: + # Create a new working copy + self._create_uncommitted_file() + result = BlobFile(self._p_blob_uncommitted, mode, self) + if self._p_blob_committed: + with open(self._p_blob_committed, 'rb') as fp: + utils.cp(fp, result) + if mode == 'r+': + result.seek(0) + else: + # Re-use existing working copy + result = BlobFile(self._p_blob_uncommitted, mode, self) + + def destroyed(ref, writers=self.writers): + try: + writers.remove(ref) + except ValueError: + pass + + self.writers.append(weakref.ref(result, destroyed)) + + self._p_changed = True + + return result + + def committed(self): + if (self._p_blob_uncommitted + or + not self._p_blob_committed + or + self._p_blob_committed.endswith(SAVEPOINT_SUFFIX) + ): + raise BlobError('Uncommitted changes') + + result = self._p_blob_committed + + # We do this to make sure we have the file and to let the + # storage know we're accessing the file. + n = self._p_jar._storage.loadBlob(self._p_oid, self._p_serial) + assert result == n, (result, n) + + return result + + def consumeFile(self, filename): + """Will replace the current data of the blob with the file given under + filename. + """ + if self.writers: + raise BlobError("Already opened for writing.") + if self.readers: + raise BlobError("Already opened for reading.") + + previous_uncommitted = bool(self._p_blob_uncommitted) + if previous_uncommitted: + # If we have uncommitted data, we move it aside for now + # in case the consumption doesn't work. + target = self._p_blob_uncommitted + target_aside = target+".aside" + os.rename(target, target_aside) + else: + target = self._create_uncommitted_file() + # We need to unlink the freshly created target again + # to allow link() to do its job + os.remove(target) + + try: + rename_or_copy_blob(filename, target, chmod=False) + except: + # Recover from the failed consumption: First remove the file, it + # might exist and mark the pointer to the uncommitted file. + self._p_blob_uncommitted = None + if os.path.exists(target): + os.remove(target) + + # If there was a file moved aside, bring it back including the + # pointer to the uncommitted file. + if previous_uncommitted: + os.rename(target_aside, target) + self._p_blob_uncommitted = target + + # Re-raise the exception to make the application aware of it. + raise + else: + if previous_uncommitted: + # The relinking worked so we can remove the data that we had + # set aside. + os.remove(target_aside) + + # We changed the blob state and have to make sure we join the + # transaction. + self._p_changed = True + + # utility methods + + def _create_uncommitted_file(self): + assert self._p_blob_uncommitted is None, ( + "Uncommitted file already exists.") + if self._p_jar: + tempdir = self._p_jar.db()._storage.temporaryDirectory() + else: + tempdir = tempfile.gettempdir() + + filename = utils.mktemp(dir=tempdir, prefix="BUC") + self._p_blob_uncommitted = filename + + def cleanup(ref): + if os.path.exists(filename): + os.remove(filename) + try: + _blob_close_refs.remove(ref) + except ValueError: + pass + self._p_blob_ref = weakref.ref(self, cleanup) + _blob_close_refs.append(self._p_blob_ref) + + return filename + + def _uncommitted(self): + # hand uncommitted data to connection, relinquishing responsibility + # for it. + filename = self._p_blob_uncommitted + if filename is None and self._p_blob_committed is None: + filename = self._create_uncommitted_file() + try: + _blob_close_refs.remove(self._p_blob_ref) + except ValueError: + pass + self._p_blob_uncommitted = self._p_blob_ref = None + return filename + +class BlobFile(file): + """A BlobFile that holds a file handle to actual blob data. + + It is a file that can be used within a transaction boundary; a BlobFile is + just a Python file object, we only override methods which cause a change to + blob data in order to call methods on our 'parent' persistent blob object + signifying that the change happened. + + """ + + # XXX these files should be created in the same partition as + # the storage later puts them to avoid copying them ... + + def __init__(self, name, mode, blob): + super(BlobFile, self).__init__(name, mode+'b') + self.blob = blob + + def close(self): + self.blob.closed(self) + super(BlobFile, self).close() + + def __reduce__(self): + # Python 3 cannot pickle an open file with any pickle protocol + # because of the underlying _io.BufferedReader/Writer object. + # Python 2 cannot pickle a file with a protocol < 2, but + # protocol 2 *can* pickle an open file; the result of unpickling + # is a closed file object. + # It's pointless to do that with a blob, so we make sure to + # prohibit it on all versions. + raise TypeError("Pickling a BlobFile is not allowed") + +_pid = str(os.getpid()) + +def log(msg, level=logging.INFO, subsys=_pid, exc_info=False): + message = "(%s) %s" % (subsys, msg) + logger.log(level, message, exc_info=exc_info) + + +class FilesystemHelper(object): + # Storages that implement IBlobStorage can choose to use this + # helper class to generate and parse blob filenames. This is not + # a set-in-stone interface for all filesystem operations dealing + # with blobs and storages needn't indirect through this if they + # want to perform blob storage differently. + + def __init__(self, base_dir, layout_name='automatic'): + self.base_dir = os.path.abspath(base_dir) + os.path.sep + self.temp_dir = os.path.join(base_dir, 'tmp') + + if layout_name == 'automatic': + layout_name = auto_layout_select(base_dir) + if layout_name == 'lawn': + log('The `lawn` blob directory layout is deprecated due to ' + 'scalability issues on some file systems, please consider ' + 'migrating to the `bushy` layout.', level=logging.WARN) + self.layout_name = layout_name + self.layout = LAYOUTS[layout_name] + + def create(self): + if not os.path.exists(self.base_dir): + os.makedirs(self.base_dir) + log("Blob directory '%s' does not exist. " + "Created new directory." % self.base_dir) + if not os.path.exists(self.temp_dir): + os.makedirs(self.temp_dir) + log("Blob temporary directory '%s' does not exist. " + "Created new directory." % self.temp_dir) + + layout_marker_path = os.path.join(self.base_dir, LAYOUT_MARKER) + if not os.path.exists(layout_marker_path): + with open(layout_marker_path, 'w') as layout_marker: + layout_marker.write(self.layout_name) + else: + with open(layout_marker_path, 'r') as layout_marker: + layout = layout_marker.read().strip() + if layout != self.layout_name: + raise ValueError( + "Directory layout `%s` selected for blob directory %s, but " + "marker found for layout `%s`" % + (self.layout_name, self.base_dir, layout)) + + def isSecure(self, path): + import warnings + warnings.warn( + "isSecure is deprecated. Permissions are no longer set by ZODB", + DeprecationWarning, stacklevel=2) + + def checkSecure(self): + import warnings + warnings.warn( + "checkSecure is deprecated. Permissions are no longer set by ZODB", + DeprecationWarning, stacklevel=2) + + def getPathForOID(self, oid, create=False): + """Given an OID, return the path on the filesystem where + the blob data relating to that OID is stored. + + If the create flag is given, the path is also created if it didn't + exist already. + + """ + # OIDs are numbers and sometimes passed around as integers. For our + # computations we rely on the 64-bit packed string representation. + if isinstance(oid, int): + oid = utils.p64(oid) + + path = self.layout.oid_to_path(oid) + path = os.path.join(self.base_dir, path) + + if create and not os.path.exists(path): + try: + os.makedirs(path) + except OSError: + # We might have lost a race. If so, the directory + # must exist now + assert os.path.exists(path) + return path + + def getOIDForPath(self, path): + """Given a path, return an OID, if the path is a valid path for an + OID. The inverse function to `getPathForOID`. + + Raises ValueError if the path is not valid for an OID. + + """ + path = path[len(self.base_dir):] + return self.layout.path_to_oid(path) + + def createPathForOID(self, oid): + """Given an OID, creates a directory on the filesystem where + the blob data relating to that OID is stored, if it doesn't exist. + """ + return self.getPathForOID(oid, create=True) + + def getBlobFilename(self, oid, tid): + """Given an oid and a tid, return the full filename of the + 'committed' blob file related to that oid and tid. + + """ + # TIDs are numbers and sometimes passed around as integers. For our + # computations we rely on the 64-bit packed string representation + if isinstance(oid, int): + oid = utils.p64(oid) + if isinstance(tid, int): + tid = utils.p64(tid) + return os.path.join(self.base_dir, + self.layout.getBlobFilePath(oid, tid), + ) + + def blob_mkstemp(self, oid, tid): + """Given an oid and a tid, return a temporary file descriptor + and a related filename. + + The file is guaranteed to exist on the same partition as committed + data, which is important for being able to rename the file without a + copy operation. The directory in which the file will be placed, which + is the return value of self.getPathForOID(oid), must exist before this + method may be called successfully. + + """ + oidpath = self.getPathForOID(oid) + fd, name = tempfile.mkstemp(suffix='.tmp', + prefix=utils.tid_repr(tid), + dir=oidpath) + return fd, name + + def splitBlobFilename(self, filename): + """Returns the oid and tid for a given blob filename. + + If the filename cannot be recognized as a blob filename, (None, None) + is returned. + + """ + if not filename.endswith(BLOB_SUFFIX): + return None, None + path, filename = os.path.split(filename) + oid = self.getOIDForPath(path) + + serial = filename[:-len(BLOB_SUFFIX)] + serial = utils.repr_to_oid(serial) + return oid, serial + + def getOIDsForSerial(self, search_serial): + """Return all oids related to a particular tid that exist in + blob data. + + """ + oids = [] + for oid, oidpath in self.listOIDs(): + for filename in os.listdir(oidpath): + blob_path = os.path.join(oidpath, filename) + oid, serial = self.splitBlobFilename(blob_path) + if search_serial == serial: + oids.append(oid) + return oids + + def listOIDs(self): + """Iterates over all paths under the base directory that contain blob + files. + """ + for path, dirs, files in os.walk(self.base_dir): + # Make sure we traverse in a stable order. This is mainly to make + # testing predictable. + dirs.sort() + files.sort() + try: + oid = self.getOIDForPath(path) + except ValueError: + continue + yield oid, path + + +class NoBlobsFileSystemHelper(object): + + @property + def temp_dir(self): + raise TypeError("Blobs are not supported") + + getPathForOID = getBlobFilename = temp_dir + + +class BlobStorageError(Exception): + """The blob storage encountered an invalid state.""" + +def auto_layout_select(path): + # A heuristic to look at a path and determine which directory layout to + # use. + layout_marker = os.path.join(path, LAYOUT_MARKER) + if os.path.exists(layout_marker): + with open(layout_marker, 'r') as fp: + layout = fp.read().strip() + log('Blob directory `%s` has layout marker set. ' + 'Selected `%s` layout. ' % (path, layout), level=logging.DEBUG) + elif not os.path.exists(path): + log('Blob directory %s does not exist. ' + 'Selected `bushy` layout. ' % path) + layout = 'bushy' + else: + # look for a non-hidden file in the directory + has_files = False + for name in os.listdir(path): + if not name.startswith('.'): + has_files = True + break + if not has_files: + log('Blob directory `%s` is unused and has no layout marker set. ' + 'Selected `bushy` layout. ' % path) + layout = 'bushy' + else: + log('Blob directory `%s` is used but has no layout marker set. ' + 'Selected `lawn` layout. ' % path) + layout = 'lawn' + return layout + + +class BushyLayout(object): + """A bushy directory layout for blob directories. + + Creates an 8-level directory structure (one level per byte) in + big-endian order from the OID of an object. + + """ + + blob_path_pattern = re.compile( + r'(0x[0-9a-f]{1,2}\%s){7,7}0x[0-9a-f]{1,2}$' % os.path.sep) + + def oid_to_path(self, oid): + # Create the bushy directory structure with the least significant byte + # first + oid_bytes = ascii_bytes(oid) + hex_bytes = binascii.hexlify(oid_bytes) + assert len(hex_bytes) == 16 + + directories = [b'0x' + hex_bytes[x:x+2] + for x in range(0, 16, 2)] + + if bytes is not str: # py3 + sep_bytes = os.path.sep.encode('ascii') + path_bytes = sep_bytes.join(directories) + return path_bytes.decode('ascii') + else: + return os.path.sep.join(directories) + + def path_to_oid(self, path): + if self.blob_path_pattern.match(path) is None: + raise ValueError("Not a valid OID path: `%s`" % path) + path = [ascii_bytes(x) for x in path.split(os.path.sep)] + # Each path segment stores a byte in hex representation. Turn it into + # an int and then get the character for our byte string. + oid = b''.join(binascii.unhexlify(byte[2:]) for byte in path) + return oid + + def getBlobFilePath(self, oid, tid): + """Given an oid and a tid, return the full filename of the + 'committed' blob file related to that oid and tid. + + """ + oid_path = self.oid_to_path(oid) + filename = "%s%s" % (utils.tid_repr(tid), BLOB_SUFFIX) + return os.path.join(oid_path, filename) + +LAYOUTS['bushy'] = BushyLayout() + +class LawnLayout(BushyLayout): + """A shallow directory layout for blob directories. + + Creates a single level of directories (one for each oid). + + """ + + def oid_to_path(self, oid): + return utils.oid_repr(oid) + + def path_to_oid(self, path): + try: + if path == '': + # This is a special case where repr_to_oid converts '' to the + # OID z64. + raise TypeError() + return utils.repr_to_oid(path) + except (TypeError, binascii.Error): + raise ValueError('Not a valid OID path: `%s`' % path) + +LAYOUTS['lawn'] = LawnLayout() + +class BlobStorageMixin(object): + """A mix-in to help storages support blobs.""" + + def _blob_init(self, blob_dir, layout='automatic'): + # XXX Log warning if storage is ClientStorage + self.fshelper = FilesystemHelper(blob_dir, layout) + self.fshelper.create() + self.dirty_oids = [] + + def _blob_init_no_blobs(self): + self.fshelper = NoBlobsFileSystemHelper() + self.dirty_oids = [] + + def _blob_tpc_abort(self): + """Blob cleanup to be called from subclass tpc_abort + """ + while self.dirty_oids: + oid, serial = self.dirty_oids.pop() + clean = self.fshelper.getBlobFilename(oid, serial) + if os.path.exists(clean): + remove_committed(clean) + + def _blob_tpc_finish(self): + """Blob cleanup to be called from subclass tpc_finish + """ + self.dirty_oids = [] + + def registerDB(self, db): + self.__untransform_record_data = db.untransform_record_data + try: + m = super(BlobStorageMixin, self).registerDB + except AttributeError: + pass + else: + m(db) + + def __untransform_record_data(self, record): + return record + + def is_blob_record(self, record): + if record: + return is_blob_record(self.__untransform_record_data(record)) + + def copyTransactionsFrom(self, other): + copyTransactionsFromTo(other, self) + + def loadBlob(self, oid, serial): + """Return the filename where the blob file can be found. + """ + filename = self.fshelper.getBlobFilename(oid, serial) + if not os.path.exists(filename): + raise POSKeyError("No blob file at %s" % filename, oid, serial) + return filename + + def openCommittedBlobFile(self, oid, serial, blob=None): + blob_filename = self.loadBlob(oid, serial) + if blob is None: + return open(blob_filename, 'rb') + else: + return BlobFile(blob_filename, 'r', blob) + + def restoreBlob(self, oid, serial, data, blobfilename, prev_txn, + transaction): + """Write blob data already committed in a separate database + """ + self.restore(oid, serial, data, '', prev_txn, transaction) + self._blob_storeblob(oid, serial, blobfilename) + + return self._tid + + def _blob_storeblob(self, oid, serial, blobfilename): + with self._lock: + self.fshelper.getPathForOID(oid, create=True) + targetname = self.fshelper.getBlobFilename(oid, serial) + rename_or_copy_blob(blobfilename, targetname) + + # if oid already in there, something is really hosed. + # The underlying storage should have complained anyway + self.dirty_oids.append((oid, serial)) + + def storeBlob(self, oid, oldserial, data, blobfilename, version, + transaction): + """Stores data that has a BLOB attached.""" + assert not version, "Versions aren't supported." + self.store(oid, oldserial, data, '', transaction) + self._blob_storeblob(oid, self._tid, blobfilename) + + def temporaryDirectory(self): + return self.fshelper.temp_dir + + +@zope.interface.implementer(ZODB.interfaces.IBlobStorage) +class BlobStorage(BlobStorageMixin): + """A wrapper/proxy storage to support blobs. + """ + + + def __init__(self, base_directory, storage, layout='automatic'): + assert not ZODB.interfaces.IBlobStorage.providedBy(storage) + self.__storage = storage + + self._blob_init(base_directory, layout) + try: + supportsUndo = storage.supportsUndo + except AttributeError: + supportsUndo = False + else: + supportsUndo = supportsUndo() + self.__supportsUndo = supportsUndo + self._blobs_pack_is_in_progress = False + + if ZODB.interfaces.IStorageRestoreable.providedBy(storage): + iblob = ZODB.interfaces.IBlobStorageRestoreable + else: + iblob = ZODB.interfaces.IBlobStorage + + zope.interface.directlyProvides( + self, iblob, zope.interface.providedBy(storage)) + + def __getattr__(self, name): + return getattr(self.__storage, name) + + def __len__(self): + return len(self.__storage) + + def __repr__(self): + normal_storage = self.__storage + return '' % (normal_storage, + hex(id(self))) + + def tpc_finish(self, *arg, **kw): + # We need to override the base storage's tpc_finish instead of + # providing a _finish method because methods found on the proxied + # object aren't rebound to the proxy + tid = self.__storage.tpc_finish(*arg, **kw) + self._blob_tpc_finish() + return tid + + def tpc_abort(self, *arg, **kw): + # We need to override the base storage's abort instead of + # providing an _abort method because methods found on the proxied object + # aren't rebound to the proxy + self.__storage.tpc_abort(*arg, **kw) + self._blob_tpc_abort() + + def _packUndoing(self, packtime, referencesf): + # Walk over all existing revisions of all blob files and check + # if they are still needed by attempting to load the revision + # of that object from the database. This is maybe the slowest + # possible way to do this, but it's safe. + for oid, oid_path in self.fshelper.listOIDs(): + files = os.listdir(oid_path) + for filename in files: + filepath = os.path.join(oid_path, filename) + whatever, serial = self.fshelper.splitBlobFilename(filepath) + try: + self.loadSerial(oid, serial) + except POSKeyError: + remove_committed(filepath) + + if not os.listdir(oid_path): + shutil.rmtree(oid_path) + + def _packNonUndoing(self, packtime, referencesf): + for oid, oid_path in self.fshelper.listOIDs(): + exists = True + try: + utils.load_current(self, oid) + except (POSKeyError, KeyError): + exists = False + + if exists: + files = os.listdir(oid_path) + files.sort() + latest = files[-1] # depends on ever-increasing tids + files.remove(latest) + for f in files: + remove_committed(os.path.join(oid_path, f)) + else: + remove_committed_dir(oid_path) + continue + + if not os.listdir(oid_path): + shutil.rmtree(oid_path) + + def pack(self, packtime, referencesf): + """Remove all unused OID/TID combinations.""" + with self._lock: + if self._blobs_pack_is_in_progress: + raise BlobStorageError('Already packing') + self._blobs_pack_is_in_progress = True + + try: + # Pack the underlying storage, which will allow us to determine + # which serials are current. + unproxied = self.__storage + result = unproxied.pack(packtime, referencesf) + + # Perform a pack on the blob data. + if self.__supportsUndo: + self._packUndoing(packtime, referencesf) + else: + self._packNonUndoing(packtime, referencesf) + finally: + with self._lock: + self._blobs_pack_is_in_progress = False + + return result + + def undo(self, serial_id, transaction): + undo_serial, keys = self.__storage.undo(serial_id, transaction) + # serial_id is the transaction id of the txn that we wish to undo. + # "undo_serial" is the transaction id of txn in which the undo is + # performed. "keys" is the list of oids that are involved in the + # undo transaction. + + # The serial_id is assumed to be given to us base-64 encoded + # (belying the web UI legacy of the ZODB code :-() + serial_id = decodebytes(serial_id + b'\n') + + with self._lock: + # we get all the blob oids on the filesystem related to the + # transaction we want to undo. + for oid in self.fshelper.getOIDsForSerial(serial_id): + # we want to find the serial id of the previous revision + # of this blob object. + load_result = self.loadBefore(oid, serial_id) + + if load_result is None: + + # There was no previous revision of this blob + # object. The blob was created in the transaction + # represented by serial_id. We copy the blob data + # to a new file that references the undo + # transaction in case a user wishes to undo this + # undo. It would be nice if we had some way to + # link to old blobs. + orig_fn = self.fshelper.getBlobFilename(oid, serial_id) + new_fn = self.fshelper.getBlobFilename(oid, undo_serial) + else: + # A previous revision of this blob existed before the + # transaction implied by "serial_id". We copy the blob + # data to a new file that references the undo transaction + # in case a user wishes to undo this undo. + data, serial_before, serial_after = load_result + orig_fn = self.fshelper.getBlobFilename(oid, serial_before) + new_fn = self.fshelper.getBlobFilename(oid, undo_serial) + with open(orig_fn, "rb") as orig: + with open(new_fn, "wb") as new: + utils.cp(orig, new) + self.dirty_oids.append((oid, undo_serial)) + + return undo_serial, keys + + def new_instance(self): + """Implementation of IMVCCStorage.new_instance. + + This method causes all storage instances to be wrapped with + a blob storage wrapper. + """ + base_dir = self.fshelper.base_dir + s = self.__storage.new_instance() + res = BlobStorage(base_dir, s) + return res + +copied = logging.getLogger('ZODB.blob.copied').debug +def rename_or_copy_blob(f1, f2, chmod=True): + """Try to rename f1 to f2, fallback to copy. + + Under certain conditions a rename might not work, e.g. because the target + directory is on a different partition. In this case we try to copy the + data and remove the old file afterwards. + + """ + try: + os.rename(f1, f2) + except OSError: + copied("Copied blob file %r to %r.", f1, f2) + with open(f1, 'rb') as file1: + with open(f2, 'wb') as file2: + utils.cp(file1, file2) + remove_committed(f1) + + if chmod: + set_not_writable(f2) + +if sys.platform == 'win32': + # On Windows, you can't remove read-only files, so make the + # file writable first. + + def remove_committed(filename): + os.chmod(filename, stat.S_IWRITE) + os.remove(filename) + + def remove_committed_dir(path): + for (dirpath, dirnames, filenames) in os.walk(path): + for filename in filenames: + filename = os.path.join(dirpath, filename) + remove_committed(filename) + shutil.rmtree(path) + + link_or_copy = shutil.copy +else: + remove_committed = os.remove + remove_committed_dir = shutil.rmtree + link_or_copy = os.link + + +def find_global_Blob(module, class_): + if module == 'ZODB.blob' and class_ == 'Blob': + return Blob + +def is_blob_record(record): + """Check whether a database record is a blob record. + + This is primarily intended to be used when copying data from one + storage to another. + + """ + if record and (b'ZODB.blob' in record): + unpickler = PersistentUnpickler(find_global_Blob, None, BytesIO(record)) + + try: + return unpickler.load() is Blob + except (MemoryError, KeyboardInterrupt, SystemExit): + raise + except Exception: + pass + + return False + +def copyTransactionsFromTo(source, destination): + for trans in source.iterator(): + destination.tpc_begin(trans, trans.tid, trans.status) + for record in trans: + blobfilename = None + if is_blob_record(record.data): + try: + blobfilename = source.loadBlob(record.oid, record.tid) + except POSKeyError: + pass + if blobfilename is not None: + fd, name = tempfile.mkstemp( + prefix='CTFT', + suffix='.tmp', dir=destination.fshelper.temp_dir) + os.close(fd) + with open(blobfilename, 'rb') as sf: + with open(name, 'wb') as df: + utils.cp(sf, df) + destination.restoreBlob(record.oid, record.tid, record.data, + name, record.data_txn, trans) + else: + destination.restore(record.oid, record.tid, record.data, + '', record.data_txn, trans) + + destination.tpc_vote(trans) + destination.tpc_finish(trans) + + +NO_WRITE = ~ (stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH) +READ_PERMS = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH +def set_not_writable(path): + perms = stat.S_IMODE(os.lstat(path).st_mode) + + # Not writable: + perms &= NO_WRITE + + # Read perms from folder: + perms |= stat.S_IMODE(os.lstat(os.path.dirname(path)).st_mode) & READ_PERMS + + os.chmod(path, perms) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/broken.py b/thesisenv/lib/python3.6/site-packages/ZODB/broken.py new file mode 100644 index 0000000..477b1bb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/broken.py @@ -0,0 +1,360 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Broken object support +""" + +import sys + +import persistent +import zope.interface + +import ZODB.interfaces +from ZODB._compat import IMPORT_MAPPING +from ZODB._compat import NAME_MAPPING + +broken_cache = {} + +@zope.interface.implementer(ZODB.interfaces.IBroken) +class Broken(object): + """Broken object base class + + Broken objects are placeholders for objects that can no longer be + created because their class has gone away. + + Broken objects don't really do much of anything, except hold their + state. The Broken class is used as a base class for creating + classes in lieu of missing classes:: + + >>> Atall = type('Atall', (Broken, ), {'__module__': 'not.there'}) + + The only thing the class can be used for is to create new objects:: + + >>> Atall() + + >>> Atall().__Broken_newargs__ + () + >>> Atall().__Broken_initargs__ + () + + >>> Atall(1, 2).__Broken_newargs__ + (1, 2) + >>> Atall(1, 2).__Broken_initargs__ + (1, 2) + + >>> a = Atall.__new__(Atall, 1, 2) + >>> a + + >>> a.__Broken_newargs__ + (1, 2) + >>> a.__Broken_initargs__ + + You can't modify broken objects:: + + >>> a.x = 1 + Traceback (most recent call last): + ... + BrokenModified: Can't change broken objects + + But you can set their state:: + + >>> a.__setstate__({'x': 1, }) + + You can pickle broken objects:: + + >>> r = a.__reduce__() + >>> len(r) + 3 + >>> r[0] is rebuild + True + >>> r[1] + ('not.there', 'Atall', 1, 2) + >>> r[2] + {'x': 1} + + >>> from ZODB._compat import dumps + >>> from ZODB._compat import loads + >>> from ZODB._compat import _protocol + >>> a2 = loads(dumps(a, _protocol)) + >>> a2 + + >>> a2.__Broken_newargs__ + (1, 2) + >>> a2.__Broken_initargs__ + >>> a2.__Broken_state__ + {'x': 1} + + Cleanup:: + + >>> broken_cache.clear() + """ + + + __Broken_state__ = __Broken_initargs__ = None + + __name__ = 'broken object' + + def __new__(class_, *args): + result = object.__new__(class_) + result.__dict__['__Broken_newargs__'] = args + return result + + def __init__(self, *args): + self.__dict__['__Broken_initargs__'] = args + + def __reduce__(self): + """We pickle broken objects in hope of being able to fix them later + """ + return (rebuild, + ((self.__class__.__module__, self.__class__.__name__) + + self.__Broken_newargs__), + self.__Broken_state__, + ) + + def __setstate__(self, state): + self.__dict__['__Broken_state__'] = state + + def __repr__(self): + return "" % ( + self.__class__.__module__, self.__class__.__name__) + + def __setattr__(self, name, value): + raise BrokenModified("Can't change broken objects") + +def find_global(modulename, globalname, + # These are *not* optimizations. Callers can override these. + Broken=Broken, type=type, + ): + """Find a global object, returning a broken class if it can't be found. + + This function looks up global variable in modules:: + + >>> import sys + >>> find_global('sys', 'path') is sys.path + True + + >>> find_global('__builtin__', 'object') is object + True + + If an object can't be found, a broken class is returned:: + + >>> broken = find_global('ZODB.not.there', 'atall') + >>> issubclass(broken, Broken) + True + >>> broken.__module__ + 'ZODB.not.there' + >>> broken.__name__ + 'atall' + + Broken classes are cached:: + + >>> find_global('ZODB.not.there', 'atall') is broken + True + + If we "repair" a missing global:: + + >>> class ZODBnotthere(object): + ... atall = [] + + >>> sys.modules['ZODB.not'] = ZODBnotthere + >>> sys.modules['ZODB.not.there'] = ZODBnotthere + + we can then get the repaired value:: + + >>> find_global('ZODB.not.there', 'atall') is ZODBnotthere.atall + True + + Of course, if we break it again:: + + >>> del sys.modules['ZODB.not'] + >>> del sys.modules['ZODB.not.there'] + + we get the broken value:: + + >>> find_global('ZODB.not.there', 'atall') is broken + True + + Cleanup:: + + >>> broken_cache.clear() + """ + + if (modulename, globalname) in NAME_MAPPING: + modulename, globalname = NAME_MAPPING[(modulename, globalname)] + if modulename in IMPORT_MAPPING: + modulename = IMPORT_MAPPING[modulename] + + # short circuit common case: + try: + return getattr(sys.modules[modulename], globalname) + except (AttributeError, KeyError): + pass + + try: + __import__(modulename) + except ImportError: + pass + else: + module = sys.modules[modulename] + try: + return getattr(module, globalname) + except AttributeError: + pass + + try: + return broken_cache[(modulename, globalname)] + except KeyError: + pass + + class_ = type(globalname, (Broken, ), {'__module__': modulename}) + broken_cache[(modulename, globalname)] = class_ + return class_ + +def rebuild(modulename, globalname, *args): + """Recreate a broken object, possibly recreating the missing class + + This functions unpickles broken objects:: + + >>> broken = rebuild('ZODB.notthere', 'atall', 1, 2) + >>> broken + + >>> broken.__Broken_newargs__ + (1, 2) + + If we "repair" the brokenness:: + + >>> class notthere(object): # fake notthere module + ... class atall(object): + ... def __new__(self, *args): + ... ob = object.__new__(self) + ... ob.args = args + ... return ob + ... def __repr__(self): + ... return 'atall %s %s' % self.args + + >>> sys.modules['ZODB.notthere'] = notthere + + >>> rebuild('ZODB.notthere', 'atall', 1, 2) + atall 1 2 + + >>> del sys.modules['ZODB.notthere'] + + Cleanup:: + + >>> broken_cache.clear() + + """ + class_ = find_global(modulename, globalname) + return class_.__new__(class_, *args) + +class BrokenModified(TypeError): + """Attempt to modify a broken object + """ + +class PersistentBroken(Broken, persistent.Persistent): + r"""Persistent broken objects + + Persistent broken objects are used for broken objects that are + also persistent. In addition to having to track the original + object data, they need to handle persistent meta data. + + Persistent broken classes are created from existing broken classes + using the persistentBroken, function:: + + >>> Atall = type('Atall', (Broken, ), {'__module__': 'not.there'}) + >>> PAtall = persistentBroken(Atall) + + (Note that we always get the *same* persistent broken class + for a given broken class:: + + >>> persistentBroken(Atall) is PAtall + True + + ) + + Persistent broken classes work a lot like broken classes:: + + >>> a = PAtall.__new__(PAtall, 1, 2) + >>> a + + >>> a.__Broken_newargs__ + (1, 2) + >>> a.__Broken_initargs__ + >>> a.x = 1 + Traceback (most recent call last): + ... + BrokenModified: Can't change broken objects + + Unlike regular broken objects, persistent broken objects keep + track of persistence meta data: + + >>> a._p_oid = '\0\0\0\0****' + >>> a + + + and persistent broken objects aren't directly picklable: + + >>> a.__reduce__() # doctest: +NORMALIZE_WHITESPACE + Traceback (most recent call last): + ... + BrokenModified: + + + but you can get their state: + + >>> a.__setstate__({'y': 2}) + >>> a.__getstate__() + {'y': 2} + + Cleanup:: + + >>> broken_cache.clear() + + """ + + def __new__(class_, *args): + result = persistent.Persistent.__new__(class_) + result.__dict__['__Broken_newargs__'] = args + return result + + def __reduce__(self, *args): + raise BrokenModified(self) + + def __getstate__(self): + return self.__Broken_state__ + + def __setattr__(self, name, value): + if name.startswith('_p_'): + persistent.Persistent.__setattr__(self, name, value) + else: + raise BrokenModified("Can't change broken objects") + + def __repr__(self): + return "" % ( + self.__class__.__module__, self.__class__.__name__, + self._p_oid) + + def __getnewargs__(self): + return self.__Broken_newargs__ + +def persistentBroken(class_): + try: + return class_.__dict__['__Broken_Persistent__'] + except KeyError: + class_.__Broken_Persistent__ = ( + type(class_.__name__, + (PersistentBroken, class_), + {'__module__': class_.__module__}, + ) + ) + return class_.__dict__['__Broken_Persistent__'] diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/collaborations.txt b/thesisenv/lib/python3.6/site-packages/ZODB/collaborations.txt new file mode 100644 index 0000000..38859d5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/collaborations.txt @@ -0,0 +1,199 @@ +======================= +Collabortation Diagrams +======================= + +This file contains several collaboration diagrams for the ZODB. + +Simple fetch, modify, commit +============================ + +Participants +------------ + +- ``DB``: ``ZODB.DB.DB`` +- ``C``: ``ZODB.Connection.Connection`` +- ``S``: ``ZODB.FileStorage.FileStorage`` +- ``T``: ``transaction.interfaces.ITransaction`` +- ``TM``: ``transaction.interfaces.ITransactionManager`` +- ``o1``, ``o2``, ...: pre-existing persistent objects + +Scenario +-------- + +:: + + DB.open() + create C + TM.registerSynch(C) + TM.begin() + create T + C.get(1) # fetches o1 + C.get(2) # fetches o2 + C.get(3) # fetches o3 + o1.modify() # anything that modifies o1 + C.register(o1) + T.join(C) + o2.modify() + C.register(o2) + # T.join(C) does not happen again + o1.modify() + # C.register(o1) doesn't happen again, because o1 was already + # in the changed state. + T.commit() + C.beforeCompletion(T) + C.tpc_begin(T) + S.tpc_begin(T) + C.commit(T) + S.store(1, ..., T) + S.store(2, ..., T) + # o3 is not stored, because it wasn't modified + C.tpc_vote(T) + S.tpc_vote(T) + C.tpc_finish(T) + S.tpc_finish(T, f) # f is a callback function, which arranges + # to call DB.invalidate (next) + DB.invalidate(tid, {1: 1, 2: 1}, C) + C2.invalidate(tid, {1: 1, 2: 1}) # for all connections + # C2 to DB, where C2 + # is not C + TM.free(T) + C.afterCompletion(T) + C._flush_invalidations() + # Processes invalidations that may have come in from other + # transactions. + + +Simple fetch, modify, abort +=========================== + +Participants +------------ + +- ``DB``: ``ZODB.DB.DB`` +- ``C``: ``ZODB.Connection.Connection`` +- ``S``: ``ZODB.FileStorage.FileStorage`` +- ``T``: ``transaction.interfaces.ITransaction`` +- ``TM``: ``transaction.interfaces.ITransactionManager`` +- ``o1``, ``o2``, ...: pre-existing persistent objects + +Scenario +-------- + +:: + + DB.open() + create C + TM.registerSynch(C) + TM.begin() + create T + C.get(1) # fetches o1 + C.get(2) # fetches o2 + C.get(3) # fetches o3 + o1.modify() # anything that modifies o1 + C.register(o1) + T.join(C) + o2.modify() + C.register(o2) + # T.join(C) does not happen again + o1.modify() + # C.register(o1) doesn't happen again, because o1 was already + # in the changed state. + T.abort() + C.beforeCompletion(T) + C.abort(T) + C._cache.invalidate(1) # toss changes to o1 + C._cache.invalidate(2) # toss changes to o2 + # o3 wasn't modified, and its cache entry isn't invalidated. + TM.free(T) + C.afterCompletion(T) + C._flush_invalidations() + # Processes invalidations that may have come in from other + # transactions. + + +Rollback of a savepoint +======================= + +Participants +------------ + +- ``T``: ``transaction.interfaces.ITransaction`` +- ``o1``, ``o2``, ``o3``: some persistent objects +- ``C1``, ``C2``, ``C3``: resource managers +- ``S1``, ``S2``: Transaction savepoint objects +- ``s11``, ``s21``, ``s22``: resource-manager savepoints + +Scenario +-------- + +:: + + create T + o1.modify() + C1.regisiter(o1) + T.join(C1) + T.savepoint() + C1.savepoint() + return s11 + return S1 = Savepoint(T, [r11]) + o1.modify() + C1.regisiter(o1) + o2.modify() + C2.regisiter(o2) + T.join(C2) + T.savepoint() + C1.savepoint() + return s21 + C2.savepoint() + return s22 + return S2 = Savepoint(T, [r21, r22]) + o3.modify() + C3.regisiter(o3) + T.join(C3) + S1.rollback() + S2.rollback() + T.discard() + C1.discard() + C2.discard() + C3.discard() + o3.invalidate() + S2.discard() + s21.discard() # roll back changes since previous, which is r11 + C1.discard(s21) + o1.invalidate() + # truncates temporary storage to s21's position + s22.discard() # roll back changes since previous, which is r11 + C1.discard(s22) + o2.invalidate() + # truncates temporary storage to beginning, because + # s22 was the first savepoint. (Perhaps conection + # savepoints record the log position before the + # data were written, which is 0 in this case. + T.commit() + C1.beforeCompletion(T) + C2.beforeCompletion(T) + C3.beforeCompletion(T) + C1.tpc_begin(T) + S1.tpc_begin(T) + C2.tpc_begin(T) + C3.tpc_begin(T) + C1.commit(T) + S1.store(1, ..., T) + C2.commit(T) + C3.commit(T) + C1.tpc_vote(T) + S1.tpc_vote(T) + C2.tpc_vote(T) + C3.tpc_vote(T) + C1.tpc_finish(T) + S1.tpc_finish(T, f) # f is a callback function, which arranges + c# to call DB.invalidate (next) + DB.invalidate(tid, {1: 1}, C) + TM.free(T) + C1.afterCompletion(T) + C1._flush_invalidations() + C2.afterCompletion(T) + C2._flush_invalidations() + C3.afterCompletion(T) + C3._flush_invalidations() + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/component.xml b/thesisenv/lib/python3.6/site-packages/ZODB/component.xml new file mode 100644 index 0000000..0227ac9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/component.xml @@ -0,0 +1,342 @@ + + + + + + + + + + + Path name to the main storage file. The names for + supplemental files, including index and lock files, will be + computed from this. + + + + + If supplied, the file storage will provide blob support and this + is the name of a directory to hold blob data. The directory will + be created if it doesn't exist. If no value (or an empty value) + is provided, then no blob support will be provided. (You can still + use a BlobStorage to provide blob support.) + + + + + Flag that indicates whether the storage should be truncated if + it already exists. + + + + + If true, only reads may be executed against the storage. Note + that the "pack" operation is not considered a write operation + and is still allowed on a read-only filestorage. + + + + + Maximum allowed size of the storage file. Operations which + would cause the size of the storage to exceed the quota will + result in a ZODB.FileStorage.FileStorageQuotaError being + raised. + + + + + The dotted name (dotted module name and object name) of a + packer object. This is used to provide an alternative pack + implementation. + + + + + If false, then no garbage collection will be performed when + packing. This can make packing go much faster and can avoid + problems when objects are referenced only from other + databases. + + + + + If true, a copy of the database before packing is kept in a + ".old" file. + + + + + + + + The storage name, used by the + :meth:`~ZODB.interfaces.IStorage.getName` and + :meth:`~ZODB.interfaces.IStorage.sortKey` methods. + + + + + + + + + + + + + + + + + + + + + + + + + Path name to the blob cache directory. + + + + + Tells whether the cache is a shared writable directory + and that the ZEO protocol should not transfer the file + but only the filename when committing. + + + + + Maximum size of the ZEO blob cache, in bytes. If not set, then + the cache size isn't checked and the blob directory will + grow without bound. + + This option is ignored if shared_blob_dir is true. + + + + + ZEO check size as percent of blob_cache_size. The ZEO + cache size will be checked when this many bytes have been + loaded into the cache. Defaults to 10% of the blob cache + size. This option is ignored if shared_blob_dir is true. + + + + + The name of the storage that the client wants to use. If the + ZEO server serves more than one storage, the client selects + the storage it wants to use by name. The default name is '1', + which is also the default name for the ZEO server. + + + + + The maximum size of the client cache, in bytes, KB or MB. + + + + + The storage name. If unspecified, the address of the server + will be used as the name. + + + + + Enables persistent cache files. The string passed here is + used to construct the cache filenames. If it is not + specified, the client creates a temporary cache that will + only be used by the current object. + + + + + The directory where persistent cache files are stored. By + default cache files, if they are persistent, are stored in + the current directory. + + + + + The minimum delay in seconds between attempts to connect to + the server, in seconds. Defaults to 5 seconds. + + + + + The maximum delay in seconds between attempts to connect to + the server, in seconds. Defaults to 300 seconds. + + + + + A boolean indicating whether the constructor should wait + for the client to connect to the server and verify the cache + before returning. The default is true. + + + + + A flag indicating whether this should be a read-only storage, + defaulting to false (i.e. writing is allowed by default). + + + + + A flag indicating whether a read-only remote storage should be + acceptable as a fall-back when no writable storages are + available. Defaults to false. At most one of read_only and + read_only_fallback should be true. + + + + + The authentication user name of the server. + + + + + The authentication password of the server. + + + + + The authentication realm of the server. Some authentication + schemes use a realm to identify the logic set of user names + that are accepted by this server. + + + + + + A flag indicating whether the client cache should be dropped + instead of an expensive verification. + + + + + A label for the client in server logs + + + + + + + + The storage name, used by the + :meth:`~ZODB.interfaces.IStorage.getName` and + :meth:`~ZODB.interfaces.IStorage.sortKey` methods. + + + + + + + +

+ + + Target size, in number of objects, of each connection's + object cache. + + + + + Target size, in total estimated size for objects, of each connection's + object cache. + "0" means no limit. + + + + + When object records are saved + that are larger than this, a warning is issued, + suggesting that blobs should be used instead. + + + + + The expected maximum number of simultaneously open connections. + There is no hard limit (as many connections as are requested + will be opened, until system resources are exhausted). Exceeding + pool-size connections causes a warning message to be logged, + and exceeding twice pool-size connections causes a critical + message to be logged. + + + + + The minimum interval that an unused (non-historical) + connection should be kept. + + + + + The expected maximum total number of historical connections + simultaneously open. + + + + + Target size, in number of objects, of each historical connection's + object cache. + + + + + Target size, in total estimated size of objects, of each historical connection's + object cache. + + + + + The minimum interval that an unused historical connection should be + kept. + + + + + When multi-databases are in use, this is the name given to this + database in the collection. The name must be unique across all + databases in the collection. The collection must also be given + a mapping from its databases' names to their databases, but that + cannot be specified in a ZODB config file. Applications using + multi-databases typical supply a way to configure the mapping in + their own config files, using the "databases" parameter of a DB + constructor. + + + + + If set to false, implicit cross references (the only kind + currently possible) are disallowed. + + + + + + + + + Path name to the blob storage directory. + + +
+ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/config.py b/thesisenv/lib/python3.6/site-packages/ZODB/config.py new file mode 100644 index 0000000..70b854e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/config.py @@ -0,0 +1,274 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Open database and storage from a configuration.""" +import os +import ZConfig +import ZODB + +try: + from cStringIO import StringIO +except ImportError: + # Py3 + from io import StringIO + + +db_schema_path = os.path.join(ZODB.__path__[0], "config.xml") +_db_schema = None + +s_schema_path = os.path.join(ZODB.__path__[0], "storage.xml") +_s_schema = None + +def getDbSchema(): + global _db_schema + if _db_schema is None: + _db_schema = ZConfig.loadSchema(db_schema_path) + return _db_schema + +def getStorageSchema(): + global _s_schema + if _s_schema is None: + _s_schema = ZConfig.loadSchema(s_schema_path) + return _s_schema + +def databaseFromString(s): + """Create a database from a database-configuration string. + + The string must contain one or more :ref:`zodb + ` sections. + + The database defined by the first section is returned. + + If :ref:`more than one zodb section is provided + `, a multi-database + configuration will be created and all of the databases will be + available in the returned database's ``databases`` attribute. + """ + return databaseFromFile(StringIO(s)) + +def databaseFromFile(f): + """Create a database from a file object that provides configuration. + + See :func:`databaseFromString`. + """ + config, handle = ZConfig.loadConfigFile(getDbSchema(), f) + return databaseFromConfig(config.database) + +def databaseFromURL(url): + """Load a database from URL (or file name) that provides configuration. + + See :func:`databaseFromString`. + """ + config, handler = ZConfig.loadConfig(getDbSchema(), url) + return databaseFromConfig(config.database) + +def databaseFromConfig(database_factories): + databases = {} + first = None + for factory in database_factories: + db = factory.open(databases) + if first is None: + first = db + + return first + +def storageFromString(s): + """Create a storage from a storage-configuration string. + """ + return storageFromFile(StringIO(s)) + +def storageFromFile(f): + """Create a storage from a file object providing storage-configuration. + """ + config, handle = ZConfig.loadConfigFile(getStorageSchema(), f) + return storageFromConfig(config.storage) + +def storageFromURL(url): + """\ + Create a storage from a URL (or file name) providing storage-configuration. + """ + config, handler = ZConfig.loadConfig(getStorageSchema(), url) + return storageFromConfig(config.storage) + +def storageFromConfig(section): + return section.open() + +class BaseConfig(object): + """Object representing a configured storage or database. + + Methods: + + open() -- open and return the configured object + + Attributes: + + name -- name of the storage + + """ + + def __init__(self, config): + self.config = config + self.name = config.getSectionName() + + def open(self, database_name='unnamed', databases=None): + """Open and return the storage object.""" + raise NotImplementedError + +class ZODBDatabase(BaseConfig): + + def open(self, databases=None): + section = self.config + storage = section.storage.open() + options = {} + + def _option(name, oname=None): + v = getattr(section, name) + if v is not None: + if oname is None: + oname = name + options[oname] = v + + _option('pool_timeout') + _option('allow_implicit_cross_references', 'xrefs') + _option('large_record_size') + + try: + return ZODB.DB( + storage, + pool_size=section.pool_size, + cache_size=section.cache_size, + cache_size_bytes=section.cache_size_bytes, + historical_pool_size=section.historical_pool_size, + historical_cache_size=section.historical_cache_size, + historical_cache_size_bytes=section.historical_cache_size_bytes, + historical_timeout=section.historical_timeout, + database_name=section.database_name or self.name or '', + databases=databases, + **options) + except: + storage.close() + raise + +class MappingStorage(BaseConfig): + + def open(self): + from ZODB.MappingStorage import MappingStorage + return MappingStorage(self.config.name) + +class DemoStorage(BaseConfig): + + def open(self): + base = changes = None + for factory in self.config.factories: + if factory.name == 'changes': + changes = factory.open() + else: + if base is None: + base = factory.open() + else: + raise ValueError("Too many base storages defined!") + + from ZODB.DemoStorage import DemoStorage + return DemoStorage(self.config.name, base=base, changes=changes) + +class FileStorage(BaseConfig): + + def open(self): + from ZODB.FileStorage import FileStorage + config = self.config + options = {} + if getattr(config, 'packer', None): + packer = config.packer + if ':' in packer: + m, expr = packer.split(':', 1) + m = __import__(m, {}, {}, ['*']) + options['packer'] = eval(expr, m.__dict__) + else: + m, name = config.packer.rsplit('.', 1) + m = __import__(m, {}, {}, ['*']) + options['packer'] = getattr(m, name) + + for name in ('blob_dir', 'create', 'read_only', 'quota', 'pack_gc', + 'pack_keep_old'): + v = getattr(config, name, self) + if v is not self: + options[name] = v + + return FileStorage(config.path, **options) + +class BlobStorage(BaseConfig): + + def open(self): + from ZODB.blob import BlobStorage + base = self.config.base.open() + return BlobStorage(self.config.blob_dir, base) + + +class ZEOClient(BaseConfig): + + def open(self): + from ZEO.ClientStorage import ClientStorage + # config.server is a multikey of socket-connection-address values + # where the value is a socket family, address tuple. + L = [server.address for server in self.config.server] + options = {} + if self.config.blob_cache_size is not None: + options['blob_cache_size'] = self.config.blob_cache_size + if self.config.blob_cache_size_check is not None: + options['blob_cache_size_check'] = self.config.blob_cache_size_check + if self.config.client_label is not None: + options['client_label'] = self.config.client_label + + return ClientStorage( + L, + blob_dir=self.config.blob_dir, + shared_blob_dir=self.config.shared_blob_dir, + storage=self.config.storage, + cache_size=self.config.cache_size, + name=self.config.name, + client=self.config.client, + var=self.config.var, + min_disconnect_poll=self.config.min_disconnect_poll, + max_disconnect_poll=self.config.max_disconnect_poll, + wait=self.config.wait, + read_only=self.config.read_only, + read_only_fallback=self.config.read_only_fallback, + drop_cache_rather_verify=self.config.drop_cache_rather_verify, + username=self.config.username, + password=self.config.password, + realm=self.config.realm, + **options) + +class BDBStorage(BaseConfig): + + def open(self): + from BDBStorage.BerkeleyBase import BerkeleyConfig + storageclass = self.get_storageclass() + bconf = BerkeleyConfig() + for name in dir(BerkeleyConfig): + if name.startswith('_'): + continue + setattr(bconf, name, getattr(self.config, name)) + return storageclass(self.config.envdir, config=bconf) + +class BDBMinimalStorage(BDBStorage): + + def get_storageclass(self): + import BDBStorage.BDBMinimalStorage + return BDBStorage.BDBMinimalStorage.BDBMinimalStorage + +class BDBFullStorage(BDBStorage): + + def get_storageclass(self): + import BDBStorage.BDBFullStorage + return BDBStorage.BDBFullStorage.BDBFullStorage diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/config.xml b/thesisenv/lib/python3.6/site-packages/ZODB/config.xml new file mode 100644 index 0000000..6ab59c5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/config.xml @@ -0,0 +1,8 @@ + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/conversionhack.py b/thesisenv/lib/python3.6/site-packages/ZODB/conversionhack.py new file mode 100644 index 0000000..9228739 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/conversionhack.py @@ -0,0 +1,34 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +import persistent.mapping + +class fixer(object): + def __of__(self, parent): + def __setstate__(state, self=parent): + self._container=state + del self.__setstate__ + return __setstate__ + +fixer=fixer() + +class hack(object): pass +hack=hack() + +def __basicnew__(): + r=persistent.mapping.PersistentMapping() + r.__setstate__=fixer + return r + +hack.__basicnew__=__basicnew__ diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/cross-database-references.txt b/thesisenv/lib/python3.6/site-packages/ZODB/cross-database-references.txt new file mode 100644 index 0000000..1ad6c0f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/cross-database-references.txt @@ -0,0 +1,200 @@ +========================= +Cross-Database References +========================= + +Persistent references to objects in different databases within a +multi-database are allowed. + +Lets set up a multi-database with 2 databases: + + >>> import ZODB.tests.util, transaction, persistent + >>> databases = {} + >>> db1 = ZODB.tests.util.DB(databases=databases, database_name='1') + >>> db2 = ZODB.tests.util.DB(databases=databases, database_name='2') + +And create a persistent object in the first database: + + >>> tm = transaction.TransactionManager() + >>> conn1 = db1.open(transaction_manager=tm) + >>> p1 = MyClass() + >>> conn1.root()['p'] = p1 + >>> tm.commit() + +First, we get a connection to the second database. We get the second +connection using the first connection's `get_connection` method. This +is important. When using multiple databases, we need to make sure we +use a consistent set of connections so that the objects in the +connection caches are connected in a consistent manner. + + >>> conn2 = conn1.get_connection('2') + +Now, we'll create a second persistent object in the second database. +We'll have a reference to the first object: + + >>> p2 = MyClass() + >>> conn2.root()['p'] = p2 + >>> p2.p1 = p1 + >>> tm.commit() + +Now, let's open a separate connection to database 2. We use it to +read `p2`, use `p2` to get to `p1`, and verify that it is in database 1: + + >>> conn = db2.open() + >>> p2x = conn.root()['p'] + >>> p1x = p2x.p1 + + >>> p2x is p2, p2x._p_oid == p2._p_oid, p2x._p_jar.db() is db2 + (False, True, True) + + >>> p1x is p1, p1x._p_oid == p1._p_oid, p1x._p_jar.db() is db1 + (False, True, True) + +It isn't valid to create references outside a multi database: + + >>> db3 = ZODB.tests.util.DB() + >>> conn3 = db3.open(transaction_manager=tm) + >>> p3 = MyClass() + >>> conn3.root()['p'] = p3 + >>> tm.commit() + + >>> p2.p3 = p3 + >>> tm.commit() # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS + Traceback (most recent call last): + ... + InvalidObjectReference: + ('Attempt to store an object from a foreign database connection', + , + ) + + >>> tm.abort() + +Databases for new objects +------------------------- + +Objects are normally added to a database by making them reachable from +an object already in the database. This is unambiguous when there is +only one database. With multiple databases, it is not so clear what +happens. Consider: + + >>> p4 = MyClass() + >>> p1.p4 = p4 + >>> p2.p4 = p4 + +In this example, the new object is reachable from both `p1` in database +1 and `p2` in database 2. If we commit, which database should `p4` end up +in? This sort of ambiguity could lead to subtle bugs. For that reason, +an error is generated if we commit changes when new objects are +reachable from multiple databases: + + >>> tm.commit() # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS + Traceback (most recent call last): + ... + InvalidObjectReference: + ("A new object is reachable from multiple databases. Won't try to + guess which one was correct!", + , + ) + + >>> tm.abort() + +To resolve this ambiguity, we can commit before an object becomes +reachable from multiple databases. + + >>> p4 = MyClass() + >>> p1.p4 = p4 + >>> tm.commit() + >>> p2.p4 = p4 + >>> tm.commit() + >>> p4._p_jar.db().database_name + '1' + +This doesn't work with a savepoint: + + >>> p5 = MyClass() + >>> p1.p5 = p5 + >>> s = tm.savepoint() + >>> p2.p5 = p5 + >>> tm.commit() # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS + Traceback (most recent call last): + ... + InvalidObjectReference: + ("A new object is reachable from multiple databases. Won't try to guess + which one was correct!", + , + ) + + >>> tm.abort() + +(Maybe it should.) + +We can disambiguate this situation by using the connection add method +to explicitly say what database an object belongs to: + + >>> p5 = MyClass() + >>> p1.p5 = p5 + >>> p2.p5 = p5 + >>> conn1.add(p5) + >>> tm.commit() + >>> p5._p_jar.db().database_name + '1' + +This the most explicit and thus the best way, when practical, to avoid +the ambiguity. + +Dissallowing implicit cross-database references +----------------------------------------------- + +The database contructor accepts a xrefs keyword argument that defaults +to True. If False is passed, the implicit cross database references +are disallowed. (Note that currently, implicit cross references are +the only kind of cross references allowed.) + + >>> databases = {} + >>> db1 = ZODB.tests.util.DB(databases=databases, database_name='1') + >>> db2 = ZODB.tests.util.DB(databases=databases, database_name='2', + ... xrefs=False) + +In this example, we allow cross-references from db1 to db2, but not +the other way around. + + >>> c1 = db1.open() + >>> c2 = c1.get_connection('2') + >>> c1.root.x = c2.root() + >>> transaction.commit() + >>> c2.root.x = c1.root() + >>> transaction.commit() # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS + Traceback (most recent call last): + ... + InvalidObjectReference: + ("Database '2' doesn't allow implicit cross-database references", + , + {'x': {}}) + + >>> transaction.abort() + +NOTE +---- + +This implementation is incomplete. It allows creating and using +cross-database references, however, there are a number of facilities +missing: + +cross-database garbage collection + + Garbage collection is done on a database by database basis. + If an object on a database only has references to it from other + databases, then the object will be garbage collected when its + database is packed. The cross-database references to it will be + broken. + +cross-database undo + + Undo is only applied to a single database. Fixing this for + multiple databases is going to be extremely difficult. Undo + currently poses consistency problems, so it is not (or should not + be) widely used. + +Cross-database aware (tolerant) export/import + + The export/import facility needs to be aware, at least, of cross-database + references. diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/event.py b/thesisenv/lib/python3.6/site-packages/ZODB/event.py new file mode 100644 index 0000000..6815e5d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/event.py @@ -0,0 +1,18 @@ +############################################################################## +# +# Copyright Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +try: + from zope.event import notify +except ImportError: + notify = lambda event: None + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/event.txt b/thesisenv/lib/python3.6/site-packages/ZODB/event.txt new file mode 100644 index 0000000..7ecea36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/event.txt @@ -0,0 +1,10 @@ +Event support + +Sometimes, you want to react when ZODB does certain things. In the +past, ZODB provided ad hoc hook functions for this. Going forward, +ZODB will use an event mechanism. ZODB.event.notify is called with +events of interest. + +If zope.event is installed, then ZODB.event.notify is simply an alias +for zope.event. If zope.event isn't installed, then ZODB.event is a +noop. diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/fsIndex.py b/thesisenv/lib/python3.6/site-packages/ZODB/fsIndex.py new file mode 100644 index 0000000..4f47b52 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/fsIndex.py @@ -0,0 +1,280 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Implement an OID to File-position (long integer) mapping.""" + +# To save space, we do two things: +# +# 1. We split the keys (OIDS) into 6-byte prefixes and 2-byte suffixes. +# We use the prefixes as keys in a mapping from prefix to mappings +# of suffix to data: +# +# data is {prefix -> {suffix -> data}} +# +# 2. We limit the data size to 48 bits. This should allow databases +# as large as 256 terabytes. +# +# Most of the space is consumed by items in the mappings from 2-byte +# suffix to 6-byte data. This should reduce the overall memory usage to +# 8-16 bytes per OID. +# +# Because +# - the mapping from suffix to data contains at most 65535 entries, +# - this is an in-memory data structure +# - new keys are inserted sequentially, +# we use a BTree bucket instead of a full BTree to store the results. +# +# We use p64 to convert integers to 8-byte strings and lop off the two +# high-order bytes when saving. On loading data, we add the leading +# bytes back before using u64 to convert the data back to (long) +# integers. +import struct + +from BTrees.fsBTree import fsBucket +from BTrees.OOBTree import OOBTree +import six + +from ZODB._compat import INT_TYPES +from ZODB._compat import Pickler +from ZODB._compat import Unpickler +from ZODB._compat import _protocol + + +# convert between numbers and six-byte strings + +def num2str(n): + return struct.pack(">Q", n)[2:] + +def str2num(s): + return struct.unpack(">Q", b"\000\000" + s)[0] + +def prefix_plus_one(s): + num = str2num(s) + return num2str(num + 1) + +def prefix_minus_one(s): + num = str2num(s) + return num2str(num - 1) + +def ensure_bytes(s): + # on Python 3 we might pickle bytes and unpickle unicode strings + return s.encode('ascii') if not isinstance(s, bytes) else s + + +class fsIndex(object): + + def __init__(self, data=None): + self._data = OOBTree() + if data: + self.update(data) + + def __getstate__(self): + return dict( + state_version = 1, + _data = [(k, v.toString()) + for (k, v) in six.iteritems(self._data) + ] + ) + + def __setstate__(self, state): + version = state.pop('state_version', 0) + getattr(self, '_setstate_%s' % version)(state) + + def _setstate_0(self, state): + self.__dict__.clear() + self.__dict__.update(state) + self._data = OOBTree([ + (ensure_bytes(k), v) + for (k, v) in self._data.items() + ]) + + def _setstate_1(self, state): + self._data = OOBTree([ + (ensure_bytes(k), fsBucket().fromString(ensure_bytes(v))) + for (k, v) in state['_data'] + ]) + + def __getitem__(self, key): + assert isinstance(key, bytes) + return str2num(self._data[key[:6]][key[6:]]) + + def save(self, pos, fname): + with open(fname, 'wb') as f: + pickler = Pickler(f, _protocol) + pickler.fast = True + pickler.dump(pos) + for k, v in six.iteritems(self._data): + pickler.dump((k, v.toString())) + pickler.dump(None) + + @classmethod + def load(class_, fname): + with open(fname, 'rb') as f: + unpickler = Unpickler(f) + pos = unpickler.load() + if not isinstance(pos, INT_TYPES): + # NB: this might contain OIDs that got unpickled + # into Unicode strings on Python 3; hope the caller + # will pipe the result to fsIndex().update() to normalize + # the keys + return pos # Old format + index = class_() + data = index._data + while 1: + v = unpickler.load() + if not v: + break + k, v = v + data[ensure_bytes(k)] = fsBucket().fromString(ensure_bytes(v)) + return dict(pos=pos, index=index) + + def get(self, key, default=None): + assert isinstance(key, bytes) + tree = self._data.get(key[:6], default) + if tree is default: + return default + v = tree.get(key[6:], default) + if v is default: + return default + return str2num(v) + + def __setitem__(self, key, value): + assert isinstance(key, bytes) + value = num2str(value) + treekey = key[:6] + tree = self._data.get(treekey) + if tree is None: + tree = fsBucket() + self._data[treekey] = tree + tree[key[6:]] = value + + def __delitem__(self, key): + assert isinstance(key, bytes) + treekey = key[:6] + tree = self._data.get(treekey) + if tree is None: + raise KeyError(key) + del tree[key[6:]] + if not tree: + del self._data[treekey] + + def __len__(self): + r = 0 + for tree in six.itervalues(self._data): + r += len(tree) + return r + + def update(self, mapping): + for k, v in mapping.items(): + self[ensure_bytes(k)] = v + + def has_key(self, key): + v = self.get(key, self) + return v is not self + + def __contains__(self, key): + assert isinstance(key, bytes) + tree = self._data.get(key[:6]) + if tree is None: + return False + v = tree.get(key[6:], None) + if v is None: + return False + return True + + def clear(self): + self._data.clear() + + def __iter__(self): + for prefix, tree in six.iteritems(self._data): + for suffix in tree: + yield prefix + suffix + + iterkeys = __iter__ + + def keys(self): + return list(self.iterkeys()) + + def iteritems(self): + for prefix, tree in six.iteritems(self._data): + for suffix, value in six.iteritems(tree): + yield (prefix + suffix, str2num(value)) + + def items(self): + return list(self.iteritems()) + + def itervalues(self): + for tree in six.itervalues(self._data): + for value in six.itervalues(tree): + yield str2num(value) + + def values(self): + return list(self.itervalues()) + + # Comment below applies for the following minKey and maxKey methods + # + # Obscure: what if `tree` is actually empty? We're relying here on + # that this class doesn't implement __delitem__: once a key gets + # into an fsIndex, the only way it can go away is by invoking + # clear(). Therefore nothing in _data.values() is ever empty. + # + # Note that because `tree` is an fsBTree, its minKey()/maxKey() methods are + # very efficient. + + def minKey(self, key=None): + if key is None: + smallest_prefix = self._data.minKey() + else: + smallest_prefix = self._data.minKey(key[:6]) + + tree = self._data[smallest_prefix] + + assert tree + + if key is None: + smallest_suffix = tree.minKey() + else: + try: + smallest_suffix = tree.minKey(key[6:]) + except ValueError: # 'empty tree' (no suffix >= arg) + next_prefix = prefix_plus_one(smallest_prefix) + smallest_prefix = self._data.minKey(next_prefix) + tree = self._data[smallest_prefix] + assert tree + smallest_suffix = tree.minKey() + + return smallest_prefix + smallest_suffix + + def maxKey(self, key=None): + if key is None: + biggest_prefix = self._data.maxKey() + else: + biggest_prefix = self._data.maxKey(key[:6]) + + tree = self._data[biggest_prefix] + + assert tree + + if key is None: + biggest_suffix = tree.maxKey() + else: + try: + biggest_suffix = tree.maxKey(key[6:]) + except ValueError: # 'empty tree' (no suffix <= arg) + next_prefix = prefix_minus_one(biggest_prefix) + biggest_prefix = self._data.maxKey(next_prefix) + tree = self._data[biggest_prefix] + assert tree + biggest_suffix = tree.maxKey() + + return biggest_prefix + biggest_suffix diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/fsrecover.py b/thesisenv/lib/python3.6/site-packages/ZODB/fsrecover.py new file mode 100644 index 0000000..7bdc47f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/fsrecover.py @@ -0,0 +1,393 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Simple script for repairing damaged FileStorage files. + +Usage: %s [-f] [-v level] [-p] [-P seconds] input output + +Recover data from a FileStorage data file, skipping over damaged data. Any +damaged data will be lost. This could lead to useless output if critical +data is lost. + +Options: + + -f + Overwrite output file even if it exists. + + -v level + + Set the verbosity level: + + 0 -- show progress indicator (default) + + 1 -- show transaction times and sizes + + 2 -- show transaction times and sizes, and show object (record) + ids, versions, and sizes + + -p + + Copy partial transactions. If a data record in the middle of a + transaction is bad, the data up to the bad data are packed. The + output record is marked as packed. If this option is not used, + transactions with any bad data are skipped. + + -P t + + Pack data to t seconds in the past. Note that if the "-p" option is + used, then t should be 0. + + +Important: The ZODB package must be importable. You may need to adjust + PYTHONPATH accordingly. +""" +from __future__ import print_function + +# Algorithm: +# +# position to start of input +# while 1: +# if end of file: +# break +# try: +# copy_transaction +# except: +# scan for transaction +# continue + +import sys +import os +import getopt +import time +from struct import unpack + +try: + import ZODB +except ImportError: + if os.path.exists('ZODB'): + sys.path.append('.') + elif os.path.exists('FileStorage.py'): + sys.path.append('..') + import ZODB + +import ZODB.FileStorage +from ZODB.utils import u64, as_text +from ZODB.FileStorage import TransactionRecord +from ZODB._compat import loads + +from persistent.TimeStamp import TimeStamp + + +def die(mess='', show_docstring=False): + if mess: + print(mess + '\n', file=sys.stderr) + if show_docstring: + print(__doc__ % sys.argv[0], file=sys.stderr) + sys.exit(1) + +class ErrorFound(Exception): + pass + +def error(mess, *args): + raise ErrorFound(mess % args) + +def read_txn_header(f, pos, file_size, outp, ltid): + # Read the transaction record + f.seek(pos) + h = f.read(23) + if len(h) < 23: + raise EOFError + + tid, stl, status, ul, dl, el = unpack(">8s8scHHH",h) + status = as_text(status) + tl = u64(stl) + + if pos + (tl + 8) > file_size: + error("bad transaction length at %s", pos) + + if tl < (23 + ul + dl + el): + error("invalid transaction length, %s, at %s", tl, pos) + + if ltid and tid < ltid: + error("time-stamp reducation %s < %s, at %s", u64(tid), u64(ltid), pos) + + if status == "c": + truncate(f, pos, file_size, outp) + raise EOFError + + if status not in " up": + error("invalid status, %r, at %s", status, pos) + + tpos = pos + tend = tpos + tl + + if status == "u": + # Undone transaction, skip it + f.seek(tend) + h = f.read(8) + if h != stl: + error("inconsistent transaction length at %s", pos) + pos = tend + 8 + return pos, None, tid + + pos = tpos+(23+ul+dl+el) + user = f.read(ul) + description = f.read(dl) + if el: + try: e = loads(f.read(el)) + except: e = {} + else: e = {} + + result = TransactionRecord(tid, status, user, description, e, pos, tend, + f, tpos) + pos = tend + + # Read the (intentionally redundant) transaction length + f.seek(pos) + h = f.read(8) + if h != stl: + error("redundant transaction length check failed at %s", pos) + pos += 8 + + return pos, result, tid + +def truncate(f, pos, file_size, outp): + """Copy data from pos to end of f to a .trNNN file.""" + + # _trname is global so that the test suite can know the path too (in + # order to delete the file when the test ends). + global _trname + + i = 0 + while 1: + _trname = outp + ".tr%d" % i + if os.path.exists(_trname): + i += 1 + else: + break + tr = open(_trname, "wb") + copy(f, tr, file_size - pos) + f.seek(pos) + tr.close() + +def copy(src, dst, n): + while n: + buf = src.read(8096) + if not buf: + break + if len(buf) > n: + buf = buf[:n] + dst.write(buf) + n -= len(buf) + +def scan(f, pos): + """Return a potential transaction location following pos in f. + + This routine scans forward from pos looking for the last data + record in a transaction. A period '.' always occurs at the end of + a pickle, and an 8-byte transaction length follows the last + pickle. If a period is followed by a plausible 8-byte transaction + length, assume that we have found the end of a transaction. + + The caller should try to verify that the returned location is + actually a transaction header. + """ + while 1: + f.seek(pos) + data = f.read(8096) + if not data: + return 0 + + s = 0 + while 1: + l = data.find(b".", s) + if l < 0: + pos += len(data) + break + # If we are less than 8 bytes from the end of the + # string, we need to read more data. + s = l + 1 + if s > len(data) - 8: + pos += l + break + tl = u64(data[s:s+8]) + if tl < pos: + return pos + s + 8 + +def iprogress(i): + if i % 2: + print(".", end=' ') + else: + print((i/2) % 10, end=' ') + sys.stdout.flush() + +def progress(p): + for i in range(p): + iprogress(i) + +def main(): + try: + opts, args = getopt.getopt(sys.argv[1:], "fv:pP:") + except getopt.error as msg: + die(str(msg), show_docstring=True) + + if len(args) != 2: + die("two positional arguments required", show_docstring=True) + inp, outp = args + + force = partial = False + verbose = 0 + pack = None + for opt, v in opts: + if opt == "-v": + verbose = int(v) + elif opt == "-p": + partial = True + elif opt == "-f": + force = True + elif opt == "-P": + pack = time.time() - float(v) + + recover(inp, outp, verbose, partial, force, pack) + +def recover(inp, outp, verbose=0, partial=False, force=False, pack=None): + print("Recovering", inp, "into", outp) + + if os.path.exists(outp) and not force: + die("%s exists" % outp) + + f = open(inp, "rb") + if f.read(4) != ZODB.FileStorage.packed_version: + die("input is not a file storage") + + f.seek(0,2) + file_size = f.tell() + + ofs = ZODB.FileStorage.FileStorage(outp, create=1) + _ts = None + ok = 1 + prog1 = 0 + undone = 0 + + pos = 4 + ltid = None + while pos: + try: + npos, txn, tid = read_txn_header(f, pos, file_size, outp, ltid) + except EOFError: + break + except (KeyboardInterrupt, SystemExit): + raise + except Exception as err: + print("error reading txn header:", err) + if not verbose: + progress(prog1) + pos = scan(f, pos) + if verbose > 1: + print("looking for valid txn header at", pos) + continue + ltid = tid + + if txn is None: + undone = undone + npos - pos + pos = npos + continue + else: + pos = npos + + tid = txn.tid + + if _ts is None: + _ts = TimeStamp(tid) + else: + t = TimeStamp(tid) + if t <= _ts: + if ok: + print(("Time stamps out of order %s, %s" % (_ts, t))) + ok = 0 + _ts = t.laterThan(_ts) + tid = _ts.raw() + else: + _ts = t + if not ok: + print(("Time stamps back in order %s" % (t))) + ok = 1 + + ofs.tpc_begin(txn, tid, txn.status) + + if verbose: + print("begin", pos, _ts, end=' ') + if verbose > 1: + print() + sys.stdout.flush() + + nrec = 0 + try: + for r in txn: + if verbose > 1: + if r.data is None: + l = "bp" + else: + l = len(r.data) + + print("%7d %s %s" % (u64(r.oid), l)) + ofs.restore(r.oid, r.tid, r.data, '', r.data_txn, + txn) + nrec += 1 + except (KeyboardInterrupt, SystemExit): + raise + except Exception as err: + if partial and nrec: + ofs._status = "p" + ofs.tpc_vote(txn) + ofs.tpc_finish(txn) + if verbose: + print("partial") + else: + ofs.tpc_abort(txn) + print("error copying transaction:", err) + if not verbose: + progress(prog1) + pos = scan(f, pos) + if verbose > 1: + print("looking for valid txn header at", pos) + else: + ofs.tpc_vote(txn) + ofs.tpc_finish(txn) + if verbose: + print("finish") + sys.stdout.flush() + + if not verbose: + prog = pos * 20 / file_size + while prog > prog1: + prog1 = prog1 + 1 + iprogress(prog1) + + + bad = file_size - undone - ofs._pos + + print("\n%s bytes removed during recovery" % bad) + if undone: + print("%s bytes of undone transaction data were skipped" % undone) + + if pack is not None: + print("Packing ...") + from ZODB.serialize import referencesf + ofs.pack(pack, referencesf) + + ofs.close() + f.close() + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/fstools.py b/thesisenv/lib/python3.6/site-packages/ZODB/fstools.py new file mode 100644 index 0000000..a0707cb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/fstools.py @@ -0,0 +1,145 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +"""Tools for using FileStorage data files. + +TODO: This module needs tests. +Caution: This file needs to be kept in sync with FileStorage.py. +""" + +import struct + +from ZODB.FileStorage.format import TRANS_HDR, DATA_HDR, TRANS_HDR_LEN +from ZODB.FileStorage.format import DATA_HDR_LEN +from ZODB.utils import u64 +from ZODB._compat import loads +from persistent.TimeStamp import TimeStamp + + +class TxnHeader(object): + """Object representing a transaction record header. + + Attribute Position Value + --------- -------- ----- + tid 0- 8 transaction id + length 8-16 length of entire transaction record - 8 + status 16-17 status of transaction (' ', 'u', 'p'?) + user_len 17-19 length of user field (pack code H) + descr_len 19-21 length of description field (pack code H) + ext_len 21-23 length of extensions (pack code H) + """ + + def __init__(self, file, pos): + self._file = file + self._pos = pos + self._read_header() + + def _read_header(self): + self._file.seek(self._pos) + self._hdr = self._file.read(TRANS_HDR_LEN) + (self.tid, self.length, self.status, self.user_len, self.descr_len, + self.ext_len) = struct.unpack(TRANS_HDR, self._hdr) + + def read_meta(self): + """Load user, descr, and ext attributes.""" + self.user = "" + self.descr = "" + self.ext = {} + if not (self.user_len or self.descr_len or self.ext_len): + return + self._file.seek(self._pos + TRANS_HDR_LEN) + if self.user_len: + self.user = self._file.read(self.user_len) + if self.descr_len: + self.descr = self._file.read(self.descr_len) + if self.ext_len: + self._ext = self._file.read(self.ext_len) + self.ext = loads(self._ext) + + def get_offset(self): + return self._pos + + def __len__(self): + return TRANS_HDR_LEN + self.user_len + self.descr_len + self.ext_len + + def get_data_offset(self): + return self._pos + len(self) + + def get_timestamp(self): + return TimeStamp(self.tid) + + def get_raw_data(self): + data_off = self.get_data_offset() + data_len = self.length - (data_off - self._pos) + self._file.seek(data_off) + return self._file.read(data_len) + + def next_txn(self): + off = self._pos + self.length + 8 + self._file.seek(off) + s = self._file.read(8) + if not s: + return None + return TxnHeader(self._file, off) + + def prev_txn(self): + if self._pos == 4: + return None + self._file.seek(self._pos - 8) + tlen = u64(self._file.read(8)) + return TxnHeader(self._file, self._pos - (tlen + 8)) + +class DataHeader(object): + """Object representing a data record header. + + Attribute Position Value + --------- -------- ----- + oid 0- 8 object id + serial 8-16 object serial numver + prev_rec_pos 16-24 position of previous data record for object + txn_pos 24-32 position of txn header + version_len 32-34 length of version (always 0) + data_len 34-42 length of data + + """ + + def __init__(self, file, pos): + self._file = file + self._pos = pos + self._read_header() + + def _read_header(self): + self._file.seek(self._pos) + self._hdr = self._file.read(DATA_HDR_LEN) + # always read the longer header, just in case + (self.oid, self.serial, prev_rec_pos, txn_pos, vlen, data_len + ) = struct.unpack(DATA_HDR, self._hdr[:DATA_HDR_LEN]) + assert not vlen + self.prev_rec_pos = u64(prev_rec_pos) + self.txn_pos = u64(txn_pos) + self.data_len = u64(data_len) + + def next_offset(self): + """Return offset of next record.""" + off = self._pos + self.data_len + off += DATA_HDR_LEN + if self.data_len == 0: + off += 8 # backpointer + return off + +def prev_txn(f): + """Return transaction located before current file position.""" + f.seek(-8, 1) + tlen = u64(f.read(8)) + 8 + return TxnHeader(f, f.tell() - tlen) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/historical_connections.txt b/thesisenv/lib/python3.6/site-packages/ZODB/historical_connections.txt new file mode 100644 index 0000000..14a6d4f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/historical_connections.txt @@ -0,0 +1,332 @@ +====================== +Historical Connections +====================== + +.. We need to mess with time to prevent spurious test failures on windows + + >>> _now = 1231019584.0 + >>> def faux_time_time(): + ... global _now + ... _now += .001 # must be less than 0.01 + ... return _now + >>> import time + >>> real_time_time = time.time + >>> real_time_sleep = time.sleep + >>> def faux_time_sleep(amt): + ... global _now + ... _now += amt + >>> if isinstance(time,type): + ... time.time = staticmethod(faux_time_time) # Jython + ... time.sleep = faux_time_sleep + ... else: + ... time.time = faux_time_time + ... time.sleep = faux_time_sleep + >>> def utcnow(): + ... mus = (int(_now % 1 * 1000000), ) + ... return datetime.datetime(*time.gmtime(_now)[:6] + mus) + +Usage +===== + +A database can be opened with a read-only, historical connection when given +a specific transaction or datetime. This can enable full-context application +level conflict resolution, historical exploration and preparation for reverts, +or even the use of a historical database revision as "production" while +development continues on a "development" head. + +A database can be opened historically ``at`` or ``before`` a given transaction +serial or datetime. Here's a simple example. It should work with any storage +that supports ``loadBefore``. + +We'll begin our example with a fairly standard set up. We + +- make a storage and a database; +- open a normal connection; +- modify the database through the connection; +- commit a transaction, remembering the time in UTC; +- modify the database again; and +- commit a transaction. + + >>> import ZODB.MappingStorage + >>> db = ZODB.MappingStorage.DB() + >>> conn = db.open() + + >>> import persistent.mapping + + >>> conn.root()['first'] = persistent.mapping.PersistentMapping(count=0) + + >>> import transaction + >>> transaction.commit() + +We wait for some time to pass, record he time, and then make some other changes. + + >>> import time + >>> time.sleep(.01) + + >>> import datetime + >>> now = utcnow() + >>> time.sleep(.01) + + >>> root = conn.root() + >>> root['second'] = persistent.mapping.PersistentMapping() + >>> root['first']['count'] += 1 + + >>> transaction.commit() + +Now we will show a historical connection. We'll open one using the ``now`` +value we generated above, and then demonstrate that the state of the original +connection, at the mutable head of the database, is different than the +historical state. + + >>> transaction1 = transaction.TransactionManager() + + >>> historical_conn = db.open(transaction_manager=transaction1, at=now) + + >>> sorted(conn.root().keys()) + ['first', 'second'] + >>> conn.root()['first']['count'] + 1 + + >>> sorted(historical_conn.root().keys()) + ['first'] + >>> historical_conn.root()['first']['count'] + 0 + +Moreover, the historical connection cannot commit changes. + + >>> historical_conn.root()['first']['count'] += 1 + >>> historical_conn.root()['first']['count'] + 1 + >>> transaction1.commit() + Traceback (most recent call last): + ... + ReadOnlyHistoryError + >>> transaction1.abort() + >>> historical_conn.root()['first']['count'] + 0 + +(It is because of the mutable behavior outside of transactional semantics that +we must have a separate connection, and associated object cache, per thread, +even though the semantics should be readonly.) + +As demonstrated, a timezone-naive datetime will be interpreted as UTC. You +can also pass a timezone-aware datetime or a serial (transaction id). +Here's opening with a serial--the serial of the root at the time of the first +commit. + + >>> historical_serial = historical_conn.root()._p_serial + >>> historical_conn.close() + + >>> historical_conn = db.open(transaction_manager=transaction1, + ... at=historical_serial) + >>> sorted(historical_conn.root().keys()) + ['first'] + >>> historical_conn.root()['first']['count'] + 0 + >>> historical_conn.close() + +We've shown the ``at`` argument. You can also ask to look ``before`` a datetime +or serial. (It's an error to pass both [#not_both]_) In this example, we're +looking at the database immediately prior to the most recent change to the +root. + + >>> serial = conn.root()._p_serial + >>> historical_conn = db.open( + ... transaction_manager=transaction1, before=serial) + >>> sorted(historical_conn.root().keys()) + ['first'] + >>> historical_conn.root()['first']['count'] + 0 + +In fact, ``at`` arguments are translated into ``before`` values because the +underlying mechanism is a storage's loadBefore method. When you look at a +connection's ``before`` attribute, it is normalized into a ``before`` serial, +no matter what you pass into ``db.open``. + + >>> print(conn.before) + None + >>> historical_conn.before == serial + True + + >>> conn.close() + +Configuration +============= + +Like normal connections, the database lets you set how many total historical +connections can be active without generating a warning, and +how many objects should be kept in each historical connection's object cache. + + >>> db.getHistoricalPoolSize() + 3 + >>> db.setHistoricalPoolSize(4) + >>> db.getHistoricalPoolSize() + 4 + + >>> db.getHistoricalCacheSize() + 1000 + >>> db.setHistoricalCacheSize(2000) + >>> db.getHistoricalCacheSize() + 2000 + +In addition, you can specify the minimum number of seconds that an unused +historical connection should be kept. + + >>> db.getHistoricalTimeout() + 300 + >>> db.setHistoricalTimeout(400) + >>> db.getHistoricalTimeout() + 400 + +All three of these values can be specified in a ZConfig file. + + >>> import ZODB.config + >>> db2 = ZODB.config.databaseFromString(''' + ... + ... + ... historical-pool-size 3 + ... historical-cache-size 1500 + ... historical-timeout 6m + ... + ... ''') + >>> db2.getHistoricalPoolSize() + 3 + >>> db2.getHistoricalCacheSize() + 1500 + >>> db2.getHistoricalTimeout() + 360 + + +The pool lets us reuse connections. To see this, we'll open some +connections, close them, and then open them again: + + >>> conns1 = [db2.open(before=serial) for i in range(4)] + >>> _ = [c.close() for c in conns1] + >>> conns2 = [db2.open(before=serial) for i in range(4)] + +Now let's look at what we got. The first connection in conns 2 is the +last connection in conns1, because it was the last connection closed. + + >>> conns2[0] is conns1[-1] + True + +Also for the next two: + + >>> (conns2[1] is conns1[-2]), (conns2[2] is conns1[-3]) + (True, True) + +But not for the last: + + >>> conns2[3] is conns1[-4] + False + +Because the pool size was set to 3. + +Connections are also discarded if they haven't been used in a while. +To see this, let's close two of the connections: + + >>> conns2[0].close(); conns2[1].close() + +We'l also set the historical timeout to be very low: + + >>> db2.setHistoricalTimeout(.01) + >>> time.sleep(.1) + >>> conns2[2].close(); conns2[3].close() + +Now, when we open 4 connections: + + >>> conns1 = [db2.open(before=serial) for i in range(4)] + +We'll see that only the last 2 connections from conn2 are in the +result: + + >>> [c in conns1 for c in conns2] + [False, False, True, True] + + +If you change the historical cache size, that changes the size of the +persistent cache on our connection. + + >>> historical_conn._cache.cache_size + 2000 + >>> db.setHistoricalCacheSize(1500) + >>> historical_conn._cache.cache_size + 1500 + +Invalidations +============= + +Invalidations are ignored for historical connections. This is another white box +test. + + >>> historical_conn = db.open( + ... transaction_manager=transaction1, at=serial) + >>> conn = db.open() + >>> sorted(conn.root().keys()) + ['first', 'second'] + >>> conn.root()['first']['count'] + 1 + >>> sorted(historical_conn.root().keys()) + ['first', 'second'] + >>> historical_conn.root()['first']['count'] + 1 + >>> conn.root()['first']['count'] += 1 + >>> conn.root()['third'] = persistent.mapping.PersistentMapping() + >>> transaction.commit() + >>> historical_conn.close() + +Note that if you try to open an historical connection to a time in the future, +you will get an error. + + >>> historical_conn = db.open( + ... at=utcnow()+datetime.timedelta(1)) + Traceback (most recent call last): + ... + ValueError: cannot open an historical connection in the future. + +Warnings +======== + +First, if you use datetimes to get a historical connection, be aware that the +conversion from datetime to transaction id has some pitfalls. Generally, the +transaction ids in the database are only as time-accurate as the system clock +was when the transaction id was created. Moreover, leap seconds are handled +somewhat naively in the ZODB (largely because they are handled naively in Unix/ +POSIX time) so any minute that contains a leap second may contain serials that +are a bit off. This is not generally a problem for the ZODB, because serials +are guaranteed to increase, but it does highlight the fact that serials are not +guaranteed to be accurately connected to time. Generally, they are about as +reliable as time.time. + +Second, historical connections currently introduce potentially wide variance in +memory requirements for the applications. Since you can open up many +connections to different serials, and each gets their own pool, you may collect +quite a few connections. For now, at least, if you use this feature you need to +be particularly careful of your memory usage. Get rid of pools when you know +you can, and reuse the exact same values for ``at`` or ``before`` when +possible. If historical connections are used for conflict resolution, these +connections will probably be temporary--not saved in a pool--so that the extra +memory usage would also be brief and unlikely to overlap. + + +.. cleanup + + >>> db.close() + >>> db2.close() + +.. restore time + + >>> time.time = real_time_time + >>> time.sleep = real_time_sleep + +.. ......... .. +.. Footnotes .. +.. ......... .. + +.. [#not_both] It is an error to try and pass both `at` and `before`. + + >>> historical_conn = db.open( + ... transaction_manager=transaction1, at=now, before=historical_serial) + Traceback (most recent call last): + ... + ValueError: can only pass zero or one of `at` and `before` diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/interfaces.py b/thesisenv/lib/python3.6/site-packages/ZODB/interfaces.py new file mode 100644 index 0000000..113e718 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/interfaces.py @@ -0,0 +1,1445 @@ +############################################################################## +# +# Copyright (c) Zope Corporation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +from zope.interface import Interface, Attribute + + +class IConnection(Interface): + """Connection to ZODB for loading and storing objects. + + The Connection object serves as a data manager. The root() method + on a Connection returns the root object for the database. This + object and all objects reachable from it are associated with the + Connection that loaded them. When a transaction commits, it uses + the Connection to store modified objects. + + Typical use of ZODB is for each thread to have its own + Connection and that no thread should have more than one Connection + to the same database. A thread is associated with a Connection by + loading objects from that Connection. Objects loaded by one + thread should not be used by another thread. + + A Connection can be frozen to a serial--a transaction id, a single point in + history-- when it is created. By default, a Connection is not associated + with a serial; it uses current data. A Connection frozen to a serial is + read-only. + + Each Connection provides an isolated, consistent view of the + database, by managing independent copies of objects in the + database. At transaction boundaries, these copies are updated to + reflect the current state of the database. + + You should not instantiate this class directly; instead call the + open() method of a DB instance. + + In many applications, root() is the only method of the Connection + that you will need to use. + + Synchronization + --------------- + + A Connection instance is not thread-safe. It is designed to + support a thread model where each thread has its own transaction. + If an application has more than one thread that uses the + connection or the transaction the connection is registered with, + the application should provide locking. + + The Connection manages movement of objects in and out of object + storage. + + TODO: We should document an intended API for using a Connection via + multiple threads. + + TODO: We should explain that the Connection has a cache and that + multiple calls to get() will return a reference to the same + object, provided that one of the earlier objects is still + referenced. Object identity is preserved within a connection, but + not across connections. + + TODO: Mention the database pool. + + A database connection always presents a consistent view of the + objects in the database, although it may not always present the + most current revision of any particular object. Modifications + made by concurrent transactions are not visible until the next + transaction boundary (abort or commit). + + Two options affect consistency. By default, the mvcc and synch + options are enabled by default. + + If you pass mvcc=False to db.open(), the Connection will never read + non-current revisions of an object. Instead it will raise a + ReadConflictError to indicate that the current revision is + unavailable because it was written after the current transaction + began. + + The logic for handling modifications assumes that the thread that + opened a Connection (called db.open()) is the thread that will use + the Connection. If this is not true, you should pass synch=False + to db.open(). When the synch option is disabled, some transaction + boundaries will be missed by the Connection; in particular, if a + transaction does not involve any modifications to objects loaded + from the Connection and synch is disabled, the Connection will + miss the transaction boundary. Two examples of this behavior are + db.undo() and read-only transactions. + + Groups of methods: + + User Methods: + root, get, add, close, db, sync, isReadOnly, cacheGC, + cacheFullSweep, cacheMinimize + + Experimental Methods: + onCloseCallbacks + + Database Invalidation Methods: + invalidate + + Other Methods: exchange, getDebugInfo, setDebugInfo, + getTransferCounts + """ + + def add(ob): + """Add a new object 'obj' to the database and assign it an oid. + + A persistent object is normally added to the database and + assigned an oid when it becomes reachable to an object already in + the database. In some cases, it is useful to create a new + object and use its oid (_p_oid) in a single transaction. + + This method assigns a new oid regardless of whether the object + is reachable. + + The object is added when the transaction commits. The object + must implement the IPersistent interface and must not + already be associated with a Connection. + + Parameters: + obj: a Persistent object + + Raises TypeError if obj is not a persistent object. + + Raises InvalidObjectReference if obj is already associated with another + connection. + + Raises ConnectionStateError if the connection is closed. + """ + + def get(oid): + """Return the persistent object with oid 'oid'. + + If the object was not in the cache and the object's class is + ghostable, then a ghost will be returned. If the object is + already in the cache, a reference to the cached object will be + returned. + + Applications seldom need to call this method, because objects + are loaded transparently during attribute lookup. + + Parameters: + oid: an object id + + Raises KeyError if oid does not exist. + + It is possible that an object does not exist as of the current + transaction, but existed in the past. It may even exist again in + the future, if the transaction that removed it is undone. + + Raises ConnectionStateError if the connection is closed. + """ + + def cacheMinimize(): + """Deactivate all unmodified objects in the cache. + + Call _p_deactivate() on each cached object, attempting to turn + it into a ghost. It is possible for individual objects to + remain active. + """ + + def cacheGC(): + """Reduce cache size to target size. + + Call _p_deactivate() on cached objects until the cache size + falls under the target size. + """ + + def onCloseCallback(f): + """Register a callable, f, to be called by close(). + + f will be called with no arguments before the Connection is closed. + + Parameters: + f: method that will be called on `close` + """ + + def close(): + """Close the Connection. + + When the Connection is closed, all callbacks registered by + onCloseCallback() are invoked and the cache is garbage collected. + + A closed Connection should not be used by client code. It can't load + or store objects. Objects in the cache are not freed, because + Connections are re-used and the cache is expected to be useful to the + next client. + """ + + def db(): + """Returns a handle to the database this connection belongs to.""" + + def isReadOnly(): + """Returns True if the storage for this connection is read only.""" + + def root(): + """Return the database root object. + + The root is a persistent.mapping.PersistentMapping. + """ + + # Multi-database support. + + connections = Attribute( + """A mapping from database name to a Connection to that database. + + In multi-database use, the Connections of all members of a database + collection share the same .connections object. + + In single-database use, of course this mapping contains a single + entry. + """) + + # TODO: should this accept all the arguments one may pass to DB.open()? + def get_connection(database_name): + """Return a Connection for the named database. + + This is intended to be called from an open Connection associated with + a multi-database. In that case, database_name must be the name of a + database within the database collection (probably the name of a + different database than is associated with the calling Connection + instance, but it's fine to use the name of the calling Connection + object's database). A Connection for the named database is + returned. If no connection to that database is already open, a new + Connection is opened. So long as the multi-database remains open, + passing the same name to get_connection() multiple times returns the + same Connection object each time. + """ + + def sync(): + """Manually update the view on the database. + + This includes aborting the current transaction, getting a fresh and + consistent view of the data (synchronizing with the storage if + possible) and calling cacheGC() for this connection. + + This method was especially useful in ZODB 3.2 to better support + read-only connections that were affected by a couple of problems. + """ + + # Debug information + + def getDebugInfo(): + """Returns a tuple with different items for debugging the connection. + + Debug information can be added to a connection by using setDebugInfo. + """ + + def setDebugInfo(*items): + """Add the given items to the debug information of this connection.""" + + def getTransferCounts(clear=False): + """Returns the number of objects loaded and stored. + + If clear is True, reset the counters. + """ + + def readCurrent(obj): + """Make sure an object being read is current + + This is used when applications want to ensure a higher level + of consistency for some operations. This should be called when + an object is read and the information read is used to write a + separate object. + """ + +class IStorageWrapper(Interface): + """Storage wrapper interface + + This interface provides 3 facilities: + + - Out-of-band invalidation support + + A storage can notify it's wrapper of object invalidations that + don't occur due to direct operations on the storage. Currently + this is only used by ZEO client storages to pass invalidation + messages sent from a server. + + - Record-reference extraction + + The references method can be used to extract referenced object + IDs from a database record. This can be used by storages to + provide more advanced garbage collection. A wrapper storage + that transforms data will provide a references method that + untransforms data passed to it and then pass the data to the + layer above it. + + - Record transformation + + A storage wrapper may transform data, for example for + compression or encryption. Methods are provided to transform or + untransform data. + + This interface may be implemented by storage adapters or other + intermediaries. For example, a storage adapter that provides + encryption and/or compresssion will apply record transformations + in it's references method. + """ + + def invalidateCache(): + """Discard all cached data + + This can be necessary if there have been major changes to + stored data and it is either impractical to enumerate them or + there would be so many that it would be inefficient to do so. + """ + + def invalidate(transaction_id, oids): + """Invalidate object ids committed by the given transaction + + The oids argument is an iterable of object identifiers. + + The version argument is provided for backward + compatibility. If passed, it must be an empty string. + + """ + + def references(record, oids=None): + """Scan the given record for object ids + + A list of object ids is returned. If a list is passed in, + then it will be used and augmented. Otherwise, a new list will + be created and returned. + """ + + def transform_record_data(data): + """Return transformed data + """ + + def untransform_record_data(data): + """Return untransformed data + """ + +IStorageDB = IStorageWrapper # for backward compatibility + + +class IDatabase(IStorageDB): + """ZODB DB. + """ + + # TODO: This interface is incomplete. + # XXX how is it incomplete? + + databases = Attribute( + """A mapping from database name to DB (database) object. + + In multi-database use, all DB members of a database collection share + the same .databases object. + + In single-database use, of course this mapping contains a single + entry. + """) + + storage = Attribute( + """The object that provides storage for the database + + This attribute is useful primarily for tests. Normal + application code should rarely, if ever, have a need to use + this attribute. + """) + + + def open(transaction_manager=None, serial=''): + """Return an IConnection object for use by application code. + + transaction_manager: transaction manager to use. None means + use the default transaction manager. + serial: the serial (transaction id) of the database to open. + An empty string (the default) means to open it to the newest + serial. Specifying a serial results in a read-only historical + connection. + + Note that the connection pool is managed as a stack, to + increase the likelihood that the connection's stack will + include useful objects. + """ + + def history(oid, size=1): + """Return a sequence of history information dictionaries. + + Up to size objects (including no objects) may be returned. + + The information provides a log of the changes made to the + object. Data are reported in reverse chronological order. + + Each dictionary has the following keys: + + time + UTC seconds since the epoch (as in time.time) that the + object revision was committed. + + tid + The transaction identifier of the transaction that + committed the version. + + user_name + The text (unicode) user identifier, if any (or an empty + string) of the user on whos behalf the revision was + committed. + + description + The text (unicode) transaction description for the + transaction that committed the revision. + + size + The size of the revision data record. + + If the transaction had extension items, then these items are + also included if they don't conflict with the keys above. + """ + + + def pack(t=None, days=0): + """Pack the storage, deleting unused object revisions. + + A pack is always performed relative to a particular time, by + default the current time. All object revisions that are not + reachable as of the pack time are deleted from the storage. + + The cost of this operation varies by storage, but it is + usually an expensive operation. + + There are two optional arguments that can be used to set the + pack time: t, pack time in seconds since the epcoh, and days, + the number of days to subtract from t or from the current + time if t is not specified. + """ + + def undoLog(first, last, filter=None): + """Return a sequence of descriptions for undoable transactions. + + Application code should call undoLog() on a DB instance instead of on + the storage directly. + + A transaction description is a mapping with at least these keys: + + "time": The time, as float seconds since the epoch, when + the transaction committed. + "user_name": The text value of the `.user` attribute on that + transaction. + "description": The text value of the `.description` attribute on + that transaction. + "id`" A bytes uniquely identifying the transaction to the + storage. If it's desired to undo this transaction, + this is the `transaction_id` to pass to `undo()`. + + In addition, if any name+value pairs were added to the transaction + by `setExtendedInfo()`, those may be added to the transaction + description mapping too (for example, FileStorage's `undoLog()` does + this). + + `filter` is a callable, taking one argument. A transaction + description mapping is passed to `filter` for each potentially + undoable transaction. The sequence returned by `undoLog()` excludes + descriptions for which `filter` returns a false value. By default, + `filter` always returns a true value. + + ZEO note: Arbitrary callables cannot be passed from a ZEO client + to a ZEO server, and a ZEO client's implementation of `undoLog()` + ignores any `filter` argument that may be passed. ZEO clients + should use the related `undoInfo()` method instead (if they want + to do filtering). + + Now picture a list containing descriptions of all undoable + transactions that pass the filter, most recent transaction first (at + index 0). The `first` and `last` arguments specify the slice of this + (conceptual) list to be returned: + + `first`: This is the index of the first transaction description + in the slice. It must be >= 0. + `last`: If >= 0, first:last acts like a Python slice, selecting + the descriptions at indices `first`, first+1, ..., up to + but not including index `last`. At most last-first + descriptions are in the slice, and `last` should be at + least as large as `first` in this case. If `last` is + less than 0, then abs(last) is taken to be the maximum + number of descriptions in the slice (which still begins + at index `first`). When `last` < 0, the same effect + could be gotten by passing the positive first-last for + `last` instead. + """ + + def undoInfo(first=0, last=-20, specification=None): + """Return a sequence of descriptions for undoable transactions. + + This is like `undoLog()`, except for the `specification` argument. + If given, `specification` is a dictionary, and `undoInfo()` + synthesizes a `filter` function `f` for `undoLog()` such that + `f(desc)` returns true for a transaction description mapping + `desc` if and only if `desc` maps each key in `specification` to + the same value `specification` maps that key to. In other words, + only extensions (or supersets) of `specification` match. + + ZEO note: `undoInfo()` passes the `specification` argument from a + ZEO client to its ZEO server (while a ZEO client ignores any `filter` + argument passed to `undoLog()`). + """ + + def undo(id, txn=None): + """Undo a transaction identified by id. + + A transaction can be undone if all of the objects involved in + the transaction were not modified subsequently, if any + modifications can be resolved by conflict resolution, or if + subsequent changes resulted in the same object state. + + The value of id should be generated by calling undoLog() + or undoInfo(). The value of id is not the same as a + transaction id used by other methods; it is unique to undo(). + + id: a storage-specific transaction identifier + txn: transaction context to use for undo(). + By default, uses the current transaction. + """ + + def close(): + """Close the database and its underlying storage. + + It is important to close the database, because the storage may + flush in-memory data structures to disk when it is closed. + Leaving the storage open with the process exits can cause the + next open to be slow. + + What effect does closing the database have on existing + connections? Technically, they remain open, but their storage + is closed, so they stop behaving usefully. Perhaps close() + should also close all the Connections. + """ + +class IStorageTransactionMetaData(Interface): + """Provide storage transaction meta data. + + Note that unlike transaction.interfaces.ITransaction, the ``user`` + and ``description`` attributes are bytes, not text. + """ + user = Attribute("Bytes transaction user") + description = Attribute("Bytes transaction Description") + extension = Attribute( + "A dictionary carrying a transaction's extended_info data") + + + def set_data(ob, data): + """Hold data on behalf of an object + + For objects such as storages that + work with multiple transactions, it's convenient to store + transaction-specific data on the transaction itself. The + transaction knows nothing about the data, but simply holds it + on behalf of the object. + + The object passed should be the object that needs the data, as + opposed to simple object like a string. (Internally, the id of + the object is used as the key.) + """ + + def data(ob): + """Retrieve data held on behalf of an object. + + See set_data. + """ + + +class IStorage(Interface): + """A storage is responsible for storing and retrieving data of objects. + + Consistency and locking + + When transactions are committed, a storage assigns monotonically + increasing transaction identifiers (tids) to the transactions and + to the object versions written by the transactions. ZODB relies + on this to decide if data in object caches are up to date and to + implement multi-version concurrency control. + + There are methods in IStorage and in derived interfaces that + provide information about the current revisions (tids) for objects + or for the database as a whole. It is critical for the proper + working of ZODB that the resulting tids are increasing with + respect to the object identifier given or to the databases. That + is, if there are 2 results for an object or for the database, R1 + and R2, such that R1 is returned before R2, then the tid returned + by R2 must be greater than or equal to the tid returned by R1. + (When thinking about results for the database, think of these as + results for all objects in the database.) + + This implies some sort of locking strategy. The key method is + tcp_finish, which causes new tids to be generated and also, + through the callback passed to it, returns new current tids for + the objects stored in a transaction and for the database as a whole. + + The IStorage methods affected are lastTransaction, load, store, + and tpc_finish. Derived interfaces may introduce additional + methods. + + """ + + def close(): + """Close the storage. + + Finalize the storage, releasing any external resources. The + storage should not be used after this method is called. + + Note that databases close their storages when they're closed, so + this method isn't generally called from application code. + """ + + def getName(): + """The name of the storage + + The format and interpretation of this name is storage + dependent. It could be a file name, a database name, etc.. + + This is used soley for informational purposes. + """ + + def getSize(): + """An approximate size of the database, in bytes. + + This is used soley for informational purposes. + """ + + def history(oid, size=1): + """Return a sequence of history information dictionaries. + + Up to size objects (including no objects) may be returned. + + The information provides a log of the changes made to the + object. Data are reported in reverse chronological order. + + Each dictionary has the following keys: + + time + UTC seconds since the epoch (as in time.time) that the + object revision was committed. + + tid + The transaction identifier of the transaction that + committed the version. + + serial + An alias for tid, which expected by older clients. + + user_name + The bytes user identifier, if any (or an empty string) of the + user on whos behalf the revision was committed. + + description + The bytes transaction description for the transaction that + committed the revision. + + size + The size of the revision data record. + + If the transaction had extension items, then these items are + also included if they don't conflict with the keys above. + + """ + + def isReadOnly(): + """Test whether a storage allows committing new transactions + + For a given storage instance, this method always returns the + same value. Read-only-ness is a static property of a storage. + """ + + # XXX Note that this method doesn't really buy us much, + # especially since we have to account for the fact that a + # ostensibly non-read-only storage may be read-only + # transiently. It would be better to just have read-only errors. + + def lastTransaction(): + """Return the id of the last committed transaction. + + If no transactions have been committed, return a string of 8 + null (0) characters. + """ + + def __len__(): + """The approximate number of objects in the storage + + This is used soley for informational purposes. + """ + + def loadBefore(oid, tid): + """Load the object data written before a transaction id + + If there isn't data before the object before the given + transaction, then None is returned, otherwise three values are + returned: + + - The data record + + - The transaction id of the data record + + - The transaction id of the following revision, if any, or None. + + If the object id isn't in the storage, then POSKeyError is raised. + """ + + def loadSerial(oid, serial): + """Load the object record for the give transaction id + + If a matching data record can be found, it is returned, + otherwise, POSKeyError is raised. + """ + + def new_oid(): + """Allocate a new object id. + + The object id returned is reserved at least as long as the + storage is opened. + + The return value is a string. + """ + + def pack(pack_time, referencesf): + """Pack the storage + + It is up to the storage to interpret this call, however, the + general idea is that the storage free space by: + + - discarding object revisions that were old and not current as of the + given pack time. + + - garbage collecting objects that aren't reachable from the + root object via revisions remaining after discarding + revisions that were not current as of the pack time. + + The pack time is given as a UTC time in seconds since the + epoch. + + The second argument is a function that should be used to + extract object references from database records. This is + needed to determine which objects are referenced from object + revisions. + """ + + def registerDB(wrapper): + """Register a storage wrapper IStorageWrapper. + + The passed object is a wrapper object that provides an upcall + interface to support composition. + + Note that, for historical reasons, this is called registerDB rather + than register_wrapper. + """ + + def sortKey(): + """Sort key used to order distributed transactions + + When a transaction involved multiple storages, 2-phase commit + operations are applied in sort-key order. This must be unique + among storages used in a transaction. Obviously, the storage + can't assure this, but it should construct the sort key so it + has a reasonable chance of being unique. + + The result must be a string. + """ + + def store(oid, serial, data, version, transaction): + """Store data for the object id, oid. + + Arguments: + + oid + The object identifier. This is either a string + consisting of 8 nulls or a string previously returned by + new_oid. + + serial + The serial of the data that was read when the object was + loaded from the database. If the object was created in + the current transaction this will be a string consisting + of 8 nulls. + + data + The data record. This is opaque to the storage. + + version + This must be an empty string. It exists for backward compatibility. + + transaction + The object passed to tpc_begin + + Several different exceptions may be raised when an error occurs. + + ConflictError + is raised when serial does not match the most recent serial + number for object oid and the conflict was not resolved by + the storage. + + StorageTransactionError + is raised when transaction does not match the current + transaction. + + StorageError or, more often, a subclass of it + is raised when an internal error occurs while the storage is + handling the store() call. + + """ + + def tpc_abort(transaction): + """Abort the transaction. + + The argument is the same object passed to tpc_begin. + + Any changes made by the transaction are discarded. + + This call is ignored is the storage is not participating in + two-phase commit or if the given transaction is not the same + as the transaction the storage is commiting. + """ + + def tpc_begin(transaction): + """Begin the two-phase commit process. + + The argument provides IStorageTransactionMetaData. + + If storage is already participating in a two-phase commit + using the same transaction, a StorageTransactionError is raised. + + If the storage is already participating in a two-phase commit + using a different transaction, the call blocks until the + current transaction ends (commits or aborts). + """ + + def tpc_finish(transaction, func = lambda tid: None): + """Finish the transaction, making any transaction changes permanent. + + Changes must be made permanent at this point. + + This call raises a StorageTransactionError if the storage + isn't participating in two-phase commit or if it is committing + a different transaction. Failure of this method is extremely + serious. + + The first argument is the same object passed to tpc_begin. + + The second argument is a call-back function that must be + called while the storage transaction lock is held. It takes + the new transaction id generated by the transaction. + + The return value may be None or the transaction id of the + committed transaction, as described in IMultiCommitStorage. + """ + + def tpc_vote(transaction): + """Provide a storage with an opportunity to veto a transaction + + The argument is the same object passed to tpc_begin. + + This call raises a StorageTransactionError if the storage + isn't participating in two-phase commit or if it is commiting + a different transaction. + + If a transaction can be committed by a storage, then the + method should return. If a transaction cannot be committed, + then an exception should be raised. If this method returns + without an error, then there must not be an error if + tpc_finish or tpc_abort is called subsequently. + + The return value can be None or a sequence of a sequence of object ids, + as described in IMultiCommitStorage.tpc_vote. + """ + + +class IPrefetchStorage(IStorage): + + def prefetch(oids, tid): + """Prefetch data for the given object ids before the given tid + + The oids argument is an iterable that should be iterated no + more than once. + """ + + +class IMultiCommitStorage(IStorage): + """A multi-commit storage can commit multiple transactions at once. + + It's likely that future versions of ZODB will require all storages + to provide this interface. + """ + + def store(oid, serial, data, version, transaction): + """Store data for the object id, oid. + + See IStorage.store. For objects implementing this interface, + the return value is always None. + """ + + def tpc_finish(transaction, func = lambda tid: None): + """Finish the transaction, making any transaction changes permanent. + + See IStorage.store. For objects implementing this interface, + the return value must be the committed tid. It is used to set the + serial for objects whose ids were passed to previous store calls + in the same transaction. + """ + + def tpc_vote(transaction): + """Provide a storage with an opportunity to veto a transaction + + See IStorage.tpc_vote. For objects implementing this interface, + the return value can be either None or a sequence of oids for which + a conflict was resolved. + """ + + +class IStorageRestoreable(IStorage): + """Copying Transactions + + The IStorageRestoreable interface supports copying + already-committed transactions from one storage to another. This + is typically done for replication or for moving data from one + storage implementation to another. + """ + + def tpc_begin(transaction, tid=None): + """Begin the two-phase commit process. + + If storage is already participating in a two-phase commit + using the same transaction, the call is ignored. + + If the storage is already participating in a two-phase commit + using a different transaction, the call blocks until the + current transaction ends (commits or aborts). + + The first argument provides IStorageTransactionMetaData. + + If a transaction id is given, then the transaction will use + the given id rather than generating a new id. This is used + when copying already committed transactions from another + storage. + """ + + # Note that the current implementation also accepts a status. + # This is an artifact of: + # - Earlier use of an undo status to undo revisions in place, + # and, + # - Incorrect pack garbage-collection algorithms (possibly + # including the existing FileStorage implementation), that + # failed to take into account records after the pack time. + + + def restore(oid, serial, data, version, prev_txn, transaction): + """Write data already committed in a separate database + + The restore method is used when copying data from one database + to a replica of the database. It differs from store in that + the data have already been committed, so there is no check for + conflicts and no new transaction is is used for the data. + + Arguments: + + oid + The object id for the record + + serial + The transaction identifier that originally committed this object. + + data + The record data. This will be None if the transaction + undid the creation of the object. + + prev_txn + The identifier of a previous transaction that held the + object data. The target storage can sometimes use this + as a hint to save space. + + transaction + The current transaction. + + Nothing is returned. + """ + + +class IStorageRecordInformation(Interface): + """Provide information about a single storage record + """ + + oid = Attribute("The object id, bytes") + tid = Attribute("The transaction id, bytes") + data = Attribute("The data record, bytes") + data_txn = Attribute("The previous transaction id, bytes") + +class IStorageTransactionInformation(IStorageTransactionMetaData): + """Provide information about a storage transaction. + + Can be iterated over to retrieve the records modified in the transaction. + + Note that this may contain a status field used by FileStorage to + support packing. At some point, this will go away when FileStorage + has a better pack algoritm. + """ + + tid = Attribute("Transaction id") + + def __iter__(): + """Iterate over the transaction's records given as + IStorageRecordInformation objects. + + """ + + +class IStorageIteration(Interface): + """API for iterating over the contents of a storage.""" + + def iterator(start=None, stop=None): + """Return an IStorageTransactionInformation iterator. + + If the start argument is not None, then iteration will start + with the first transaction whose identifier is greater than or + equal to start. + + If the stop argument is not None, then iteration will end with + the last transaction whose identifier is less than or equal to + stop. + + The iterator provides access to the data as available at the time when + the iterator was retrieved. + + """ + +class IStorageUndoable(IStorage): + """A storage supporting transactional undo. + """ + + def supportsUndo(): + """Return True, indicating that the storage supports undo. + """ + + def undo(transaction_id, transaction): + """Undo the transaction corresponding to the given transaction id. + + The transaction id is a value returned from undoInfo or + undoLog, which may not be a stored transaction identifier as + used elsewhere in the storage APIs. + + This method must only be called in the first phase of + two-phase commit (after tpc_begin but before tpc_vote). It + returns a serial (transaction id) and a sequence of object ids + for objects affected by the transaction. The serial is ignored + and may be None. The return from this method may be None. + """ + # Used by DB (Actually, by TransactionalUndo) + + def undoLog(first, last, filter=None): + """Return a sequence of descriptions for undoable transactions. + + Application code should call undoLog() on a DB instance instead of on + the storage directly. + + A transaction description is a mapping with at least these keys: + + "time": The time, as float seconds since the epoch, when + the transaction committed. + "user_name": The bytes value of the `.user` attribute on that + transaction. + "description": The bytes value of the `.description` attribute on + that transaction. + "id`" A bytes uniquely identifying the transaction to the + storage. If it's desired to undo this transaction, + this is the `transaction_id` to pass to `undo()`. + + In addition, if any name+value pairs were added to the transaction + by `setExtendedInfo()`, those may be added to the transaction + description mapping too (for example, FileStorage's `undoLog()` does + this). + + `filter` is a callable, taking one argument. A transaction + description mapping is passed to `filter` for each potentially + undoable transaction. The sequence returned by `undoLog()` excludes + descriptions for which `filter` returns a false value. By default, + `filter` always returns a true value. + + ZEO note: Arbitrary callables cannot be passed from a ZEO client + to a ZEO server, and a ZEO client's implementation of `undoLog()` + ignores any `filter` argument that may be passed. ZEO clients + should use the related `undoInfo()` method instead (if they want + to do filtering). + + Now picture a list containing descriptions of all undoable + transactions that pass the filter, most recent transaction first (at + index 0). The `first` and `last` arguments specify the slice of this + (conceptual) list to be returned: + + `first`: This is the index of the first transaction description + in the slice. It must be >= 0. + `last`: If >= 0, first:last acts like a Python slice, selecting + the descriptions at indices `first`, first+1, ..., up to + but not including index `last`. At most last-first + descriptions are in the slice, and `last` should be at + least as large as `first` in this case. If `last` is + less than 0, then abs(last) is taken to be the maximum + number of descriptions in the slice (which still begins + at index `first`). When `last` < 0, the same effect + could be gotten by passing the positive first-last for + `last` instead. + """ + + def undoInfo(first=0, last=-20, specification=None): + """Return a sequence of descriptions for undoable transactions. + + This is like `undoLog()`, except for the `specification` argument. + If given, `specification` is a dictionary, and `undoInfo()` + synthesizes a `filter` function `f` for `undoLog()` such that + `f(desc)` returns true for a transaction description mapping + `desc` if and only if `desc` maps each key in `specification` to + the same value `specification` maps that key to. In other words, + only extensions (or supersets) of `specification` match. + + ZEO note: `undoInfo()` passes the `specification` argument from a + ZEO client to its ZEO server (while a ZEO client ignores any `filter` + argument passed to `undoLog()`). + """ + + +class IMVCCStorage(IStorage): + """A storage that provides MVCC semantics internally. + + MVCC (multi-version concurrency control) means each user of a + database has a snapshot view of the database. The snapshot view + does not change, even if concurrent connections commit + transactions, until a transaction boundary. Relational databases + that support serializable transaction isolation provide MVCC. + + Storages that implement IMVCCStorage, such as RelStorage, provide + MVCC semantics at the ZODB storage layer. When ZODB.Connection uses + a storage that implements IMVCCStorage, each connection uses a + connection-specific storage instance, and that storage instance + provides a snapshot of the database. + + By contrast, storages that do not implement IMVCCStorage, such as + FileStorage, rely on ZODB.Connection to provide MVCC semantics, so + in that case, one storage instance is shared by many + ZODB.Connections. Applications that use ZODB.Connection always have + a snapshot view of the database; IMVCCStorage only modifies which + layer of ZODB provides MVCC. + + Furthermore, IMVCCStorage changes the way object invalidation + works. An essential feature of ZODB is the propagation of object + invalidation messages to keep in-memory caches up to date. Storages + like FileStorage and ZEO.ClientStorage send invalidation messages + to all other Connection instances at transaction commit time. + Storages that implement IMVCCStorage, on the other hand, expect the + ZODB.Connection to poll for a list of invalidated objects. + + Certain methods of IMVCCStorage implementations open persistent + back end database sessions and retain the sessions even after the + method call finishes:: + + load + loadEx + loadSerial + loadBefore + store + restore + new_oid + history + tpc_begin + tpc_vote + tpc_abort + tpc_finish + + If you know that the storage instance will no longer be used after + calling any of these methods, you should call the release method to + release the persistent sessions. The persistent sessions will be + reopened as necessary if you call one of those methods again. + + Other storage methods open short lived back end sessions and close + the back end sessions before returning. These include:: + + __len__ + getSize + undoLog + undo + pack + iterator + + These methods do not provide MVCC semantics, so these methods + operate on the most current view of the database, rather than the + snapshot view that the other methods use. + """ + + def new_instance(): + """Creates and returns another storage instance. + + The returned instance provides IMVCCStorage and connects to the + same back-end database. The database state visible by the + instance will be a snapshot that varies independently of other + storage instances. + """ + + def release(): + """Release resources held by the storage instance. + + The storage instance won't be used again after this call. + """ + + def poll_invalidations(): + """Poll the storage for external changes. + + Returns either a sequence of OIDs that have changed, or None. When a + sequence is returned, the corresponding objects should be removed + from the ZODB in-memory cache. When None is returned, the storage is + indicating that so much time has elapsed since the last poll that it + is no longer possible to enumerate all of the changed OIDs, since the + previous transaction seen by the connection has already been packed. + In that case, the ZODB in-memory cache should be cleared. + """ + + def sync(force=True): + """Updates the internal snapshot to the current state of the database. + + If the force parameter is False, the storage may choose to + ignore this call. By ignoring this call, a storage can reduce + the frequency of database polls, thus reducing database load. + """ + + def load(oid): + """Load current data for an object id + + A data record and serial are returned. The serial is a + transaction identifier of the transaction that wrote the data + record. + + A POSKeyError is raised if there is no record for the object id. + """ + +class IMVCCPrefetchStorage(IMVCCStorage): + + def prefetch(oids): + """Prefetch data for the given object ids + + The oids argument is an iterable that should be iterated no + more than once. + """ + +class IMVCCAfterCompletionStorage(IMVCCStorage): + + def afterCompletion(): + """Notify a storage that a transaction has ended. + + The storage may choose to use this opportunity to release resources. + + See ``transaction.interfaces.ISynchronizer.afterCompletion``. + """ + +class IStorageCurrentRecordIteration(IStorage): + + def record_iternext(next=None): + """Iterate over the records in a storage + + Use like this: + + >>> next = None + >>> while 1: + ... oid, tid, data, next = storage.record_iternext(next) + ... # do things with oid, tid, and data + ... if next is None: + ... break + + """ + +class IExternalGC(IStorage): + + def deleteObject(oid, serial, transaction): + """Mark an object as deleted + + This method marks an object as deleted via a new object + revision. Subsequent attempts to load current data for the + object will fail with a POSKeyError, but loads for + non-current data will suceed if there are previous + non-delete records. The object will be removed from the + storage when all not-delete records are removed. + + The serial argument must match the most recently committed + serial for the object. This is a seat belt. + + This method can only be called in the first phase of 2-phase + commit. + """ + +class ReadVerifyingStorage(IStorage): + + def checkCurrentSerialInTransaction(oid, serial, transaction): + """Check whether the given serial number is current. + + The method is called during the first phase of 2-phase commit + to verify that data read in a transaction is current. + + The storage should raise a ReadConflictError if the serial is not + current, although it may raise the exception later, in a call + to store or in a call to tpc_vote. + + If no exception is raised, then the serial must remain current + through the end of the transaction. + """ + +class IBlob(Interface): + """A BLOB supports efficient handling of large data within ZODB.""" + + def open(mode): + """Open a blob + + Returns a file(-like) object for handling the blob data. + + mode: Mode to open the file with. Possible values: r,w,r+,a,c + + The mode 'c' is similar to 'r', except that an orinary file + object is returned and may be used in a separate transaction + and after the blob's database connection has been closed. + + """ + + def committed(): + """Return a file name for committed data. + + The returned file name may be opened for reading or handed to + other processes for reading. The file name isn't guarenteed + to be valid indefinately. The file may be removed in the + future as a result of garbage collection depending on system + configuration. + + A BlobError will be raised if the blob has any uncommitted data. + """ + + def consumeFile(filename): + """Consume a file. + + Replace the current data of the blob with the file given under + filename. + + The blob must not be opened for reading or writing when consuming a + file. + + The blob will take over ownership of the file and will either + rename or copy and remove it. The file must not be open. + + """ + + +class IBlobStorage(Interface): + """A storage supporting BLOBs.""" + + def storeBlob(oid, oldserial, data, blobfilename, version, transaction): + """Stores data that has a BLOB attached. + + The blobfilename argument names a file containing blob data. + The storage will take ownership of the file and will rename it + (or copy and remove it) immediately, or at transaction-commit + time. The file must not be open. + + Several different exceptions may be raised when an error occurs. + + ConflictError + is raised when serial does not match the most recent serial + number for object oid and the conflict was not resolved by + the storage. + + StorageTransactionError + is raised when transaction does not match the current + transaction. + + StorageError or, more often, a subclass of it + is raised when an internal error occurs while the storage is + handling the store() call. + + """ + + def loadBlob(oid, serial): + """Return the filename of the Blob data for this OID and serial. + + Returns a filename. + + Raises POSKeyError if the blobfile cannot be found. + """ + + def openCommittedBlobFile(oid, serial, blob=None): + """Return a file for committed data for the given object id and serial + + If a blob is provided, then a BlobFile object is returned, + otherwise, an ordinary file is returned. In either case, the + file is opened for binary reading. + + This method is used to allow storages that cache blob data to + make sure that data are available at least long enough for the + file to be opened. + """ + + def temporaryDirectory(): + """Return a directory that should be used for uncommitted blob data. + + If Blobs use this, then commits can be performed with a simple rename. + """ + +class IBlobStorageRestoreable(IBlobStorage, IStorageRestoreable): + + def restoreBlob(oid, serial, data, blobfilename, prev_txn, transaction): + """Write blob data already committed in a separate database + + See the restore and storeBlob methods. + """ + + +class IBroken(Interface): + """Broken objects are placeholders for objects that can no longer be + created because their class has gone away. + + They cannot be modified, but they retain their state. This allows them to + be rebuild should the missing class be found again. + + A broken object's __class__ can be used to determine the original + class' name (__name__) and module (__module__). + + The original object's state and initialization arguments are + available in broken object attributes to aid analysis and + reconstruction. + + """ + + def __setattr__(name, value): + """You cannot modify broken objects. This will raise a + ZODB.broken.BrokenModified exception. + """ + + __Broken_newargs__ = Attribute("Arguments passed to __new__.") + __Broken_initargs__ = Attribute("Arguments passed to __init__.") + __Broken_state__ = Attribute("Value passed to __setstate__.") + +class BlobError(Exception): + pass + + +class StorageStopIteration(IndexError, StopIteration): + """A combination of StopIteration and IndexError to provide a + backwards-compatible exception. + """ diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/loglevels.py b/thesisenv/lib/python3.6/site-packages/ZODB/loglevels.py new file mode 100644 index 0000000..3804ca9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/loglevels.py @@ -0,0 +1,47 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Supplies custom logging levels BLATHER and TRACE. + +$Revision: 1.1 $ +""" + +import logging + +__all__ = ["BLATHER", "TRACE"] + +# In the days of zLOG, there were 7 standard log levels, and ZODB/ZEO used +# all of them. Here's how they map to the logging package's 5 standard +# levels: +# +# zLOG logging +# ------------- --------------- +# PANIC (300) FATAL, CRITICAL (50) +# ERROR (200) ERROR (40) +# WARNING, PROBLEM (100) WARN (30) +# INFO (0) INFO (20) +# BLATHER (-100) none -- defined here as BLATHER (15) +# DEBUG (-200) DEBUG (10) +# TRACE (-300) none -- defined here as TRACE (5) +# +# TRACE is used by ZEO for extremely verbose trace output, enabled only +# when chasing bottom-level communications bugs. It really should be at +# a lower level than DEBUG. +# +# BLATHER is a harder call, and various instances could probably be folded +# into INFO or DEBUG without real harm. + +BLATHER = 15 +TRACE = 5 +logging.addLevelName(BLATHER, "BLATHER") +logging.addLevelName(TRACE, "TRACE") diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/mvccadapter.py b/thesisenv/lib/python3.6/site-packages/ZODB/mvccadapter.py new file mode 100644 index 0000000..121e579 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/mvccadapter.py @@ -0,0 +1,266 @@ +"""Adapt IStorage objects to IMVCCStorage + +This is a largely internal implementation of ZODB, especially DB and +Connection. It takes the MVCC implementation involving invalidations +and start time and moves it into a storage adapter. This allows ZODB +to treat Relstoage and other storages in pretty much the same way and +also simplifies the implementation of the DB and Connection classes. +""" +import zope.interface + +from . import interfaces, serialize, POSException +from .utils import p64, u64, Lock + +class Base(object): + + _copy_methods = ( + 'getName', 'getSize', 'history', 'lastTransaction', 'sortKey', + 'loadBlob', 'openCommittedBlobFile', + 'isReadOnly', 'supportsUndo', 'undoLog', 'undoInfo', + 'temporaryDirectory', + ) + + def __init__(self, storage): + self._storage = storage + if interfaces.IBlobStorage.providedBy(storage): + zope.interface.alsoProvides(self, interfaces.IBlobStorage) + + def __getattr__(self, name): + if name in self._copy_methods: + m = getattr(self._storage, name) + setattr(self, name, m) + return m + + raise AttributeError(name) + + def __len__(self): + return len(self._storage) + +class MVCCAdapter(Base): + + def __init__(self, storage): + Base.__init__(self, storage) + self._instances = set() + self._lock = Lock() + if hasattr(storage, 'registerDB'): + storage.registerDB(self) + + def new_instance(self): + instance = MVCCAdapterInstance(self) + with self._lock: + self._instances.add(instance) + return instance + + def before_instance(self, before=None): + return HistoricalStorageAdapter(self._storage, before) + + def undo_instance(self): + return UndoAdapterInstance(self) + + def _release(self, instance): + with self._lock: + self._instances.remove(instance) + + closed = False + def close(self): + if not self.closed: + self.closed = True + self._storage.close() + del self._instances + del self._storage + + def invalidateCache(self): + with self._lock: + for instance in self._instances: + instance._invalidateCache() + + def invalidate(self, transaction_id, oids): + with self._lock: + for instance in self._instances: + instance._invalidate(oids) + + def _invalidate_finish(self, oids, committing_instance): + with self._lock: + for instance in self._instances: + if instance is not committing_instance: + instance._invalidate(oids) + + references = serialize.referencesf + transform_record_data = untransform_record_data = lambda self, data: data + + def pack(self, pack_time, referencesf): + return self._storage.pack(pack_time, referencesf) + +class MVCCAdapterInstance(Base): + + _copy_methods = Base._copy_methods + ( + 'loadSerial', 'new_oid', 'tpc_vote', + 'checkCurrentSerialInTransaction', 'tpc_abort', + ) + + def __init__(self, base): + self._base = base + Base.__init__(self, base._storage) + self._lock = Lock() + self._invalidations = set() + self._start = None # Transaction start time + self._sync = getattr(self._storage, 'sync', lambda : None) + + def release(self): + self._base._release(self) + + close = release + + def _invalidateCache(self): + with self._lock: + self._invalidations = None + + def _invalidate(self, oids): + with self._lock: + try: + self._invalidations.update(oids) + except AttributeError: + if self._invalidations is not None: + raise + + def sync(self, force=True): + if force: + self._sync() + + def poll_invalidations(self): + self._start = p64(u64(self._storage.lastTransaction()) + 1) + with self._lock: + if self._invalidations is None: + self._invalidations = set() + return None + else: + result = list(self._invalidations) + self._invalidations.clear() + return result + + def load(self, oid): + assert self._start is not None + r = self._storage.loadBefore(oid, self._start) + if r is None: + raise POSException.ReadConflictError(repr(oid)) + return r[:2] + + def prefetch(self, oids): + try: + self._storage.prefetch(oids, self._start) + except AttributeError: + if not hasattr(self._storage, 'prefetch'): + self.prefetch = lambda *a: None + else: + raise + + _modified = None # Used to keep track of oids modified within a + # transaction, so we can invalidate them later. + + def tpc_begin(self, transaction): + self._storage.tpc_begin(transaction) + self._modified = set() + + def store(self, oid, serial, data, version, transaction): + self._storage.store(oid, serial, data, version, transaction) + self._modified.add(oid) + + def storeBlob(self, oid, serial, data, blobfilename, version, transaction): + self._storage.storeBlob( + oid, serial, data, blobfilename, '', transaction) + self._modified.add(oid) + + def tpc_finish(self, transaction, func = lambda tid: None): + modified = self._modified + self._modified = None + + def invalidate_finish(tid): + self._base._invalidate_finish(modified, self) + func(tid) + + return self._storage.tpc_finish(transaction, invalidate_finish) + +def read_only_writer(self, *a, **kw): + raise POSException.ReadOnlyError + +class HistoricalStorageAdapter(Base): + """Adapt a storage to a historical storage + """ + + _copy_methods = Base._copy_methods + ( + 'loadSerial', 'tpc_begin', 'tpc_finish', 'tpc_abort', 'tpc_vote', + 'checkCurrentSerialInTransaction', + ) + + def __init__(self, storage, before=None): + Base.__init__(self, storage) + self._before = before + + def isReadOnly(self): + return True + + def supportsUndo(self): + return False + + def release(self): + try: + release = self._storage.release + except AttributeError: + pass + else: + release() + + close = release + + def sync(self, force=True): + pass + + def poll_invalidations(self): + return [] + + new_oid = pack = store = read_only_writer + + def load(self, oid, version=''): + r = self._storage.loadBefore(oid, self._before) + if r is None: + raise POSException.POSKeyError(oid) + return r[:2] + + +class UndoAdapterInstance(Base): + + _copy_methods = Base._copy_methods + ( + 'tpc_abort', + ) + + def __init__(self, base): + self._base = base + Base.__init__(self, base._storage) + + def release(self): + pass + + close = release + + def tpc_begin(self, transaction): + self._storage.tpc_begin(transaction) + self._undone = set() + + def undo(self, transaction_id, transaction): + result = self._storage.undo(transaction_id, transaction) + if result: + self._undone.update(result[1]) + return result + + def tpc_vote(self, transaction): + result = self._storage.tpc_vote(transaction) + if result: + self._undone.update(result) + + def tpc_finish(self, transaction, func = lambda tid: None): + + def invalidate_finish(tid): + self._base._invalidate_finish(self._undone, None) + func(tid) + + self._storage.tpc_finish(transaction, invalidate_finish) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/persistentclass.py b/thesisenv/lib/python3.6/site-packages/ZODB/persistentclass.py new file mode 100644 index 0000000..44ec1fb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/persistentclass.py @@ -0,0 +1,227 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Persistent Class Support + +$Id$ +""" + + +# Notes: +# +# Persistent classes are non-ghostable. This has some interesting +# ramifications: +# +# - When an object is invalidated, it must reload its state +# +# - When an object is loaded from the database, its state must be +# loaded. Unfortunately, there isn't a clear signal when an object is +# loaded from the database. This should probably be fixed. +# +# In the mean time, we need to infer. This should be viewed as a +# short term hack. +# +# Here's the strategy we'll use: +# +# - We'll have a need to be loaded flag that we'll set in +# __new__, through an extra argument. +# +# - When setting _p_oid and _p_jar, if both are set and we need to be +# loaded, then we'll load out state. +# +# - We'll use _p_changed is None to indicate that we're in this state. +# + +class _p_DataDescr(object): + # Descr used as base for _p_ data. Data are stored in + # _p_class_dict. + + def __init__(self, name): + self.__name__ = name + + def __get__(self, inst, cls): + if inst is None: + return self + + if '__global_persistent_class_not_stored_in_DB__' in inst.__dict__: + raise AttributeError(self.__name__) + return inst._p_class_dict.get(self.__name__) + + def __set__(self, inst, v): + inst._p_class_dict[self.__name__] = v + + def __delete__(self, inst): + raise AttributeError(self.__name__) + +class _p_oid_or_jar_Descr(_p_DataDescr): + # Special descr for _p_oid and _p_jar that loads + # state when set if both are set and _p_changed is None + # + # See notes above + + def __set__(self, inst, v): + get = inst._p_class_dict.get + if v == get(self.__name__): + return + + inst._p_class_dict[self.__name__] = v + + jar = get('_p_jar') + if (jar is not None + and get('_p_oid') is not None + and get('_p_changed') is None + ): + jar.setstate(inst) + +class _p_ChangedDescr(object): + # descriptor to handle special weird semantics of _p_changed + + def __get__(self, inst, cls): + if inst is None: + return self + return inst._p_class_dict['_p_changed'] + + def __set__(self, inst, v): + if v is None: + return + inst._p_class_dict['_p_changed'] = bool(v) + + def __delete__(self, inst): + inst._p_invalidate() + +class _p_MethodDescr(object): + """Provide unassignable class attributes + """ + + def __init__(self, func): + self.func = func + + def __get__(self, inst, cls): + if inst is None: + return cls + return self.func.__get__(inst, cls) + + def __set__(self, inst, v): + raise AttributeError(self.__name__) + + def __delete__(self, inst): + raise AttributeError(self.__name__) + + +special_class_descrs = '__dict__', '__weakref__' + +class PersistentMetaClass(type): + + _p_jar = _p_oid_or_jar_Descr('_p_jar') + _p_oid = _p_oid_or_jar_Descr('_p_oid') + _p_changed = _p_ChangedDescr() + _p_serial = _p_DataDescr('_p_serial') + + def __new__(self, name, bases, cdict, _p_changed=False): + cdict = dict([(k, v) for (k, v) in cdict.items() + if not k.startswith('_p_')]) + cdict['_p_class_dict'] = {'_p_changed': _p_changed} + return super(PersistentMetaClass, self).__new__( + self, name, bases, cdict) + + def __getnewargs__(self): + return self.__name__, self.__bases__, {}, None + + __getnewargs__ = _p_MethodDescr(__getnewargs__) + + def _p_maybeupdate(self, name): + get = self._p_class_dict.get + data_manager = get('_p_jar') + + if ( + (data_manager is not None) + and + (get('_p_oid') is not None) + and + (get('_p_changed') == False) + ): + + self._p_changed = True + data_manager.register(self) + + def __setattr__(self, name, v): + if not ((name.startswith('_p_') or name.startswith('_v'))): + self._p_maybeupdate(name) + super(PersistentMetaClass, self).__setattr__(name, v) + + def __delattr__(self, name): + if not ((name.startswith('_p_') or name.startswith('_v'))): + self._p_maybeupdate(name) + super(PersistentMetaClass, self).__delattr__(name) + + def _p_deactivate(self): + # persistent classes can't be ghosts + pass + + _p_deactivate = _p_MethodDescr(_p_deactivate) + + def _p_invalidate(self): + # reset state + self._p_class_dict['_p_changed'] = None + self._p_jar.setstate(self) + + _p_invalidate = _p_MethodDescr(_p_invalidate) + + + def __getstate__(self): + return (self.__bases__, + dict([(k, v) for (k, v) in self.__dict__.items() + if not (k.startswith('_p_') + or k.startswith('_v_') + or k in special_class_descrs + ) + ]), + ) + + __getstate__ = _p_MethodDescr(__getstate__) + + def __setstate__(self, state): + bases, cdict = state + if self.__bases__ != bases: + # __getnewargs__ should've taken care of that + raise AssertionError(self.__bases__, '!=', bases) + cdict = dict([(k, v) for (k, v) in cdict.items() + if not k.startswith('_p_')]) + + _p_class_dict = self._p_class_dict + self._p_class_dict = {} + + to_remove = [k for k in self.__dict__ + if ((k not in cdict) + and + (k not in special_class_descrs) + and + (k != '_p_class_dict') + )] + + for k in to_remove: + delattr(self, k) + + for k, v in cdict.items(): + setattr(self, k, v) + + self._p_class_dict = _p_class_dict + + self._p_changed = False + + __setstate__ = _p_MethodDescr(__setstate__) + + def _p_activate(self): + self._p_jar.setstate(self) + + _p_activate = _p_MethodDescr(_p_activate) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/persistentclass.txt b/thesisenv/lib/python3.6/site-packages/ZODB/persistentclass.txt new file mode 100644 index 0000000..600d3b2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/persistentclass.txt @@ -0,0 +1,291 @@ +================== +Persistent Classes +================== + +NOTE: persistent classes are EXPERIMENTAL and, in some sense, + incomplete. This module exists largely to test changes made to + support Zope 2 ZClasses, with their historical flaws. + +The persistentclass module provides a meta class that can be used to implement +persistent classes. + +Persistent classes have the following properties: + +- They cannot be turned into ghosts + +- They can only contain picklable subobjects + +- They don't live in regular file-system modules + +Let's look at an example: + + >>> def __init__(self, name): + ... self.name = name + + >>> def foo(self): + ... return self.name, self.kind + + >>> import ZODB.persistentclass + >>> C = ZODB.persistentclass.PersistentMetaClass( + ... 'C', (object, ), dict( + ... __init__ = __init__, + ... __module__ = '__zodb__', + ... foo = foo, + ... kind = 'sample', + ... )) + +This example is obviously a bit contrived. In particular, we defined +the methods outside of the class. Why? Because all of the items in a +persistent class must be picklable. We defined the methods as global +functions to make them picklable. + +Also note that we explicitly set the module. Persistent classes don't +live in normal Python modules. Rather, they live in the database. We +use information in ``__module__`` to record where in the database. When +we want to use a database, we will need to supply a custom class +factory to load instances of the class. + +The class we created works a lot like other persistent objects. It +has standard standard persistent attributes: + + >>> C._p_oid + >>> C._p_jar + >>> C._p_serial + >>> C._p_changed + False + +Because we haven't saved the object, the jar, oid, and serial are all +None and it's not changed. + +We can create and use instances of the class: + + >>> c = C('first') + >>> c.foo() + ('first', 'sample') + +We can modify the class and none of the persistent attributes will +change because the object hasn't been saved. + + >>> import six + >>> def bar(self): + ... six.print_('bar', self.name) + >>> C.bar = bar + >>> c.bar() + bar first + + >>> C._p_oid + >>> C._p_jar + >>> C._p_serial + >>> C._p_changed + False + +Now, we can store the class in a database. We're going to use an +explicit transaction manager so that we can show parallel transactions +without having to use threads. + + >>> import transaction + >>> tm = transaction.TransactionManager() + >>> connection = some_database.open(transaction_manager=tm) + >>> connection.root()['C'] = C + >>> tm.commit() + +Now, if we look at the persistence variables, we'll see that they have +values: + + >>> C._p_oid + '\x00\x00\x00\x00\x00\x00\x00\x01' + >>> C._p_jar is not None + True + >>> C._p_serial is not None + True + >>> C._p_changed + False + +Now, if we modify the class: + + >>> def baz(self): + ... six.print_('baz', self.name) + >>> C.baz = baz + >>> c.baz() + baz first + +We'll see that the class has changed: + + >>> C._p_changed + True + +If we abort the transaction: + + >>> tm.abort() + +Then the class will return to it's prior state: + + >>> c.baz() + Traceback (most recent call last): + ... + AttributeError: 'C' object has no attribute 'baz' + + >>> c.bar() + bar first + +We can open another connection and access the class there. + + >>> tm2 = transaction.TransactionManager() + >>> connection2 = some_database.open(transaction_manager=tm2) + + >>> C2 = connection2.root()['C'] + >>> c2 = C2('other') + >>> c2.bar() + bar other + +If we make changes without committing them: + + >>> C.bar = baz + >>> c.bar() + baz first + + >>> C is C2 + False + +Other connections are unaffected: + + >>> connection2.sync() + >>> c2.bar() + bar other + +Until we commit: + + >>> tm.commit() + >>> connection2.sync() + >>> c2.bar() + baz other + +Similarly, we don't see changes made in other connections: + + >>> C2.color = 'red' + >>> tm2.commit() + + >>> c.color + Traceback (most recent call last): + ... + AttributeError: 'C' object has no attribute 'color' + +until we sync: + + >>> connection.sync() + >>> c.color + 'red' + +Instances of Persistent Classes +------------------------------- + +We can, of course, store instances of persistent classes in the +database: + + >>> c.color = 'blue' + >>> connection.root()['c'] = c + >>> tm.commit() + + >>> connection2.sync() + >>> connection2.root()['c'].color + 'blue' + +NOTE: If a non-persistent instance of a persistent class is copied, + the class may be copied as well. This is usually not the desired + result. + + +Persistent instances of persistent classes +------------------------------------------ + +Persistent instances of persistent classes are handled differently +than normal instances. When we copy a persistent instances of a +persistent class, we want to avoid copying the class. + +Lets create a persistent class that subclasses Persistent: + + >>> import persistent + >>> class P(persistent.Persistent, C): + ... __module__ = '__zodb__' + ... color = 'green' + + >>> connection.root()['P'] = P + + >>> import persistent.mapping + >>> connection.root()['obs'] = persistent.mapping.PersistentMapping() + >>> p = P('p') + >>> connection.root()['obs']['p'] = p + >>> tm.commit() + +You might be wondering why we didn't just stick 'p' into the root +object. We created an intermediate persistent object instead. We are +storing persistent classes in the root object. To create a ghost for a +persistent instance of a persistent class, we need to be able to be +able to access the root object and it must be loaded first. If the +instance was in the root object, we'd be unable to create it while +loading the root object. + +Now, if we try to load it, we get a broken object: + + >>> connection2.sync() + >>> connection2.root()['obs']['p'] + + +because the module, `__zodb__` can't be loaded. We need to provide a +class factory that knows about this special module. Here we'll supply a +sample class factory that looks up a class name in the database root +if the module is `__zodb__`. It falls back to the normal class lookup +for other modules: + + >>> from ZODB.broken import find_global + >>> def classFactory(connection, modulename, globalname): + ... if modulename == '__zodb__': + ... return connection.root()[globalname] + ... return find_global(modulename, globalname) + + >>> some_database.classFactory = classFactory + +Normally, the classFactory should be set before a database is opened. +We'll reopen the connections we're using. We'll assign the old +connections to a variable first to prevent getting them from the +connection pool: + + >>> old = connection, connection2 + >>> connection = some_database.open(transaction_manager=tm) + >>> connection2 = some_database.open(transaction_manager=tm2) + +Now, we can read the object: + + >>> connection2.root()['obs']['p'].color + 'green' + >>> connection2.root()['obs']['p'].color = 'blue' + >>> tm2.commit() + + >>> connection.sync() + >>> p = connection.root()['obs']['p'] + >>> p.color + 'blue' + +Copying +------- + +If we copy an instance via export/import, the copy and the original +share the same class: + + >>> file = connection.exportFile(p._p_oid) + >>> _ = file.seek(0) + >>> cp = connection.importFile(file) + >>> file.close() + >>> cp.color + 'blue' + + >>> cp is not p + True + + >>> cp.__class__ is p.__class__ + True + + >>> tm.abort() + + +XXX test abort of import diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/README.txt b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/README.txt new file mode 100644 index 0000000..636a2da --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/README.txt @@ -0,0 +1,107 @@ +This directory contains a collection of utilities for managing ZODB +databases. Some are more useful than others. If you install ZODB +using distutils ("python setup.py install"), a few of these will be installed. + +Unless otherwise noted, these scripts are invoked with the name of the +Data.fs file as their only argument. Example: checkbtrees.py data.fs. + + +analyze.py -- a transaction analyzer for FileStorage + +Reports on the data in a FileStorage. The report is organized by +class. It shows total data, as well as separate reports for current +and historical revisions of objects. + + +checkbtrees.py -- checks BTrees in a FileStorage for corruption + +Attempts to find all the BTrees contained in a Data.fs, calls their +_check() methods, and runs them through BTrees.check.check(). + + +fsdump.py -- summarize FileStorage contents, one line per revision + +Prints a report of FileStorage contents, with one line for each +transaction and one line for each data record in that transaction. +Includes time stamps, file positions, and class names. + + +fsoids.py -- trace all uses of specified oids in a FileStorage + +For heavy debugging. +A set of oids is specified by text file listing and/or command line. +A report is generated showing all uses of these oids in the database: +all new-revision creation/modifications, all references from all +revisions of other objects, and all creation undos. + + +fstest.py -- simple consistency checker for FileStorage + +usage: fstest.py [-v] data.fs + +The fstest tool will scan all the data in a FileStorage and report an +error if it finds any corrupt transaction data. The tool will print a +message when the first error is detected an exit. + +The tool accepts one or more -v arguments. If a single -v is used, it +will print a line of text for each transaction record it encounters. +If two -v arguments are used, it will also print a line of text for +each object. The objects for a transaction will be printed before the +transaction itself. + +Note: It does not check the consistency of the object pickles. It is +possible for the damage to occur only in the part of the file that +stores object pickles. Those errors will go undetected. + + +space.py -- report space used by objects in a FileStorage + +usage: space.py [-v] data.fs + +This ignores revisions and versions. + + +netspace.py -- hackish attempt to report on size of objects + +usage: netspace.py [-P | -v] data.fs + +-P: do a pack first +-v: print info for all objects, even if a traversal path isn't found + +Traverses objects from the database root and attempts to calculate +size of object, including all reachable subobjects. + + +repozo.py -- incremental backup utility for FileStorage + +Run the script with the -h option to see usage details. + + +timeout.py -- script to test transaction timeout + +usage: timeout.py address delay [storage-name] + +This script connects to a storage, begins a transaction, calls store() +and tpc_vote(), and then sleeps forever. This should trigger the +transaction timeout feature of the server. + +zodbload.py -- exercise ZODB under a heavy synthesized Zope-like load + +See the module docstring for details. Note that this script requires +Zope. New in ZODB3 3.1.4. + + +fsrefs.py -- check FileStorage for dangling references + + +fstail.py -- display the most recent transactions in a FileStorage + +usage: fstail.py [-n nxtn] data.fs + +The most recent ntxn transactions are displayed, to stdout. +Optional argument -n specifies ntxn, and defaults to 10. + + +migrate.py -- do a storage migration and gather statistics + +See the module docstring for details. diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/__init__.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/__init__.py new file mode 100644 index 0000000..792d600 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/__init__.py @@ -0,0 +1 @@ +# diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/analyze.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/analyze.py new file mode 100644 index 0000000..295a822 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/analyze.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python + +# Based on a transaction analyzer by Matt Kromer. +from __future__ import print_function + +import sys + +from ZODB.FileStorage import FileStorage +from ZODB._compat import PersistentUnpickler, BytesIO + + + +class FakeError(Exception): + def __init__(self, module, name): + Exception.__init__(self) + self.module = module + self.name = name + + +def fake_find_class(module, name): + raise FakeError(module, name) + + +def FakeUnpickler(f): + unpickler = PersistentUnpickler(fake_find_class, None, f) + return unpickler + + +class Report(object): + def __init__(self): + self.OIDMAP = {} + self.TYPEMAP = {} + self.TYPESIZE = {} + self.FREEMAP = {} + self.USEDMAP = {} + self.TIDS = 0 + self.OIDS = 0 + self.DBYTES = 0 + self.COIDS = 0 + self.CBYTES = 0 + self.FOIDS = 0 + self.FBYTES = 0 + +def shorten(s, n): + l = len(s) + if l <= n: + return s + while len(s) + 3 > n: # account for ... + i = s.find(".") + if i == -1: + # In the worst case, just return the rightmost n bytes + return s[-n:] + else: + s = s[i + 1:] + l = len(s) + return "..." + s + +def report(rep): + print("Processed %d records in %d transactions" % (rep.OIDS, rep.TIDS)) + print("Average record size is %7.2f bytes" % (rep.DBYTES * 1.0 / rep.OIDS)) + print(("Average transaction size is %7.2f bytes" % + (rep.DBYTES * 1.0 / rep.TIDS))) + + print("Types used:") + fmt = "%-46s %7s %9s %6s %7s" + fmtp = "%-46s %7d %9d %5.1f%% %7.2f" # per-class format + fmts = "%46s %7d %8dk %5.1f%% %7.2f" # summary format + print(fmt % ("Class Name", "Count", "TBytes", "Pct", "AvgSize")) + print(fmt % ('-'*46, '-'*7, '-'*9, '-'*5, '-'*7)) + typemap = sorted(rep.TYPEMAP) + cumpct = 0.0 + for t in typemap: + pct = rep.TYPESIZE[t] * 100.0 / rep.DBYTES + cumpct += pct + print(fmtp % (shorten(t, 46), rep.TYPEMAP[t], rep.TYPESIZE[t], + pct, rep.TYPESIZE[t] * 1.0 / rep.TYPEMAP[t])) + + print(fmt % ('='*46, '='*7, '='*9, '='*5, '='*7)) + print("%46s %7d %9s %6s %6.2fk" % ('Total Transactions', rep.TIDS, ' ', + ' ', rep.DBYTES * 1.0 / rep.TIDS / 1024.0)) + print(fmts % ('Total Records', rep.OIDS, rep.DBYTES / 1024.0, cumpct, + rep.DBYTES * 1.0 / rep.OIDS)) + + print(fmts % ('Current Objects', rep.COIDS, rep.CBYTES / 1024.0, + rep.CBYTES * 100.0 / rep.DBYTES, + rep.CBYTES * 1.0 / rep.COIDS)) + if rep.FOIDS: + print(fmts % ('Old Objects', rep.FOIDS, rep.FBYTES / 1024.0, + rep.FBYTES * 100.0 / rep.DBYTES, + rep.FBYTES * 1.0 / rep.FOIDS)) + +def analyze(path): + fs = FileStorage(path, read_only=1) + fsi = fs.iterator() + report = Report() + for txn in fsi: + analyze_trans(report, txn) + return report + +def analyze_trans(report, txn): + report.TIDS += 1 + for rec in txn: + analyze_rec(report, rec) + +def get_type(record): + try: + unpickled = FakeUnpickler(BytesIO(record.data)).load() + except FakeError as err: + return "%s.%s" % (err.module, err.name) + classinfo = unpickled[0] + if isinstance(classinfo, tuple): + mod, klass = classinfo + return "%s.%s" % (mod, klass) + else: + return str(classinfo) + +def analyze_rec(report, record): + oid = record.oid + report.OIDS += 1 + if record.data is None: + # No pickle -- aborted version or undo of object creation. + return + try: + size = len(record.data) # Ignores various overhead + report.DBYTES += size + if oid not in report.OIDMAP: + type = get_type(record) + report.OIDMAP[oid] = type + report.USEDMAP[oid] = size + report.COIDS += 1 + report.CBYTES += size + else: + type = report.OIDMAP[oid] + fsize = report.USEDMAP[oid] + report.FREEMAP[oid] = report.FREEMAP.get(oid, 0) + fsize + report.USEDMAP[oid] = size + report.FOIDS += 1 + report.FBYTES += fsize + report.CBYTES += size - fsize + report.TYPEMAP[type] = report.TYPEMAP.get(type, 0) + 1 + report.TYPESIZE[type] = report.TYPESIZE.get(type, 0) + size + except Exception as err: + print(err) + +if __name__ == "__main__": + path = sys.argv[1] + report(analyze(path)) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/checkbtrees.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/checkbtrees.py new file mode 100644 index 0000000..6df449a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/checkbtrees.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python +"""Check the consistency of BTrees in a Data.fs + +usage: checkbtrees.py data.fs + +Try to find all the BTrees in a Data.fs, call their _check() methods, +and run them through BTrees.check.check(). +""" +from __future__ import print_function +import ZODB +from ZODB.FileStorage import FileStorage +from BTrees.check import check + +# Set of oids we've already visited. Since the object structure is +# a general graph, this is needed to prevent unbounded paths in the +# presence of cycles. It's also helpful in eliminating redundant +# checking when a BTree is pointed to by many objects. +oids_seen = {} + +# Append (obj, path) to L if and only if obj is a persistent object +# and we haven't seen it before. +def add_if_new_persistent(L, obj, path): + global oids_seen + + getattr(obj, '_', None) # unghostify + if hasattr(obj, '_p_oid'): + oid = obj._p_oid + if oid not in oids_seen: + L.append((obj, path)) + oids_seen[oid] = 1 + +def get_subobjects(obj): + getattr(obj, '_', None) # unghostify + sub = [] + try: + attrs = obj.__dict__.items() + except AttributeError: + attrs = () + for pair in attrs: + sub.append(pair) + + # what if it is a mapping? + try: + items = obj.items() + except AttributeError: + items = () + for k, v in items: + if not isinstance(k, int): + sub.append(("", k)) + if not isinstance(v, int): + sub.append(("[%s]" % repr(k), v)) + + # what if it is a sequence? + i = 0 + while 1: + try: + elt = obj[i] + except: + break + sub.append(("[%d]" % i, elt)) + i += 1 + + return sub + +def main(fname=None): + if fname is None: + import sys + try: + fname, = sys.argv[1:] + except: + print(__doc__) + sys.exit(2) + + fs = FileStorage(fname, read_only=1) + cn = ZODB.DB(fs).open() + rt = cn.root() + todo = [] + add_if_new_persistent(todo, rt, '') + + found = 0 + while todo: + obj, path = todo.pop(0) + found += 1 + if not path: + print("", repr(obj)) + else: + print(path, repr(obj)) + + mod = str(obj.__class__.__module__) + if mod.startswith("BTrees"): + if hasattr(obj, "_check"): + try: + obj._check() + except AssertionError as msg: + print("*" * 60) + print(msg) + print("*" * 60) + + try: + check(obj) + except AssertionError as msg: + print("*" * 60) + print(msg) + print("*" * 60) + + if found % 100 == 0: + cn.cacheMinimize() + + for k, v in get_subobjects(obj): + if k.startswith('['): + # getitem + newpath = "%s%s" % (path, k) + else: + newpath = "%s.%s" % (path, k) + add_if_new_persistent(todo, v, newpath) + + print("total", len(fs._index), "found", found) + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fsoids.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fsoids.py new file mode 100644 index 0000000..8307bba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fsoids.py @@ -0,0 +1,79 @@ +#!/usr/bin/python + +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +"""FileStorage oid-tracer. + +usage: fsoids.py [-f oid_file] Data.fs [oid]... + +Display information about all occurrences of specified oids in a FileStorage. +This is meant for heavy debugging. + +This includes all revisions of the oids, all objects referenced by the +oids, and all revisions of all objects referring to the oids. + +If specified, oid_file is an input text file, containing one oid per +line. oids are specified as integers, in any of Python's integer +notations (typically like 0x341a). One or more oids can also be specified +on the command line. + +The output is grouped by oid, from smallest to largest, and sub-grouped +by transaction, from oldest to newest. + +This will not alter the FileStorage, but running against a live FileStorage +is not recommended (spurious error messages may result). + +See testfsoids.py for a tutorial doctest. +""" +from __future__ import print_function + +import sys + +from ZODB.FileStorage.fsoids import Tracer + +def usage(): + print(__doc__) + +def main(): + import getopt + + try: + opts, args = getopt.getopt(sys.argv[1:], 'f:') + if not args: + usage() + raise ValueError("Must specify a FileStorage") + path = None + for k, v in opts: + if k == '-f': + path = v + except (getopt.error, ValueError): + usage() + raise + + c = Tracer(args[0]) + for oid in args[1:]: + as_int = int(oid, 0) # 0 == auto-detect base + c.register_oids(as_int) + if path is not None: + for line in open(path): + as_int = int(line, 0) + c.register_oids(as_int) + if not c.oids: + raise ValueError("no oids specified") + c.run() + c.report() + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fsrefs.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fsrefs.py new file mode 100644 index 0000000..ed897d5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fsrefs.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +"""Check FileStorage for dangling references. + +usage: fsrefs.py [-v] data.fs + +fsrefs.py checks object sanity by trying to load the current revision of +every object O in the database, and also verifies that every object +directly reachable from each such O exists in the database. + +It's hard to explain exactly what it does because it relies on undocumented +features in Python's cPickle module: many of the crucial steps of loading +an object are taken, but application objects aren't actually created. This +saves a lot of time, and allows fsrefs to be run even if the code +implementing the object classes isn't available. + +A read-only connection to the specified FileStorage is made, but it is not +recommended to run fsrefs against a live FileStorage. Because a live +FileStorage is mutating while fsrefs runs, it's not possible for fsrefs to +get a wholly consistent view of the database across the entire time fsrefs +is running; spurious error messages may result. + +fsrefs doesn't normally produce any output. If an object fails to load, the +oid of the object is given in a message saying so, and if -v was specified +then the traceback corresponding to the load failure is also displayed +(this is the only effect of the -v flag). + +Three other kinds of errors are also detected, when an object O loads OK, +and directly refers to a persistent object P but there's a problem with P: + + - If P doesn't exist in the database, a message saying so is displayed. + The unsatisifiable reference to P is often called a "dangling + reference"; P is called "missing" in the error output. + + - If the current state of the database is such that P's creation has + been undone, then P can't be loaded either. This is also a kind of + dangling reference, but is identified as "object creation was undone". + + - If P can't be loaded (but does exist in the database), a message saying + that O refers to an object that can't be loaded is displayed. + +fsrefs also (indirectly) checks that the .index file is sane, because +fsrefs uses the index to get its idea of what constitutes "all the objects +in the database". + +Note these limitations: because fsrefs only looks at the current revision +of objects, it does not attempt to load objects in versions, or non-current +revisions of objects; therefore fsrefs cannot find problems in versions or +in non-current revisions. +""" +from __future__ import print_function +import traceback + +from ZODB.FileStorage import FileStorage +from ZODB.TimeStamp import TimeStamp +from ZODB.utils import u64, oid_repr, get_pickle_metadata, load_current +from ZODB.serialize import get_refs +from ZODB.POSException import POSKeyError + +# There's a problem with oid. 'data' is its pickle, and 'serial' its +# serial number. 'missing' is a list of (oid, class, reason) triples, +# explaining what the problem(s) is(are). +def report(oid, data, serial, missing): + from_mod, from_class = get_pickle_metadata(data) + if len(missing) > 1: + plural = "s" + else: + plural = "" + ts = TimeStamp(serial) + print("oid %s %s.%s" % (hex(u64(oid)), from_mod, from_class)) + print("last updated: %s, tid=%s" % (ts, hex(u64(serial)))) + print("refers to invalid object%s:" % plural) + for oid, info, reason in missing: + if isinstance(info, tuple): + description = "%s.%s" % info + else: + description = str(info) + print("\toid %s %s: %r" % (oid_repr(oid), reason, description)) + print() + +def main(path=None): + verbose = 0 + if path is None: + import sys + import getopt + + opts, args = getopt.getopt(sys.argv[1:], "v") + for k, v in opts: + if k == "-v": + verbose += 1 + + path, = args + + + fs = FileStorage(path, read_only=1) + + # Set of oids in the index that failed to load due to POSKeyError. + # This is what happens if undo is applied to the transaction creating + # the object (the oid is still in the index, but its current data + # record has a backpointer of 0, and POSKeyError is raised then + # because of that backpointer). + undone = {} + + # Set of oids that were present in the index but failed to load. + # This does not include oids in undone. + noload = {} + + for oid in fs._index.keys(): + try: + data, serial = load_current(fs, oid) + except (KeyboardInterrupt, SystemExit): + raise + except POSKeyError: + undone[oid] = 1 + except: + if verbose: + traceback.print_exc() + noload[oid] = 1 + + inactive = noload.copy() + inactive.update(undone) + for oid in fs._index.keys(): + if oid in inactive: + continue + data, serial = load_current(fs, oid) + refs = get_refs(data) + missing = [] # contains 3-tuples of oid, klass-metadata, reason + for ref, klass in refs: + if klass is None: + klass = '' + if ref not in fs._index: + missing.append((ref, klass, "missing")) + if ref in noload: + missing.append((ref, klass, "failed to load")) + if ref in undone: + missing.append((ref, klass, "object creation was undone")) + if missing: + report(oid, data, serial, missing) + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fsstats.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fsstats.py new file mode 100644 index 0000000..4c767a6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fsstats.py @@ -0,0 +1,203 @@ +#!/usr/bin/env python2 +"""Print details statistics from fsdump output.""" +from __future__ import print_function +import re +import sys +import six +from six.moves import filter + +rx_txn = re.compile("tid=([0-9a-f]+).*size=(\d+)") +rx_data = re.compile("oid=([0-9a-f]+) class=(\S+) size=(\d+)") + +def sort_byhsize(seq, reverse=False): + L = [(v.size(), k, v) for k, v in seq] + L.sort() + if reverse: + L.reverse() + return [(k, v) for n, k, v in L] + +class Histogram(dict): + + def add(self, size): + self[size] = self.get(size, 0) + 1 + + def size(self): + return sum(six.itervalues(self)) + + def mean(self): + product = sum([k * v for k, v in six.iteritems(self)]) + return product / self.size() + + def median(self): + # close enough? + n = self.size() / 2 + L = self.keys() + L.sort() + L.reverse() + while 1: + k = L.pop() + if self[k] > n: + return k + n -= self[k] + + def mode(self): + mode = 0 + value = 0 + for k, v in six.iteritems(self): + if v > value: + value = v + mode = k + return mode + + def make_bins(self, binsize): + maxkey = max(six.iterkeys(self)) + self.binsize = binsize + self.bins = [0] * (1 + maxkey / binsize) + for k, v in six.iteritems(self): + b = k / binsize + self.bins[b] += v + + def report(self, name, binsize=50, usebins=False, gaps=True, skip=True): + if usebins: + # Use existing bins with whatever size they have + binsize = self.binsize + else: + # Make new bins + self.make_bins(binsize) + maxval = max(self.bins) + # Print up to 40 dots for a value + dot = max(maxval / 40, 1) + tot = sum(self.bins) + print(name) + print("Total", tot, end=' ') + print("Median", self.median(), end=' ') + print("Mean", self.mean(), end=' ') + print("Mode", self.mode(), end=' ') + print("Max", max(self)) + print("One * represents", dot) + gap = False + cum = 0 + for i, n in enumerate(self.bins): + if gaps and (not n or (skip and not n / dot)): + if not gap: + print(" ...") + gap = True + continue + gap = False + p = 100 * n / tot + cum += n + pc = 100 * cum / tot + print("%6d %6d %3d%% %3d%% %s" % ( + i * binsize, n, p, pc, "*" * (n / dot))) + print() + +def class_detail(class_size): + # summary of classes + fmt = "%5s %6s %6s %6s %-50.50s" + labels = ["num", "median", "mean", "mode", "class"] + print(fmt % tuple(labels)) + print(fmt % tuple(["-" * len(s) for s in labels])) + for klass, h in sort_byhsize(six.iteritems(class_size)): + print(fmt % (h.size(), h.median(), h.mean(), h.mode(), klass)) + print() + + # per class details + for klass, h in sort_byhsize(six.iteritems(class_size), reverse=True): + h.make_bins(50) + if len(filter(None, h.bins)) == 1: + continue + h.report("Object size for %s" % klass, usebins=True) + +def revision_detail(lifetimes, classes): + # Report per-class details for any object modified more than once + for name, oids in six.iteritems(classes): + h = Histogram() + keep = False + for oid in dict.fromkeys(oids, 1): + L = lifetimes.get(oid) + n = len(L) + h.add(n) + if n > 1: + keep = True + if keep: + h.report("Number of revisions for %s" % name, binsize=10) + +def main(path=None): + if path is None: + path = sys.argv[1] + txn_objects = Histogram() # histogram of txn size in objects + txn_bytes = Histogram() # histogram of txn size in bytes + obj_size = Histogram() # histogram of object size + n_updates = Histogram() # oid -> num updates + n_classes = Histogram() # class -> num objects + lifetimes = {} # oid -> list of tids + class_size = {} # class -> histogram of object size + classes = {} # class -> list of oids + + MAX = 0 + objects = 0 + tid = None + + f = open(path, "rb") + for i, line in enumerate(f): + if MAX and i > MAX: + break + if line.startswith(" data"): + m = rx_data.search(line) + if not m: + continue + oid, klass, size = m.groups() + size = int(size) + + obj_size.add(size) + n_updates.add(oid) + n_classes.add(klass) + + h = class_size.get(klass) + if h is None: + h = class_size[klass] = Histogram() + h.add(size) + + L = lifetimes.setdefault(oid, []) + L.append(tid) + + L = classes.setdefault(klass, []) + L.append(oid) + objects += 1 + + elif line.startswith("Trans"): + + if tid is not None: + txn_objects.add(objects) + + m = rx_txn.search(line) + if not m: + continue + tid, size = m.groups() + size = int(size) + objects = 0 + + txn_bytes.add(size) + f.close() + + print("Summary: %d txns, %d objects, %d revisions" % ( + txn_objects.size(), len(n_updates), n_updates.size())) + print() + + txn_bytes.report("Transaction size (bytes)", binsize=1024) + txn_objects.report("Transaction size (objects)", binsize=10) + obj_size.report("Object size", binsize=128) + + # object lifetime info + h = Histogram() + for k, v in lifetimes.items(): + h.add(len(v)) + h.report("Number of revisions", binsize=10, skip=False) + + # details about revisions + revision_detail(lifetimes, classes) + + class_detail(class_size) + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fstail.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fstail.py new file mode 100644 index 0000000..801e9b0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fstail.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Tool to dump the last few transactions from a FileStorage.""" +from __future__ import print_function +from ZODB.fstools import prev_txn + +import binascii +import getopt +import sys + +try: + from hashlib import sha1 +except ImportError: + from sha import sha as sha1 + +def main(path, ntxn): + with open(path, "rb") as f: + f.seek(0, 2) + th = prev_txn(f) + i = ntxn + while th and i > 0: + hash = sha1(th.get_raw_data()).digest() + l = len(str(th.get_timestamp())) + 1 + th.read_meta() + print("%s: hash=%s" % (th.get_timestamp(), + binascii.hexlify(hash).decode())) + print(("user=%r description=%r length=%d offset=%d (+%d)" + % (th.user, th.descr, th.length, th.get_offset(), len(th)))) + print() + th = th.prev_txn() + i -= 1 + +def Main(): + ntxn = 10 + opts, args = getopt.getopt(sys.argv[1:], "n:") + path, = args + for k, v in opts: + if k == '-n': + ntxn = int(v) + main(path, ntxn) + +if __name__ == "__main__": + Main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fstest.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fstest.py new file mode 100644 index 0000000..5a846da --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/fstest.py @@ -0,0 +1,225 @@ +#!/usr/bin/env python +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Simple consistency checker for FileStorage. + +usage: fstest.py [-v] data.fs + +The fstest tool will scan all the data in a FileStorage and report an +error if it finds any corrupt transaction data. The tool will print a +message when the first error is detected, then exit. + +The tool accepts one or more -v arguments. If a single -v is used, it +will print a line of text for each transaction record it encounters. +If two -v arguments are used, it will also print a line of text for +each object. The objects for a transaction will be printed before the +transaction itself. + +Note: It does not check the consistency of the object pickles. It is +possible for the damage to occur only in the part of the file that +stores object pickles. Those errors will go undetected. +""" +from __future__ import print_function + +# The implementation is based closely on the read_index() function in +# ZODB.FileStorage. If anything about the FileStorage layout changes, +# this file will need to be udpated. + +import binascii +import struct +import sys +from ZODB._compat import FILESTORAGE_MAGIC + +class FormatError(ValueError): + """There is a problem with the format of the FileStorage.""" + +class Status(object): + checkpoint = b'c' + undone = b'u' + +packed_version = FILESTORAGE_MAGIC + +TREC_HDR_LEN = 23 +DREC_HDR_LEN = 42 + +VERBOSE = 0 + +def hexify(s): + r"""Format an 8-bit string as hex + + >>> hexify(b'\x00\xff\xaa\xcc') + '0x00ffaacc' + + """ + return '0x' + binascii.hexlify(s).decode() + +def chatter(msg, level=1): + if VERBOSE >= level: + sys.stdout.write(msg) + +def U64(v): + """Unpack an 8-byte string as a 64-bit long""" + h, l = struct.unpack(">II", v) + if h: + return (h << 32) + l + else: + return l + +def check(path): + with open(path, 'rb') as file: + file.seek(0, 2) + file_size = file.tell() + if file_size == 0: + raise FormatError("empty file") + file.seek(0) + if file.read(4) != packed_version: + raise FormatError("invalid file header") + + pos = 4 + tid = b'\000' * 8 # lowest possible tid to start + i = 0 + while pos: + _pos = pos + pos, tid = check_trec(path, file, pos, tid, file_size) + if tid is not None: + chatter("%10d: transaction tid %s #%d \n" % + (_pos, hexify(tid), i)) + i = i + 1 + + +def check_trec(path, file, pos, ltid, file_size): + """Read an individual transaction record from file. + + Returns the pos of the next transaction and the transaction id. + It also leaves the file pointer set to pos. The path argument is + used for generating error messages. + """ + + h = file.read(TREC_HDR_LEN) #XXX must be bytes under Py3k + if not h: + return None, None + if len(h) != TREC_HDR_LEN: + raise FormatError("%s truncated at %s" % (path, pos)) + + tid, stl, status, ul, dl, el = struct.unpack(">8s8scHHH", h) + tmeta_len = TREC_HDR_LEN + ul + dl + el + + if tid <= ltid: + raise FormatError("%s time-stamp reduction at %s: %s <= %s" % + (path, pos, hexify(tid), hexify(ltid))) + ltid = tid + + tl = U64(stl) # transaction record length - 8 + if pos + tl + 8 > file_size: + raise FormatError("%s truncated possibly because of" + " damaged records at %s" % (path, pos)) + if status == Status.checkpoint: + raise FormatError("%s checkpoint flag was not cleared at %s" + % (path, pos)) + if status not in b' up': + raise FormatError("%s has invalid status '%s' at %s" % + (path, status, pos)) + + if tmeta_len > tl: + raise FormatError("%s has an invalid transaction header" + " at %s" % (path, pos)) + + tpos = pos + tend = tpos + tl + + if status != Status.undone: + pos = tpos + tmeta_len + file.read(ul + dl + el) # skip transaction metadata + + i = 0 + while pos < tend: + _pos = pos + pos, oid = check_drec(path, file, pos, tpos, tid) + if pos > tend: + raise FormatError("%s has data records that extend beyond" + " the transaction record; end at %s" % + (path, pos)) + chatter("%10d: object oid %s #%d\n" % (_pos, hexify(oid), i), + level=2) + i = i + 1 + + file.seek(tend) + rtl = file.read(8) + if rtl != stl: + raise FormatError("%s has inconsistent transaction length" + " for undone transaction at %s" % (path, pos)) + pos = tend + 8 + return pos, tid + +def check_drec(path, file, pos, tpos, tid): + """Check a data record for the current transaction record""" + + h = file.read(DREC_HDR_LEN) + if len(h) != DREC_HDR_LEN: + raise FormatError("%s truncated at %s" % (path, pos)) + oid, serial, _prev, _tloc, vlen, _plen = ( + struct.unpack(">8s8s8s8sH8s", h)) + prev = U64(_prev) + tloc = U64(_tloc) + plen = U64(_plen) + dlen = DREC_HDR_LEN + (plen or 8) + + if vlen: + dlen = dlen + 16 + vlen + file.seek(8, 1) + pv = U64(file.read(8)) + file.seek(vlen, 1) # skip the version data + + if tloc != tpos: + raise FormatError("%s data record exceeds transaction record " + "at %s: tloc %d != tpos %d" % + (path, pos, tloc, tpos)) + + pos = pos + dlen + if plen: + file.seek(plen, 1) + else: + file.seek(8, 1) + # _loadBack() ? + + return pos, oid + +def usage(): + sys.exit(__doc__) + +def main(args=None): + if args is None: + args = sys.argv[1:] + import getopt + + global VERBOSE + try: + opts, args = getopt.getopt(args, 'v') + if len(args) != 1: + raise ValueError("expected one argument") + for k, v in opts: + if k == '-v': + VERBOSE = VERBOSE + 1 + except (getopt.error, ValueError): + usage() + + try: + check(args[0]) + except FormatError as msg: + sys.exit(msg) + + chatter("no errors detected") + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/manual_tests/test-checker.fs b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/manual_tests/test-checker.fs new file mode 100644 index 0000000..4afe2ae Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/manual_tests/test-checker.fs differ diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/manual_tests/testfstest.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/manual_tests/testfstest.py new file mode 100644 index 0000000..9f85b63 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/manual_tests/testfstest.py @@ -0,0 +1,177 @@ +"""Verify that fstest.py can find errors. + +Note: To run this test script fstest.py must be on your PYTHONPATH. +""" + +from cStringIO import StringIO +import re +import struct +import unittest +import ZODB.tests.util + +import fstest +from fstest import FormatError, U64 + +class TestCorruptedFS(ZODB.tests.util.TestCase): + + f = open('test-checker.fs', 'rb') + datafs = f.read() + f.close() + del f + + def setUp(self): + ZODB.tests.util.TestCase.setUp(self) + self._temp = 'Data.fs' + self._file = open(self._temp, 'wb') + + def tearDown(self): + if not self._file.closed: + self._file.close() + ZODB.tests.util.TestCase.tearDown(self) + + def noError(self): + if not self._file.closed: + self._file.close() + fstest.check(self._temp) + + def detectsError(self, rx): + if not self._file.closed: + self._file.close() + try: + fstest.check(self._temp) + except FormatError as msg: + mo = re.search(rx, str(msg)) + self.assertFalse(mo is None, "unexpected error: %s" % msg) + else: + self.fail("fstest did not detect corruption") + + def getHeader(self): + buf = self._datafs.read(16) + if not buf: + return 0, '' + tl = U64(buf[8:]) + return tl, buf + + def copyTransactions(self, n): + """Copy at most n transactions from the good data""" + f = self._datafs = StringIO(self.datafs) + self._file.write(f.read(4)) + for i in range(n): + tl, data = self.getHeader() + if not tl: + return + self._file.write(data) + rec = f.read(tl - 8) + self._file.write(rec) + + def testGood(self): + self._file.write(self.datafs) + self.noError() + + def testTwoTransactions(self): + self.copyTransactions(2) + self.noError() + + def testEmptyFile(self): + self.detectsError("empty file") + + def testInvalidHeader(self): + self._file.write('SF12') + self.detectsError("invalid file header") + + def testTruncatedTransaction(self): + self._file.write(self.datafs[:4+22]) + self.detectsError("truncated") + + def testCheckpointFlag(self): + self.copyTransactions(2) + tl, data = self.getHeader() + assert tl > 0, "ran out of good transaction data" + self._file.write(data) + self._file.write('c') + self._file.write(self._datafs.read(tl - 9)) + self.detectsError("checkpoint flag") + + def testInvalidStatus(self): + self.copyTransactions(2) + tl, data = self.getHeader() + assert tl > 0, "ran out of good transaction data" + self._file.write(data) + self._file.write('Z') + self._file.write(self._datafs.read(tl - 9)) + self.detectsError("invalid status") + + def testTruncatedRecord(self): + self.copyTransactions(3) + tl, data = self.getHeader() + assert tl > 0, "ran out of good transaction data" + self._file.write(data) + buf = self._datafs.read(tl / 2) + self._file.write(buf) + self.detectsError("truncated possibly") + + def testBadLength(self): + self.copyTransactions(2) + tl, data = self.getHeader() + assert tl > 0, "ran out of good transaction data" + self._file.write(data) + buf = self._datafs.read(tl - 8) + self._file.write(buf[0]) + assert tl <= 1<<16, "can't use this transaction for this test" + self._file.write("\777\777") + self._file.write(buf[3:]) + self.detectsError("invalid transaction header") + + def testDecreasingTimestamps(self): + self.copyTransactions(0) + tl, data = self.getHeader() + buf = self._datafs.read(tl - 8) + t1 = data + buf + + tl, data = self.getHeader() + buf = self._datafs.read(tl - 8) + t2 = data + buf + + self._file.write(t2[:8] + t1[8:]) + self._file.write(t1[:8] + t2[8:]) + self.detectsError("time-stamp") + + def testTruncatedData(self): + # This test must re-write the transaction header length in + # order to trigger the error in check_drec(). If it doesn't, + # the truncated data record would also caught a truncated + # transaction record. + self.copyTransactions(1) + tl, data = self.getHeader() + pos = self._file.tell() + self._file.write(data) + buf = self._datafs.read(tl - 8) + hdr = buf[:15] + ul, dl, el = struct.unpack(">HHH", hdr[-6:]) + self._file.write(buf[:15 + ul + dl + el]) + data = buf[15 + ul + dl + el:] + self._file.write(data[:24]) + self._file.seek(pos + 8, 0) + newlen = struct.pack(">II", 0, tl - (len(data) - 24)) + self._file.write(newlen) + self.detectsError("truncated at") + + def testBadDataLength(self): + self.copyTransactions(1) + tl, data = self.getHeader() + self._file.write(data) + buf = self._datafs.read(tl - 8) + hdr = buf[:7] + # write the transaction meta data + ul, dl, el = struct.unpack(">HHH", hdr[-6:]) + self._file.write(buf[:7 + ul + dl + el]) + + # write the first part of the data header + data = buf[7 + ul + dl + el:] + self._file.write(data[:24]) + self._file.write("\000" * 4 + "\077" + "\000" * 3) + self._file.write(data[32:]) + self.detectsError("record exceeds transaction") + +if __name__ == "__main__": + unittest.main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/migrate.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/migrate.py new file mode 100644 index 0000000..b093b47 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/migrate.py @@ -0,0 +1,358 @@ +#!/usr/bin/env python +############################################################################## +# +# Copyright (c) 2001, 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""A script to gather statistics while doing a storage migration. + +This is very similar to a standard storage's copyTransactionsFrom() method, +except that it's geared to run as a script, and it collects useful pieces of +information as it's working. This script can be used to stress test a storage +since it blasts transactions at it as fast as possible. You can get a good +sense of the performance of a storage by running this script. + +Actually it just counts the size of pickles in the transaction via the +iterator protocol, so storage overheads aren't counted. + +Usage: %(PROGRAM)s [options] [source-storage-args] [destination-storage-args] +Options: + -S sourcetype + --stype=sourcetype + This is the name of a recognized type for the source database. Use -T + to print out the known types. Defaults to "file". + + -D desttype + --dtype=desttype + This is the name of the recognized type for the destination database. + Use -T to print out the known types. Defaults to "file". + + -o filename + --output=filename + Print results in filename, otherwise stdout. + + -m txncount + --max=txncount + Stop after committing txncount transactions. + + -k txncount + --skip=txncount + Skip the first txncount transactions. + + -p/--profile + Turn on specialized profiling. + + -t/--timestamps + Print tids as timestamps. + + -T/--storage_types + Print all the recognized storage types and exit. + + -v/--verbose + Turns on verbose output. Multiple -v options increase the verbosity. + + -h/--help + Print this message and exit. + +Positional arguments: + + source-storage-args: + Semicolon separated list of arguments for the source storage, as + key=val pairs. E.g. "file_name=Data.fs;read_only=1" + + destination-storage-args: + Comma separated list of arguments for the source storage, as key=val + pairs. E.g. "name=full;frequency=3600" +""" +from __future__ import print_function +import re +import sys +import time +import getopt +import marshal +import profile + +from ZODB import utils +from ZODB import StorageTypes +from ZODB.TimeStamp import TimeStamp + +PROGRAM = sys.argv[0] +ZERO = '\0'*8 + + +def usage(code, msg=''): + print(__doc__ % globals(), file=sys.stderr) + if msg: + print(msg, file=sys.stderr) + sys.exit(code) + + +def error(code, msg): + print(msg, file=sys.stderr) + print("use --help for usage message") + sys.exit(code) + + +def main(): + try: + opts, args = getopt.getopt( + sys.argv[1:], + 'hvo:pm:k:D:S:Tt', + ['help', 'verbose', + 'output=', 'profile', 'storage_types', + 'max=', 'skip=', 'dtype=', 'stype=', 'timestamps']) + except getopt.error as msg: + error(2, msg) + + class Options(object): + stype = 'FileStorage' + dtype = 'FileStorage' + verbose = 0 + outfile = None + profilep = False + maxtxn = -1 + skiptxn = -1 + timestamps = False + + options = Options() + + for opt, arg in opts: + if opt in ('-h', '--help'): + usage(0) + elif opt in ('-v', '--verbose'): + options.verbose += 1 + elif opt in ('-T', '--storage_types'): + print_types() + sys.exit(0) + elif opt in ('-S', '--stype'): + options.stype = arg + elif opt in ('-D', '--dtype'): + options.dtype = arg + elif opt in ('-o', '--output'): + options.outfile = arg + elif opt in ('-p', '--profile'): + options.profilep = True + elif opt in ('-m', '--max'): + options.maxtxn = int(arg) + elif opt in ('-k', '--skip'): + options.skiptxn = int(arg) + elif opt in ('-t', '--timestamps'): + options.timestamps = True + + if len(args) > 2: + error(2, "too many arguments") + + srckws = {} + if len(args) > 0: + srcargs = args[0] + for kv in re.split(r';\s*', srcargs): + key, val = kv.split('=') + srckws[key] = val + + destkws = {} + if len(args) > 1: + destargs = args[1] + for kv in re.split(r';\s*', destargs): + key, val = kv.split('=') + destkws[key] = val + + if options.stype not in StorageTypes.storage_types.keys(): + usage(2, 'Source database type must be provided') + if options.dtype not in StorageTypes.storage_types.keys(): + usage(2, 'Destination database type must be provided') + + # Open the output file + if options.outfile is None: + options.outfp = sys.stdout + options.outclosep = False + else: + options.outfp = open(options.outfile, 'w') + options.outclosep = True + + if options.verbose > 0: + print('Opening source database...') + modname, sconv = StorageTypes.storage_types[options.stype] + kw = sconv(**srckws) + __import__(modname) + sclass = getattr(sys.modules[modname], options.stype) + srcdb = sclass(**kw) + + if options.verbose > 0: + print('Opening destination database...') + modname, dconv = StorageTypes.storage_types[options.dtype] + kw = dconv(**destkws) + __import__(modname) + dclass = getattr(sys.modules[modname], options.dtype) + dstdb = dclass(**kw) + + try: + t0 = time.time() + doit(srcdb, dstdb, options) + t1 = time.time() + if options.verbose > 0: + print('Migration time: %8.3f' % (t1-t0)) + finally: + # Done + srcdb.close() + dstdb.close() + if options.outclosep: + options.outfp.close() + + +def doit(srcdb, dstdb, options): + outfp = options.outfp + profilep = options.profilep + verbose = options.verbose + # some global information + largest_pickle = 0 + largest_txn_in_size = 0 + largest_txn_in_objects = 0 + total_pickle_size = 0 + total_object_count = 0 + # Ripped from BaseStorage.copyTransactionsFrom() + ts = None + ok = True + prevrevids = {} + counter = 0 + skipper = 0 + if options.timestamps: + print("%4s. %26s %6s %8s %5s %5s %5s %5s %5s" % ( + "NUM", "TID AS TIMESTAMP", "OBJS", "BYTES", + # Does anybody know what these times mean? + "t4-t0", "t1-t0", "t2-t1", "t3-t2", "t4-t3")) + else: + print("%4s. %20s %6s %8s %6s %6s %6s %6s %6s" % ( + "NUM", "TRANSACTION ID", "OBJS", "BYTES", + # Does anybody know what these times mean? + "t4-t0", "t1-t0", "t2-t1", "t3-t2", "t4-t3")) + for txn in srcdb.iterator(): + skipper += 1 + if skipper <= options.skiptxn: + continue + counter += 1 + if counter > options.maxtxn >= 0: + break + tid = txn.tid + if ts is None: + ts = TimeStamp(tid) + else: + t = TimeStamp(tid) + if t <= ts: + if ok: + print(( + 'Time stamps are out of order %s, %s' % (ts, t)), file=sys.stderr) + ok = False + ts = t.laterThan(ts) + tid = ts.raw() + else: + ts = t + if not ok: + print(( + 'Time stamps are back in order %s' % t), file=sys.stderr) + ok = True + if verbose > 1: + print(ts) + + prof = None + if profilep and (counter % 100) == 0: + prof = profile.Profile() + objects = 0 + size = 0 + newrevids = RevidAccumulator() + t0 = time.time() + dstdb.tpc_begin(txn, tid, txn.status) + t1 = time.time() + for r in txn: + oid = r.oid + objects += 1 + thissize = len(r.data) + size += thissize + if thissize > largest_pickle: + largest_pickle = thissize + if verbose > 1: + if not r.version: + vstr = 'norev' + else: + vstr = r.version + print(utils.U64(oid), vstr, len(r.data)) + oldrevid = prevrevids.get(oid, ZERO) + result = dstdb.store(oid, oldrevid, r.data, r.version, txn) + newrevids.store(oid, result) + t2 = time.time() + result = dstdb.tpc_vote(txn) + t3 = time.time() + newrevids.tpc_vote(result) + prevrevids.update(newrevids.get_dict()) + # Profile every 100 transactions + if prof: + prof.runcall(dstdb.tpc_finish, txn) + else: + dstdb.tpc_finish(txn) + t4 = time.time() + + # record the results + if objects > largest_txn_in_objects: + largest_txn_in_objects = objects + if size > largest_txn_in_size: + largest_txn_in_size = size + if options.timestamps: + tidstr = str(TimeStamp(tid)) + format = "%4d. %26s %6d %8d %5.3f %5.3f %5.3f %5.3f %5.3f" + else: + tidstr = utils.U64(tid) + format = "%4d. %20s %6d %8d %6.4f %6.4f %6.4f %6.4f %6.4f" + print(format % (skipper, tidstr, objects, size, + t4-t0, t1-t0, t2-t1, t3-t2, t4-t3), file=outfp) + total_pickle_size += size + total_object_count += objects + + if prof: + prof.create_stats() + fp = open('profile-%02d.txt' % (counter / 100), 'wb') + marshal.dump(prof.stats, fp) + fp.close() + print("Largest pickle: %8d" % largest_pickle, file=outfp) + print("Largest transaction: %8d" % largest_txn_in_size, file=outfp) + print("Largest object count: %8d" % largest_txn_in_objects, file=outfp) + print("Total pickle size: %14d" % total_pickle_size, file=outfp) + print("Total object count: %8d" % total_object_count, file=outfp) + + +# helper to deal with differences between old-style store() return and +# new-style store() return that supports ZEO + +class RevidAccumulator(object): + + def __init__(self): + self.data = {} + + def _update_from_list(self, list): + for oid, serial in list: + if not isinstance(serial, str): + raise serial + self.data[oid] = serial + + def store(self, oid, result): + if isinstance(result, str): + self.data[oid] = result + elif result is not None: + self._update_from_list(result) + + def tpc_vote(self, result): + if result is not None: + self._update_from_list(result) + + def get_dict(self): + return self.data + + +if __name__ == '__main__': + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/migrateblobs.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/migrateblobs.py new file mode 100644 index 0000000..23a99bc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/migrateblobs.py @@ -0,0 +1,81 @@ +############################################################################## +# +# Copyright (c) 2008 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""A script to migrate a blob directory into a different layout. +""" +from __future__ import print_function +import logging +import optparse +import os +import shutil + +from ZODB.blob import FilesystemHelper +from ZODB.utils import oid_repr + + +def link_or_copy(f1, f2): + try: + os.link(f1, f2) + except OSError: + shutil.copy(f1, f2) + +# Check if we actually have link +try: + os.link +except AttributeError: + link_or_copy = shutil.copy + + +def migrate(source, dest, layout): + source_fsh = FilesystemHelper(source) + source_fsh.create() + dest_fsh = FilesystemHelper(dest, layout) + dest_fsh.create() + print("Migrating blob data from `%s` (%s) to `%s` (%s)" % ( + source, source_fsh.layout_name, dest, dest_fsh.layout_name)) + for oid, path in source_fsh.listOIDs(): + dest_path = dest_fsh.getPathForOID(oid, create=True) + files = os.listdir(path) + for file in files: + source_file = os.path.join(path, file) + dest_file = os.path.join(dest_path, file) + link_or_copy(source_file, dest_file) + print("\tOID: %s - %s files " % (oid_repr(oid), len(files))) + + +def main(source=None, dest=None, layout="bushy"): + usage = "usage: %prog [options] " + description = ("Create the new directory and migrate all blob " + "data to while using the new for " + "") + + parser = optparse.OptionParser(usage=usage, description=description) + parser.add_option("-l", "--layout", + default=layout, type='choice', + choices=['bushy', 'lawn'], + help="Define the layout to use for the new directory " + "(bushy or lawn). Default: %default") + options, args = parser.parse_args() + + if not len(args) == 2: + parser.error("source and destination must be given") + + logging.getLogger().addHandler(logging.StreamHandler()) + logging.getLogger().setLevel(0) + + source, dest = args + migrate(source, dest, options.layout) + + +if __name__ == '__main__': + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/netspace.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/netspace.py new file mode 100644 index 0000000..a1e2514 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/netspace.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python +"""Report on the net size of objects counting subobjects. + +usage: netspace.py [-P | -v] data.fs + +-P: do a pack first +-v: print info for all objects, even if a traversal path isn't found +""" +from __future__ import print_function +import ZODB +from ZODB.FileStorage import FileStorage +from ZODB.utils import U64, get_pickle_metadata, load_current +from ZODB.serialize import referencesf +from six.moves import filter + +def find_paths(root, maxdist): + """Find Python attribute traversal paths for objects to maxdist distance. + + Starting at a root object, traverse attributes up to distance levels + from the root, looking for persistent objects. Return a dict + mapping oids to traversal paths. + + TODO: Assumes that the keys of the root are not themselves + persistent objects. + + TODO: Doesn't traverse containers. + """ + paths = {} + + # Handle the root as a special case because it's a dict + objs = [] + for k, v in root.items(): + oid = getattr(v, '_p_oid', None) + objs.append((k, v, oid, 0)) + + for path, obj, oid, dist in objs: + if oid is not None: + paths[oid] = path + if dist < maxdist: + getattr(obj, 'foo', None) # unghostify + try: + items = obj.__dict__.items() + except AttributeError: + continue + for k, v in items: + oid = getattr(v, '_p_oid', None) + objs.append(("%s.%s" % (path, k), v, oid, dist + 1)) + + return paths + +def main(path): + fs = FileStorage(path, read_only=1) + if PACK: + fs.pack() + + db = ZODB.DB(fs) + rt = db.open().root() + paths = find_paths(rt, 3) + + def total_size(oid): + cache = {} + cache_size = 1000 + def _total_size(oid, seen): + v = cache.get(oid) + if v is not None: + return v + data, serialno = load_current(fs, oid) + size = len(data) + for suboid in referencesf(data): + if suboid in seen: + continue + seen[suboid] = 1 + size += _total_size(suboid, seen) + cache[oid] = size + if len(cache) == cache_size: + cache.popitem() + return size + return _total_size(oid, {}) + + keys = fs._index.keys() + keys.sort() + keys.reverse() + + if not VERBOSE: + # If not running verbosely, don't print an entry for an object + # unless it has an entry in paths. + keys = filter(paths.has_key, keys) + + fmt = "%8s %5d %8d %s %s.%s" + + for oid in keys: + data, serialno = load_current(fs, oid) + mod, klass = get_pickle_metadata(data) + refs = referencesf(data) + path = paths.get(oid, '-') + print(fmt % (U64(oid), len(data), total_size(oid), path, mod, klass)) + +def Main(): + import sys + import getopt + + global PACK + global VERBOSE + + PACK = 0 + VERBOSE = 0 + try: + opts, args = getopt.getopt(sys.argv[1:], 'Pv') + path, = args + except getopt.error as err: + print(err) + print(__doc__) + sys.exit(2) + except ValueError: + print("expected one argument, got", len(args)) + print(__doc__) + sys.exit(2) + for o, v in opts: + if o == '-P': + PACK = 1 + if o == '-v': + VERBOSE += 1 + main(path) + +if __name__ == "__main__": + Main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/referrers.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/referrers.py new file mode 100644 index 0000000..6cc6a40 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/referrers.py @@ -0,0 +1,27 @@ +############################################################################## +# +# Copyright (c) 2005 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Compute a table of object id referrers + +$Id$ +""" + +from ZODB.serialize import referencesf + +def referrers(storage): + result = {} + for transaction in storage.iterator(): + for record in transaction: + for oid in referencesf(record.data): + result.setdefault(oid, []).append((record.oid, record.tid)) + return result diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/repozo.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/repozo.py new file mode 100644 index 0000000..dfb12f6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/repozo.py @@ -0,0 +1,747 @@ +#!/usr/bin/env python + +# repozo.py -- incremental and full backups of a Data.fs file. +# +# Originally written by Anthony Baxter +# Significantly modified by Barry Warsaw + +"""repozo.py -- incremental and full backups of a Data.fs file and index. + +Usage: %(program)s [options] +Where: + + Exactly one of -B, -R, or -V must be specified: + + -B / --backup + Backup current ZODB file. + + -R / --recover + Restore a ZODB file from a backup. + + -V / --verify + Verify backup integrity. + + -v / --verbose + Verbose mode. + + -h / --help + Print this text and exit. + + -r dir + --repository=dir + Repository directory containing the backup files. This argument + is required. The directory must already exist. You should not + edit the files in this directory, or add your own files to it. + +Options for -B/--backup: + -f file + --file=file + Source Data.fs file. This argument is required. + + -F / --full + Force a full backup. By default, an incremental backup is made + if possible (e.g., if a pack has occurred since the last + incremental backup, a full backup is necessary). + + -Q / --quick + Verify via md5 checksum only the last incremental written. This + significantly reduces the disk i/o at the (theoretical) cost of + inconsistency. This is a probabilistic way of determining whether + a full backup is necessary. + + -z / --gzip + Compress with gzip the backup files. Uses the default zlib + compression level. By default, gzip compression is not used. + + -k / --kill-old-on-full + If a full backup is created, remove any prior full or incremental + backup files (and associated metadata files) from the repository + directory. + +Options for -R/--recover: + -D str + --date=str + Recover state as of this date. Specify UTC (not local) time. + yyyy-mm-dd[-hh[-mm[-ss]]] + By default, current time is used. + + -o filename + --output=filename + Write recovered ZODB to given file. By default, the file is + written to stdout. + + Note: for the stdout case, the index file will **not** be restored + automatically. + +Options for -V/--verify: + -Q / --quick + Verify file sizes only (skip md5 checksums). +""" +from __future__ import print_function +import os +import shutil +import sys +from six.moves import filter +from hashlib import md5 +import gzip +import time +import errno +import getopt + +from ZODB.FileStorage import FileStorage + +program = sys.argv[0] + +BACKUP = 1 +RECOVER = 2 +VERIFY = 3 + +COMMASPACE = ', ' +READCHUNK = 16 * 1024 +VERBOSE = False + + +class WouldOverwriteFiles(Exception): + pass + + +class NoFiles(Exception): + pass + + +class _GzipCloser(object): + + def __init__(self, fqn, mode): + self._opened = gzip.open(fqn, mode) + + def __enter__(self): + return self._opened + + def __exit__(self, exc_type, exc_value, traceback): + self._opened.close() + + +def usage(code, msg=''): + outfp = sys.stderr + if code == 0: + outfp = sys.stdout + + print(__doc__ % globals(), file=outfp) + if msg: + print(msg, file=outfp) + + sys.exit(code) + + +def log(msg, *args): + if VERBOSE: + # Use stderr here so that -v flag works with -R and no -o + print(msg % args, file=sys.stderr) + + +def error(msg, *args): + print(msg % args, file=sys.stderr) + + +def parseargs(argv): + global VERBOSE + try: + opts, args = getopt.getopt(argv, 'BRVvhr:f:FQzkD:o:', + ['backup', + 'recover', + 'verify', + 'verbose', + 'help', + 'repository=', + 'file=', + 'full', + 'quick', + 'gzip', + 'kill-old-on-full', + 'date=', + 'output=', + ]) + except getopt.error as msg: + usage(1, msg) + + class Options(object): + mode = None # BACKUP, RECOVER or VERIFY + file = None # name of input Data.fs file + repository = None # name of directory holding backups + full = False # True forces full backup + date = None # -D argument, if any + output = None # where to write recovered data; None = stdout + quick = False # -Q flag state + gzip = False # -z flag state + killold = False # -k flag state + + options = Options() + + for opt, arg in opts: + if opt in ('-h', '--help'): + usage(0) + elif opt in ('-v', '--verbose'): + VERBOSE = True + elif opt in ('-R', '--recover'): + if options.mode is not None: + usage(1, '-B, -R, and -V are mutually exclusive') + options.mode = RECOVER + elif opt in ('-B', '--backup'): + if options.mode is not None: + usage(1, '-B, -R, and -V are mutually exclusive') + options.mode = BACKUP + elif opt in ('-V', '--verify'): + if options.mode is not None: + usage(1, '-B, -R, and -V are mutually exclusive') + options.mode = VERIFY + elif opt in ('-Q', '--quick'): + options.quick = True + elif opt in ('-f', '--file'): + options.file = arg + elif opt in ('-r', '--repository'): + options.repository = arg + elif opt in ('-F', '--full'): + options.full = True + elif opt in ('-D', '--date'): + options.date = arg + elif opt in ('-o', '--output'): + options.output = arg + elif opt in ('-z', '--gzip'): + options.gzip = True + elif opt in ('-k', '--kill-old-on-full'): + options.killold = True + else: + assert False, (opt, arg) + + # Any other arguments are invalid + if args: + usage(1, 'Invalid arguments: ' + COMMASPACE.join(args)) + + # Sanity checks + if options.mode is None: + usage(1, 'Either --backup, --recover or --verify is required') + if options.repository is None: + usage(1, '--repository is required') + if options.mode == BACKUP: + if options.date is not None: + log('--date option is ignored in backup mode') + options.date = None + if options.output is not None: + log('--output option is ignored in backup mode') + options.output = None + elif options.mode == RECOVER: + if options.file is not None: + log('--file option is ignored in recover mode') + options.file = None + if options.killold: + log('--kill-old-on-full option is ignored in recover mode') + options.killold = False + else: + assert options.mode == VERIFY + if options.date is not None: + log("--date option is ignored in verify mode") + options.date = None + if options.output is not None: + log('--output option is ignored in verify mode') + options.output = None + if options.full: + log('--full option is ignored in verify mode') + options.full = False + if options.gzip: + log('--gzip option is ignored in verify mode') + options.gzip = False + if options.file is not None: + log('--file option is ignored in verify mode') + options.file = None + if options.killold: + log('--kill-old-on-full option is ignored in verify mode') + options.killold = False + return options + + +# afile is a Python file object, or created by gzip.open(). The latter +# doesn't have a fileno() method, so to fsync it we need to reach into +# its underlying file object. +def fsync(afile): + afile.flush() + fileobject = getattr(afile, 'fileobj', afile) + os.fsync(fileobject.fileno()) + +# Read bytes (no more than n, or to EOF if n is None) in chunks from the +# current position in file fp. Pass each chunk as an argument to func(). +# Return the total number of bytes read == the total number of bytes +# passed in all to func(). Leaves the file position just after the +# last byte read. +def dofile(func, fp, n=None): + bytesread = 0 + while n is None or n > 0: + if n is None: + todo = READCHUNK + else: + todo = min(READCHUNK, n) + data = fp.read(todo) + if not data: + break + func(data) + nread = len(data) + bytesread += nread + if n is not None: + n -= nread + return bytesread + + +def checksum(fp, n): + # Checksum the first n bytes of the specified file + sum = md5() + def func(data): + sum.update(data) + dofile(func, fp, n) + return sum.hexdigest() + + +def file_size(fp): + # Compute number of bytes that can be read from fp + def func(data): + pass + return dofile(func, fp, None) + + +def checksum_and_size(fp): + # Checksum and return it with the size of the file + sum = md5() + def func(data): + sum.update(data) + size = dofile(func, fp, None) + return sum.hexdigest(), size + + +def copyfile(options, dst, start, n): + # Copy bytes from file src, to file dst, starting at offset start, for n + # length of bytes. For robustness, we first write, flush and fsync + # to a temp file, then rename the temp file at the end. + sum = md5() + ifp = open(options.file, 'rb') + ifp.seek(start) + tempname = os.path.join(os.path.dirname(dst), 'tmp.tmp') + if options.gzip: + ofp = gzip.open(tempname, 'wb') + else: + ofp = open(tempname, 'wb') + + def func(data): + sum.update(data) + ofp.write(data) + + ndone = dofile(func, ifp, n) + assert ndone == n + + ifp.close() + fsync(ofp) + ofp.close() + os.rename(tempname, dst) + return sum.hexdigest() + + +def concat(files, ofp=None): + # Concatenate a bunch of files from the repository, output to 'ofp' if + # given. Return the number of bytes written and the md5 checksum of the + # bytes. + sum = md5() + def func(data): + sum.update(data) + if ofp: + ofp.write(data) + bytesread = 0 + for f in files: + # Auto uncompress + if f.endswith('fsz'): + ifp = gzip.open(f, 'rb') + else: + ifp = open(f, 'rb') + bytesread += dofile(func, ifp) + ifp.close() + if ofp: + ofp.close() + return bytesread, sum.hexdigest() + + +def gen_filedate(options): + return getattr(options, 'test_now', time.gmtime()[:6]) + +def gen_filename(options, ext=None, now=None): + if ext is None: + if options.full: + ext = '.fs' + else: + ext = '.deltafs' + if options.gzip: + ext += 'z' + # Hook for testing + if now is None: + now = gen_filedate(options) + t = now + (ext,) + return '%04d-%02d-%02d-%02d-%02d-%02d%s' % t + +# Return a list of files needed to reproduce state at time options.date. +# This is a list, in chronological order, of the .fs[z] and .deltafs[z] +# files, from the time of the most recent full backup preceding +# options.date, up to options.date. + +import re +is_data_file = re.compile(r'\d{4}(?:-\d\d){5}\.(?:delta)?fsz?$').match +del re + +def find_files(options): + when = options.date + if not when: + when = gen_filename(options, ext='') + log('looking for files between last full backup and %s...', when) + # newest file first + all = sorted( + filter(is_data_file, os.listdir(options.repository)), reverse=True) + # Find the last full backup before date, then include all the + # incrementals between that full backup and "when". + needed = [] + for fname in all: + root, ext = os.path.splitext(fname) + if root <= when: + needed.append(fname) + if ext in ('.fs', '.fsz'): + break + # Make the file names relative to the repository directory + needed = [os.path.join(options.repository, f) for f in needed] + # Restore back to chronological order + needed.reverse() + if needed: + log('files needed to recover state as of %s:', when) + for f in needed: + log('\t%s', f) + else: + log('no files found') + return needed + +# Scan the .dat file corresponding to the last full backup performed. +# Return +# +# filename, startpos, endpos, checksum +# +# of the last incremental. If there is no .dat file, or the .dat file +# is empty, return +# +# None, None, None, None + +def scandat(repofiles): + fullfile = repofiles[0] + datfile = os.path.splitext(fullfile)[0] + '.dat' + fn = startpos = endpos = sum = None # assume .dat file missing or empty + try: + fp = open(datfile) + except IOError as e: + if e.errno != errno.ENOENT: + raise + else: + # We only care about the last one. + lines = fp.readlines() + fp.close() + if lines: + fn, startpos, endpos, sum = lines[-1].split() + startpos = int(startpos) + endpos = int(endpos) + + return fn, startpos, endpos, sum + +def delete_old_backups(options): + # Delete all full backup files except for the most recent full backup file + all = sorted(filter(is_data_file, os.listdir(options.repository))) + + deletable = [] + full = [] + for fname in all: + root, ext = os.path.splitext(fname) + if ext in ('.fs', '.fsz'): + full.append(fname) + if ext in ('.fs', '.fsz', '.deltafs', '.deltafsz'): + deletable.append(fname) + + # keep most recent full + if not full: + return + + recentfull = full.pop(-1) + deletable.remove(recentfull) + root, ext = os.path.splitext(recentfull) + dat = root + '.dat' + if dat in deletable: + deletable.remove(dat) + index = root + '.index' + if index in deletable: + deletable.remove(index) + + for fname in deletable: + log('removing old backup file %s (and .dat / .index)', fname) + root, ext = os.path.splitext(fname) + try: + os.unlink(os.path.join(options.repository, root + '.dat')) + except OSError: + pass + try: + os.unlink(os.path.join(options.repository, root + '.index')) + except OSError: + pass + os.unlink(os.path.join(options.repository, fname)) + +def do_full_backup(options): + options.full = True + tnow = gen_filedate(options) + dest = os.path.join(options.repository, gen_filename(options, now=tnow)) + if os.path.exists(dest): + raise WouldOverwriteFiles('Cannot overwrite existing file: %s' % dest) + # Find the file position of the last completed transaction. + fs = FileStorage(options.file, read_only=True) + # Note that the FileStorage ctor calls read_index() which scans the file + # and returns "the position just after the last valid transaction record". + # getSize() then returns this position, which is exactly what we want, + # because we only want to copy stuff from the beginning of the file to the + # last valid transaction record. + pos = fs.getSize() + # Save the storage index into the repository + index_file = os.path.join(options.repository, + gen_filename(options, '.index', tnow)) + log('writing index') + fs._index.save(pos, index_file) + fs.close() + log('writing full backup: %s bytes to %s', pos, dest) + sum = copyfile(options, dest, 0, pos) + # Write the data file for this full backup + datfile = os.path.splitext(dest)[0] + '.dat' + fp = open(datfile, 'w') + print(dest, 0, pos, sum, file=fp) + fp.flush() + os.fsync(fp.fileno()) + fp.close() + if options.killold: + delete_old_backups(options) + + +def do_incremental_backup(options, reposz, repofiles): + options.full = False + tnow = gen_filedate(options) + dest = os.path.join(options.repository, gen_filename(options, now=tnow)) + if os.path.exists(dest): + raise WouldOverwriteFiles('Cannot overwrite existing file: %s' % dest) + # Find the file position of the last completed transaction. + fs = FileStorage(options.file, read_only=True) + # Note that the FileStorage ctor calls read_index() which scans the file + # and returns "the position just after the last valid transaction record". + # getSize() then returns this position, which is exactly what we want, + # because we only want to copy stuff from the beginning of the file to the + # last valid transaction record. + pos = fs.getSize() + log('writing index') + index_file = os.path.join(options.repository, + gen_filename(options, '.index', tnow)) + fs._index.save(pos, index_file) + fs.close() + log('writing incremental: %s bytes to %s', pos-reposz, dest) + sum = copyfile(options, dest, reposz, pos - reposz) + # The first file in repofiles points to the last full backup. Use this to + # get the .dat file and append the information for this incrementatl to + # that file. + fullfile = repofiles[0] + datfile = os.path.splitext(fullfile)[0] + '.dat' + # This .dat file better exist. Let the exception percolate if not. + fp = open(datfile, 'a') + print(dest, reposz, pos, sum, file=fp) + fp.flush() + os.fsync(fp.fileno()) + fp.close() + + +def do_backup(options): + repofiles = find_files(options) + # See if we need to do a full backup + if options.full or not repofiles: + log('doing a full backup') + do_full_backup(options) + return + srcsz = os.path.getsize(options.file) + if options.quick: + fn, startpos, endpos, sum = scandat(repofiles) + # If the .dat file was missing, or was empty, do a full backup + if (fn, startpos, endpos, sum) == (None, None, None, None): + log('missing or empty .dat file (full backup)') + do_full_backup(options) + return + # Has the file shrunk, possibly because of a pack? + if srcsz < endpos: + log('file shrunk, possibly because of a pack (full backup)') + do_full_backup(options) + return + # Now check the md5 sum of the source file, from the last + # incremental's start and stop positions. + srcfp = open(options.file, 'rb') + srcfp.seek(startpos) + srcsum = checksum(srcfp, endpos-startpos) + srcfp.close() + log('last incremental file: %s', fn) + log('last incremental checksum: %s', sum) + log('source checksum range: [%s..%s], sum: %s', + startpos, endpos, srcsum) + if sum == srcsum: + if srcsz == endpos: + log('No changes, nothing to do') + return + log('doing incremental, starting at: %s', endpos) + do_incremental_backup(options, endpos, repofiles) + return + else: + # This was is much slower, and more disk i/o intensive, but it's also + # more accurate since it checks the actual existing files instead of + # the information in the .dat file. + # + # See if we can do an incremental, based on the files that already + # exist. This call of concat() will not write an output file. + reposz, reposum = concat(repofiles) + log('repository state: %s bytes, md5: %s', reposz, reposum) + # Get the md5 checksum of the source file, up to two file positions: + # the entire size of the file, and up to the file position of the last + # incremental backup. + srcfp = open(options.file, 'rb') + srcsum = checksum(srcfp, srcsz) + srcfp.seek(0) + srcsum_backedup = checksum(srcfp, reposz) + srcfp.close() + log('current state : %s bytes, md5: %s', srcsz, srcsum) + log('backed up state : %s bytes, md5: %s', reposz, srcsum_backedup) + # Has nothing changed? + if srcsz == reposz and srcsum == reposum: + log('No changes, nothing to do') + return + # Has the file shrunk, probably because of a pack? + if srcsz < reposz: + log('file shrunk, possibly because of a pack (full backup)') + do_full_backup(options) + return + # The source file is larger than the repository. If the md5 checksums + # match, then we know we can do an incremental backup. If they don't, + # then perhaps the file was packed at some point (or a + # non-transactional undo was performed, but this is deprecated). Only + # do a full backup if forced to. + if reposum == srcsum_backedup: + log('doing incremental, starting at: %s', reposz) + do_incremental_backup(options, reposz, repofiles) + return + # The checksums don't match, meaning the front of the source file has + # changed. We'll need to do a full backup in that case. + log('file changed, possibly because of a pack (full backup)') + do_full_backup(options) + + +def do_recover(options): + # Find the first full backup at or before the specified date + repofiles = find_files(options) + if not repofiles: + if options.date: + raise NoFiles('No files in repository before %s', options.date) + else: + raise NoFiles('No files in repository') + if options.output is None: + log('Recovering file to stdout') + outfp = sys.stdout + else: + log('Recovering file to %s', options.output) + outfp = open(options.output, 'wb') + reposz, reposum = concat(repofiles, outfp) + if outfp != sys.stdout: + outfp.close() + log('Recovered %s bytes, md5: %s', reposz, reposum) + + if options.output is not None: + last_base = os.path.splitext(repofiles[-1])[0] + source_index = '%s.index' % last_base + target_index = '%s.index' % options.output + if os.path.exists(source_index): + log('Restoring index file %s to %s', source_index, target_index) + shutil.copyfile(source_index, target_index) + else: + log('No index file to restore: %s', source_index) + + +def do_verify(options): + # Verify the sizes and checksums of all files mentioned in the .dat file + repofiles = find_files(options) + if not repofiles: + raise NoFiles('No files in repository') + datfile = os.path.splitext(repofiles[0])[0] + '.dat' + with open(datfile) as fp: + for line in fp: + fn, startpos, endpos, sum = line.split() + startpos = int(startpos) + endpos = int(endpos) + filename = os.path.join(options.repository, + os.path.basename(fn)) + expected_size = endpos - startpos + log("Verifying %s", filename) + try: + if filename.endswith('fsz'): + actual_sum, size = get_checksum_and_size_of_gzipped_file(filename, options.quick) + when_uncompressed = ' (when uncompressed)' + else: + actual_sum, size = get_checksum_and_size_of_file(filename, options.quick) + when_uncompressed = '' + except IOError: + error("%s is missing", filename) + continue + if size != expected_size: + error("%s is %d bytes%s, should be %d bytes", filename, + size, when_uncompressed, expected_size) + elif not options.quick: + if actual_sum != sum: + error("%s has checksum %s%s instead of %s", filename, + actual_sum, when_uncompressed, sum) + + +def get_checksum_and_size_of_gzipped_file(filename, quick): + with _GzipCloser(filename, 'rb') as fp: + if quick: + return None, file_size(fp) + else: + return checksum_and_size(fp) + + +def get_checksum_and_size_of_file(filename, quick): + with open(filename, 'rb') as fp: + fp.seek(0, 2) + actual_size = fp.tell() + if quick: + actual_sum = None + else: + fp.seek(0) + actual_sum = checksum(fp, actual_size) + return actual_sum, actual_size + + +def main(argv=None): + if argv is None: + argv = sys.argv[1:] + options = parseargs(argv) + if options.mode == BACKUP: + try: + do_backup(options) + except WouldOverwriteFiles as e: + sys.exit(str(e)) + elif options.mode == RECOVER: + try: + do_recover(options) + except NoFiles as e: + sys.exit(str(e)) + else: + assert options.mode == VERIFY + try: + do_verify(options) + except NoFiles as e: + sys.exit(str(e)) + + +if __name__ == '__main__': + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/space.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/space.py new file mode 100644 index 0000000..60a671a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/space.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +"""Report on the space used by objects in a storage. + +usage: space.py data.fs + +The current implementation only supports FileStorage. + +Current limitations / simplifications: Ignores revisions and versions. +""" +from __future__ import print_function +from ZODB.FileStorage import FileStorage +from ZODB.utils import U64, get_pickle_metadata, load_current +import six + +def run(path, v=0): + fs = FileStorage(path, read_only=1) + # break into the file implementation + if hasattr(fs._index, 'iterkeys'): + iter = six.iterkeys(fs._index) + else: + iter = fs._index.keys() + totals = {} + for oid in iter: + data, serialno = load_current(fs, oid) + mod, klass = get_pickle_metadata(data) + key = "%s.%s" % (mod, klass) + bytes, count = totals.get(key, (0, 0)) + bytes += len(data) + count += 1 + totals[key] = bytes, count + if v: + print("%8s %5d %s" % (U64(oid), len(data), key)) + L = totals.items() + L.sort(lambda a, b: cmp(a[1], b[1])) + L.reverse() + print("Totals per object class:") + for key, (bytes, count) in L: + print("%8d %8d %s" % (count, bytes, key)) + +def main(): + import sys + import getopt + try: + opts, args = getopt.getopt(sys.argv[1:], "v") + except getopt.error as msg: + print(msg) + print("usage: space.py [-v] Data.fs") + sys.exit(2) + if len(args) != 1: + print("usage: space.py [-v] Data.fs") + sys.exit(2) + v = 0 + for o, a in opts: + if o == "-v": + v += 1 + path = args[0] + run(path, v) + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/fstail.txt b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/fstail.txt new file mode 100644 index 0000000..7d3d441 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/fstail.txt @@ -0,0 +1,41 @@ +==================== +The `fstail` utility +==================== + +The `fstail` utility shows information for a FileStorage about the last `n` +transactions: + +We have to prepare a FileStorage first: + + >>> from ZODB.FileStorage import FileStorage + >>> from ZODB.DB import DB + >>> import transaction + >>> from tempfile import mktemp + >>> storagefile = mktemp(suffix='.fs') + >>> base_storage = FileStorage(storagefile) + >>> database = DB(base_storage) + >>> connection1 = database.open() + >>> root = connection1.root() + >>> root['foo'] = 1 + >>> transaction.commit() + +Now lets have a look at the last transactions of this FileStorage: + + >>> from ZODB.scripts.fstail import main + >>> main(storagefile, 5) + 2007-11-10 15:18:48.543001: hash=b16422d09fabdb45d4e4325e4b42d7d6f021d3c3 + user='' description='' length=132 offset=162 (+23) + + 2007-11-10 15:18:48.543001: hash=b16422d09fabdb45d4e4325e4b42d7d6f021d3c3 + user='' description='initial database creation' length=150 offset=4 (+48) + + +Now clean up the storage again: + + >>> import os + >>> connection1.close() + >>> base_storage.close() + >>> os.unlink(storagefile) + >>> os.unlink(storagefile+'.index') + >>> os.unlink(storagefile+'.lock') + >>> os.unlink(storagefile+'.tmp') diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/referrers.txt b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/referrers.txt new file mode 100644 index 0000000..88204fb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/referrers.txt @@ -0,0 +1,43 @@ +Getting Object Referrers +======================== + +The referrers module provides a way to get object referrers. It +provides a referrers method that takes an iterable storage object. It +returns a dictionary mapping object ids to lists of referrer object +versions, which each version is a tuple an object id nd serial +nummber. + +To see how this works, we'll create a small database: + + >>> import transaction + >>> from persistent.mapping import PersistentMapping + >>> from ZODB.FileStorage import FileStorage + >>> from ZODB.DB import DB + >>> import os, tempfile + >>> dest = tempfile.mkdtemp() + >>> fs = FileStorage(os.path.join(dest, 'Data.fs')) + >>> db = DB(fs) + >>> conn = db.open() + >>> conn.root()['a'] = PersistentMapping() + >>> conn.root()['b'] = PersistentMapping() + >>> transaction.commit() + >>> roid = conn.root()._p_oid + >>> aoid = conn.root()['a']._p_oid + >>> boid = conn.root()['b']._p_oid + >>> s1 = conn.root()['b']._p_serial + + >>> conn.root()['a']['b'] = conn.root()['b'] + >>> transaction.commit() + >>> s2 = conn.root()['a']._p_serial + +Now we'll get the storage and compute the referrers: + + >>> import ZODB.scripts.referrers + >>> referrers = ZODB.scripts.referrers.referrers(fs) + + >>> referrers[boid] == [(roid, s1), (aoid, s2)] + True + +.. Cleanup + + >>> db.close() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/test_doc.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/test_doc.py new file mode 100644 index 0000000..acb6334 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/test_doc.py @@ -0,0 +1,48 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import doctest +import re +import unittest +import ZODB.tests.util +import zope.testing.renormalizing + +checker = zope.testing.renormalizing.RENormalizing([ + (re.compile( + r'[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]+'), + '2007-11-10 15:18:48.543001'), + (re.compile('hash=[0-9a-f]{40}'), + 'hash=b16422d09fabdb45d4e4325e4b42d7d6f021d3c3'), + # Python 3 bytes add a "b". + (re.compile("b('.*?')"), r"\1"), + (re.compile('b(".*?")'), r"\1"), + # Python 3 produces larger pickles, even when we use zodbpickle :( + # this changes all the offsets and sizes in fstail.txt + (re.compile("user='' description='' " + r"length=[0-9]+ offset=[0-9]+ \(\+23\)"), + "user='' description='' " + "length= offset= (+23)"), + (re.compile("user='' description='initial database creation' " + r"length=[0-9]+ offset=4 \(\+48\)"), + "user='' description='initial database creation' " + "length= offset=4 (+48)"), +]) + +def test_suite(): + return unittest.TestSuite(( + doctest.DocFileSuite( + 'referrers.txt', + 'fstail.txt', + setUp=ZODB.tests.util.setUp, tearDown=ZODB.tests.util.tearDown, + checker=checker), + )) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/test_fstest.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/test_fstest.py new file mode 100644 index 0000000..cf08065 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/test_fstest.py @@ -0,0 +1,55 @@ +############################################################################## +# +# Copyright (c) 2010 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import doctest +import re +import unittest + +import ZODB +from zope.testing import setupstack +from zope.testing.renormalizing import RENormalizing + +def test_fstest_verbose(): + r""" + >>> db = ZODB.DB('data.fs') + >>> db.close() + >>> import ZODB.scripts.fstest + >>> ZODB.scripts.fstest.main(['data.fs']) + + >>> ZODB.scripts.fstest.main(['data.fs']) + + >>> ZODB.scripts.fstest.main(['-v', 'data.fs']) + ... # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + 4: transaction tid ... #0 + no errors detected + + >>> ZODB.scripts.fstest.main(['-vvv', 'data.fs']) + ... # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + 52: object oid 0x0000000000000000 #0 + 4: transaction tid ... #0 + no errors detected + + """ + + +def test_suite(): + checker = RENormalizing([ + # Python 3 drops the u'' prefix on unicode strings + (re.compile(r"u('[^']*')"), r"\1"), + ]) + return unittest.TestSuite([ + doctest.DocTestSuite('ZODB.scripts.fstest', checker=checker), + doctest.DocTestSuite(setUp=setupstack.setUpDirectory, + tearDown=setupstack.tearDown), + ]) + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/test_repozo.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/test_repozo.py new file mode 100644 index 0000000..a4f2554 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/tests/test_repozo.py @@ -0,0 +1,1183 @@ +############################################################################## +# +# Copyright (c) 2004-2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from __future__ import print_function +import unittest +import os +import sys +from hashlib import md5 + +import ZODB.tests.util # layer used at class scope + +from io import BytesIO, StringIO +if str is bytes: + NativeStringIO = BytesIO +else: + NativeStringIO = StringIO + + +_NOISY = os.environ.get('NOISY_REPOZO_TEST_OUTPUT') + +def _write_file(name, bits, mode='wb'): + with open(name, mode) as f: + f.write(bits) + f.flush() + +def _read_file(name, mode='rb'): + with open(name, mode) as f: + return f.read() + + +class OurDB(object): + + _file_name = None + + def __init__(self, dir): + from BTrees.OOBTree import OOBTree + import transaction + self.dir = dir + self.getdb() + conn = self.db.open() + conn.root()['tree'] = OOBTree() + transaction.commit() + self.pos = self.db.storage._pos + self.close() + + def getdb(self): + from ZODB import DB + from ZODB.FileStorage import FileStorage + self._file_name = storage_filename = os.path.join(self.dir, 'Data.fs') + storage = FileStorage(storage_filename) + self.db = DB(storage) + + def gettree(self): + self.getdb() + conn = self.db.open() + return conn.root()['tree'] + + def pack(self): + self.getdb() + self.db.pack() + + def close(self): + if self.db is not None: + self.db.close() + self.db = None + + def mutate(self): + # Make random mutations to the btree in the database. + import random + import transaction + tree = self.gettree() + for dummy in range(100): + if random.random() < 0.6: + tree[random.randrange(100000)] = random.randrange(100000) + else: + keys = tree.keys() + if keys: + del tree[keys[0]] + transaction.commit() + self.pos = self.db.storage._pos + self.maxkey = self.db.storage._oid + self.close() + + +class Test_parseargs(unittest.TestCase): + + def setUp(self): + from ZODB.scripts import repozo + self._old_verbosity = repozo.VERBOSE + self._old_stderr = sys.stderr + repozo.VERBOSE = False + sys.stderr = NativeStringIO() + + def tearDown(self): + from ZODB.scripts import repozo + sys.stderr = self._old_stderr + repozo.VERBOSE = self._old_verbosity + + def test_short(self): + from ZODB.scripts import repozo + options = repozo.parseargs(['-v', '-V', '-r', '/tmp/nosuchdir']) + self.assertTrue(repozo.VERBOSE) + self.assertEqual(options.mode, repozo.VERIFY) + self.assertEqual(options.repository, '/tmp/nosuchdir') + + def test_long(self): + from ZODB.scripts import repozo + options = repozo.parseargs(['--verbose', '--verify', + '--repository=/tmp/nosuchdir']) + self.assertTrue(repozo.VERBOSE) + self.assertEqual(options.mode, repozo.VERIFY) + self.assertEqual(options.repository, '/tmp/nosuchdir') + + def test_help(self): + from ZODB.scripts import repozo + # Note: can't mock sys.stdout in our setUp: if a test fails, + # zope.testrunner will happily print the traceback and failure message + # into our StringIO before running our tearDown. + old_stdout = sys.stdout + sys.stdout = NativeStringIO() + try: + self.assertRaises(SystemExit, repozo.parseargs, ['--help']) + self.assertIn('Usage:', sys.stdout.getvalue()) + finally: + sys.stdout = old_stdout + + def test_bad_option(self): + from ZODB.scripts import repozo + self.assertRaises(SystemExit, repozo.parseargs, + ['--crash-please']) + self.assertIn('option --crash-please not recognized', + sys.stderr.getvalue()) + + def test_bad_argument(self): + from ZODB.scripts import repozo + self.assertRaises(SystemExit, repozo.parseargs, + ['crash', 'please']) + self.assertIn('Invalid arguments: crash, please', + sys.stderr.getvalue()) + + def test_mode_selection(self): + from ZODB.scripts import repozo + options = repozo.parseargs(['-B', '-r', '/tmp/nosuchdir']) + self.assertEqual(options.mode, repozo.BACKUP) + options = repozo.parseargs(['-R', '-r', '/tmp/nosuchdir']) + self.assertEqual(options.mode, repozo.RECOVER) + options = repozo.parseargs(['-V', '-r', '/tmp/nosuchdir']) + self.assertEqual(options.mode, repozo.VERIFY) + + def test_mode_selection_is_mutually_exclusive(self): + from ZODB.scripts import repozo + self.assertRaises(SystemExit, repozo.parseargs, ['-B', '-R']) + self.assertIn('-B, -R, and -V are mutually exclusive', + sys.stderr.getvalue()) + self.assertRaises(SystemExit, repozo.parseargs, ['-R', '-V']) + self.assertRaises(SystemExit, repozo.parseargs, ['-V', '-B']) + + def test_mode_selection_required(self): + from ZODB.scripts import repozo + self.assertRaises(SystemExit, repozo.parseargs, []) + self.assertIn('Either --backup, --recover or --verify is required', + sys.stderr.getvalue()) + + def test_misc_flags(self): + from ZODB.scripts import repozo + options = repozo.parseargs(['-B', '-r', '/tmp/nosuchdir', '-F']) + self.assertTrue(options.full) + options = repozo.parseargs(['-B', '-r', '/tmp/nosuchdir', '-k']) + self.assertTrue(options.killold) + + def test_repo_is_required(self): + from ZODB.scripts import repozo + self.assertRaises(SystemExit, repozo.parseargs, ['-B']) + self.assertIn('--repository is required', sys.stderr.getvalue()) + + def test_backup_ignored_args(self): + from ZODB.scripts import repozo + options = repozo.parseargs(['-B', '-r', '/tmp/nosuchdir', '-v', + '-o', '/tmp/ignored.fs', + '-D', '2011-12-13']) + self.assertEqual(options.date, None) + self.assertIn('--date option is ignored in backup mode', + sys.stderr.getvalue()) + self.assertEqual(options.output, None) + self.assertIn('--output option is ignored in backup mode', + sys.stderr.getvalue()) + + def test_recover_ignored_args(self): + from ZODB.scripts import repozo + options = repozo.parseargs(['-R', '-r', '/tmp/nosuchdir', '-v', + '-f', '/tmp/ignored.fs', + '-k']) + self.assertEqual(options.file, None) + self.assertIn('--file option is ignored in recover mode', + sys.stderr.getvalue()) + self.assertEqual(options.killold, False) + self.assertIn('--kill-old-on-full option is ignored in recover mode', + sys.stderr.getvalue()) + + def test_verify_ignored_args(self): + from ZODB.scripts import repozo + options = repozo.parseargs(['-V', '-r', '/tmp/nosuchdir', '-v', + '-o', '/tmp/ignored.fs', + '-D', '2011-12-13', + '-f', '/tmp/ignored.fs', + '-z', '-k', '-F']) + self.assertEqual(options.date, None) + self.assertIn('--date option is ignored in verify mode', + sys.stderr.getvalue()) + self.assertEqual(options.output, None) + self.assertIn('--output option is ignored in verify mode', + sys.stderr.getvalue()) + self.assertEqual(options.full, False) + self.assertIn('--full option is ignored in verify mode', + sys.stderr.getvalue()) + self.assertEqual(options.gzip, False) + self.assertIn('--gzip option is ignored in verify mode', + sys.stderr.getvalue()) + self.assertEqual(options.file, None) + self.assertIn('--file option is ignored in verify mode', + sys.stderr.getvalue()) + self.assertEqual(options.killold, False) + self.assertIn('--kill-old-on-full option is ignored in verify mode', + sys.stderr.getvalue()) + + +class FileopsBase(object): + + def _makeChunks(self): + from ZODB.scripts.repozo import READCHUNK + return [b'x' * READCHUNK, b'y' * READCHUNK, b'z'] + + def _makeFile(self, text=None): + if text is None: + text = b''.join(self._makeChunks()) + return BytesIO(text) + + +class Test_dofile(unittest.TestCase, FileopsBase): + + def _callFUT(self, func, fp, n): + from ZODB.scripts.repozo import dofile + return dofile(func, fp, n) + + def test_empty_read_all(self): + chunks = [] + file = self._makeFile(b'') + bytes = self._callFUT(chunks.append, file, None) + self.assertEqual(bytes, 0) + self.assertEqual(chunks, []) + + def test_empty_read_count(self): + chunks = [] + file = self._makeFile(b'') + bytes = self._callFUT(chunks.append, file, 42) + self.assertEqual(bytes, 0) + self.assertEqual(chunks, []) + + def test_nonempty_read_all(self): + chunks = [] + file = self._makeFile() + bytes = self._callFUT(chunks.append, file, None) + self.assertEqual(bytes, file.tell()) + self.assertEqual(chunks, self._makeChunks()) + + def test_nonempty_read_count(self): + chunks = [] + file = self._makeFile() + bytes = self._callFUT(chunks.append, file, 42) + self.assertEqual(bytes, 42) + self.assertEqual(chunks, [b'x' * 42]) + + +class Test_checksum(unittest.TestCase, FileopsBase): + + def _callFUT(self, fp, n): + from ZODB.scripts.repozo import checksum + return checksum(fp, n) + + def test_empty_read_all(self): + file = self._makeFile(b'') + sum = self._callFUT(file, None) + self.assertEqual(sum, md5(b'').hexdigest()) + + def test_empty_read_count(self): + file = self._makeFile(b'') + sum = self._callFUT(file, 42) + self.assertEqual(sum, md5(b'').hexdigest()) + + def test_nonempty_read_all(self): + file = self._makeFile() + sum = self._callFUT(file, None) + self.assertEqual(sum, md5(b''.join(self._makeChunks())).hexdigest()) + + def test_nonempty_read_count(self): + chunks = [] + file = self._makeFile() + sum = self._callFUT(file, 42) + self.assertEqual(sum, md5(b'x' * 42).hexdigest()) + + +class OptionsTestBase(object): + + _repository_directory = None + _data_directory = None + + def tearDown(self): + if self._repository_directory is not None: + from shutil import rmtree + rmtree(self._repository_directory) + if self._data_directory is not None: + from shutil import rmtree + rmtree(self._data_directory) + + def _makeOptions(self, **kw): + import tempfile + self._repository_directory = tempfile.mkdtemp(prefix='test-repozo-') + class Options(object): + repository = self._repository_directory + date = None + def __init__(self, **kw): + self.__dict__.update(kw) + return Options(**kw) + +class Test_copyfile(OptionsTestBase, unittest.TestCase): + + def _callFUT(self, options, dest, start, n): + from ZODB.scripts.repozo import copyfile + return copyfile(options, dest, start, n) + + def test_no_gzip(self): + options = self._makeOptions(gzip=False) + source = options.file = os.path.join(self._repository_directory, + 'source.txt') + _write_file(source, b'x' * 1000) + target = os.path.join(self._repository_directory, 'target.txt') + sum = self._callFUT(options, target, 0, 100) + self.assertEqual(sum, md5(b'x' * 100).hexdigest()) + self.assertEqual(_read_file(target), b'x' * 100) + + def test_w_gzip(self): + from ZODB.scripts.repozo import _GzipCloser + options = self._makeOptions(gzip=True) + source = options.file = os.path.join(self._repository_directory, + 'source.txt') + _write_file(source, b'x' * 1000) + target = os.path.join(self._repository_directory, 'target.txt') + sum = self._callFUT(options, target, 0, 100) + self.assertEqual(sum, md5(b'x' * 100).hexdigest()) + with _GzipCloser(target, 'rb') as f: + self.assertEqual(f.read(), b'x' * 100) + + +class Test_concat(OptionsTestBase, unittest.TestCase): + + def _callFUT(self, files, ofp): + from ZODB.scripts.repozo import concat + return concat(files, ofp) + + def _makeFile(self, name, text, gzip_file=False): + from ZODB.scripts.repozo import _GzipCloser + import tempfile + if self._repository_directory is None: + self._repository_directory = tempfile.mkdtemp() + fqn = os.path.join(self._repository_directory, name) + if gzip_file: + _opener = _GzipCloser + else: + _opener = open + with _opener(fqn, 'wb') as f: + f.write(text) + f.flush() + return fqn + + def test_empty_list_no_ofp(self): + bytes, sum = self._callFUT([], None) + self.assertEqual(bytes, 0) + self.assertEqual(sum, md5(b'').hexdigest()) + + def test_w_plain_files_no_ofp(self): + files = [self._makeFile(x, x.encode(), False) for x in 'ABC'] + bytes, sum = self._callFUT(files, None) + self.assertEqual(bytes, 3) + self.assertEqual(sum, md5(b'ABC').hexdigest()) + + def test_w_gzipped_files_no_ofp(self): + files = [self._makeFile('%s.fsz' % x, x.encode(), True) for x in 'ABC'] + bytes, sum = self._callFUT(files, None) + self.assertEqual(bytes, 3) + self.assertEqual(sum, md5(b'ABC').hexdigest()) + + def test_w_ofp(self): + + class Faux(object): + _closed = False + def __init__(self): + self._written = [] + def write(self, data): + self._written.append(data) + def close(self): + self._closed = True + + files = [self._makeFile(x, x.encode(), False) for x in 'ABC'] + ofp = Faux() + bytes, sum = self._callFUT(files, ofp) + self.assertEqual(ofp._written, [x.encode() for x in 'ABC']) + self.assertTrue(ofp._closed) + +_marker = object() +class Test_gen_filename(OptionsTestBase, unittest.TestCase): + + def _callFUT(self, options, ext=_marker): + from ZODB.scripts.repozo import gen_filename + if ext is _marker: + return gen_filename(options) + return gen_filename(options, ext) + + def test_explicit_ext(self): + options = self._makeOptions(test_now = (2010, 5, 14, 12, 52, 31)) + fn = self._callFUT(options, '.txt') + self.assertEqual(fn, '2010-05-14-12-52-31.txt') + + def test_full_no_gzip(self): + options = self._makeOptions(test_now = (2010, 5, 14, 12, 52, 31), + full = True, + gzip = False, + ) + fn = self._callFUT(options) + self.assertEqual(fn, '2010-05-14-12-52-31.fs') + + def test_full_w_gzip(self): + options = self._makeOptions(test_now = (2010, 5, 14, 12, 52, 31), + full = True, + gzip = True, + ) + fn = self._callFUT(options) + self.assertEqual(fn, '2010-05-14-12-52-31.fsz') + + def test_incr_no_gzip(self): + options = self._makeOptions(test_now = (2010, 5, 14, 12, 52, 31), + full = False, + gzip = False, + ) + fn = self._callFUT(options) + self.assertEqual(fn, '2010-05-14-12-52-31.deltafs') + + def test_incr_w_gzip(self): + options = self._makeOptions(test_now = (2010, 5, 14, 12, 52, 31), + full = False, + gzip = True, + ) + fn = self._callFUT(options) + self.assertEqual(fn, '2010-05-14-12-52-31.deltafsz') + + +class Test_find_files(OptionsTestBase, unittest.TestCase): + + def _callFUT(self, options): + from ZODB.scripts.repozo import find_files + return find_files(options) + + def _makeFile(self, hour, min, sec, ext): + # call _makeOptions first! + name = '2010-05-14-%02d-%02d-%02d%s' % (hour, min, sec, ext) + fqn = os.path.join(self._repository_directory, name) + _write_file(fqn, name.encode()) + return fqn + + def test_no_files(self): + options = self._makeOptions(date='2010-05-14-13-30-57') + found = self._callFUT(options) + self.assertEqual(found, []) + + def test_explicit_date(self): + options = self._makeOptions(date='2010-05-14-13-30-57') + files = [] + for h, m, s, e in [(2, 13, 14, '.fs'), + (2, 13, 14, '.dat'), + (3, 14, 15, '.deltafs'), + (4, 14, 15, '.deltafs'), + (5, 14, 15, '.deltafs'), + (12, 13, 14, '.fs'), + (12, 13, 14, '.dat'), + (13, 14, 15, '.deltafs'), + (14, 15, 16, '.deltafs'), + ]: + files.append(self._makeFile(h, m, s, e)) + found = self._callFUT(options) + # Older files, .dat file not included + self.assertEqual(found, [files[5], files[7]]) + + def test_using_gen_filename(self): + options = self._makeOptions(date=None, + test_now=(2010, 5, 14, 13, 30, 57)) + files = [] + for h, m, s, e in [(2, 13, 14, '.fs'), + (2, 13, 14, '.dat'), + (3, 14, 15, '.deltafs'), + (4, 14, 15, '.deltafs'), + (5, 14, 15, '.deltafs'), + (12, 13, 14, '.fs'), + (12, 13, 14, '.dat'), + (13, 14, 15, '.deltafs'), + (14, 15, 16, '.deltafs'), + ]: + files.append(self._makeFile(h, m, s, e)) + found = self._callFUT(options) + # Older files, .dat file not included + self.assertEqual(found, [files[5], files[7]]) + + +class Test_scandat(OptionsTestBase, unittest.TestCase): + + def _callFUT(self, repofiles): + from ZODB.scripts.repozo import scandat + return scandat(repofiles) + + def test_no_dat_file(self): + options = self._makeOptions() + fsfile = os.path.join(self._repository_directory, 'foo.fs') + fn, startpos, endpos, sum = self._callFUT([fsfile]) + self.assertEqual(fn, None) + self.assertEqual(startpos, None) + self.assertEqual(endpos, None) + self.assertEqual(sum, None) + + def test_empty_dat_file(self): + options = self._makeOptions() + fsfile = os.path.join(self._repository_directory, 'foo.fs') + datfile = os.path.join(self._repository_directory, 'foo.dat') + _write_file(datfile, b'') + fn, startpos, endpos, sum = self._callFUT([fsfile]) + self.assertEqual(fn, None) + self.assertEqual(startpos, None) + self.assertEqual(endpos, None) + self.assertEqual(sum, None) + + def test_single_line(self): + options = self._makeOptions() + fsfile = os.path.join(self._repository_directory, 'foo.fs') + datfile = os.path.join(self._repository_directory, 'foo.dat') + _write_file(datfile, b'foo.fs 0 123 ABC\n') + fn, startpos, endpos, sum = self._callFUT([fsfile]) + self.assertEqual(fn, 'foo.fs') + self.assertEqual(startpos, 0) + self.assertEqual(endpos, 123) + self.assertEqual(sum, 'ABC') + + def test_multiple_lines(self): + options = self._makeOptions() + fsfile = os.path.join(self._repository_directory, 'foo.fs') + datfile = os.path.join(self._repository_directory, 'foo.dat') + _write_file(datfile, b'foo.fs 0 123 ABC\n' + b'bar.deltafs 123 456 DEF\n') + fn, startpos, endpos, sum = self._callFUT([fsfile]) + self.assertEqual(fn, 'bar.deltafs') + self.assertEqual(startpos, 123) + self.assertEqual(endpos, 456) + self.assertEqual(sum, 'DEF') + + +class Test_delete_old_backups(OptionsTestBase, unittest.TestCase): + + def _makeOptions(self, filenames=()): + options = super(Test_delete_old_backups, self)._makeOptions() + for filename in filenames: + fqn = os.path.join(options.repository, filename) + _write_file(fqn, b'testing delete_old_backups') + return options + + def _callFUT(self, options=None, filenames=()): + from ZODB.scripts.repozo import delete_old_backups + if options is None: + options = self._makeOptions(filenames) + return delete_old_backups(options) + + def test_empty_dir_doesnt_raise(self): + self._callFUT() + self.assertEqual(len(os.listdir(self._repository_directory)), 0) + + def test_no_repozo_files_doesnt_raise(self): + FILENAMES = ['bogus.txt', 'not_a_repozo_file'] + self._callFUT(filenames=FILENAMES) + remaining = os.listdir(self._repository_directory) + self.assertEqual(len(remaining), len(FILENAMES)) + for name in FILENAMES: + fqn = os.path.join(self._repository_directory, name) + self.assertTrue(os.path.isfile(fqn)) + + def test_doesnt_remove_current_repozo_files(self): + FILENAMES = ['2009-12-20-10-08-03.fs', + '2009-12-20-10-08-03.dat', + '2009-12-20-10-08-03.index', + ] + self._callFUT(filenames=FILENAMES) + remaining = os.listdir(self._repository_directory) + self.assertEqual(len(remaining), len(FILENAMES)) + for name in FILENAMES: + fqn = os.path.join(self._repository_directory, name) + self.assertTrue(os.path.isfile(fqn)) + + def test_removes_older_repozo_files(self): + OLDER_FULL = ['2009-12-20-00-01-03.fs', + '2009-12-20-00-01-03.dat', + '2009-12-20-00-01-03.index', + ] + DELTAS = ['2009-12-21-00-00-01.deltafs', + '2009-12-21-00-00-01.index', + '2009-12-22-00-00-01.deltafs', + '2009-12-22-00-00-01.index', + ] + CURRENT_FULL = ['2009-12-23-00-00-01.fs', + '2009-12-23-00-00-01.dat', + '2009-12-23-00-00-01.index', + ] + FILENAMES = OLDER_FULL + DELTAS + CURRENT_FULL + self._callFUT(filenames=FILENAMES) + remaining = os.listdir(self._repository_directory) + self.assertEqual(len(remaining), len(CURRENT_FULL)) + for name in OLDER_FULL: + fqn = os.path.join(self._repository_directory, name) + self.assertFalse(os.path.isfile(fqn)) + for name in DELTAS: + fqn = os.path.join(self._repository_directory, name) + self.assertFalse(os.path.isfile(fqn)) + for name in CURRENT_FULL: + fqn = os.path.join(self._repository_directory, name) + self.assertTrue(os.path.isfile(fqn)) + + def test_removes_older_repozo_files_zipped(self): + OLDER_FULL = ['2009-12-20-00-01-03.fsz', + '2009-12-20-00-01-03.dat', + '2009-12-20-00-01-03.index', + ] + DELTAS = ['2009-12-21-00-00-01.deltafsz', + '2009-12-21-00-00-01.index', + '2009-12-22-00-00-01.deltafsz', + '2009-12-22-00-00-01.index', + ] + CURRENT_FULL = ['2009-12-23-00-00-01.fsz', + '2009-12-23-00-00-01.dat', + '2009-12-23-00-00-01.index', + ] + FILENAMES = OLDER_FULL + DELTAS + CURRENT_FULL + self._callFUT(filenames=FILENAMES) + remaining = os.listdir(self._repository_directory) + self.assertEqual(len(remaining), len(CURRENT_FULL)) + for name in OLDER_FULL: + fqn = os.path.join(self._repository_directory, name) + self.assertFalse(os.path.isfile(fqn)) + for name in DELTAS: + fqn = os.path.join(self._repository_directory, name) + self.assertFalse(os.path.isfile(fqn)) + for name in CURRENT_FULL: + fqn = os.path.join(self._repository_directory, name) + self.assertTrue(os.path.isfile(fqn)) + + +class Test_do_full_backup(OptionsTestBase, unittest.TestCase): + + def _callFUT(self, options): + from ZODB.scripts.repozo import do_full_backup + return do_full_backup(options) + + def _makeDB(self): + import tempfile + datadir = self._data_directory = tempfile.mkdtemp() + return OurDB(self._data_directory) + + def test_dont_overwrite_existing_file(self): + from ZODB.scripts.repozo import WouldOverwriteFiles + from ZODB.scripts.repozo import gen_filename + db = self._makeDB() + options = self._makeOptions(full=True, + file=db._file_name, + gzip=False, + test_now = (2010, 5, 14, 10, 51, 22), + ) + fqn = os.path.join(self._repository_directory, gen_filename(options)) + _write_file(fqn, b'TESTING') + self.assertRaises(WouldOverwriteFiles, self._callFUT, options) + + def test_empty(self): + import struct + from ZODB.scripts.repozo import gen_filename + from ZODB.fsIndex import fsIndex + db = self._makeDB() + options = self._makeOptions(file=db._file_name, + gzip=False, + killold=False, + test_now = (2010, 5, 14, 10, 51, 22), + ) + self._callFUT(options) + target = os.path.join(self._repository_directory, + gen_filename(options)) + original = _read_file(db._file_name) + self.assertEqual(_read_file(target), original) + datfile = os.path.join(self._repository_directory, + gen_filename(options, '.dat')) + self.assertEqual(_read_file(datfile, mode='r'), #XXX 'rb'? + '%s 0 %d %s\n' % + (target, len(original), md5(original).hexdigest())) + ndxfile = os.path.join(self._repository_directory, + gen_filename(options, '.index')) + ndx_info = fsIndex.load(ndxfile) + self.assertEqual(ndx_info['pos'], len(original)) + index = ndx_info['index'] + pZero = struct.pack(">Q", 0) + pOne = struct.pack(">Q", 1) + self.assertEqual(index.minKey(), pZero) + self.assertEqual(index.maxKey(), pOne) + + +class Test_do_incremental_backup(OptionsTestBase, unittest.TestCase): + + def _callFUT(self, options, reposz, repofiles): + from ZODB.scripts.repozo import do_incremental_backup + return do_incremental_backup(options, reposz, repofiles) + + def _makeDB(self): + import tempfile + datadir = self._data_directory = tempfile.mkdtemp() + return OurDB(self._data_directory) + + def test_dont_overwrite_existing_file(self): + from ZODB.scripts.repozo import WouldOverwriteFiles + from ZODB.scripts.repozo import gen_filename + from ZODB.scripts.repozo import find_files + db = self._makeDB() + options = self._makeOptions(full=False, + file=db._file_name, + gzip=False, + test_now = (2010, 5, 14, 10, 51, 22), + date = None, + ) + fqn = os.path.join(self._repository_directory, gen_filename(options)) + _write_file(fqn, b'TESTING') + repofiles = find_files(options) + self.assertRaises(WouldOverwriteFiles, + self._callFUT, options, 0, repofiles) + + def test_no_changes(self): + import struct + from ZODB.scripts.repozo import gen_filename + from ZODB.fsIndex import fsIndex + db = self._makeDB() + oldpos = db.pos + options = self._makeOptions(file=db._file_name, + gzip=False, + killold=False, + test_now = (2010, 5, 14, 10, 51, 22), + date = None, + ) + fullfile = os.path.join(self._repository_directory, + '2010-05-14-00-00-00.fs') + original = _read_file(db._file_name) + last = len(original) + _write_file(fullfile, original) + datfile = os.path.join(self._repository_directory, + '2010-05-14-00-00-00.dat') + repofiles = [fullfile, datfile] + self._callFUT(options, oldpos, repofiles) + target = os.path.join(self._repository_directory, + gen_filename(options)) + self.assertEqual(_read_file(target), b'') + self.assertEqual(_read_file(datfile, mode='r'), #XXX mode='rb'? + '%s %d %d %s\n' % + (target, oldpos, oldpos, md5(b'').hexdigest())) + ndxfile = os.path.join(self._repository_directory, + gen_filename(options, '.index')) + ndx_info = fsIndex.load(ndxfile) + self.assertEqual(ndx_info['pos'], oldpos) + index = ndx_info['index'] + pZero = struct.pack(">Q", 0) + pOne = struct.pack(">Q", 1) + self.assertEqual(index.minKey(), pZero) + self.assertEqual(index.maxKey(), pOne) + + def test_w_changes(self): + import struct + from ZODB.scripts.repozo import gen_filename + from ZODB.fsIndex import fsIndex + db = self._makeDB() + oldpos = db.pos + options = self._makeOptions(file=db._file_name, + gzip=False, + killold=False, + test_now = (2010, 5, 14, 10, 51, 22), + date = None, + ) + fullfile = os.path.join(self._repository_directory, + '2010-05-14-00-00-00.fs') + original = _read_file(db._file_name) + f = _write_file(fullfile, original) + datfile = os.path.join(self._repository_directory, + '2010-05-14-00-00-00.dat') + repofiles = [fullfile, datfile] + db.mutate() + newpos = db.pos + self._callFUT(options, oldpos, repofiles) + target = os.path.join(self._repository_directory, + gen_filename(options)) + with open(db._file_name, 'rb') as f: + f.seek(oldpos) + increment = f.read() + self.assertEqual(_read_file(target), increment) + self.assertEqual(_read_file(datfile, mode='r'), #XXX mode='rb'? + '%s %d %d %s\n' % + (target, oldpos, newpos, + md5(increment).hexdigest())) + ndxfile = os.path.join(self._repository_directory, + gen_filename(options, '.index')) + ndx_info = fsIndex.load(ndxfile) + self.assertEqual(ndx_info['pos'], newpos) + index = ndx_info['index'] + pZero = struct.pack(">Q", 0) + self.assertEqual(index.minKey(), pZero) + self.assertEqual(index.maxKey(), db.maxkey) + + +class Test_do_recover(OptionsTestBase, unittest.TestCase): + + def _callFUT(self, options): + from ZODB.scripts.repozo import do_recover + return do_recover(options) + + def _makeFile(self, hour, min, sec, ext, text=None): + # call _makeOptions first! + name = '2010-05-14-%02d-%02d-%02d%s' % (hour, min, sec, ext) + if text is None: + text = name + fqn = os.path.join(self._repository_directory, name) + f = _write_file(fqn, text.encode()) + return fqn + + def test_no_files(self): + from ZODB.scripts.repozo import NoFiles + options = self._makeOptions(date=None, + test_now=(2010, 5, 15, 13, 30, 57)) + self.assertRaises(NoFiles, self._callFUT, options) + + def test_no_files_before_explicit_date(self): + from ZODB.scripts.repozo import NoFiles + options = self._makeOptions(date='2010-05-13-13-30-57') + files = [] + for h, m, s, e in [(2, 13, 14, '.fs'), + (2, 13, 14, '.dat'), + (3, 14, 15, '.deltafs'), + (4, 14, 15, '.deltafs'), + (5, 14, 15, '.deltafs'), + (12, 13, 14, '.fs'), + (12, 13, 14, '.dat'), + (13, 14, 15, '.deltafs'), + (14, 15, 16, '.deltafs'), + ]: + files.append(self._makeFile(h, m, s, e)) + self.assertRaises(NoFiles, self._callFUT, options) + + def test_w_full_backup_latest_no_index(self): + import tempfile + dd = self._data_directory = tempfile.mkdtemp() + output = os.path.join(dd, 'Data.fs') + index = os.path.join(dd, 'Data.fs.index') + options = self._makeOptions(date='2010-05-15-13-30-57', + output=output) + self._makeFile(2, 3, 4, '.fs', 'AAA') + self._makeFile(4, 5, 6, '.fs', 'BBB') + self._callFUT(options) + self.assertEqual(_read_file(output), b'BBB') + + def test_w_full_backup_latest_index(self): + import tempfile + dd = self._data_directory = tempfile.mkdtemp() + output = os.path.join(dd, 'Data.fs') + index = os.path.join(dd, 'Data.fs.index') + options = self._makeOptions(date='2010-05-15-13-30-57', + output=output) + self._makeFile(2, 3, 4, '.fs', 'AAA') + self._makeFile(4, 5, 6, '.fs', 'BBB') + self._makeFile(4, 5, 6, '.index', 'CCC') + self._callFUT(options) + self.assertEqual(_read_file(output), b'BBB') + self.assertEqual(_read_file(index), b'CCC') + + def test_w_incr_backup_latest_no_index(self): + import tempfile + dd = self._data_directory = tempfile.mkdtemp() + output = os.path.join(dd, 'Data.fs') + index = os.path.join(dd, 'Data.fs.index') + options = self._makeOptions(date='2010-05-15-13-30-57', + output=output) + self._makeFile(2, 3, 4, '.fs', 'AAA') + self._makeFile(4, 5, 6, '.deltafs', 'BBB') + self._callFUT(options) + self.assertEqual(_read_file(output), b'AAABBB') + + def test_w_incr_backup_latest_index(self): + import tempfile + dd = self._data_directory = tempfile.mkdtemp() + output = os.path.join(dd, 'Data.fs') + index = os.path.join(dd, 'Data.fs.index') + options = self._makeOptions(date='2010-05-15-13-30-57', + output=output) + self._makeFile(2, 3, 4, '.fs', 'AAA') + self._makeFile(4, 5, 6, '.deltafs', 'BBB') + self._makeFile(4, 5, 6, '.index', 'CCC') + self._callFUT(options) + self.assertEqual(_read_file(output), b'AAABBB') + self.assertEqual(_read_file(index), b'CCC') + + +class Test_do_verify(OptionsTestBase, unittest.TestCase): + + def _callFUT(self, options): + from ZODB.scripts import repozo + errors = [] + orig_error = repozo.error + def _error(msg, *args): + errors.append(msg % args) + repozo.error = _error + try: + repozo.do_verify(options) + return errors + finally: + repozo.error = orig_error + + def _makeFile(self, hour, min, sec, ext, text=None): + from ZODB.scripts.repozo import _GzipCloser + assert self._repository_directory, 'call _makeOptions first!' + name = '2010-05-14-%02d-%02d-%02d%s' % (hour, min, sec, ext) + if text is None: + text = name + fqn = os.path.join(self._repository_directory, name) + if ext.endswith('fsz'): + _opener = _GzipCloser + else: + _opener = open + with _opener(fqn, 'wb') as f: + f.write(text.encode()) + f.flush() + return fqn + + def test_no_files(self): + from ZODB.scripts.repozo import NoFiles + options = self._makeOptions() + self.assertRaises(NoFiles, self._callFUT, options) + + def test_all_is_fine(self): + options = self._makeOptions(quick=False) + self._makeFile(2, 3, 4, '.fs', 'AAA') + self._makeFile(4, 5, 6, '.deltafs', 'BBBB') + self._makeFile(2, 3, 4, '.dat', + '/backup/2010-05-14-02-03-04.fs 0 3 e1faffb3e614e6c2fba74296962386b7\n' + '/backup/2010-05-14-04-05-06.deltafs 3 7 f50881ced34c7d9e6bce100bf33dec60\n') + self.assertEqual(self._callFUT(options), []) + + def test_all_is_fine_gzip(self): + options = self._makeOptions(quick=False) + self._makeFile(2, 3, 4, '.fsz', 'AAA') + self._makeFile(4, 5, 6, '.deltafsz', 'BBBB') + self._makeFile(2, 3, 4, '.dat', + '/backup/2010-05-14-02-03-04.fsz 0 3 e1faffb3e614e6c2fba74296962386b7\n' + '/backup/2010-05-14-04-05-06.deltafsz 3 7 f50881ced34c7d9e6bce100bf33dec60\n') + self.assertEqual(self._callFUT(options), []) + + def test_missing_file(self): + options = self._makeOptions(quick=True) + self._makeFile(2, 3, 4, '.fs', 'AAA') + self._makeFile(2, 3, 4, '.dat', + '/backup/2010-05-14-02-03-04.fs 0 3 e1faffb3e614e6c2fba74296962386b7\n' + '/backup/2010-05-14-04-05-06.deltafs 3 7 f50881ced34c7d9e6bce100bf33dec60\n') + self.assertEqual(self._callFUT(options), + [options.repository + os.path.sep + + '2010-05-14-04-05-06.deltafs is missing']) + + def test_missing_file_gzip(self): + options = self._makeOptions(quick=True) + self._makeFile(2, 3, 4, '.fsz', 'AAA') + self._makeFile(2, 3, 4, '.dat', + '/backup/2010-05-14-02-03-04.fsz 0 3 e1faffb3e614e6c2fba74296962386b7\n' + '/backup/2010-05-14-04-05-06.deltafsz 3 7 f50881ced34c7d9e6bce100bf33dec60\n') + self.assertEqual(self._callFUT(options), + [options.repository + os.path.sep + + '2010-05-14-04-05-06.deltafsz is missing']) + + def test_bad_size(self): + options = self._makeOptions(quick=False) + self._makeFile(2, 3, 4, '.fs', 'AAA') + self._makeFile(4, 5, 6, '.deltafs', 'BBB') + self._makeFile(2, 3, 4, '.dat', + '/backup/2010-05-14-02-03-04.fs 0 3 e1faffb3e614e6c2fba74296962386b7\n' + '/backup/2010-05-14-04-05-06.deltafs 3 7 f50881ced34c7d9e6bce100bf33dec60\n') + self.assertEqual(self._callFUT(options), + [options.repository + os.path.sep + + '2010-05-14-04-05-06.deltafs is 3 bytes,' + ' should be 4 bytes']) + + def test_bad_size_gzip(self): + options = self._makeOptions(quick=False) + self._makeFile(2, 3, 4, '.fsz', 'AAA') + self._makeFile(4, 5, 6, '.deltafsz', 'BBB') + self._makeFile(2, 3, 4, '.dat', + '/backup/2010-05-14-02-03-04.fsz 0 3 e1faffb3e614e6c2fba74296962386b7\n' + '/backup/2010-05-14-04-05-06.deltafsz 3 7 f50881ced34c7d9e6bce100bf33dec60\n') + self.assertEqual(self._callFUT(options), + [options.repository + os.path.sep + + '2010-05-14-04-05-06.deltafsz is 3 bytes (when uncompressed),' + ' should be 4 bytes']) + + def test_bad_checksum(self): + options = self._makeOptions(quick=False) + self._makeFile(2, 3, 4, '.fs', 'AAA') + self._makeFile(4, 5, 6, '.deltafs', 'BbBB') + self._makeFile(2, 3, 4, '.dat', + '/backup/2010-05-14-02-03-04.fs 0 3 e1faffb3e614e6c2fba74296962386b7\n' + '/backup/2010-05-14-04-05-06.deltafs 3 7 f50881ced34c7d9e6bce100bf33dec60\n') + self.assertEqual(self._callFUT(options), + [options.repository + os.path.sep + + '2010-05-14-04-05-06.deltafs has checksum' + ' 36486440db255f0ee6ab109d5d231406 instead of' + ' f50881ced34c7d9e6bce100bf33dec60']) + + def test_bad_checksum_gzip(self): + options = self._makeOptions(quick=False) + self._makeFile(2, 3, 4, '.fsz', 'AAA') + self._makeFile(4, 5, 6, '.deltafsz', 'BbBB') + self._makeFile(2, 3, 4, '.dat', + '/backup/2010-05-14-02-03-04.fsz 0 3 e1faffb3e614e6c2fba74296962386b7\n' + '/backup/2010-05-14-04-05-06.deltafsz 3 7 f50881ced34c7d9e6bce100bf33dec60\n') + self.assertEqual(self._callFUT(options), + [options.repository + os.path.sep + + '2010-05-14-04-05-06.deltafsz has checksum' + ' 36486440db255f0ee6ab109d5d231406 (when uncompressed) instead of' + ' f50881ced34c7d9e6bce100bf33dec60']) + + def test_quick_ignores_checksums(self): + options = self._makeOptions(quick=True) + self._makeFile(2, 3, 4, '.fs', 'AAA') + self._makeFile(4, 5, 6, '.deltafs', 'BBBB') + self._makeFile(2, 3, 4, '.dat', + '/backup/2010-05-14-02-03-04.fs 0 3 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n' + '/backup/2010-05-14-04-05-06.deltafs 3 7 bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n') + self.assertEqual(self._callFUT(options), []) + + def test_quick_ignores_checksums_gzip(self): + options = self._makeOptions(quick=True) + self._makeFile(2, 3, 4, '.fsz', 'AAA') + self._makeFile(4, 5, 6, '.deltafsz', 'BBBB') + self._makeFile(2, 3, 4, '.dat', + '/backup/2010-05-14-02-03-04.fsz 0 3 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n' + '/backup/2010-05-14-04-05-06.deltafsz 3 7 bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n') + self.assertEqual(self._callFUT(options), []) + + +class MonteCarloTests(unittest.TestCase): + + layer = ZODB.tests.util.MininalTestLayer('repozo') + + def setUp(self): + # compute directory names + import tempfile + self.basedir = tempfile.mkdtemp() + self.backupdir = os.path.join(self.basedir, 'backup') + self.datadir = os.path.join(self.basedir, 'data') + self.restoredir = os.path.join(self.basedir, 'restore') + self.copydir = os.path.join(self.basedir, 'copy') + self.currdir = os.getcwd() + # create empty directories + os.mkdir(self.backupdir) + os.mkdir(self.datadir) + os.mkdir(self.restoredir) + os.mkdir(self.copydir) + os.chdir(self.datadir) + self.db = OurDB(self.datadir) + + def tearDown(self): + os.chdir(self.currdir) + import shutil + shutil.rmtree(self.basedir) + + def _callRepozoMain(self, argv): + from ZODB.scripts.repozo import main + main(argv) + + @ZODB.tests.util.time_monotonically_increases + def test_via_monte_carlo(self): + self.saved_snapshots = [] # list of (name, time) pairs for copies. + + for i in range(100): + self.mutate_pack_backup(i) + + # Verify snapshots can be reproduced exactly. + for copyname, copytime in self.saved_snapshots: + if _NOISY: + print("Checking that", copyname, end=' ') + print("at", copytime, "is reproducible.") + self.assertRestored(copyname, copytime) + + def mutate_pack_backup(self, i): + import random + from shutil import copyfile + from time import gmtime + from time import sleep + self.db.mutate() + + # Pack about each tenth time. + if random.random() < 0.1: + if _NOISY: + print("packing") + self.db.pack() + self.db.close() + + # Make an incremental backup, half the time with gzip (-z). + argv = ['-BQr', self.backupdir, '-f', 'Data.fs'] + if _NOISY: + argv.insert(0, '-v') + if random.random() < 0.5: + argv.insert(0, '-z') + self._callRepozoMain(argv) + + # Save snapshots to assert that dated restores are possible + if i % 9 == 0: + srcname = os.path.join(self.datadir, 'Data.fs') + copytime = '%04d-%02d-%02d-%02d-%02d-%02d' % (gmtime()[:6]) + copyname = os.path.join(self.copydir, "Data%d.fs" % i) + copyfile(srcname, copyname) + self.saved_snapshots.append((copyname, copytime)) + + # The clock moves forward automatically on calls to time.time() + + # Verify current Data.fs can be reproduced exactly. + self.assertRestored() + + def assertRestored(self, correctpath='Data.fs', when=None): + # Do recovery to time 'when', and check that it's identical to correctpath. + # restore to Restored.fs + restoredfile = os.path.join(self.restoredir, 'Restored.fs') + argv = ['-Rr', self.backupdir, '-o', restoredfile] + if _NOISY: + argv.insert(0, '-v') + if when is not None: + argv.append('-D') + argv.append(when) + self._callRepozoMain(argv) + + # check restored file content is equal to file that was backed up + fguts = _read_file(correctpath) + gguts = _read_file(restoredfile) + msg = ("guts don't match\ncorrectpath=%r when=%r\n cmd=%r" % + (correctpath, when, ' '.join(argv))) + self.assertEqual(fguts, gguts, msg) + + +def test_suite(): + return unittest.TestSuite([ + unittest.makeSuite(Test_parseargs), + unittest.makeSuite(Test_dofile), + unittest.makeSuite(Test_checksum), + unittest.makeSuite(Test_copyfile), + unittest.makeSuite(Test_concat), + unittest.makeSuite(Test_gen_filename), + unittest.makeSuite(Test_find_files), + unittest.makeSuite(Test_scandat), + unittest.makeSuite(Test_delete_old_backups), + unittest.makeSuite(Test_do_full_backup), + unittest.makeSuite(Test_do_incremental_backup), + #unittest.makeSuite(Test_do_backup), #TODO + unittest.makeSuite(Test_do_recover), + unittest.makeSuite(Test_do_verify), + # N.B.: this test take forever to run (~40sec on a fast laptop), + # *and* it is non-deterministic. + unittest.makeSuite(MonteCarloTests), + ]) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/scripts/zodbload.py b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/zodbload.py new file mode 100644 index 0000000..864a85a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/scripts/zodbload.py @@ -0,0 +1,835 @@ +#!/usr/bin/env python +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test script for testing ZODB under a heavy zope-like load. + +Note that, to be as realistic as possible with ZEO, you should run this +script multiple times, to simulate multiple clients. + +Here's how this works. + +The script starts some number of threads. Each thread, sequentially +executes jobs. There is a job producer that produces jobs. + +Input data are provided by a mail producer that hands out message from +a mailbox. + +Execution continues until there is an error, which will normally occur +when the mailbox is exhausted. + +Command-line options are used to provide job definitions. Job +definitions have perameters of the form name=value. Jobs have 2 +standard parameters: + + frequency=integer + + The frequency of the job. The default is 1. + + sleep=float + + The number os seconds to sleep before performing the job. The + default is 0. + +Usage: loadmail2 [options] + + Options: + + -edit [frequency=integer] [sleep=float] + + Define an edit job. An edit job edits a random already-saved + email message, deleting and inserting a random number of words. + + After editing the message, the message is (re)cataloged. + + -insert [number=int] [frequency=integer] [sleep=float] + + Insert some number of email messages. + + -index [number=int] [frequency=integer] [sleep=float] + + Insert and index (catalog) some number of email messages. + + -search [terms='word1 word2 ...'] [frequency=integer] [sleep=float] + + Search the catalog. A query is givem with one or more terms as + would be entered into a typical seach box. If no query is + given, then queries will be randomly selected based on a set of + built-in word list. + + -setup + + Set up the database. This will delete any existing Data.fs + file. (Of course, this may have no effect, if there is a + custom_zodb that defined a different storage.) It also adds a + mail folder and a catalog. + + -options file + + Read options from the given file. Th efile should be a python + source file that defines a sequence of options named 'options'. + + -threads n + + Specify the number of threads to execute. If not specified (< 2), + then jobs are run in a single (main) thread. + + -mbox filename + + Specify the mailbox for getting input data. + + There is a (lame) syntax for providing options within the + filename. The filename may be followed by up to 3 integers, + min, max, and start: + + -mbox 'foo.mbox 0 100 10000' + + The messages from min to max will be read from the mailbox. + They will be assigned message numbers starting with start. + So, in the example above, we read the first hundred messages + and assign thgem message numbers starting with 10001. + + The maxmum can be given as a negative number, in which case, it + specifies the number of messages to read. + + The start defaults to the minimum. The following two options: + + -mbox 'foo.mbox 300 400 300' + + and + + -mbox 'foo.mbox 300 -100' + + are equivalent +""" +from __future__ import print_function +import mailbox +import math +import os +import random +import re +import sys +import threading +import time +import transaction + +class JobProducer(object): + + def __init__(self): + self.jobs = [] + + def add(self, callable, frequency, sleep, repeatp=0): + self.jobs.extend([(callable, sleep, repeatp)] * int(frequency)) + random.shuffle(self.jobs) + + def next(self): + factory, sleep, repeatp = random.choice(self.jobs) + time.sleep(sleep) + callable, args = factory.create() + return factory, callable, args, repeatp + + def __nonzero__(self): + return not not self.jobs + + + +class MBox(object): + + def __init__(self, filename): + if ' ' in filename: + filename = filename.split() + if len(filename) < 4: + filename += [0, 0, -1][-(4-len(filename)):] + filename, min, max, start = filename + min = int(min) + max = int(max) + start = int(start) + + if start < 0: + start = min + + if max < 0: + # negative max is treated as a count + self._max = start - max + elif max > 0: + self._max = start + max - min + else: + self._max = 0 + + else: + self._max = 0 + min = start = 0 + + if filename.endswith('.bz2'): + f = os.popen("bunzip2 <"+filename, 'r') + filename = filename[-4:] + else: + f = open(filename) + + self._mbox = mb = mailbox.UnixMailbox(f) + + self.number = start + while min: + next(mb) + min -= 1 + + self._lock = threading.Lock() + self.__name__ = os.path.splitext(os.path.split(filename)[1])[0] + self._max = max + + def next(self): + with self.lock: + if self._max > 0 and self.number >= self._max: + raise IndexError(self.number + 1) + message = next(self._mbox) + message.body = message.fp.read() + message.headers = list(message.headers) + self.number += 1 + message.number = self.number + message.mbox = self.__name__ + return message + +bins = 9973 +#bins = 11 +def mailfolder(app, mboxname, number): + mail = getattr(app, mboxname, None) + if mail is None: + app.manage_addFolder(mboxname) + mail = getattr(app, mboxname) + from BTrees.Length import Length + mail.length = Length() + for i in range(bins): + mail.manage_addFolder('b'+str(i)) + bin = hash(str(number))%bins + return getattr(mail, 'b'+str(bin)) + + +def VmSize(): + + try: + with open('/proc/%s/status' % os.getpid()) as f: + lines = f.readlines() + except: + return 0 + else: + l = list(filter(lambda l: l[:7] == 'VmSize:', lines)) + if l: + l = l[0][7:].strip().split()[0] + return int(l) + return 0 + +def setup(lib_python): + try: + os.remove(os.path.join(lib_python, '..', '..', 'var', 'Data.fs')) + except: + pass + import Zope2 + import Products + import AccessControl.SecurityManagement + app=Zope2.app() + + Products.ZCatalog.ZCatalog.manage_addZCatalog(app, 'cat', '') + + from Products.ZCTextIndex.ZCTextIndex import PLexicon + from Products.ZCTextIndex.Lexicon import Splitter, CaseNormalizer + + app.cat._setObject('lex', + PLexicon('lex', '', Splitter(), CaseNormalizer()) + ) + + class extra(object): + doc_attr = 'PrincipiaSearchSource' + lexicon_id = 'lex' + index_type = 'Okapi BM25 Rank' + + app.cat.addIndex('PrincipiaSearchSource', 'ZCTextIndex', extra) + + transaction.commit() + + system = AccessControl.SpecialUsers.system + AccessControl.SecurityManagement.newSecurityManager(None, system) + + app._p_jar.close() + +def do(db, f, args): + """Do something in a transaction, retrying of necessary + + Measure the speed of both the compurartion and the commit + """ + from ZODB.POSException import ConflictError + wcomp = ccomp = wcommit = ccommit = 0.0 + rconflicts = wconflicts = 0 + start = time.time() + + while 1: + connection = db.open() + try: + transaction.begin() + t=time.time() + c=time.clock() + try: + try: + r = f(connection, *args) + except ConflictError: + rconflicts += 1 + transaction.abort() + continue + finally: + wcomp += time.time() - t + ccomp += time.clock() - c + + t=time.time() + c=time.clock() + try: + try: + transaction.commit() + break + except ConflictError: + wconflicts += 1 + transaction.abort() + continue + finally: + wcommit += time.time() - t + ccommit += time.clock() - c + finally: + connection.close() + + return start, wcomp, ccomp, rconflicts, wconflicts, wcommit, ccommit, r + +def run1(tid, db, factory, job, args): + (start, wcomp, ccomp, rconflicts, wconflicts, wcommit, ccommit, r + ) = do(db, job, args) + start = "%.4d-%.2d-%.2d %.2d:%.2d:%.2d" % time.localtime(start)[:6] + print("%s %s %8.3g %8.3g %s %s\t%8.3g %8.3g %s %r" % ( + start, tid, wcomp, ccomp, rconflicts, wconflicts, wcommit, ccommit, + factory.__name__, r)) + +def run(jobs, tid=b''): + import Zope2 + while 1: + factory, job, args, repeatp = next(jobs) + run1(tid, Zope2.DB, factory, job, args) + if repeatp: + while 1: + i = random.randint(0,100) + if i > repeatp: + break + run1(tid, Zope2.DB, factory, job, args) + + +def index(connection, messages, catalog, max): + app = connection.root()['Application'] + for message in messages: + mail = mailfolder(app, message.mbox, message.number) + + if max: + # Cheat and use folder implementation secrets + # to avoid having to read the old data + _objects = mail._objects + if len(_objects) >= max: + for d in _objects[:len(_objects)-max+1]: + del mail.__dict__[d['id']] + mail._objects = _objects[len(_objects)-max+1:] + + docid = 'm'+str(message.number) + mail.manage_addDTMLDocument(docid, file=message.body) + + # increment counted + getattr(app, message.mbox).length.change(1) + + doc = mail[docid] + for h in message.headers: + h = h.strip() + l = h.find(':') + if l <= 0: + continue + name = h[:l].lower() + if name=='subject': + name='title' + v = h[l+1:].strip() + type='string' + + if name=='title': + doc.manage_changeProperties(title=h) + else: + try: + doc.manage_addProperty(name, v, type) + except: + pass + if catalog: + app.cat.catalog_object(doc) + + return message.number + +class IndexJob(object): + needs_mbox = 1 + catalog = 1 + prefix = 'index' + + def __init__(self, mbox, number=1, max=0): + self.__name__ = "%s%s_%s" % (self.prefix, number, mbox.__name__) + self.mbox, self.number, self.max = mbox, int(number), int(max) + + def create(self): + messages = [next(self.mbox) for i in range(self.number)] + return index, (messages, self.catalog, self.max) + + +class InsertJob(IndexJob): + catalog = 0 + prefix = 'insert' + +wordre = re.compile(r'(\w{3,20})') +stop = 'and', 'not' +def edit(connection, mbox, catalog=1): + app = connection.root()['Application'] + mail = getattr(app, mbox.__name__, None) + if mail is None: + time.sleep(1) + return "No mailbox %s" % mbox.__name__ + + nmessages = mail.length() + if nmessages < 2: + time.sleep(1) + return "No messages to edit in %s" % mbox.__name__ + + # find a message to edit: + while 1: + number = random.randint(1, nmessages-1) + did = 'm' + str(number) + + mail = mailfolder(app, mbox.__name__, number) + doc = getattr(mail, did, None) + if doc is not None: + break + + text = doc.raw.split() + norig = len(text) + if norig > 10: + ndel = int(math.exp(random.randint(0, int(math.log(norig))))) + nins = int(math.exp(random.randint(0, int(math.log(norig))))) + else: + ndel = 0 + nins = 10 + + for j in range(ndel): + j = random.randint(0,len(text)-1) + word = text[j] + m = wordre.search(word) + if m: + word = m.group(1).lower() + if (word not in wordsd) and word not in stop: + words.append(word) + wordsd[word] = 1 + del text[j] + + for j in range(nins): + word = random.choice(words) + text.append(word) + + doc.raw = ' '.join(text) + + if catalog: + app.cat.catalog_object(doc) + + return norig, ndel, nins + +class EditJob(object): + needs_mbox = 1 + prefix = 'edit' + catalog = 1 + + def __init__(self, mbox): + self.__name__ = "%s_%s" % (self.prefix, mbox.__name__) + self.mbox = mbox + + def create(self): + return edit, (self.mbox, self.catalog) + +class ModifyJob(EditJob): + prefix = 'modify' + catalog = 0 + + +def search(connection, terms, number): + app = connection.root()['Application'] + cat = app.cat + n = 0 + + for i in number: + term = random.choice(terms) + + results = cat(PrincipiaSearchSource=term) + n += len(results) + for result in results: + obj = result.getObject() + # Apparently, there is a bug in Zope that leads obj to be None + # on occasion. + if obj is not None: + obj.getId() + + return n + +class SearchJob(object): + + def __init__(self, terms='', number=10): + + if terms: + terms = terms.split() + self.__name__ = "search_" + '_'.join(terms) + self.terms = terms + else: + self.__name__ = 'search' + self.terms = words + + number = min(int(number), len(self.terms)) + self.number = list(range(number)) + + def create(self): + return search, (self.terms, self.number) + + +words=['banishment', 'indirectly', 'imprecise', 'peeks', +'opportunely', 'bribe', 'sufficiently', 'Occidentalized', 'elapsing', +'fermenting', 'listen', 'orphanage', 'younger', 'draperies', 'Ida', +'cuttlefish', 'mastermind', 'Michaels', 'populations', 'lent', +'cater', 'attentional', 'hastiness', 'dragnet', 'mangling', +'scabbards', 'princely', 'star', 'repeat', 'deviation', 'agers', +'fix', 'digital', 'ambitious', 'transit', 'jeeps', 'lighted', +'Prussianizations', 'Kickapoo', 'virtual', 'Andrew', 'generally', +'boatsman', 'amounts', 'promulgation', 'Malay', 'savaging', +'courtesan', 'nursed', 'hungered', 'shiningly', 'ship', 'presides', +'Parke', 'moderns', 'Jonas', 'unenlightening', 'dearth', 'deer', +'domesticates', 'recognize', 'gong', 'penetrating', 'dependents', +'unusually', 'complications', 'Dennis', 'imbalances', 'nightgown', +'attached', 'testaments', 'congresswoman', 'circuits', 'bumpers', +'braver', 'Boreas', 'hauled', 'Howe', 'seethed', 'cult', 'numismatic', +'vitality', 'differences', 'collapsed', 'Sandburg', 'inches', 'head', +'rhythmic', 'opponent', 'blanketer', 'attorneys', 'hen', 'spies', +'indispensably', 'clinical', 'redirection', 'submit', 'catalysts', +'councilwoman', 'kills', 'topologies', 'noxious', 'exactions', +'dashers', 'balanced', 'slider', 'cancerous', 'bathtubs', 'legged', +'respectably', 'crochets', 'absenteeism', 'arcsine', 'facility', +'cleaners', 'bobwhite', 'Hawkins', 'stockade', 'provisional', +'tenants', 'forearms', 'Knowlton', 'commit', 'scornful', +'pediatrician', 'greets', 'clenches', 'trowels', 'accepts', +'Carboloy', 'Glenn', 'Leigh', 'enroll', 'Madison', 'Macon', 'oiling', +'entertainingly', 'super', 'propositional', 'pliers', 'beneficiary', +'hospitable', 'emigration', 'sift', 'sensor', 'reserved', +'colonization', 'shrilled', 'momentously', 'stevedore', 'Shanghaiing', +'schoolmasters', 'shaken', 'biology', 'inclination', 'immoderate', +'stem', 'allegory', 'economical', 'daytime', 'Newell', 'Moscow', +'archeology', 'ported', 'scandals', 'Blackfoot', 'leery', 'kilobit', +'empire', 'obliviousness', 'productions', 'sacrificed', 'ideals', +'enrolling', 'certainties', 'Capsicum', 'Brookdale', 'Markism', +'unkind', 'dyers', 'legislates', 'grotesquely', 'megawords', +'arbitrary', 'laughing', 'wildcats', 'thrower', 'sex', 'devils', +'Wehr', 'ablates', 'consume', 'gossips', 'doorways', 'Shari', +'advanced', 'enumerable', 'existentially', 'stunt', 'auctioneers', +'scheduler', 'blanching', 'petulance', 'perceptibly', 'vapors', +'progressed', 'rains', 'intercom', 'emergency', 'increased', +'fluctuating', 'Krishna', 'silken', 'reformed', 'transformation', +'easter', 'fares', 'comprehensible', 'trespasses', 'hallmark', +'tormenter', 'breastworks', 'brassiere', 'bladders', 'civet', 'death', +'transformer', 'tolerably', 'bugle', 'clergy', 'mantels', 'satin', +'Boswellizes', 'Bloomington', 'notifier', 'Filippo', 'circling', +'unassigned', 'dumbness', 'sentries', 'representativeness', 'souped', +'Klux', 'Kingstown', 'gerund', 'Russell', 'splices', 'bellow', +'bandies', 'beefers', 'cameramen', 'appalled', 'Ionian', 'butterball', +'Portland', 'pleaded', 'admiringly', 'pricks', 'hearty', 'corer', +'deliverable', 'accountably', 'mentors', 'accorded', +'acknowledgement', 'Lawrenceville', 'morphology', 'eucalyptus', +'Rena', 'enchanting', 'tighter', 'scholars', 'graduations', 'edges', +'Latinization', 'proficiency', 'monolithic', 'parenthesizing', 'defy', +'shames', 'enjoyment', 'Purdue', 'disagrees', 'barefoot', 'maims', +'flabbergast', 'dishonorable', 'interpolation', 'fanatics', 'dickens', +'abysses', 'adverse', 'components', 'bowl', 'belong', 'Pipestone', +'trainees', 'paw', 'pigtail', 'feed', 'whore', 'conditioner', +'Volstead', 'voices', 'strain', 'inhabits', 'Edwin', 'discourses', +'deigns', 'cruiser', 'biconvex', 'biking', 'depreciation', 'Harrison', +'Persian', 'stunning', 'agar', 'rope', 'wagoner', 'elections', +'reticulately', 'Cruz', 'pulpits', 'wilt', 'peels', 'plants', +'administerings', 'deepen', 'rubs', 'hence', 'dissension', 'implored', +'bereavement', 'abyss', 'Pennsylvania', 'benevolent', 'corresponding', +'Poseidon', 'inactive', 'butchers', 'Mach', 'woke', 'loading', +'utilizing', 'Hoosier', 'undo', 'Semitization', 'trigger', 'Mouthe', +'mark', 'disgracefully', 'copier', 'futility', 'gondola', 'algebraic', +'lecturers', 'sponged', 'instigators', 'looted', 'ether', 'trust', +'feeblest', 'sequencer', 'disjointness', 'congresses', 'Vicksburg', +'incompatibilities', 'commend', 'Luxembourg', 'reticulation', +'instructively', 'reconstructs', 'bricks', 'attache', 'Englishman', +'provocation', 'roughen', 'cynic', 'plugged', 'scrawls', 'antipode', +'injected', 'Daedalus', 'Burnsides', 'asker', 'confronter', +'merriment', 'disdain', 'thicket', 'stinker', 'great', 'tiers', +'oust', 'antipodes', 'Macintosh', 'tented', 'packages', +'Mediterraneanize', 'hurts', 'orthodontist', 'seeder', 'readying', +'babying', 'Florida', 'Sri', 'buckets', 'complementary', +'cartographer', 'chateaus', 'shaves', 'thinkable', 'Tehran', +'Gordian', 'Angles', 'arguable', 'bureau', 'smallest', 'fans', +'navigated', 'dipole', 'bootleg', 'distinctive', 'minimization', +'absorbed', 'surmised', 'Malawi', 'absorbent', 'close', 'conciseness', +'hopefully', 'declares', 'descent', 'trick', 'portend', 'unable', +'mildly', 'Morse', 'reference', 'scours', 'Caribbean', 'battlers', +'astringency', 'likelier', 'Byronizes', 'econometric', 'grad', +'steak', 'Austrian', 'ban', 'voting', 'Darlington', 'bison', 'Cetus', +'proclaim', 'Gilbertson', 'evictions', 'submittal', 'bearings', +'Gothicizer', 'settings', 'McMahon', 'densities', 'determinants', +'period', 'DeKastere', 'swindle', 'promptness', 'enablers', 'wordy', +'during', 'tables', 'responder', 'baffle', 'phosgene', 'muttering', +'limiters', 'custodian', 'prevented', 'Stouffer', 'waltz', 'Videotex', +'brainstorms', 'alcoholism', 'jab', 'shouldering', 'screening', +'explicitly', 'earner', 'commandment', 'French', 'scrutinizing', +'Gemma', 'capacitive', 'sheriff', 'herbivore', 'Betsey', 'Formosa', +'scorcher', 'font', 'damming', 'soldiers', 'flack', 'Marks', +'unlinking', 'serenely', 'rotating', 'converge', 'celebrities', +'unassailable', 'bawling', 'wording', 'silencing', 'scotch', +'coincided', 'masochists', 'graphs', 'pernicious', 'disease', +'depreciates', 'later', 'torus', 'interject', 'mutated', 'causer', +'messy', 'Bechtel', 'redundantly', 'profoundest', 'autopsy', +'philosophic', 'iterate', 'Poisson', 'horridly', 'silversmith', +'millennium', 'plunder', 'salmon', 'missioner', 'advances', 'provers', +'earthliness', 'manor', 'resurrectors', 'Dahl', 'canto', 'gangrene', +'gabler', 'ashore', 'frictionless', 'expansionism', 'emphasis', +'preservations', 'Duane', 'descend', 'isolated', 'firmware', +'dynamites', 'scrawled', 'cavemen', 'ponder', 'prosperity', 'squaw', +'vulnerable', 'opthalmic', 'Simms', 'unite', 'totallers', 'Waring', +'enforced', 'bridge', 'collecting', 'sublime', 'Moore', 'gobble', +'criticizes', 'daydreams', 'sedate', 'apples', 'Concordia', +'subsequence', 'distill', 'Allan', 'seizure', 'Isadore', 'Lancashire', +'spacings', 'corresponded', 'hobble', 'Boonton', 'genuineness', +'artifact', 'gratuities', 'interviewee', 'Vladimir', 'mailable', +'Bini', 'Kowalewski', 'interprets', 'bereave', 'evacuated', 'friend', +'tourists', 'crunched', 'soothsayer', 'fleetly', 'Romanizations', +'Medicaid', 'persevering', 'flimsy', 'doomsday', 'trillion', +'carcasses', 'guess', 'seersucker', 'ripping', 'affliction', +'wildest', 'spokes', 'sheaths', 'procreate', 'rusticates', 'Schapiro', +'thereafter', 'mistakenly', 'shelf', 'ruination', 'bushel', +'assuredly', 'corrupting', 'federation', 'portmanteau', 'wading', +'incendiary', 'thing', 'wanderers', 'messages', 'Paso', 'reexamined', +'freeings', 'denture', 'potting', 'disturber', 'laborer', 'comrade', +'intercommunicating', 'Pelham', 'reproach', 'Fenton', 'Alva', 'oasis', +'attending', 'cockpit', 'scout', 'Jude', 'gagging', 'jailed', +'crustaceans', 'dirt', 'exquisitely', 'Internet', 'blocker', 'smock', +'Troutman', 'neighboring', 'surprise', 'midscale', 'impart', +'badgering', 'fountain', 'Essen', 'societies', 'redresses', +'afterwards', 'puckering', 'silks', 'Blakey', 'sequel', 'greet', +'basements', 'Aubrey', 'helmsman', 'album', 'wheelers', 'easternmost', +'flock', 'ambassadors', 'astatine', 'supplant', 'gird', 'clockwork', +'foxes', 'rerouting', 'divisional', 'bends', 'spacer', +'physiologically', 'exquisite', 'concerts', 'unbridled', 'crossing', +'rock', 'leatherneck', 'Fortescue', 'reloading', 'Laramie', 'Tim', +'forlorn', 'revert', 'scarcer', 'spigot', 'equality', 'paranormal', +'aggrieves', 'pegs', 'committeewomen', 'documented', 'interrupt', +'emerald', 'Battelle', 'reconverted', 'anticipated', 'prejudices', +'drowsiness', 'trivialities', 'food', 'blackberries', 'Cyclades', +'tourist', 'branching', 'nugget', 'Asilomar', 'repairmen', 'Cowan', +'receptacles', 'nobler', 'Nebraskan', 'territorial', 'chickadee', +'bedbug', 'darted', 'vigilance', 'Octavia', 'summands', 'policemen', +'twirls', 'style', 'outlawing', 'specifiable', 'pang', 'Orpheus', +'epigram', 'Babel', 'butyrate', 'wishing', 'fiendish', 'accentuate', +'much', 'pulsed', 'adorned', 'arbiters', 'counted', 'Afrikaner', +'parameterizes', 'agenda', 'Americanism', 'referenda', 'derived', +'liquidity', 'trembling', 'lordly', 'Agway', 'Dillon', 'propellers', +'statement', 'stickiest', 'thankfully', 'autograph', 'parallel', +'impulse', 'Hamey', 'stylistic', 'disproved', 'inquirer', 'hoisting', +'residues', 'variant', 'colonials', 'dequeued', 'especial', 'Samoa', +'Polaris', 'dismisses', 'surpasses', 'prognosis', 'urinates', +'leaguers', 'ostriches', 'calculative', 'digested', 'divided', +'reconfigurer', 'Lakewood', 'illegalities', 'redundancy', +'approachability', 'masterly', 'cookery', 'crystallized', 'Dunham', +'exclaims', 'mainline', 'Australianizes', 'nationhood', 'pusher', +'ushers', 'paranoia', 'workstations', 'radiance', 'impedes', +'Minotaur', 'cataloging', 'bites', 'fashioning', 'Alsop', 'servants', +'Onondaga', 'paragraph', 'leadings', 'clients', 'Latrobe', +'Cornwallis', 'excitingly', 'calorimetric', 'savior', 'tandem', +'antibiotics', 'excuse', 'brushy', 'selfish', 'naive', 'becomes', +'towers', 'popularizes', 'engender', 'introducing', 'possession', +'slaughtered', 'marginally', 'Packards', 'parabola', 'utopia', +'automata', 'deterrent', 'chocolates', 'objectives', 'clannish', +'aspirin', 'ferociousness', 'primarily', 'armpit', 'handfuls', +'dangle', 'Manila', 'enlivened', 'decrease', 'phylum', 'hardy', +'objectively', 'baskets', 'chaired', 'Sepoy', 'deputy', 'blizzard', +'shootings', 'breathtaking', 'sticking', 'initials', 'epitomized', +'Forrest', 'cellular', 'amatory', 'radioed', 'horrified', 'Neva', +'simultaneous', 'delimiter', 'expulsion', 'Himmler', 'contradiction', +'Remus', 'Franklinizations', 'luggage', 'moisture', 'Jews', +'comptroller', 'brevity', 'contradictions', 'Ohio', 'active', +'babysit', 'China', 'youngest', 'superstition', 'clawing', 'raccoons', +'chose', 'shoreline', 'helmets', 'Jeffersonian', 'papered', +'kindergarten', 'reply', 'succinct', 'split', 'wriggle', 'suitcases', +'nonce', 'grinders', 'anthem', 'showcase', 'maimed', 'blue', 'obeys', +'unreported', 'perusing', 'recalculate', 'rancher', 'demonic', +'Lilliputianize', 'approximation', 'repents', 'yellowness', +'irritates', 'Ferber', 'flashlights', 'booty', 'Neanderthal', +'someday', 'foregoes', 'lingering', 'cloudiness', 'guy', 'consumer', +'Berkowitz', 'relics', 'interpolating', 'reappearing', 'advisements', +'Nolan', 'turrets', 'skeletal', 'skills', 'mammas', 'Winsett', +'wheelings', 'stiffen', 'monkeys', 'plainness', 'braziers', 'Leary', +'advisee', 'jack', 'verb', 'reinterpret', 'geometrical', 'trolleys', +'arboreal', 'overpowered', 'Cuzco', 'poetical', 'admirations', +'Hobbes', 'phonemes', 'Newsweek', 'agitator', 'finally', 'prophets', +'environment', 'easterners', 'precomputed', 'faults', 'rankly', +'swallowing', 'crawl', 'trolley', 'spreading', 'resourceful', 'go', +'demandingly', 'broader', 'spiders', 'Marsha', 'debris', 'operates', +'Dundee', 'alleles', 'crunchier', 'quizzical', 'hanging', 'Fisk'] + +wordsd = {} +for word in words: + wordsd[word] = 1 + + +def collect_options(args, jobs, options): + + while args: + arg = args.pop(0) + if arg.startswith('-'): + name = arg[1:] + if name == 'options': + fname = args.pop(0) + d = {} + with open(fname) as fp: + exec(compile(fp.read(), fname, 'exec'), d) + collect_options(list(d['options']), jobs, options) + elif name in options: + v = args.pop(0) + if options[name] != None: + raise ValueError( + "Duplicate values for %s, %s and %s" + % (name, v, options[name]) + ) + options[name] = v + elif name == 'setup': + options['setup'] = 1 + elif name.capitalize()+'Job' in globals(): + job = name + kw = {} + while args and args[0].find("=") > 0: + arg = args.pop(0).split('=') + name, v = arg[0], '='.join(arg[1:]) + if name in kw: + raise ValueError( + "Duplicate parameter %s for job %s" + % (name, job) + ) + kw[name]=v + if 'frequency' in kw: + frequency = kw['frequency'] + del kw['frequency'] + else: + frequency = 1 + + if 'sleep' in kw: + sleep = float(kw['sleep']) + del kw['sleep'] + else: + sleep = 0.0001 + + if 'repeat' in kw: + repeatp = float(kw['repeat']) + del kw['repeat'] + else: + repeatp = 0 + + jobs.append((job, kw, frequency, sleep, repeatp)) + else: + raise ValueError("not an option or job", name) + else: + raise ValueError("Expected an option", arg) + + +def find_lib_python(): + for b in os.getcwd(), os.path.split(sys.argv[0])[0]: + for i in range(6): + d = ['..']*i + ['lib', 'python'] + p = os.path.join(b, *d) + if os.path.isdir(p): + return p + raise ValueError("Couldn't find lib/python") + +def main(args=None): + lib_python = find_lib_python() + sys.path.insert(0, lib_python) + + if args is None: + args = sys.argv[1:] + if not args: + print(__doc__) + sys.exit(0) + + print(args) + random.seed(hash(tuple(args))) # always use the same for the given args + + options = {"mbox": None, "threads": None} + jobdefs = [] + collect_options(args, jobdefs, options) + + mboxes = {} + if options["mbox"]: + mboxes[options["mbox"]] = MBox(options["mbox"]) + + # Perform a ZConfig-based Zope initialization: + zetup(os.path.join(lib_python, '..', '..', 'etc', 'zope.conf')) + + if 'setup' in options: + setup(lib_python) + else: + import Zope2 + Zope2.startup() + + jobs = JobProducer() + for job, kw, frequency, sleep, repeatp in jobdefs: + Job = globals()[job.capitalize()+'Job'] + if getattr(Job, 'needs_mbox', 0): + if "mbox" not in kw: + if not options["mbox"]: + raise ValueError( + "no mailbox (mbox option) file specified") + kw['mbox'] = mboxes[options["mbox"]] + else: + if not mboxes.has_key[kw["mbox"]]: + mboxes[kw['mbox']] = MBox[kw['mbox']] + kw["mbox"] = mboxes[kw['mbox']] + jobs.add(Job(**kw), frequency, sleep, repeatp) + + if not jobs: + print("No jobs to execute") + return + + threads = int(options['threads'] or '0') + if threads > 1: + threads = [threading.Thread(target=run, args=(jobs, i), name=str(i)) + for i in range(threads)] + for thread in threads: + thread.start() + for thread in threads: + thread.join() + else: + run(jobs) + + +def zetup(configfile_name): + from Zope.Startup.options import ZopeOptions + from Zope.Startup import handlers as h + from App import config + opts = ZopeOptions() + opts.configfile = configfile_name + opts.realize(args=[]) + h.handleConfig(opts.configroot, opts.confighandlers) + config.setConfiguration(opts.configroot) + from Zope.Startup import dropPrivileges + dropPrivileges(opts.configroot) + + + +if __name__ == '__main__': + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/serialize.py b/thesisenv/lib/python3.6/site-packages/ZODB/serialize.py new file mode 100644 index 0000000..23a01f7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/serialize.py @@ -0,0 +1,714 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Support for ZODB object serialization. + +ZODB serializes objects using a custom format based on Python pickles. +When an object is unserialized, it can be loaded as either a ghost or +a real object. A ghost is a persistent object of the appropriate type +but without any state. The first time a ghost is accessed, the +persistence machinery traps access and loads the actual state. A +ghost allows many persistent objects to be loaded while minimizing the +memory consumption of referenced but otherwise unused objects. + +Pickle format +------------- + +ZODB stores serialized objects using a custom format based on pickle. +Each serialized object has two parts: the class description and the +object state. The class description must provide enough information +to call the class's ``__new__`` and create an empty object. Once the +object exists as a ghost, its state is passed to ``__setstate__``. + +The class description can be in a variety of formats, in part to +provide backwards compatibility with earlier versions of Zope. The +four current formats for class description are: + + 1. type(obj) + 2. type(obj), obj.__getnewargs__() + 3. (module name, class name), None + 7. (module name, class name), obj.__getnewargs__() + +The second of these options is used if the object has a __getnewargs__() +method. It is intended to support objects like persistent classes that have +custom C layouts that are determined by arguments to __new__(). The +third and fourth (#3 & #7) apply to instances of a persistent class (which +means the class itself is persistent, not that it's a subclass of +Persistent). + +The type object is usually stored using the standard pickle mechanism, which +involves the pickle GLOBAL opcode (giving the type's module and name as +strings). The type may itself be a persistent object, in which case a +persistent reference (see below) is used. + +It's unclear what "usually" means in the last paragraph. There are two +useful places to concentrate confusion about exactly which formats exist: + +- ObjectReader.getClassName() below returns a dotted "module.class" + string, via actually loading a pickle. This requires that the + implementation of application objects be available. + +- ZODB/utils.py's get_pickle_metadata() tries to return the module and + class names (as strings) without importing any application modules or + classes, via analyzing the pickle. + +Earlier versions of Zope supported several other kinds of class +descriptions. The current serialization code reads these descriptions, but +does not write them. The three earlier formats are: + + 4. (module name, class name), __getinitargs__() + 5. class, None + 6. class, __getinitargs__() + +Formats 4 and 6 are used only if the class defines a __getinitargs__() +method, but we really can't tell them apart from formats 7 and 2 +(respectively). Formats 5 and 6 are used if the class does not have a +__module__ attribute (I'm not sure when this applies, but I think it occurs +for some but not all ZClasses). + + +Persistent references +--------------------- + +When one persistent object pickle refers to another persistent object, +the database uses a persistent reference. + +ZODB persistent references are of the form:: + +oid + A simple object reference. + +(oid, class meta data) + A persistent object reference + +[reference_type, args] + An extended reference + + Extension references come in a number of subforms, based on the + reference types. + + The following reference types are defined: + + 'w' + Persistent weak reference. The arguments consist of an oid + and optionally a database name. + + The following are planned for the future: + + 'n' + Multi-database simple object reference. The arguments consist + of a database name, and an object id. + + 'm' + Multi-database persistent object reference. The arguments consist + of a database name, an object id, and class meta data. + +The following legacy format is also supported. + +[oid] + A persistent weak reference + +Because the persistent object reference forms include class +information, it is not possible to change the class of a persistent +object for which this form is used. If a transaction changed the +class of an object, a new record with new class metadata would be +written but all the old references would still use the old class. (It +is possible that we could deal with this limitation in the future.) + +An object id is used alone when a class requires arguments +to it's __new__ method, which is signalled by the class having a +__getnewargs__ attribute. + +A number of legacyforms are defined: + + +""" +import logging + +from persistent import Persistent +from persistent.wref import WeakRefMarker, WeakRef +from ZODB import broken +from ZODB.POSException import InvalidObjectReference +from ZODB._compat import PersistentPickler, PersistentUnpickler, BytesIO +from ZODB._compat import _protocol, binary + + +_oidtypes = bytes, type(None) + + +# Might to update or redo coptimizations to reflect weakrefs: +# from ZODB.coptimizations import new_persistent_id + +def myhasattr(obj, name, _marker=object()): + """Make sure we don't mask exceptions like hasattr(). + + We don't want exceptions other than AttributeError to be masked, + since that too often masks other programming errors. + Three-argument getattr() doesn't mask those, so we use that to + implement our own hasattr() replacement. + """ + return getattr(obj, name, _marker) is not _marker + + +class ObjectWriter(object): + """Serializes objects for storage in the database. + + The ObjectWriter creates object pickles in the ZODB format. It + also detects new persistent objects reachable from the current + object. + """ + + _jar = None + + def __init__(self, obj=None): + self._file = BytesIO() + self._p = PersistentPickler(self.persistent_id, self._file, _protocol) + self._stack = [] + if obj is not None: + self._stack.append(obj) + jar = obj._p_jar + assert myhasattr(jar, "new_oid") + self._jar = jar + + def persistent_id(self, obj): + """Return the persistent id for obj. + + >>> from ZODB.tests.util import P + >>> class DummyJar(object): + ... xrefs = True + ... def new_oid(self): + ... return b'42' + ... def db(self): + ... return self + ... databases = {} + + >>> jar = DummyJar() + >>> class O(object): + ... _p_jar = jar + >>> writer = ObjectWriter(O) + + Normally, object references include the oid and a cached named + reference to the class. Having the class information + available allows fast creation of the ghost, avoiding + requiring an additional database lookup. + + >>> bob = P('bob') + >>> oid, cls = writer.persistent_id(bob) + >>> oid + '42' + >>> cls is P + True + + To work with Python 3, the oid in the persistent id is of the + zodbpickle binary type: + + >>> oid.__class__ is binary + True + + + If a persistent object does not already have an oid and jar, + these will be assigned by persistent_id(): + + >>> bob._p_oid + '42' + >>> bob._p_jar is jar + True + + If the object already has a persistent id, the id is not changed: + + >>> bob._p_oid = b'24' + >>> oid, cls = writer.persistent_id(bob) + >>> oid + '24' + >>> cls is P + True + + If the jar doesn't match that of the writer, an error is raised: + + >>> bob._p_jar = DummyJar() + >>> writer.persistent_id(bob) + ... # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS + Traceback (most recent call last): + ... + InvalidObjectReference: + ('Attempt to store an object from a foreign database connection', + , P(bob)) + + Constructor arguments used by __new__(), as returned by + __getnewargs__(), can affect memory allocation, but may also + change over the life of the object. This makes it useless to + cache even the object's class. + + >>> class PNewArgs(P): + ... def __getnewargs__(self): + ... return () + + >>> sam = PNewArgs('sam') + >>> writer.persistent_id(sam) + '42' + >>> sam._p_oid + '42' + >>> sam._p_jar is jar + True + + Check that simple objects don't get accused of persistence: + + >>> writer.persistent_id(42) + >>> writer.persistent_id(object()) + + Check that a classic class doesn't get identified improperly: + + >>> class ClassicClara(object): + ... pass + >>> clara = ClassicClara() + + >>> writer.persistent_id(clara) + """ + + # Most objects are not persistent. The following cheap test + # identifies most of them. For these, we return None, + # signalling that the object should be pickled normally. + if not isinstance(obj, (Persistent, type, WeakRef)): + # Not persistent, pickle normally + return None + + # Any persistent object must have an oid: + try: + oid = obj._p_oid + except AttributeError: + # Not persistent, pickle normally + return None + + if not (oid is None or isinstance(oid, bytes)): + # Deserves a closer look: + + # Make sure it's not a descriptor + if hasattr(oid, '__get__'): + # The oid is a descriptor. That means obj is a non-persistent + # class whose instances are persistent, so ... + # Not persistent, pickle normally + return None + + if oid is WeakRefMarker: + # we have a weakref, see weakref.py + + oid = obj.oid + if oid is None: + target = obj() # get the referenced object + oid = target._p_oid + if oid is None: + # Here we are causing the object to be saved in + # the database. One could argue that we shouldn't + # do this, because a weakref should not cause an object + # to be added. We'll be optimistic, though, and + # assume that the object will be added eventually. + + oid = self._jar.new_oid() + target._p_jar = self._jar + target._p_oid = oid + self._stack.append(target) + obj.oid = oid + obj.dm = target._p_jar + obj.database_name = obj.dm.db().database_name + + oid = binary(oid) + if obj.dm is self._jar: + return ['w', (oid, )] + else: + return ['w', (oid, obj.database_name)] + + + # Since we have an oid, we have either a persistent instance + # (an instance of Persistent), or a persistent class. + + # NOTE! Persistent classes don't (and can't) subclass persistent. + + database_name = None + + if oid is None: + oid = obj._p_oid = self._jar.new_oid() + obj._p_jar = self._jar + self._stack.append(obj) + + elif obj._p_jar is not self._jar: + if not self._jar.db().xrefs: + raise InvalidObjectReference( + "Database %r doesn't allow implicit cross-database " + "references" % self._jar.db().database_name, + self._jar, obj) + + try: + otherdb = obj._p_jar.db() + database_name = otherdb.database_name + except AttributeError: + otherdb = self + + if self._jar.db().databases.get(database_name) is not otherdb: + raise InvalidObjectReference( + "Attempt to store an object from a foreign " + "database connection", self._jar, obj, + ) + + if self._jar.get_connection(database_name) is not obj._p_jar: + raise InvalidObjectReference( + "Attempt to store a reference to an object from " + "a separate connection to the same database or " + "multidatabase", self._jar, obj, + ) + + # OK, we have an object from another database. + # Lets make sure the object ws not *just* loaded. + + if obj._p_jar._implicitlyAdding(oid): + raise InvalidObjectReference( + "A new object is reachable from multiple databases. " + "Won't try to guess which one was correct!", + self._jar, obj, + ) + + oid = binary(oid) + klass = type(obj) + if hasattr(klass, '__getnewargs__'): + # We don't want to save newargs in object refs. + # It's possible that __getnewargs__ is degenerate and + # returns (), but we don't want to have to deghostify + # the object to find out. + + # Note that this has the odd effect that, if the class has + # __getnewargs__ of its own, we'll lose the optimization + # of caching the class info. + + if database_name is not None: + return ['n', (database_name, oid)] + + return oid + + # Note that we never get here for persistent classes. + # We'll use direct refs for normal classes. + + if database_name is not None: + return ['m', (database_name, oid, klass)] + + return oid, klass + + def serialize(self, obj): + # We don't use __class__ here, because obj could be a persistent proxy. + # We don't want to be fooled by proxies. + klass = type(obj) + + # We want to serialize persistent classes by name if they have + # a non-None non-empty module so as not to have a direct + # ref. This is important when copying. We probably want to + # revisit this in the future. + newargs = getattr(obj, "__getnewargs__", None) + if (isinstance(getattr(klass, '_p_oid', 0), _oidtypes) + and klass.__module__): + # This is a persistent class with a non-empty module. This + # uses pickle format #3 or #7. + klass = klass.__module__, klass.__name__ + if newargs is None: + meta = klass, None + else: + meta = klass, newargs() + elif newargs is None: + # Pickle format #1. + meta = klass + else: + # Pickle format #2. + meta = klass, newargs() + + return self._dump(meta, obj.__getstate__()) + + def _dump(self, classmeta, state): + # To reuse the existing BytesIO object, we must reset + # the file position to 0 and truncate the file after the + # new pickle is written. + self._file.seek(0) + self._p.clear_memo() + self._p.dump(classmeta) + self._p.dump(state) + self._file.truncate() + return self._file.getvalue() + + def __iter__(self): + return NewObjectIterator(self._stack) + +class NewObjectIterator(object): + + # The pickler is used as a forward iterator when the connection + # is looking for new objects to pickle. + + def __init__(self, stack): + self._stack = stack + + def __iter__(self): + return self + + def __next__(self): + if self._stack: + elt = self._stack.pop() + return elt + else: + raise StopIteration + + next = __next__ + +class ObjectReader(object): + + def __init__(self, conn=None, cache=None, factory=None): + self._conn = conn + self._cache = cache + self._factory = factory + + def _get_class(self, module, name): + return self._factory(self._conn, module, name) + + def _get_unpickler(self, pickle): + file = BytesIO(pickle) + + factory = self._factory + conn = self._conn + + def find_global(modulename, name): + return factory(conn, modulename, name) + unpickler = PersistentUnpickler(find_global, self._persistent_load, file) + + return unpickler + + loaders = {} + + def _persistent_load(self, reference): + if isinstance(reference, tuple): + return self.load_persistent(*reference) + elif isinstance(reference, (bytes, str)): + return self.load_oid(reference) + else: + try: + reference_type, args = reference + except ValueError: + # weakref + return self.loaders['w'](self, *reference) + else: + return self.loaders[reference_type](self, *args) + + def load_persistent(self, oid, klass): + # Quick instance reference. We know all we need to know + # to create the instance w/o hitting the db, so go for it! + + if not isinstance(oid, bytes): + assert isinstance(oid, str) + # this happens on Python 3 when all bytes in the oid are < 0x80 + oid = oid.encode('ascii') + + obj = self._cache.get(oid, None) + if obj is not None: + return obj + + if isinstance(klass, tuple): + klass = self._get_class(*klass) + + if issubclass(klass, broken.Broken): + # We got a broken class. We might need to make it + # PersistentBroken + if not issubclass(klass, broken.PersistentBroken): + klass = broken.persistentBroken(klass) + + try: + obj = klass.__new__(klass) + except TypeError: + # Couldn't create the instance. Maybe there's more + # current data in the object's actual record! + return self._conn.get(oid) + + # TODO: should be done by connection + self._cache.new_ghost(oid, obj) + return obj + + def load_multi_persistent(self, database_name, oid, klass): + conn = self._conn.get_connection(database_name) + # TODO, make connection _cache attr public + reader = ObjectReader(conn, conn._cache, self._factory) + return reader.load_persistent(oid, klass) + + loaders['m'] = load_multi_persistent + + + def load_persistent_weakref(self, oid, database_name=None): + if not isinstance(oid, bytes): + assert isinstance(oid, str) + # this happens on Python 3 when all bytes in the oid are < 0x80 + oid = oid.encode('ascii') + obj = WeakRef.__new__(WeakRef) + obj.oid = oid + if database_name is None: + obj.dm = self._conn + else: + obj.database_name = database_name + try: + obj.dm = self._conn.get_connection(database_name) + except KeyError: + # XXX Not sure what to do here. It seems wrong to + # fail since this is a weak reference. For now we'll + # just pretend that the target object has gone. + pass + return obj + + loaders['w'] = load_persistent_weakref + + def load_oid(self, oid): + if not isinstance(oid, bytes): + assert isinstance(oid, str) + # this happens on Python 3 when all bytes in the oid are < 0x80 + oid = oid.encode('ascii') + obj = self._cache.get(oid, None) + if obj is not None: + return obj + return self._conn.get(oid) + + def load_multi_oid(self, database_name, oid): + conn = self._conn.get_connection(database_name) + # TODO, make connection _cache attr public + reader = ObjectReader(conn, conn._cache, self._factory) + return reader.load_oid(oid) + + loaders['n'] = load_multi_oid + + def getClassName(self, pickle): + unpickler = self._get_unpickler(pickle) + klass = unpickler.load() + if isinstance(klass, tuple): + klass, args = klass + if isinstance(klass, tuple): + # old style reference + return "%s.%s" % klass + return "%s.%s" % (klass.__module__, klass.__name__) + + def getGhost(self, pickle): + unpickler = self._get_unpickler(pickle) + klass = unpickler.load() + if isinstance(klass, tuple): + # Here we have a separate class and args. + # This could be an old record, so the class module ne a named + # refernce + klass, args = klass + if isinstance(klass, tuple): + # Old module_name, class_name tuple + klass = self._get_class(*klass) + + if args is None: + args = () + else: + # Definitely new style direct class reference + args = () + + if issubclass(klass, broken.Broken): + # We got a broken class. We might need to make it + # PersistentBroken + if not issubclass(klass, broken.PersistentBroken): + klass = broken.persistentBroken(klass) + + return klass.__new__(klass, *args) + + def getState(self, pickle): + unpickler = self._get_unpickler(pickle) + try: + unpickler.load() # skip the class metadata + return unpickler.load() + except EOFError as msg: + log = logging.getLogger("ZODB.serialize") + log.exception("Unpickling error: %r", pickle) + raise + + def setGhostState(self, obj, pickle): + state = self.getState(pickle) + obj.__setstate__(state) + + +def referencesf(p, oids=None): + """Return a list of object ids found in a pickle + + A list may be passed in, in which case, information is + appended to it. + + Only ordinary internal references are included. + Weak and multi-database references are not included. + """ + + refs = [] + u = PersistentUnpickler(None, refs.append, BytesIO(p)) + u.noload() + u.noload() + + # Now we have a list of referencs. Need to convert to list of + # oids: + + if oids is None: + oids = [] + + for reference in refs: + if isinstance(reference, tuple): + oid = reference[0] + elif isinstance(reference, (bytes, str)): + oid = reference + else: + assert isinstance(reference, list) + continue + + if not isinstance(oid, bytes): + assert isinstance(oid, str) + # this happens on Python 3 when all bytes in the oid are < 0x80 + oid = oid.encode('ascii') + + oids.append(oid) + + return oids + +oid_klass_loaders = { + 'w': lambda oid, database_name=None: None, + } + +def get_refs(a_pickle): + """Return oid and class information for references in a pickle + + The result of a list of oid and class information tuples. + If the reference doesn't contain class information, then the + klass information is None. + """ + + refs = [] + u = PersistentUnpickler(None, refs.append, BytesIO(a_pickle)) + u.noload() + u.noload() + + # Now we have a list of references. Need to convert to list of + # oids and class info: + + result = [] + + for reference in refs: + if isinstance(reference, tuple): + oid, klass = reference + elif isinstance(reference, (bytes, str)): + data, klass = reference, None + else: + assert isinstance(reference, list) + continue + + if not isinstance(oid, bytes): + assert isinstance(oid, str) + # this happens on Python 3 when all bytes in the oid are < 0x80 + oid = oid.encode('ascii') + + result.append((oid, klass)) + + return result diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/storage.xml b/thesisenv/lib/python3.6/site-packages/ZODB/storage.xml new file mode 100644 index 0000000..ff459ae --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/storage.xml @@ -0,0 +1,4 @@ + + +
+ diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/subtransactions.txt b/thesisenv/lib/python3.6/site-packages/ZODB/subtransactions.txt new file mode 100644 index 0000000..60291dd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/subtransactions.txt @@ -0,0 +1,51 @@ +========================= +Subtransactions in ZODB 3 +========================= + +ZODB 3 provides limited support for subtransactions. Subtransactions +are nested to *one* level. There are top-level transactions and +subtransactions. When a transaction is committed, a flag is passed +indicating whether it is a subtransaction or a top-level transaction. +Consider the following exampler commit calls: + +- ``commit()`` + + A regular top-level transaction is committed. + +- ``commit(1)`` + + A subtransaction is committed. There is now one subtransaction of + the current top-level transaction. + +- ``commit(1)`` + + A subtransaction is committed. There are now two subtransactions of + the current top-level transaction. + +- ``abort(1)`` + + A subtransaction is aborted. There are still two subtransactions of + the current top-level transaction; work done since the last + ``commit(1)`` call is discarded. + +- ``commit()`` + + We now commit a top-level transaction. The work done in the previous + two subtransactions *plus* work done since the last ``abort(1)`` call + is saved. + +- ``commit(1)`` + + A subtransaction is committed. There is now one subtransaction of + the current top-level transaction. + +- ``commit(1)`` + + A subtransaction is committed. There are now two subtransactions of + the current top-level transaction. + +- ``abort()`` + + We now abort a top-level transaction. We discard the work done in + the previous two subtransactions *plus* work done since the last + ``commit(1)`` call. diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/BasicStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/BasicStorage.py new file mode 100644 index 0000000..b5519c2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/BasicStorage.py @@ -0,0 +1,392 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Run the basic tests for a storage as described in the official storage API + +The most complete and most out-of-date description of the interface is: +http://www.zope.org/Documentation/Developer/Models/ZODB/ZODB_Architecture_Storage_Interface_Info.html + +All storages should be able to pass these tests. +""" +from ZODB import POSException +from ZODB.Connection import TransactionMetaData +from ZODB.tests.MinPO import MinPO +from ZODB.tests.StorageTestBase import zodb_unpickle, zodb_pickle + +import threading +import time +import zope.interface +import zope.interface.verify + +from .. import utils + +ZERO = b'\0'*8 + +class BasicStorage(object): + def checkBasics(self): + self.assertEqual(self._storage.lastTransaction(), ZERO) + + t = TransactionMetaData() + self._storage.tpc_begin(t) + self.assertRaises(POSException.StorageTransactionError, + self._storage.tpc_begin, t) + # Aborting is easy + self._storage.tpc_abort(t) + # Test a few expected exceptions when we're doing operations giving a + # different Transaction object than the one we've begun on. + self._storage.tpc_begin(t) + self.assertRaises( + POSException.StorageTransactionError, + self._storage.store, + ZERO, ZERO, b'', '', TransactionMetaData()) + + self.assertRaises( + POSException.StorageTransactionError, + self._storage.store, + ZERO, 1, b'2', '', TransactionMetaData()) + + self.assertRaises( + POSException.StorageTransactionError, + self._storage.tpc_vote, TransactionMetaData()) + self._storage.tpc_abort(t) + + def checkSerialIsNoneForInitialRevision(self): + eq = self.assertEqual + oid = self._storage.new_oid() + txn = TransactionMetaData() + self._storage.tpc_begin(txn) + # Use None for serial. Don't use _dostore() here because that coerces + # serial=None to serial=ZERO. + self._storage.store(oid, None, zodb_pickle(MinPO(11)), + '', txn) + self._storage.tpc_vote(txn) + newrevid = self._storage.tpc_finish(txn) + data, revid = utils.load_current(self._storage, oid) + value = zodb_unpickle(data) + eq(value, MinPO(11)) + eq(revid, newrevid) + + def checkStore(self): + revid = ZERO + newrevid = self._dostore(revid=None) + # Finish the transaction. + self.assertNotEqual(newrevid, revid) + + def checkStoreAndLoad(self): + eq = self.assertEqual + oid = self._storage.new_oid() + self._dostore(oid=oid, data=MinPO(7)) + data, revid = utils.load_current(self._storage, oid) + value = zodb_unpickle(data) + eq(value, MinPO(7)) + # Now do a bunch of updates to an object + for i in range(13, 22): + revid = self._dostore(oid, revid=revid, data=MinPO(i)) + # Now get the latest revision of the object + data, revid = utils.load_current(self._storage, oid) + eq(zodb_unpickle(data), MinPO(21)) + + def checkConflicts(self): + oid = self._storage.new_oid() + revid1 = self._dostore(oid, data=MinPO(11)) + self._dostore(oid, revid=revid1, data=MinPO(12)) + self.assertRaises(POSException.ConflictError, + self._dostore, + oid, revid=revid1, data=MinPO(13)) + + def checkWriteAfterAbort(self): + oid = self._storage.new_oid() + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.store(oid, ZERO, zodb_pickle(MinPO(5)), '', t) + # Now abort this transaction + self._storage.tpc_abort(t) + # Now start all over again + oid = self._storage.new_oid() + self._dostore(oid=oid, data=MinPO(6)) + + def checkAbortAfterVote(self): + oid1 = self._storage.new_oid() + revid1 = self._dostore(oid=oid1, data=MinPO(-2)) + oid = self._storage.new_oid() + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.store(oid, ZERO, zodb_pickle(MinPO(5)), '', t) + # Now abort this transaction + self._storage.tpc_vote(t) + self._storage.tpc_abort(t) + # Now start all over again + oid = self._storage.new_oid() + revid = self._dostore(oid=oid, data=MinPO(6)) + + for oid, revid in [(oid1, revid1), (oid, revid)]: + data, _revid = utils.load_current(self._storage, oid) + self.assertEqual(revid, _revid) + + def checkStoreTwoObjects(self): + noteq = self.assertNotEqual + p31, p32, p51, p52 = map(MinPO, (31, 32, 51, 52)) + oid1 = self._storage.new_oid() + oid2 = self._storage.new_oid() + noteq(oid1, oid2) + revid1 = self._dostore(oid1, data=p31) + revid2 = self._dostore(oid2, data=p51) + noteq(revid1, revid2) + revid3 = self._dostore(oid1, revid=revid1, data=p32) + revid4 = self._dostore(oid2, revid=revid2, data=p52) + noteq(revid3, revid4) + + def checkGetTid(self): + if not hasattr(self._storage, 'getTid'): + return + eq = self.assertEqual + p41, p42 = map(MinPO, (41, 42)) + oid = self._storage.new_oid() + self.assertRaises(KeyError, self._storage.getTid, oid) + # Now store a revision + revid1 = self._dostore(oid, data=p41) + eq(revid1, self._storage.getTid(oid)) + # And another one + revid2 = self._dostore(oid, revid=revid1, data=p42) + eq(revid2, self._storage.getTid(oid)) + + def checkLen(self): + # len(storage) reports the number of objects. + # check it is zero when empty + self.assertEqual(len(self._storage),0) + # check it is correct when the storage contains two object. + # len may also be zero, for storages that do not keep track + # of this number + self._dostore(data=MinPO(22)) + self._dostore(data=MinPO(23)) + self.assertTrue(len(self._storage) in [0,2]) + + def checkGetSize(self): + self._dostore(data=MinPO(25)) + size = self._storage.getSize() + # The storage API doesn't make any claims about what size + # means except that it ought to be printable. + str(size) + + def checkNote(self): + oid = self._storage.new_oid() + t = TransactionMetaData() + self._storage.tpc_begin(t) + t.note(u'this is a test') + self._storage.store(oid, ZERO, zodb_pickle(MinPO(5)), '', t) + self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + + def checkInterfaces(self): + for iface in zope.interface.providedBy(self._storage): + zope.interface.verify.verifyObject(iface, self._storage) + + def checkMultipleEmptyTransactions(self): + # There was a bug in handling empty transactions in mapping + # storage that caused the commit lock not to be released. :( + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + t = TransactionMetaData() + self._storage.tpc_begin(t) # Hung here before + self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + + def _do_store_in_separate_thread(self, oid, revid, voted): + # We'll run the competing trans in a separate thread: + thread = threading.Thread(name='T2', + target=self._dostore, args=(oid,), kwargs=dict(revid=revid)) + thread.setDaemon(True) + thread.start() + thread.join(.1) + return thread + + def check_checkCurrentSerialInTransaction(self): + oid = b'\0\0\0\0\0\0\0\xf0' + tid = self._dostore(oid) + tid2 = self._dostore(oid, revid=tid) + data = b'cpersistent\nPersistent\nq\x01.N.' # a simple persistent obj + + #---------------------------------------------------------------------- + # stale read + t = TransactionMetaData() + self._storage.tpc_begin(t) + try: + self._storage.store(b'\0\0\0\0\0\0\0\xf1', + b'\0\0\0\0\0\0\0\0', data, '', t) + self._storage.checkCurrentSerialInTransaction(oid, tid, t) + self._storage.tpc_vote(t) + except POSException.ReadConflictError as v: + self.assertEqual(v.oid, oid) + self.assertEqual(v.serials, (tid2, tid)) + else: + if 0: self.assertTrue(False, "No conflict error") + + self._storage.tpc_abort(t) + + + #---------------------------------------------------------------------- + # non-stale read, no stress. :) + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.store(b'\0\0\0\0\0\0\0\xf2', + b'\0\0\0\0\0\0\0\0', data, '', t) + self._storage.checkCurrentSerialInTransaction(oid, tid2, t) + self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + + #---------------------------------------------------------------------- + # non-stale read, competition after vote. The competing + # transaction must produce a tid > this transaction's tid + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.store(b'\0\0\0\0\0\0\0\xf3', + b'\0\0\0\0\0\0\0\0', data, '', t) + self._storage.checkCurrentSerialInTransaction(oid, tid2, t) + self._storage.tpc_vote(t) + + # We'll run the competing trans in a separate thread: + thread = self._do_store_in_separate_thread(oid, tid2, True) + self._storage.tpc_finish(t) + thread.join(33) + + tid3 = utils.load_current(self._storage, oid)[1] + self.assertTrue(tid3 > + utils.load_current( + self._storage, b'\0\0\0\0\0\0\0\xf3')[1]) + + #---------------------------------------------------------------------- + # non-stale competing trans after checkCurrentSerialInTransaction + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.store(b'\0\0\0\0\0\0\0\xf4', + b'\0\0\0\0\0\0\0\0', data, '', t) + self._storage.checkCurrentSerialInTransaction(oid, tid3, t) + + thread = self._do_store_in_separate_thread(oid, tid3, False) + + # There are 2 possibilities: + # 1. The store happens before this transaction completes, + # in which case, the vote below fails. + # 2. The store happens after this trans, in which case, the + # tid of the object is greater than this transaction's tid. + try: + self._storage.tpc_vote(t) + except POSException.ReadConflictError: + thread.join() # OK :) + else: + self._storage.tpc_finish(t) + thread.join() + tid4 = utils.load_current(self._storage, oid)[1] + self.assertTrue( + tid4 > + utils.load_current(self._storage, b'\0\0\0\0\0\0\0\xf4')[1]) + + + def check_tid_ordering_w_commit(self): + + # It's important that storages always give a consistent + # ordering for revisions, tids. This is most likely to fail + # around commit. Here we'll do some basic tests to check this. + + # We'll use threads to arrange for ordering to go wrong and + # verify that a storage gets it right. + + # First, some initial data. + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.store(ZERO, ZERO, b'x', '', t) + self._storage.tpc_vote(t) + tids = [] + self._storage.tpc_finish(t, lambda tid: tids.append(tid)) + + # OK, now we'll start a new transaction, take it to finish, + # and then block finish while we do some other operations. + + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.store(ZERO, tids[0], b'y', '', t) + self._storage.tpc_vote(t) + + to_join = [] + def run_in_thread(func): + t = threading.Thread(target=func) + t.setDaemon(True) + t.start() + to_join.append(t) + + started = threading.Event() + finish = threading.Event() + @run_in_thread + def commit(): + def callback(tid): + started.set() + tids.append(tid) + finish.wait() + + self._storage.tpc_finish(t, callback) + + results = {} + started.wait() + attempts = [] + attempts_cond = utils.Condition() + + def update_attempts(): + with attempts_cond: + attempts.append(1) + attempts_cond.notifyAll() + + + @run_in_thread + def lastTransaction(): + update_attempts() + results['lastTransaction'] = self._storage.lastTransaction() + + @run_in_thread + def load(): + update_attempts() + results['load'] = utils.load_current(self._storage, ZERO)[1] + + expected_attempts = 2 + + if hasattr(self._storage, 'getTid'): + expected_attempts += 1 + @run_in_thread + def getTid(): + update_attempts() + results['getTid'] = self._storage.getTid(ZERO) + + if hasattr(self._storage, 'lastInvalidations'): + expected_attempts += 1 + @run_in_thread + def lastInvalidations(): + update_attempts() + invals = self._storage.lastInvalidations(1) + if invals: + results['lastInvalidations'] = invals[0][0] + + with attempts_cond: + while len(attempts) < expected_attempts: + attempts_cond.wait() + + time.sleep(.01) # for good measure :) + finish.set() + + for t in to_join: + t.join(1) + + self.assertEqual(results.pop('load'), tids[1]) + self.assertEqual(results.pop('lastTransaction'), tids[1]) + for m, tid in results.items(): + self.assertEqual(tid, tids[1]) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/ConflictResolution.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/ConflictResolution.py new file mode 100644 index 0000000..448c083 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/ConflictResolution.py @@ -0,0 +1,176 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests for application-level conflict resolution.""" + +from ZODB import DB +from ZODB.Connection import TransactionMetaData +from ZODB.POSException import ConflictError, UndoError +from persistent import Persistent +from transaction import TransactionManager + +from ZODB.tests.StorageTestBase import zodb_unpickle, zodb_pickle + +class PCounter(Persistent): + + _value = 0 + + def __repr__(self): + return "" % self._value + + def inc(self, n=1): + self._value = self._value + n + + def _p_resolveConflict(self, oldState, savedState, newState): + savedDiff = savedState['_value'] - oldState['_value'] + newDiff = newState['_value'] - oldState['_value'] + + oldState['_value'] = oldState['_value'] + savedDiff + newDiff + + return oldState + + # Insecurity: What if _p_resolveConflict _thinks_ it resolved the + # conflict, but did something wrong? + +class PCounter2(PCounter): + + def _p_resolveConflict(self, oldState, savedState, newState): + raise ConflictError + +class PCounter3(PCounter): + def _p_resolveConflict(self, oldState, savedState, newState): + raise AttributeError("no attribute (testing conflict resolution)") + +class PCounter4(PCounter): + def _p_resolveConflict(self, oldState, savedState): + raise RuntimeError("Can't get here; not enough args") + +class ConflictResolvingStorage(object): + + def checkResolve(self, resolvable=True): + db = DB(self._storage) + + t1 = TransactionManager() + c1 = db.open(t1) + o1 = c1.root()['p'] = (PCounter if resolvable else PCounter2)() + o1.inc() + t1.commit() + + t2 = TransactionManager() + c2 = db.open(t2) + o2 = c2.root()['p'] + o2.inc(2) + t2.commit() + + o1.inc(3) + try: + t1.commit() + except ConflictError as err: + self.assertIn(".PCounter2,", str(err)) + self.assertEqual(o1._value, 3) + else: + self.assertTrue(resolvable, "Expected ConflictError") + self.assertEqual(o1._value, 6) + + t2.begin() + self.assertEqual(o2._value, o1._value) + + db.close() + + def checkUnresolvable(self): + self.checkResolve(False) + + def checkZClassesArentResolved(self): + from ZODB.ConflictResolution import find_global, BadClassName + dummy_class_tuple = ('*foobar', ()) + self.assertRaises(BadClassName, find_global, '*foobar', ()) + + def checkBuggyResolve1(self): + obj = PCounter3() + obj.inc() + + oid = self._storage.new_oid() + + revid1 = self._dostoreNP(oid, data=zodb_pickle(obj)) + + obj.inc() + obj.inc() + # The effect of committing two transactions with the same + # pickle is to commit two different transactions relative to + # revid1 that add two to _value. + revid2 = self._dostoreNP(oid, revid=revid1, data=zodb_pickle(obj)) + self.assertRaises(ConflictError, + self._dostoreNP, + oid, revid=revid1, data=zodb_pickle(obj)) + + def checkBuggyResolve2(self): + obj = PCounter4() + obj.inc() + + oid = self._storage.new_oid() + + revid1 = self._dostoreNP(oid, data=zodb_pickle(obj)) + + obj.inc() + obj.inc() + # The effect of committing two transactions with the same + # pickle is to commit two different transactions relative to + # revid1 that add two to _value. + revid2 = self._dostoreNP(oid, revid=revid1, data=zodb_pickle(obj)) + self.assertRaises(ConflictError, + self._dostoreNP, + oid, revid=revid1, data=zodb_pickle(obj)) + +class ConflictResolvingTransUndoStorage(object): + + def checkUndoConflictResolution(self): + # This test is based on checkNotUndoable in the + # TransactionalUndoStorage test suite. Except here, conflict + # resolution should allow us to undo the transaction anyway. + + obj = PCounter() + obj.inc() + oid = self._storage.new_oid() + revid_a = self._dostore(oid, data=obj) + obj.inc() + revid_b = self._dostore(oid, revid=revid_a, data=obj) + obj.inc() + revid_c = self._dostore(oid, revid=revid_b, data=obj) + # Start the undo + info = self._storage.undoInfo() + tid = info[1]['id'] + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.undo(tid, t) + self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + + def checkUndoUnresolvable(self): + # This test is based on checkNotUndoable in the + # TransactionalUndoStorage test suite. Except here, conflict + # resolution should allow us to undo the transaction anyway. + + obj = PCounter2() + obj.inc() + oid = self._storage.new_oid() + revid_a = self._dostore(oid, data=obj) + obj.inc() + revid_b = self._dostore(oid, revid=revid_a, data=obj) + obj.inc() + revid_c = self._dostore(oid, revid=revid_b, data=obj) + # Start the undo + info = self._storage.undoInfo() + tid = info[1]['id'] + t = TransactionMetaData() + self.assertRaises(UndoError, self._begin_undos_vote, t, tid) + self._storage.tpc_abort(t) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/Corruption.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/Corruption.py new file mode 100644 index 0000000..44843e7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/Corruption.py @@ -0,0 +1,75 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Do some minimal tests of data corruption""" + +import os +import random +import stat + +import ZODB.FileStorage + +from ZODB.utils import load_current + +from .StorageTestBase import StorageTestBase + +class FileStorageCorruptTests(StorageTestBase): + + def setUp(self): + StorageTestBase.setUp(self) + self._storage = ZODB.FileStorage.FileStorage('Data.fs', create=1) + + def _do_stores(self): + oids = [] + for i in range(5): + oid = self._storage.new_oid() + revid = self._dostore(oid) + oids.append((oid, revid)) + return oids + + def _check_stores(self, oids): + for oid, revid in oids: + data, s_revid = load_current(self._storage, oid) + self.assertEqual(s_revid, revid) + + def checkTruncatedIndex(self): + oids = self._do_stores() + self._close() + + # truncation the index file + self.assertTrue(os.path.exists('Data.fs.index')) + f = open('Data.fs.index', 'rb+') + f.seek(0, 2) + size = f.tell() + f.seek(size // 2) + f.truncate() + f.close() + + self._storage = ZODB.FileStorage.FileStorage('Data.fs') + self._check_stores(oids) + + def checkCorruptedIndex(self): + oids = self._do_stores() + self._close() + + # truncation the index file + self.assertTrue(os.path.exists('Data.fs.index')) + size = os.stat('Data.fs.index')[stat.ST_SIZE] + f = open('Data.fs.index', 'rb+') + while f.tell() < size: + f.seek(random.randrange(1, size // 10), 1) + f.write(b'\000') + f.close() + + self._storage = ZODB.FileStorage.FileStorage('Data.fs') + self._check_stores(oids) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/HistoryStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/HistoryStorage.py new file mode 100644 index 0000000..cb97a5a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/HistoryStorage.py @@ -0,0 +1,55 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Run the history() related tests for a storage. + +Any storage that supports the history() method should be able to pass +all these tests. +""" + +import sys +from time import time, sleep +from ZODB.tests.MinPO import MinPO + +class HistoryStorage(object): + def checkSimpleHistory(self): + self._checkHistory((11, 12, 13)) + + def _checkHistory(self, data): + start = time() + # Store a couple of revisions of the object + oid = self._storage.new_oid() + self.assertRaises(KeyError,self._storage.history,oid) + revids = [None] + for data in data: + if sys.platform == 'win32': + # time.time() has a precision of 1ms on Windows. + sleep(0.002) + revids.append(self._dostore(oid, revids[-1], MinPO(data))) + revids.reverse() + del revids[-1] + # Now get various snapshots of the object's history + for i in range(1, 1 + len(revids)): + h = self._storage.history(oid, size=i) + self.assertEqual([d['tid'] for d in h], revids[:i]) + # Check results are sorted by timestamp, in descending order. + if sys.platform == 'win32': + # Same as above. This is also required in case this method is + # called several times for the same storage. + sleep(0.002) + a = time() + for d in h: + b = a + a = d['time'] + self.assertLess(a, b) + self.assertLess(start, a) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/IExternalGC.test b/thesisenv/lib/python3.6/site-packages/ZODB/tests/IExternalGC.test new file mode 100644 index 0000000..52983d3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/IExternalGC.test @@ -0,0 +1,130 @@ +Storage Support for external GC +=============================== + +A storage that provides IExternalGC supports external garbage +collectors by providing a deleteObject method that transactionally +deletes an object. + +A create_storage function is provided that creates a storage. + + >>> storage = create_storage() + >>> import ZODB.blob, transaction + >>> db = ZODB.DB(storage) + >>> conn = db.open() + >>> conn.root()[0] = conn.root().__class__() + >>> conn.root()[1] = ZODB.blob.Blob(b'some data') + >>> transaction.commit() + >>> oid0 = conn.root()[0]._p_oid + >>> oid1 = conn.root()[1]._p_oid + >>> del conn.root()[0] + >>> del conn.root()[1] + >>> transaction.commit() + +At this point, object 0 and 1 is garbage, but it's still in the storage: + + >>> p0, s0 = storage.load(oid0, '') + >>> p1, s1 = storage.load(oid1, '') + +The storage is configured not to gc on pack, so even if we pack, these +objects won't go away: + + >>> len(storage) + 3 + >>> import time + >>> db.pack(time.time()+1) + >>> len(storage) + 3 + >>> p0, s0 = storage.load(oid0, '') + >>> p1, s1 = storage.load(oid1, '') + +Now we'll use the new deleteObject API to delete the objects. We can't +go through the database to do this, so we'll have to manage the +transaction ourselves. + + >>> from ZODB.Connection import TransactionMetaData + >>> txn = TransactionMetaData() + >>> storage.tpc_begin(txn) + >>> storage.deleteObject(oid0, s0, txn) + >>> storage.deleteObject(oid1, s1, txn) + >>> _ = storage.tpc_vote(txn) + >>> tid = storage.tpc_finish(txn) + >>> tid == storage.lastTransaction() + True + +Now if we try to load data for the objects, we get a POSKeyError: + + + >>> storage.load(oid0, '') # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + POSKeyError: ... + + >>> storage.load(oid1, '') # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + POSKeyError: ... + +We can still get the data if we load before the time we deleted. + + >>> storage.loadBefore(oid0, conn.root()._p_serial) == (p0, s0, tid) + True + >>> storage.loadBefore(oid1, conn.root()._p_serial) == (p1, s1, tid) + True + >>> with open(storage.loadBlob(oid1, s1)) as fp: fp.read() + 'some data' + +If we pack, however, the old data will be removed and the data will be +gone: + + >>> db.pack(time.time()+1) + >>> len(db.storage) + 1 + + >>> time.sleep(.1) + + >>> storage.load(oid0, '') # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + POSKeyError: ... + + >>> storage.load(oid1, '') # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + POSKeyError: ... + + >>> storage.loadBefore(oid0, conn.root()._p_serial) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + POSKeyError: ... + + >>> storage.loadBefore(oid1, conn.root()._p_serial) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + POSKeyError: ... + + >>> storage.loadBlob(oid1, s1) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + POSKeyError: ... + +A conflict error is raised if the serial we provide to deleteObject +isn't current: + + >>> conn.root()[0] = conn.root().__class__() + >>> transaction.commit() + >>> oid = conn.root()[0]._p_oid + >>> bad_serial = conn.root()[0]._p_serial + >>> conn.root()[0].x = 1 + >>> transaction.commit() + + >>> txn = TransactionMetaData() + >>> storage.tpc_begin(txn) + >>> storage.deleteObject(oid, bad_serial, txn); storage.tpc_vote(txn) + ... # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ConflictError: database conflict error ... + + >>> storage.tpc_abort(txn) + + >>> storage.close() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/IteratorStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/IteratorStorage.py new file mode 100644 index 0000000..48fcbc9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/IteratorStorage.py @@ -0,0 +1,247 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Run tests against the iterator() interface for storages. + +Any storage that supports the iterator() method should be able to pass +all these tests. + +""" + +from ZODB.Connection import TransactionMetaData +from ZODB.tests.MinPO import MinPO +from ZODB.tests.StorageTestBase import zodb_pickle, zodb_unpickle +from ZODB.utils import U64, p64, load_current + +import ZODB.blob + +try: + from itertools import izip as zip +except ImportError: + # Py3: zip() already returns an iterable. + pass + +class IteratorCompare(object): + + def iter_verify(self, txniter, revids, val0): + eq = self.assertEqual + oid = self._oid + val = val0 + for reciter, revid in zip(txniter, revids + [None]): + eq(reciter.tid, revid) + for rec in reciter: + eq(rec.oid, oid) + eq(rec.tid, revid) + eq(zodb_unpickle(rec.data), MinPO(val)) + val = val + 1 + eq(val, val0 + len(revids)) + + +class IteratorStorage(IteratorCompare): + + def checkSimpleIteration(self): + # Store a bunch of revisions of a single object + self._oid = oid = self._storage.new_oid() + revid1 = self._dostore(oid, data=MinPO(11)) + revid2 = self._dostore(oid, revid=revid1, data=MinPO(12)) + revid3 = self._dostore(oid, revid=revid2, data=MinPO(13)) + # Now iterate over all the transactions and compare carefully + txniter = self._storage.iterator() + self.iter_verify(txniter, [revid1, revid2, revid3], 11) + + def checkUndoZombie(self): + oid = self._storage.new_oid() + revid = self._dostore(oid, data=MinPO(94)) + # Get the undo information + info = self._storage.undoInfo() + tid = info[0]['id'] + # Undo the creation of the object, rendering it a zombie + t = TransactionMetaData() + self._storage.tpc_begin(t) + oids = self._storage.undo(tid, t) + self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + # Now attempt to iterator over the storage + iter = self._storage.iterator() + for txn in iter: + for rec in txn: + pass + + # The last transaction performed an undo of the transaction that + # created object oid. (As Barry points out, the object is now in the + # George Bailey state.) Assert that the final data record contains + # None in the data attribute. + self.assertEqual(rec.oid, oid) + self.assertEqual(rec.data, None) + + def checkTransactionExtensionFromIterator(self): + oid = self._storage.new_oid() + revid = self._dostore(oid, data=MinPO(1)) + iter = self._storage.iterator() + count = 0 + for txn in iter: + self.assertEqual(txn.extension, {}) + count +=1 + self.assertEqual(count, 1) + + def checkIterationIntraTransaction(self): + # TODO: Try this test with logging enabled. If you see something + # like + # + # ZODB FS FS21 warn: FileStorageTests.fs truncated, possibly due to + # damaged records at 4 + # + # Then the code in FileIterator.next() hasn't yet been fixed. + # Should automate that check. + oid = self._storage.new_oid() + t = TransactionMetaData() + data = zodb_pickle(MinPO(0)) + try: + self._storage.tpc_begin(t) + self._storage.store(oid, '\0'*8, data, '', t) + self._storage.tpc_vote(t) + # Don't do tpc_finish yet + it = self._storage.iterator() + for x in it: + pass + finally: + self._storage.tpc_finish(t) + + def checkLoad_was_checkLoadEx(self): + oid = self._storage.new_oid() + self._dostore(oid, data=42) + data, tid = load_current(self._storage, oid) + self.assertEqual(zodb_unpickle(data), MinPO(42)) + match = False + for txn in self._storage.iterator(): + for rec in txn: + if rec.oid == oid and rec.tid == tid: + self.assertEqual(txn.tid, tid) + match = True + if not match: + self.fail("Could not find transaction with matching id") + + def checkIterateRepeatedly(self): + self._dostore() + transactions = self._storage.iterator() + self.assertEqual(1, len(list(transactions))) + # The iterator can only be consumed once: + self.assertEqual(0, len(list(transactions))) + + def checkIterateRecordsRepeatedly(self): + self._dostore() + it = self._storage.iterator() + tinfo = next(it) + self.assertEqual(1, len(list(tinfo))) + self.assertEqual(1, len(list(tinfo))) + if hasattr(it, 'close'): + it.close() + + def checkIterateWhileWriting(self): + self._dostore() + iterator = self._storage.iterator() + # We have one transaction with 1 modified object. + txn_1 = next(iterator) + self.assertEqual(1, len(list(txn_1))) + + # We store another transaction with 1 object, the already running + # iterator does not pick this up. + self._dostore() + with self.assertRaises(StopIteration): + next(iterator) + + +class ExtendedIteratorStorage(IteratorCompare): + + def checkExtendedIteration(self): + # Store a bunch of revisions of a single object + self._oid = oid = self._storage.new_oid() + revid1 = self._dostore(oid, data=MinPO(11)) + revid2 = self._dostore(oid, revid=revid1, data=MinPO(12)) + revid3 = self._dostore(oid, revid=revid2, data=MinPO(13)) + revid4 = self._dostore(oid, revid=revid3, data=MinPO(14)) + # Note that the end points are included + # Iterate over all of the transactions with explicit start/stop + txniter = self._storage.iterator(revid1, revid4) + self.iter_verify(txniter, [revid1, revid2, revid3, revid4], 11) + # Iterate over some of the transactions with explicit start + txniter = self._storage.iterator(revid3) + self.iter_verify(txniter, [revid3, revid4], 13) + # Iterate over some of the transactions with explicit stop + txniter = self._storage.iterator(None, revid2) + self.iter_verify(txniter, [revid1, revid2], 11) + # Iterate over some of the transactions with explicit start+stop + txniter = self._storage.iterator(revid2, revid3) + self.iter_verify(txniter, [revid2, revid3], 12) + # Specify an upper bound somewhere in between values + revid3a = p64((U64(revid3) + U64(revid4)) // 2) + txniter = self._storage.iterator(revid2, revid3a) + self.iter_verify(txniter, [revid2, revid3], 12) + # Specify a lower bound somewhere in between values. + # revid2 == revid1+1 is very likely on Windows. Adding 1 before + # dividing ensures that "the midpoint" we compute is strictly larger + # than revid1. + revid1a = p64((U64(revid1) + 1 + U64(revid2)) // 2) + assert revid1 < revid1a + txniter = self._storage.iterator(revid1a, revid3a) + self.iter_verify(txniter, [revid2, revid3], 12) + # Specify an empty range + txniter = self._storage.iterator(revid3, revid2) + self.iter_verify(txniter, [], 13) + # Specify a singleton range + txniter = self._storage.iterator(revid3, revid3) + self.iter_verify(txniter, [revid3], 13) + + +class IteratorDeepCompare(object): + + def compare(self, storage1, storage2): + eq = self.assertEqual + iter1 = storage1.iterator() + iter2 = storage2.iterator() + for txn1, txn2 in zip(iter1, iter2): + eq(txn1.tid, txn2.tid) + eq(txn1.status, txn2.status) + eq(txn1.user, txn2.user) + eq(txn1.description, txn2.description) + eq(txn1.extension, txn2.extension) + itxn1 = iter(txn1) + itxn2 = iter(txn2) + for rec1, rec2 in zip(itxn1, itxn2): + eq(rec1.oid, rec2.oid) + eq(rec1.tid, rec2.tid) + eq(rec1.data, rec2.data) + if ZODB.blob.is_blob_record(rec1.data): + try: + fn1 = storage1.loadBlob(rec1.oid, rec1.tid) + except ZODB.POSException.POSKeyError: + self.assertRaises( + ZODB.POSException.POSKeyError, + storage2.loadBlob, rec1.oid, rec1.tid) + else: + fn2 = storage2.loadBlob(rec1.oid, rec1.tid) + self.assertTrue(fn1 != fn2) + with open(fn1, 'rb') as fp1: + with open(fn2, 'rb') as fp2: + eq(fp1.read(), fp2.read()) + + # Make sure there are no more records left in rec1 and rec2, + # meaning they were the same length. + # Additionally, check that we're backwards compatible to the + # IndexError we used to raise before. + self.assertRaises(StopIteration, next, itxn1) + self.assertRaises(StopIteration, next, itxn2) + # Make sure ther are no more records left in txn1 and txn2, meaning + # they were the same length + self.assertRaises(StopIteration, next, iter1) + self.assertRaises(StopIteration, next, iter2) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/MTStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/MTStorage.py new file mode 100644 index 0000000..ebe1dba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/MTStorage.py @@ -0,0 +1,244 @@ +import random +import sys +import threading +import time + +from persistent.mapping import PersistentMapping +import six +import transaction + +import ZODB +from ZODB.Connection import TransactionMetaData +from ZODB.tests.StorageTestBase import zodb_pickle, zodb_unpickle +from ZODB.tests.MinPO import MinPO +from ZODB.POSException import ConflictError + +from ZODB.utils import load_current + +SHORT_DELAY = 0.01 + +class TestThread(threading.Thread): + """Base class for defining threads that run from unittest. + + If the thread exits with an uncaught exception, catch it and + re-raise it when the thread is joined. The re-raise will cause + the test to fail. + + The subclass should define a runtest() method instead of a run() + method. + """ + + def __init__(self): + threading.Thread.__init__(self) + self._exc_info = None + + def run(self): + try: + self.runtest() + except: + self._exc_info = sys.exc_info() + + def join(self, timeout=None): + threading.Thread.join(self, timeout) + if self._exc_info: + raise six.reraise( + self._exc_info[0], self._exc_info[1], self._exc_info[2]) + +class ZODBClientThread(TestThread): + + __super_init = TestThread.__init__ + + def __init__(self, db, test, commits=10, delay=SHORT_DELAY): + self.__super_init() + self.setDaemon(1) + self.db = db + self.test = test + self.commits = commits + self.delay = delay + + def runtest(self): + conn = self.db.open() + conn.sync() + root = conn.root() + d = self.get_thread_dict(root) + if d is None: + self.test.fail() + else: + for i in range(self.commits): + self.commit(d, i) + self.test.assertEqual(sorted(d.keys()), list(range(self.commits))) + conn.close() + + def commit(self, d, num): + d[num] = time.time() + time.sleep(self.delay) + transaction.commit() + time.sleep(self.delay) + + # Return a new PersistentMapping, and store it on the root object under + # the name (.getName()) of the current thread. + def get_thread_dict(self, root): + # This is vicious: multiple threads are slamming changes into the + # root object, then trying to read the root object, simultaneously + # and without any coordination. Conflict errors are rampant. It + # used to go around at most 10 times, but that fairly often failed + # to make progress in the 7-thread tests on some test boxes. Going + # around (at most) 1000 times was enough so that a 100-thread test + # reliably passed on Tim's hyperthreaded WinXP box (but at the + # original 10 retries, the same test reliably failed with 15 threads). + name = self.getName() + MAXRETRIES = 1000 + + for i in range(MAXRETRIES): + try: + root[name] = PersistentMapping() + transaction.commit() + break + except ConflictError: + root._p_jar.sync() + else: + raise ConflictError("Exceeded %d attempts to store" % MAXRETRIES) + + for j in range(MAXRETRIES): + try: + return root.get(name) + except ConflictError: + root._p_jar.sync() + + raise ConflictError("Exceeded %d attempts to read" % MAXRETRIES) + +class StorageClientThread(TestThread): + + __super_init = TestThread.__init__ + + def __init__(self, storage, test, commits=10, delay=SHORT_DELAY): + self.__super_init() + self.storage = storage + self.test = test + self.commits = commits + self.delay = delay + self.oids = {} + + def runtest(self): + for i in range(self.commits): + self.dostore(i) + self.check() + + def check(self): + for oid, revid in self.oids.items(): + data, serial = load_current(self.storage, oid) + self.test.assertEqual(serial, revid) + obj = zodb_unpickle(data) + self.test.assertEqual(obj.value[0], self.getName()) + + def pause(self): + time.sleep(self.delay) + + def oid(self): + oid = self.storage.new_oid() + self.oids[oid] = None + return oid + + def dostore(self, i): + data = zodb_pickle(MinPO((self.getName(), i))) + t = TransactionMetaData() + oid = self.oid() + self.pause() + + self.storage.tpc_begin(t) + self.pause() + + # Always create a new object, signified by None for revid + self.storage.store(oid, None, data, '', t) + self.pause() + + self.storage.tpc_vote(t) + self.pause() + + revid = self.storage.tpc_finish(t) + self.pause() + self.oids[oid] = revid + +class ExtStorageClientThread(StorageClientThread): + + def runtest(self): + # pick some other storage ops to execute, depending in part + # on the features provided by the storage. + names = ["do_load"] + + storage = self.storage + + try: + supportsUndo = storage.supportsUndo + except AttributeError: + pass + else: + if supportsUndo(): + names += ["do_loadSerial", "do_undoLog", "do_iterator"] + + ops = [getattr(self, meth) for meth in names] + assert ops, "Didn't find an storage ops in %s" % self.storage + # do a store to guarantee there's at least one oid in self.oids + self.dostore(0) + + for i in range(self.commits - 1): + meth = random.choice(ops) + meth() + self.dostore(i) + self.check() + + def pick_oid(self): + return random.choice(tuple(self.oids)) + + def do_load(self): + oid = self.pick_oid() + load_current(self.storage, oid) + + def do_loadSerial(self): + oid = self.pick_oid() + self.storage.loadSerial(oid, self.oids[oid]) + + def do_undoLog(self): + self.storage.undoLog(0, -20) + + def do_iterator(self): + try: + iter = self.storage.iterator() + except AttributeError: + # It's hard to detect that a ZEO ClientStorage + # doesn't have this method, but does have all the others. + return + for obj in iter: + pass + +class MTStorage(object): + "Test a storage with multiple client threads executing concurrently." + + def _checkNThreads(self, n, constructor, *args): + threads = [constructor(*args) for i in range(n)] + for t in threads: + t.start() + for t in threads: + t.join(60) + for t in threads: + self.assertFalse(t.isAlive(), + "thread failed to finish in 60 seconds") + + def check2ZODBThreads(self): + db = ZODB.DB(self._storage) + self._checkNThreads(2, ZODBClientThread, db, self) + db.close() + + def check7ZODBThreads(self): + db = ZODB.DB(self._storage) + self._checkNThreads(7, ZODBClientThread, db, self) + db.close() + + def check2StorageThreads(self): + self._checkNThreads(2, StorageClientThread, self._storage, self) + + def check7StorageThreads(self): + self._checkNThreads(7, StorageClientThread, self._storage, self) + + def check4ExtStorageThread(self): + self._checkNThreads(4, ExtStorageClientThread, self._storage, self) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/MVCCMappingStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/MVCCMappingStorage.py new file mode 100644 index 0000000..e87b0be --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/MVCCMappingStorage.py @@ -0,0 +1,124 @@ +############################################################################## +# +# Copyright (c) Zope Corporation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""An extension of MappingStorage that depends on polling. + +Each Connection has its own view of the database. Polling updates each +connection's view. +""" + +import ZODB.utils +import ZODB.POSException +from ZODB.interfaces import IMVCCStorage +from ZODB.MappingStorage import MappingStorage +from zope.interface import implementer + + +@implementer(IMVCCStorage) +class MVCCMappingStorage(MappingStorage): + + def __init__(self, name="MVCC Mapping Storage"): + MappingStorage.__init__(self, name=name) + # _polled_tid contains the transaction ID at the last poll. + self._polled_tid = b'' + self._data_snapshot = None # {oid->(state, tid)} + self._main_lock = self._lock + + def new_instance(self): + """Returns a storage instance that is a view of the same data. + """ + inst = MVCCMappingStorage(name=self.__name__) + # All instances share the same OID data, transaction log, commit lock, + # and OID sequence. + inst._data = self._data + inst._transactions = self._transactions + inst._commit_lock = self._commit_lock + inst.new_oid = self.new_oid + inst.pack = self.pack + inst.loadBefore = self.loadBefore + inst._ltid = self._ltid + inst._main_lock = self._lock + return inst + + @ZODB.utils.locked(MappingStorage.opened) + def sync(self, force=False): + self._data_snapshot = None + + def release(self): + pass + + @ZODB.utils.locked(MappingStorage.opened) + def load(self, oid, version=''): + assert not version, "Versions are not supported" + if self._data_snapshot is None: + self.poll_invalidations() + info = self._data_snapshot.get(oid) + if info: + return info + raise ZODB.POSException.POSKeyError(oid) + + def poll_invalidations(self): + """Poll the storage for changes by other connections. + """ + # prevent changes to _transactions and _data during analysis + with self._main_lock: + if self._transactions: + new_tid = self._transactions.maxKey() + else: + new_tid = ZODB.utils.z64 + + # Copy the current data into a snapshot. This is obviously + # very inefficient for large storages, but it's good for + # tests. + self._data_snapshot = {} + for oid, tid_data in self._data.items(): + if tid_data: + tid = tid_data.maxKey() + self._data_snapshot[oid] = tid_data[tid], tid + + if self._polled_tid: + if self._polled_tid not in self._transactions: + # This connection is so old that we can no longer enumerate + # all the changes. + self._polled_tid = new_tid + return None + + changed_oids = set() + for tid, txn in self._transactions.items( + self._polled_tid, new_tid, + excludemin=True, excludemax=False): + if txn.status == 'p': + # This transaction has been packed, so it is no longer + # possible to enumerate all changed oids. + self._polled_tid = new_tid + return None + if tid == self._ltid: + # ignore the transaction committed by this connection + continue + changed_oids.update(txn.data.keys()) + + self._polled_tid = self._ltid = new_tid + return list(changed_oids) + + def tpc_finish(self, transaction, func = lambda tid: None): + self._data_snapshot = None + return MappingStorage.tpc_finish(self, transaction, func) + + def tpc_abort(self, transaction): + self._data_snapshot = None + MappingStorage.tpc_abort(self, transaction) + + def pack(self, t, referencesf, gc=True): + # prevent all concurrent commits during packing + with self._commit_lock: + MappingStorage.pack(self, t, referencesf, gc) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/MinPO.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/MinPO.py new file mode 100644 index 0000000..6085f5a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/MinPO.py @@ -0,0 +1,50 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""A minimal persistent object to use for tests""" +from persistent import Persistent + +class MinPO(Persistent): + def __init__(self, value=None): + self.value = value + + def __cmp__(self, aMinPO): + return cmp(self.value, aMinPO.value) + + def __hash__(self): + return hash(self.value) + + # Py3: Python 3 does not support cmp() anymore. This is insane!! + + def __eq__(self, aMinPO): + return self.value == aMinPO.value + + def __lt__(self, aMinPO): + return self.value < aMinPO.value + + # @functools.total_ordering is not available in 2.6 :-( + + def __ne__(self, aMinPO): + return self.value != aMinPO.value + + def __gt__(self, aMinPO): + return self.value > aMinPO.value + + def __le__(self, aMinPO): + return self.value <= aMinPO.value + + def __ge__(self, aMinPO): + return self.value >= aMinPO.value + + def __repr__(self): + return "MinPO(%s)" % self.value diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/PackableStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/PackableStorage.py new file mode 100644 index 0000000..d61f2c6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/PackableStorage.py @@ -0,0 +1,798 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Run some tests relevant for storages that support pack().""" +from __future__ import print_function + +import doctest +import time + +from persistent import Persistent +from persistent.mapping import PersistentMapping +from ZODB import DB +from ZODB.POSException import ConflictError, StorageError +from ZODB.serialize import referencesf +from ZODB.tests.MinPO import MinPO +from ZODB.tests.MTStorage import TestThread +from ZODB.tests.StorageTestBase import snooze +from ZODB._compat import (loads, PersistentPickler, Pickler, Unpickler, + BytesIO, _protocol) +import transaction +import ZODB.interfaces +import ZODB.tests.util +from ZODB.tests.util import time_monotonically_increases +import zope.testing.setupstack + +from ZODB.utils import load_current + +ZERO = b'\0'*8 + + +# This class is for the root object. It must not contain a getoid() method +# (really, attribute). The persistent pickling machinery -- in the dumps() +# function below -- will pickle Root objects as normal, but any attributes +# which reference persistent Object instances will get pickled as persistent +# ids, not as the object's state. This makes the referencesf stuff work, +# because it pickle sniffs for persistent ids (so we have to get those +# persistent ids into the root object's pickle). +class Root(object): + pass + + +# This is the persistent Object class. Because it has a getoid() method, the +# persistent pickling machinery -- in the dumps() function below -- will +# pickle the oid string instead of the object's actual state. Yee haw, this +# stuff is deep. ;) +class Object(object): + def __init__(self, oid): + self._oid = oid + + def getoid(self): + return self._oid + + def __setstate__(self, state): + self.__dict__.clear() + self.__dict__.update(state) + if not isinstance(self._oid, bytes): + # Python 3 + self._oid = self._oid.encode('ascii') + + +class C(Persistent): + pass + +# Here's where all the magic occurs. Sadly, the pickle module is a bit +# underdocumented, but here's what happens: by setting the persistent_id +# attribute to getpersid() on the pickler, that function gets called for every +# object being pickled. By returning None when the object has no getoid +# attribute, it signals pickle to serialize the object as normal. That's how +# the Root instance gets pickled correctly. But, if the object has a getoid +# attribute, then by returning that method's value, we tell pickle to +# serialize the persistent id of the object instead of the object's state. +# That sets the pickle up for proper sniffing by the referencesf machinery. +# Fun, huh? +def dumps(obj): + def getpersid(obj): + if hasattr(obj, 'getoid'): + return obj.getoid() + return None + s = BytesIO() + p = PersistentPickler(getpersid, s, _protocol) + p.dump(obj) + p.dump(None) + return s.getvalue() + +def pdumps(obj): + s = BytesIO() + p = Pickler(s, _protocol) + p.dump(obj) + p.dump(None) + return s.getvalue() + + +class PackableStorageBase(object): + # We keep a cache of object ids to instances so that the unpickler can + # easily return any persistent object. + + @property + def _cache(self): + try: + return self.__cache + except AttributeError: + self.__cache = {} + return self.__cache + + def _newobj(self): + # This is a convenience method to create a new persistent Object + # instance. It asks the storage for a new object id, creates the + # instance with the given oid, populates the cache and returns the + # object. + oid = self._storage.new_oid() + obj = Object(oid) + self._cache[obj.getoid()] = obj + return obj + + def _makeloader(self): + # This is the other side of the persistent pickling magic. We need a + # custom unpickler to mirror our custom pickler above. By setting the + # persistent_load function of the unpickler to self._cache.get(), + # whenever a persistent id is unpickled, it will actually return the + # Object instance out of the cache. As far as returning a function + # with an argument bound to an instance attribute method, we do it + # this way because it makes the code in the tests more succinct. + # + # BUT! Be careful in your use of loads() vs. pickle.loads(). loads() + # should only be used on the Root object's pickle since it's the only + # special one. All the Object instances should use pickle.loads(). + def loads(str, persfunc=self._cache.get): + fp = BytesIO(str) + u = Unpickler(fp) + u.persistent_load = persfunc + return u.load() + return loads + + def _initroot(self): + try: + load_current(self._storage, ZERO) + except KeyError: + from ZODB.Connection import TransactionMetaData + file = BytesIO() + p = Pickler(file, _protocol) + p.dump((PersistentMapping, None)) + p.dump({'_container': {}}) + t = TransactionMetaData() + t.description = u'initial database creation' + self._storage.tpc_begin(t) + self._storage.store(ZERO, None, file.getvalue(), '', t) + self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + + def _sanity_check(self): + # Iterate over the storage to make sure it's sane. + if not ZODB.interfaces.IStorageIteration.providedBy(self._storage): + return + it = self._storage.iterator() + for txn in it: + for data in txn: + pass + + +class PackableStorage(PackableStorageBase): + + def checkPackEmptyStorage(self): + self._storage.pack(time.time(), referencesf) + + def checkPackTomorrow(self): + self._initroot() + self._storage.pack(time.time() + 10000, referencesf) + + def checkPackYesterday(self): + self._initroot() + self._storage.pack(time.time() - 10000, referencesf) + + def _PackWhileWriting(self, pack_now): + # A storage should allow some reading and writing during + # a pack. This test attempts to exercise locking code + # in the storage to test that it is safe. It generates + # a lot of revisions, so that pack takes a long time. + + db = DB(self._storage) + conn = db.open() + root = conn.root() + + for i in range(10): + root[i] = MinPO(i) + transaction.commit() + + snooze() + packt = time.time() + + choices = list(range(10)) + for dummy in choices: + for i in choices: + root[i].value = MinPO(i) + transaction.commit() + + # How many client threads should we run, and how long should we + # wait for them to finish? Hard to say. Running 4 threads and + # waiting 30 seconds too often left a thread still alive on Tim's + # Win98SE box, during ZEO flavors of this test. Those tend to + # run one thread at a time to completion, and take about 10 seconds + # per thread. There doesn't appear to be a compelling reason to + # run that many threads. Running 3 threads and waiting up to a + # minute seems to work well in practice. The ZEO tests normally + # finish faster than that, and the non-ZEO tests very much faster + # than that. + NUM_LOOP_TRIP = 50 + timer = ElapsedTimer(time.time()) + threads = [ClientThread(db, choices, NUM_LOOP_TRIP, timer, i) + for i in range(3)] + for t in threads: + t.start() + + if pack_now: + db.pack(time.time()) + else: + db.pack(packt) + + for t in threads: + t.join(60) + liveness = [t.isAlive() for t in threads] + if True in liveness: + # They should have finished by now. + print('Liveness:', liveness) + # Combine the outcomes, and sort by start time. + outcomes = [] + for t in threads: + outcomes.extend(t.outcomes) + # each outcome list has as many of these as a loop trip got thru: + # thread_id + # elapsed millis at loop top + # elapsed millis at attempt to assign to self.root[index] + # index into self.root getting replaced + # elapsed millis when outcome known + # 'OK' or 'Conflict' + # True if we got beyond this line, False if it raised an + # exception (one possible Conflict cause): + # self.root[index].value = MinPO(j) + def cmp_by_time(a, b): + return cmp((a[1], a[0]), (b[1], b[0])) + outcomes.sort(cmp_by_time) + counts = [0] * 4 + for outcome in outcomes: + n = len(outcome) + assert n >= 2 + tid = outcome[0] + print('tid:%d top:%5d' % (tid, outcome[1]), end=' ') + if n > 2: + print('commit:%5d' % outcome[2], end=' ') + if n > 3: + print('index:%2d' % outcome[3], end=' ') + if n > 4: + print('known:%5d' % outcome[4], end=' ') + if n > 5: + print('%8s' % outcome[5], end=' ') + if n > 6: + print('assigned:%5s' % outcome[6], end=' ') + counts[tid] += 1 + if counts[tid] == NUM_LOOP_TRIP: + print('thread %d done' % tid, end=' ') + print() + + self.fail('a thread is still alive') + + self._sanity_check() + + db.close() + + @time_monotonically_increases + def checkPackWhileWriting(self): + self._PackWhileWriting(pack_now=False) + + @time_monotonically_increases + def checkPackNowWhileWriting(self): + self._PackWhileWriting(pack_now=True) + + @time_monotonically_increases + def checkPackLotsWhileWriting(self): + # This is like the other pack-while-writing tests, except it packs + # repeatedly until the client thread is done. At the time it was + # introduced, it reliably provoked + # CorruptedError: ... transaction with checkpoint flag set + # in the ZEO flavor of the FileStorage tests. + + db = DB(self._storage) + conn = db.open() + root = conn.root() + + choices = list(range(10)) + for i in choices: + root[i] = MinPO(i) + transaction.commit() + + snooze() + packt = time.time() + + for dummy in choices: + for i in choices: + root[i].value = MinPO(i) + transaction.commit() + + NUM_LOOP_TRIP = 100 + timer = ElapsedTimer(time.time()) + thread = ClientThread(db, choices, NUM_LOOP_TRIP, timer, 0) + thread.start() + while thread.isAlive(): + db.pack(packt) + snooze() + packt = time.time() + thread.join() + + self._sanity_check() + + db.close() + + def checkPackWithMultiDatabaseReferences(self): + databases = {} + db = DB(self._storage, databases=databases, database_name='') + otherdb = ZODB.tests.util.DB(databases=databases, database_name='o') + conn = db.open() + root = conn.root() + root[1] = C() + transaction.commit() + del root[1] + transaction.commit() + root[2] = conn.get_connection('o').root() + transaction.commit() + db.pack(time.time()+1) + # some valid storages always return 0 for len() + self.assertTrue(len(self._storage) in (0, 1)) + conn.close() + otherdb.close() + db.close() + + def checkPackAllRevisions(self): + self._initroot() + eq = self.assertEqual + raises = self.assertRaises + # Create a `persistent' object + obj = self._newobj() + oid = obj.getoid() + obj.value = 1 + # Commit three different revisions + revid1 = self._dostoreNP(oid, data=pdumps(obj)) + obj.value = 2 + revid2 = self._dostoreNP(oid, revid=revid1, data=pdumps(obj)) + obj.value = 3 + revid3 = self._dostoreNP(oid, revid=revid2, data=pdumps(obj)) + # Now make sure all three revisions can be extracted + data = self._storage.loadSerial(oid, revid1) + pobj = loads(data) + eq(pobj.getoid(), oid) + eq(pobj.value, 1) + data = self._storage.loadSerial(oid, revid2) + pobj = loads(data) + eq(pobj.getoid(), oid) + eq(pobj.value, 2) + data = self._storage.loadSerial(oid, revid3) + pobj = loads(data) + eq(pobj.getoid(), oid) + eq(pobj.value, 3) + # Now pack all transactions; need to sleep a second to make + # sure that the pack time is greater than the last commit time. + now = packtime = time.time() + while packtime <= now: + packtime = time.time() + self._storage.pack(packtime, referencesf) + # All revisions of the object should be gone, since there is no + # reference from the root object to this object. + raises(KeyError, self._storage.loadSerial, oid, revid1) + raises(KeyError, self._storage.loadSerial, oid, revid2) + raises(KeyError, self._storage.loadSerial, oid, revid3) + + def checkPackJustOldRevisions(self): + eq = self.assertEqual + raises = self.assertRaises + loads = self._makeloader() + # Create a root object. This can't be an instance of Object, + # otherwise the pickling machinery will serialize it as a persistent + # id and not as an object that contains references (persistent ids) to + # other objects. + root = Root() + # Create a persistent object, with some initial state + obj = self._newobj() + oid = obj.getoid() + # Link the root object to the persistent object, in order to keep the + # persistent object alive. Store the root object. + root.obj = obj + root.value = 0 + revid0 = self._dostoreNP(ZERO, data=dumps(root)) + # Make sure the root can be retrieved + data, revid = load_current(self._storage, ZERO) + eq(revid, revid0) + eq(loads(data).value, 0) + # Commit three different revisions of the other object + obj.value = 1 + revid1 = self._dostoreNP(oid, data=pdumps(obj)) + obj.value = 2 + revid2 = self._dostoreNP(oid, revid=revid1, data=pdumps(obj)) + obj.value = 3 + revid3 = self._dostoreNP(oid, revid=revid2, data=pdumps(obj)) + # Now make sure all three revisions can be extracted + data = self._storage.loadSerial(oid, revid1) + pobj = loads(data) + eq(pobj.getoid(), oid) + eq(pobj.value, 1) + data = self._storage.loadSerial(oid, revid2) + pobj = loads(data) + eq(pobj.getoid(), oid) + eq(pobj.value, 2) + data = self._storage.loadSerial(oid, revid3) + pobj = loads(data) + eq(pobj.getoid(), oid) + eq(pobj.value, 3) + # Now pack just revisions 1 and 2. The object's current revision + # should stay alive because it's pointed to by the root. + now = packtime = time.time() + while packtime <= now: + packtime = time.time() + self._storage.pack(packtime, referencesf) + # Make sure the revisions are gone, but that object zero and revision + # 3 are still there and correct + data, revid = load_current(self._storage, ZERO) + eq(revid, revid0) + eq(loads(data).value, 0) + raises(KeyError, self._storage.loadSerial, oid, revid1) + raises(KeyError, self._storage.loadSerial, oid, revid2) + data = self._storage.loadSerial(oid, revid3) + pobj = loads(data) + eq(pobj.getoid(), oid) + eq(pobj.value, 3) + data, revid = load_current(self._storage, oid) + eq(revid, revid3) + pobj = loads(data) + eq(pobj.getoid(), oid) + eq(pobj.value, 3) + + def checkPackOnlyOneObject(self): + eq = self.assertEqual + raises = self.assertRaises + loads = self._makeloader() + # Create a root object. This can't be an instance of Object, + # otherwise the pickling machinery will serialize it as a persistent + # id and not as an object that contains references (persistent ids) to + # other objects. + root = Root() + # Create a persistent object, with some initial state + obj1 = self._newobj() + oid1 = obj1.getoid() + # Create another persistent object, with some initial state. + obj2 = self._newobj() + oid2 = obj2.getoid() + # Link the root object to the persistent objects, in order to keep + # them alive. Store the root object. + root.obj1 = obj1 + root.obj2 = obj2 + root.value = 0 + revid0 = self._dostoreNP(ZERO, data=dumps(root)) + # Make sure the root can be retrieved + data, revid = load_current(self._storage, ZERO) + eq(revid, revid0) + eq(loads(data).value, 0) + # Commit three different revisions of the first object + obj1.value = 1 + revid1 = self._dostoreNP(oid1, data=pdumps(obj1)) + obj1.value = 2 + revid2 = self._dostoreNP(oid1, revid=revid1, data=pdumps(obj1)) + obj1.value = 3 + revid3 = self._dostoreNP(oid1, revid=revid2, data=pdumps(obj1)) + # Now make sure all three revisions can be extracted + data = self._storage.loadSerial(oid1, revid1) + pobj = loads(data) + eq(pobj.getoid(), oid1) + eq(pobj.value, 1) + data = self._storage.loadSerial(oid1, revid2) + pobj = loads(data) + eq(pobj.getoid(), oid1) + eq(pobj.value, 2) + data = self._storage.loadSerial(oid1, revid3) + pobj = loads(data) + eq(pobj.getoid(), oid1) + eq(pobj.value, 3) + # Now commit a revision of the second object + obj2.value = 11 + revid4 = self._dostoreNP(oid2, data=pdumps(obj2)) + # And make sure the revision can be extracted + data = self._storage.loadSerial(oid2, revid4) + pobj = loads(data) + eq(pobj.getoid(), oid2) + eq(pobj.value, 11) + # Now pack just revisions 1 and 2 of object1. Object1's current + # revision should stay alive because it's pointed to by the root, as + # should Object2's current revision. + now = packtime = time.time() + while packtime <= now: + packtime = time.time() + self._storage.pack(packtime, referencesf) + # Make sure the revisions are gone, but that object zero, object2, and + # revision 3 of object1 are still there and correct. + data, revid = load_current(self._storage, ZERO) + eq(revid, revid0) + eq(loads(data).value, 0) + raises(KeyError, self._storage.loadSerial, oid1, revid1) + raises(KeyError, self._storage.loadSerial, oid1, revid2) + data = self._storage.loadSerial(oid1, revid3) + pobj = loads(data) + eq(pobj.getoid(), oid1) + eq(pobj.value, 3) + data, revid = load_current(self._storage, oid1) + eq(revid, revid3) + pobj = loads(data) + eq(pobj.getoid(), oid1) + eq(pobj.value, 3) + data, revid = load_current(self._storage, oid2) + eq(revid, revid4) + eq(loads(data).value, 11) + data = self._storage.loadSerial(oid2, revid4) + pobj = loads(data) + eq(pobj.getoid(), oid2) + eq(pobj.value, 11) + +class PackableStorageWithOptionalGC(PackableStorage): + + def checkPackAllRevisionsNoGC(self): + self._initroot() + eq = self.assertEqual + raises = self.assertRaises + # Create a `persistent' object + obj = self._newobj() + oid = obj.getoid() + obj.value = 1 + # Commit three different revisions + revid1 = self._dostoreNP(oid, data=pdumps(obj)) + obj.value = 2 + revid2 = self._dostoreNP(oid, revid=revid1, data=pdumps(obj)) + obj.value = 3 + revid3 = self._dostoreNP(oid, revid=revid2, data=pdumps(obj)) + # Now make sure all three revisions can be extracted + data = self._storage.loadSerial(oid, revid1) + pobj = loads(data) + eq(pobj.getoid(), oid) + eq(pobj.value, 1) + data = self._storage.loadSerial(oid, revid2) + pobj = loads(data) + eq(pobj.getoid(), oid) + eq(pobj.value, 2) + data = self._storage.loadSerial(oid, revid3) + pobj = loads(data) + eq(pobj.getoid(), oid) + eq(pobj.value, 3) + # Now pack all transactions; need to sleep a second to make + # sure that the pack time is greater than the last commit time. + now = packtime = time.time() + while packtime <= now: + packtime = time.time() + self._storage.pack(packtime, referencesf, gc=False) + # Only old revisions of the object should be gone. We don't gc + raises(KeyError, self._storage.loadSerial, oid, revid1) + raises(KeyError, self._storage.loadSerial, oid, revid2) + self._storage.loadSerial(oid, revid3) + + + +class PackableUndoStorage(PackableStorageBase): + + def checkPackUnlinkedFromRoot(self): + eq = self.assertEqual + db = DB(self._storage) + conn = db.open() + root = conn.root() + + txn = transaction.get() + txn.note(u'root') + txn.commit() + + now = packtime = time.time() + while packtime <= now: + packtime = time.time() + + obj = C() + obj.value = 7 + + root['obj'] = obj + txn = transaction.get() + txn.note(u'root -> o1') + txn.commit() + + del root['obj'] + txn = transaction.get() + txn.note(u'root -x-> o1') + txn.commit() + + self._storage.pack(packtime, referencesf) + + log = self._storage.undoLog() + tid = log[0]['id'] + db.undo(tid) + txn = transaction.get() + txn.note(u'undo root -x-> o1') + txn.commit() + + conn.sync() + + eq(root['obj'].value, 7) + + @time_monotonically_increases + def checkRedundantPack(self): + # It is an error to perform a pack with a packtime earlier + # than a previous packtime. The storage can't do a full + # traversal as of the packtime, because the previous pack may + # have removed revisions necessary for a full traversal. + + # It should be simple to test that a storage error is raised, + # but this test case goes to the trouble of constructing a + # scenario that would lose data if the earlier packtime was + # honored. + + self._initroot() + + db = DB(self._storage) + conn = db.open() + root = conn.root() + + root["d"] = d = PersistentMapping() + transaction.commit() + snooze() + + obj = d["obj"] = C() + obj.value = 1 + transaction.commit() + snooze() + packt1 = time.time() + lost_oid = obj._p_oid + + obj = d["anotherobj"] = C() + obj.value = 2 + transaction.commit() + snooze() + packt2 = time.time() + + db.pack(packt2) + # BDBStorage allows the second pack, but doesn't lose data. + try: + db.pack(packt1) + except StorageError: + pass + # This object would be removed by the second pack, even though + # it is reachable. + load_current(self._storage, lost_oid) + + @time_monotonically_increases(0.1) + def checkPackUndoLog(self): + self._initroot() + # Create a `persistent' object + obj = self._newobj() + oid = obj.getoid() + obj.value = 1 + # Commit two different revisions + revid1 = self._dostoreNP(oid, data=pdumps(obj)) + obj.value = 2 + snooze() + packtime = time.time() + snooze() + self._dostoreNP(oid, revid=revid1, data=pdumps(obj)) + # Now pack the first transaction + self.assertEqual(3, len(self._storage.undoLog())) + self._storage.pack(packtime, referencesf) + # The undo log contains only the most resent transaction + self.assertEqual(1, len(self._storage.undoLog())) + + def dont_checkPackUndoLogUndoable(self): + # A disabled test. I wanted to test that the content of the + # undo log was consistent, but every storage appears to + # include something slightly different. If the result of this + # method is only used to fill a GUI then this difference + # doesnt matter. Perhaps re-enable this test once we agree + # what should be asserted. + + self._initroot() + # Create two `persistent' object + obj1 = self._newobj() + oid1 = obj1.getoid() + obj1.value = 1 + obj2 = self._newobj() + oid2 = obj2.getoid() + obj2.value = 2 + + # Commit the first revision of each of them + revid11 = self._dostoreNP(oid1, data=pdumps(obj1), + description="1-1") + revid22 = self._dostoreNP(oid2, data=pdumps(obj2), + description="2-2") + + # remember the time. everything above here will be packed away + snooze() + packtime = time.time() + snooze() + # Commit two revisions of the first object + obj1.value = 3 + revid13 = self._dostoreNP(oid1, revid=revid11, + data=pdumps(obj1), description="1-3") + obj1.value = 4 + self._dostoreNP(oid1, revid=revid13, + data=pdumps(obj1), description="1-4") + # Commit one revision of the second object + obj2.value = 5 + self._dostoreNP(oid2, revid=revid22, + data=pdumps(obj2), description="2-5") + # Now pack + self.assertEqual(6,len(self._storage.undoLog())) + print('\ninitial undoLog was') + for r in self._storage.undoLog(): print(r) + self._storage.pack(packtime, referencesf) + # The undo log contains only two undoable transaction. + print('\nafter packing undoLog was') + for r in self._storage.undoLog(): print(r) + # what can we assert about that? + + +# A number of these threads are kicked off by _PackWhileWriting(). Their +# purpose is to abuse the database passed to the constructor with lots of +# random write activity while the main thread is packing it. +class ClientThread(TestThread): + + def __init__(self, db, choices, loop_trip, timer, thread_id): + TestThread.__init__(self) + self.db = db + self.choices = choices + self.loop_trip = loop_trip + self.millis = timer.elapsed_millis + self.thread_id = thread_id + # list of lists; each list has as many of these as a loop trip + # got thru: + # thread_id + # elapsed millis at loop top + # elapsed millis at attempt + # index into self.root getting replaced + # elapsed millis when outcome known + # 'OK' or 'Conflict' + # True if we got beyond this line, False if it raised an exception: + # self.root[index].value = MinPO(j) + self.outcomes = [] + + def runtest(self): + from random import choice + conn = self.db.open() + + for j in range(self.loop_trip): + assign_worked = False + alist = [self.thread_id, self.millis()] + self.outcomes.append(alist) + try: + index = choice(self.choices) + alist.extend([self.millis(), index]) + conn.root()[index].value = MinPO(j) + assign_worked = True + transaction.commit() + alist.append(self.millis()) + alist.append('OK') + except ConflictError: + alist.append(self.millis()) + alist.append('Conflict') + transaction.abort() + alist.append(assign_worked) + + conn.close() + +class ElapsedTimer(object): + def __init__(self, start_time): + self.start_time = start_time + + def elapsed_millis(self): + return int((time.time() - self.start_time) * 1000) + + +def IExternalGC_suite(factory): + """Return a test suite for a generic . + + Pass a factory taking a name and a blob directory name. + """ + + def setup(test): + ZODB.tests.util.setUp(test) + test.globs['create_storage'] = factory + + return doctest.DocFileSuite( + 'IExternalGC.test', + setUp=setup, tearDown=ZODB.tests.util.tearDown, + checker=ZODB.tests.util.checker) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/PersistentStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/PersistentStorage.py new file mode 100644 index 0000000..031d19a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/PersistentStorage.py @@ -0,0 +1,51 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test that a storage's values persist across open and close.""" + +from ZODB.utils import load_current + +class PersistentStorage(object): + + def checkUpdatesPersist(self): + oids = [] + + def new_oid_wrapper(l=oids, new_oid=self._storage.new_oid): + oid = new_oid() + l.append(oid) + return oid + + self._storage.new_oid = new_oid_wrapper + + self._dostore() + oid = self._storage.new_oid() + revid = self._dostore(oid) + oid = self._storage.new_oid() + revid = self._dostore(oid, data=1) + revid = self._dostore(oid, revid, data=2) + self._dostore(oid, revid, data=3) + + # keep copies of all the objects + objects = [] + for oid in oids: + p, s = load_current(self._storage, oid) + objects.append((oid, '', p, s)) + + self._storage.close() + self.open() + + # keep copies of all the objects + for oid, ver, p, s in objects: + _p, _s = load_current(self._storage, oid) + self.assertEqual(p, _p) + self.assertEqual(s, _s) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/ReadOnlyStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/ReadOnlyStorage.py new file mode 100644 index 0000000..b911161 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/ReadOnlyStorage.py @@ -0,0 +1,58 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from ZODB.Connection import TransactionMetaData +from ZODB.POSException import ReadOnlyError, Unsupported + +from ZODB.utils import load_current + +class ReadOnlyStorage(object): + + def _create_data(self): + # test a read-only storage that already has some data + self.oids = {} + for i in range(10): + oid = self._storage.new_oid() + revid = self._dostore(oid) + self.oids[oid] = revid + + def _make_readonly(self): + self._storage.close() + self.open(read_only=True) + self.assertTrue(self._storage.isReadOnly()) + + def checkReadMethods(self): + self._create_data() + self._make_readonly() + # Note that this doesn't check _all_ read methods. + for oid in self.oids.keys(): + data, revid = load_current(self._storage, oid) + self.assertEqual(revid, self.oids[oid]) + # Storages without revisions may not have loadSerial(). + try: + _data = self._storage.loadSerial(oid, revid) + self.assertEqual(data, _data) + except Unsupported: + pass + + def checkWriteMethods(self): + self._make_readonly() + self.assertRaises(ReadOnlyError, self._storage.new_oid) + t = TransactionMetaData() + self.assertRaises(ReadOnlyError, self._storage.tpc_begin, t) + + self.assertRaises(ReadOnlyError, self._storage.store, + b'\000' * 8, None, b'', '', t) + + self.assertRaises(ReadOnlyError, self._storage.undo, + b'\000' * 8, t) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/RecoveryStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/RecoveryStorage.py new file mode 100644 index 0000000..2f83f67 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/RecoveryStorage.py @@ -0,0 +1,200 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""More recovery and iterator tests.""" + +import transaction +from ZODB.Connection import TransactionMetaData +from ZODB.tests.IteratorStorage import IteratorDeepCompare +from ZODB.tests.StorageTestBase import MinPO, snooze +from ZODB import DB +from ZODB.serialize import referencesf + +from ZODB.utils import load_current +from ZODB.tests.util import time_monotonically_increases + +import time + + +class RecoveryStorage(IteratorDeepCompare): + + # Requires a setUp() that creates a self._dst destination storage + def checkSimpleRecovery(self): + oid = self._storage.new_oid() + revid = self._dostore(oid, data=11) + revid = self._dostore(oid, revid=revid, data=12) + revid = self._dostore(oid, revid=revid, data=13) + self._dst.copyTransactionsFrom(self._storage) + self.compare(self._storage, self._dst) + + def checkRestoreAcrossPack(self): + db = DB(self._storage) + c = db.open() + r = c.root() + obj = r["obj1"] = MinPO(1) + transaction.commit() + obj = r["obj2"] = MinPO(1) + transaction.commit() + + self._dst.copyTransactionsFrom(self._storage) + self._dst.pack(time.time(), referencesf) + + self._undo(self._storage.undoInfo()[0]['id']) + + # copy the final transaction manually. even though there + # was a pack, the restore() ought to succeed. + it = self._storage.iterator() + # Get the last transaction and its record iterator. Record iterators + # can't be accessed out-of-order, so we need to do this in a bit + # complicated way: + for final in it: + records = list(final) + + self._dst.tpc_begin(final, final.tid, final.status) + for r in records: + self._dst.restore(r.oid, r.tid, r.data, '', r.data_txn, + final) + self._dst.tpc_vote(final) + self._dst.tpc_finish(final) + + @time_monotonically_increases + def checkPackWithGCOnDestinationAfterRestore(self): + raises = self.assertRaises + db = DB(self._storage) + conn = db.open() + root = conn.root() + root.obj = obj1 = MinPO(1) + txn = transaction.get() + txn.note(u'root -> obj') + txn.commit() + root.obj.obj = obj2 = MinPO(2) + txn = transaction.get() + txn.note(u'root -> obj -> obj') + txn.commit() + del root.obj + txn = transaction.get() + txn.note(u'root -X->') + txn.commit() + # Now copy the transactions to the destination + self._dst.copyTransactionsFrom(self._storage) + # Now pack the destination. + snooze() + self._dst.pack(time.time(), referencesf) + # And check to see that the root object exists, but not the other + # objects. + data, serial = load_current(self._dst, root._p_oid) + raises(KeyError, load_current, self._dst, obj1._p_oid) + raises(KeyError, load_current, self._dst, obj2._p_oid) + + def checkRestoreWithMultipleObjectsInUndoRedo(self): + from ZODB.FileStorage import FileStorage + + # Undo creates backpointers in (at least) FileStorage. ZODB 3.2.1 + # FileStorage._data_find() had an off-by-8 error, neglecting to + # account for the size of the backpointer when searching a + # transaction with multiple data records. The results were + # unpredictable. For example, it could raise a Python exception + # due to passing a negative offset to file.seek(), or could + # claim that a transaction didn't have data for an oid despite + # that it actually did. + # + # The former failure mode was seen in real life, in a ZRS secondary + # doing recovery. On my box today, the second failure mode is + # what happens in this test (with an unpatched _data_find, of + # course). Note that the error can only "bite" if more than one + # data record is in a transaction, and the oid we're looking for + # follows at least one data record with a backpointer. + # + # Unfortunately, _data_find() is a low-level implementation detail, + # and this test does some horrid white-box abuse to test it. + + is_filestorage = isinstance(self._storage, FileStorage) + + db = DB(self._storage) + c = db.open() + r = c.root() + + # Create some objects. + r["obj1"] = MinPO(1) + r["obj2"] = MinPO(1) + transaction.commit() + + # Add x attributes to them. + r["obj1"].x = 'x1' + r["obj2"].x = 'x2' + transaction.commit() + + r = db.open().root() + self.assertEqual(r["obj1"].x, 'x1') + self.assertEqual(r["obj2"].x, 'x2') + + # Dirty tricks. + if is_filestorage: + obj1_oid = r["obj1"]._p_oid + obj2_oid = r["obj2"]._p_oid + # This will be the offset of the next transaction, which + # will contain two backpointers. + pos = self._storage.getSize() + + # Undo the attribute creation. + info = self._storage.undoInfo() + tid = info[0]['id'] + t = TransactionMetaData() + self._storage.tpc_begin(t) + oids = self._storage.undo(tid, t) + self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + + r = db.open().root() + self.assertRaises(AttributeError, getattr, r["obj1"], 'x') + self.assertRaises(AttributeError, getattr, r["obj2"], 'x') + + if is_filestorage: + # _data_find should find data records for both objects in that + # transaction. Without the patch, the second assert failed + # (it claimed it couldn't find a data record for obj2) on my + # box, but other failure modes were possible. + self.assertTrue(self._storage._data_find(pos, obj1_oid, '') > 0) + self.assertTrue(self._storage._data_find(pos, obj2_oid, '') > 0) + + # The offset of the next ("redo") transaction. + pos = self._storage.getSize() + + # Undo the undo (restore the attributes). + info = self._storage.undoInfo() + tid = info[0]['id'] + t = TransactionMetaData() + self._storage.tpc_begin(t) + oids = self._storage.undo(tid, t) + self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + + r = db.open().root() + self.assertEqual(r["obj1"].x, 'x1') + self.assertEqual(r["obj2"].x, 'x2') + + if is_filestorage: + # Again _data_find should find both objects in this txn, and + # again the second assert failed on my box. + self.assertTrue(self._storage._data_find(pos, obj1_oid, '') > 0) + self.assertTrue(self._storage._data_find(pos, obj2_oid, '') > 0) + + # Indirectly provoke .restore(). .restore in turn indirectly + # provokes _data_find too, but not usefully for the purposes of + # the specific bug this test aims at: copyTransactionsFrom() uses + # storage iterators that chase backpointers themselves, and + # return the data they point at instead. The result is that + # _data_find didn't actually see anything dangerous in this + # part of the test. + self._dst.copyTransactionsFrom(self._storage) + self.compare(self._storage, self._dst) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/RevisionStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/RevisionStorage.py new file mode 100644 index 0000000..7a497d6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/RevisionStorage.py @@ -0,0 +1,175 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Check loadSerial() on storages that support historical revisions.""" + +from ZODB.Connection import TransactionMetaData +from ZODB.tests.MinPO import MinPO +from ZODB.tests.StorageTestBase import zodb_unpickle, zodb_pickle, snooze +from ZODB.utils import p64, u64, load_current +from ZODB.tests.util import time_monotonically_increases + +ZERO = '\0'*8 + +class RevisionStorage(object): + + def checkLoadSerial(self): + oid = self._storage.new_oid() + revid = ZERO + revisions = {} + for i in range(31, 38): + revid = self._dostore(oid, revid=revid, data=MinPO(i)) + revisions[revid] = MinPO(i) + # Now make sure all the revisions have the correct value + for revid, value in revisions.items(): + data = self._storage.loadSerial(oid, revid) + self.assertEqual(zodb_unpickle(data), value) + + @time_monotonically_increases + def checkLoadBefore(self): + # Store 10 revisions of one object and then make sure that we + # can get all the non-current revisions back. + oid = self._storage.new_oid() + revs = [] + revid = None + for i in range(10): + # We need to ensure that successive timestamps are at least + # two apart, so that a timestamp exists that's unambiguously + # between successive timestamps. Each call to snooze() + # guarantees that the next timestamp will be at least one + # larger (and probably much more than that) than the previous + # one. + snooze() + snooze() + revid = self._dostore(oid, revid, data=MinPO(i)) + revs.append(load_current(self._storage, oid)) + + prev = u64(revs[0][1]) + for i in range(1, 10): + tid = revs[i][1] + cur = u64(tid) + middle = prev + (cur - prev) // 2 + assert prev < middle < cur # else the snooze() trick failed + prev = cur + t = self._storage.loadBefore(oid, p64(middle)) + self.assertTrue(t is not None) + data, start, end = t + self.assertEqual(revs[i-1][0], data) + self.assertEqual(tid, end) + + def checkLoadBeforeEdges(self): + # Check the edges cases for a non-current load. + oid = self._storage.new_oid() + + self.assertRaises(KeyError, self._storage.loadBefore, + oid, p64(0)) + + revid1 = self._dostore(oid, data=MinPO(1)) + + self.assertEqual(self._storage.loadBefore(oid, p64(0)), None) + self.assertEqual(self._storage.loadBefore(oid, revid1), None) + + cur = p64(u64(revid1) + 1) + data, start, end = self._storage.loadBefore(oid, cur) + self.assertEqual(zodb_unpickle(data), MinPO(1)) + self.assertEqual(start, revid1) + self.assertEqual(end, None) + + revid2 = self._dostore(oid, revid=revid1, data=MinPO(2)) + data, start, end = self._storage.loadBefore(oid, cur) + self.assertEqual(zodb_unpickle(data), MinPO(1)) + self.assertEqual(start, revid1) + self.assertEqual(end, revid2) + + @time_monotonically_increases + def checkLoadBeforeOld(self): + # Look for a very old revision. With the BaseStorage implementation + # this should require multple history() calls. + oid = self._storage.new_oid() + revs = [] + revid = None + for i in range(50): + revid = self._dostore(oid, revid, data=MinPO(i)) + revs.append(revid) + + data, start, end = self._storage.loadBefore(oid, revs[12]) + self.assertEqual(zodb_unpickle(data), MinPO(11)) + self.assertEqual(start, revs[11]) + self.assertEqual(end, revs[12]) + + + # Unsure: Is it okay to assume everyone testing against RevisionStorage + # implements undo? + + def checkLoadBeforeUndo(self): + # Do several transactions then undo them. + oid = self._storage.new_oid() + revid = None + for i in range(5): + revid = self._dostore(oid, revid, data=MinPO(i)) + revs = [] + for i in range(4): + info = self._storage.undoInfo() + tid = info[0]["id"] + # Always undo the most recent txn, so the value will + # alternate between 3 and 4. + self._undo(tid, note="undo %d" % i) + revs.append(load_current(self._storage, oid)) + + prev_tid = None + for i, (data, tid) in enumerate(revs): + t = self._storage.loadBefore(oid, p64(u64(tid) + 1)) + self.assertEqual(data, t[0]) + self.assertEqual(tid, t[1]) + if prev_tid: + self.assertTrue(prev_tid < t[1]) + prev_tid = t[1] + if i < 3: + self.assertEqual(revs[i+1][1], t[2]) + else: + self.assertEqual(None, t[2]) + + def checkLoadBeforeConsecutiveTids(self): + eq = self.assertEqual + oid = self._storage.new_oid() + def helper(tid, revid, x): + data = zodb_pickle(MinPO(x)) + t = TransactionMetaData() + try: + self._storage.tpc_begin(t, p64(tid)) + self._storage.store(oid, revid, data, '', t) + # Finish the transaction + self._storage.tpc_vote(t) + newrevid = self._storage.tpc_finish(t) + except: + self._storage.tpc_abort(t) + raise + return newrevid + revid1 = helper(1, None, 1) + revid2 = helper(2, revid1, 2) + revid3 = helper(3, revid2, 3) + data, start_tid, end_tid = self._storage.loadBefore(oid, p64(2)) + eq(zodb_unpickle(data), MinPO(1)) + eq(u64(start_tid), 1) + eq(u64(end_tid), 2) + + def checkLoadBeforeCreation(self): + eq = self.assertEqual + oid1 = self._storage.new_oid() + oid2 = self._storage.new_oid() + revid1 = self._dostore(oid1) + revid2 = self._dostore(oid2) + results = self._storage.loadBefore(oid2, revid2) + eq(results, None) + + # TODO: There are other edge cases to handle, including pack. diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/StorageTestBase.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/StorageTestBase.py new file mode 100644 index 0000000..1488e23 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/StorageTestBase.py @@ -0,0 +1,183 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Provide a mixin base class for storage tests. + +The StorageTestBase class provides basic setUp() and tearDown() +semantics (which you can override), and it also provides a helper +method _dostore() which performs a complete store transaction for a +single object revision. +""" +from __future__ import print_function +import sys +import time + +from ZODB.Connection import TransactionMetaData +from ZODB.utils import u64, z64 +from ZODB.tests.MinPO import MinPO +from ZODB._compat import PersistentPickler, Unpickler, BytesIO, _protocol +import ZODB.tests.util + + +ZERO = b'\0'*8 + +def snooze(): + # In Windows, it's possible that two successive time.time() calls return + # the same value. Tim guarantees that time never runs backwards. You + # usually want to call this before you pack a storage, or must make other + # guarantees about increasing timestamps. + now = time.time() + while now == time.time(): + time.sleep(0.1) + +def _persistent_id(obj): + oid = getattr(obj, "_p_oid", None) + if getattr(oid, "__get__", None) is not None: + return None + else: + return oid + +def zodb_pickle(obj): + """Create a pickle in the format expected by ZODB.""" + f = BytesIO() + p = PersistentPickler(_persistent_id, f, _protocol) + klass = obj.__class__ + assert not hasattr(obj, '__getinitargs__'), "not ready for constructors" + args = None + + mod = getattr(klass, '__module__', None) + if mod is not None: + klass = mod, klass.__name__ + + state = obj.__getstate__() + + p.dump((klass, args)) + p.dump(state) + return f.getvalue() + +def persistent_load(pid): + # helper for zodb_unpickle + return "ref to %s.%s oid=%s" % (pid[1][0], pid[1][1], u64(pid[0])) + +def zodb_unpickle(data): + """Unpickle an object stored using the format expected by ZODB.""" + f = BytesIO(data) + u = Unpickler(f) + u.persistent_load = persistent_load + klass_info = u.load() + if isinstance(klass_info, tuple): + if isinstance(klass_info[0], type): + # Unclear: what is the second part of klass_info? + klass, xxx = klass_info + assert not xxx + else: + if isinstance(klass_info[0], tuple): + modname, klassname = klass_info[0] + else: + modname, klassname = klass_info + if modname == "__main__": + ns = globals() + else: + mod = import_helper(modname) + ns = mod.__dict__ + try: + klass = ns[klassname] + except KeyError: + print("can't find %s in %r" % (klassname, ns), file=sys.stderr) + inst = klass() + else: + raise ValueError("expected class info: %s" % repr(klass_info)) + state = u.load() + inst.__setstate__(state) + return inst + +def import_helper(name): + __import__(name) + return sys.modules[name] + + +class StorageTestBase(ZODB.tests.util.TestCase): + + # It would be simpler if concrete tests didn't need to extend + # setUp() and tearDown(). + + _storage = None + + def _close(self): + # You should override this if closing your storage requires additional + # shutdown operations. + if self._storage is not None: + self._storage.close() + + def tearDown(self): + self._close() + ZODB.tests.util.TestCase.tearDown(self) + + def _dostore(self, oid=None, revid=None, data=None, + already_pickled=0, user=None, description=None): + """Do a complete storage transaction. The defaults are: + + - oid=None, ask the storage for a new oid + - revid=None, use a revid of ZERO + - data=None, pickle up some arbitrary data (the integer 7) + + Returns the object's new revision id. + """ + if oid is None: + oid = self._storage.new_oid() + if revid is None: + revid = ZERO + if data is None: + data = MinPO(7) + if type(data) == int: + data = MinPO(data) + if not already_pickled: + data = zodb_pickle(data) + # Begin the transaction + t = TransactionMetaData() + if user is not None: + t.user = user + if description is not None: + t.description = description + try: + self._storage.tpc_begin(t) + # Store an object + r1 = self._storage.store(oid, revid, data, '', t) + # Finish the transaction + r2 = self._storage.tpc_vote(t) + revid = self._storage.tpc_finish(t) + except: + self._storage.tpc_abort(t) + raise + return revid + + def _dostoreNP(self, oid=None, revid=None, data=None, + user=None, description=None): + return self._dostore(oid, revid, data, 1, user, description) + + # The following methods depend on optional storage features. + + def _undo(self, tid, expected_oids=None, note=None): + # Undo a tid that affects a single object (oid). + # This is very specialized. + t = TransactionMetaData() + t.note(note or u"undo") + self._storage.tpc_begin(t) + undo_result = self._storage.undo(tid, t) + vote_result = self._storage.tpc_vote(t) + if expected_oids is not None: + oids = set(undo_result[1]) if undo_result else set() + if vote_result: + oids.update(vote_result) + self.assertEqual(oids, set(expected_oids)) + return self._storage.tpc_finish(t) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/Synchronization.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/Synchronization.py new file mode 100644 index 0000000..23a6bab --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/Synchronization.py @@ -0,0 +1,118 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test the storage's implemenetation of the storage synchronization spec. + +The Synchronization spec + http://www.zope.org/Documentation/Developer/Models/ZODB/ + ZODB_Architecture_Storage_Interface_State_Synchronization_Diag.html + +It specifies two states committing and non-committing. A storage +starts in the non-committing state. tpc_begin() transfers to the +committting state; tpc_abort() and tpc_finish() transfer back to +non-committing. + +Several other methods are only allowed in one state or another. Many +methods allowed only in the committing state require that they apply +to the currently committing transaction. + +The spec is silent on a variety of methods that don't appear to modify +the state, e.g. load(), undoLog(), pack(). It's unclear whether there +is a separate set of synchronization rules that apply to these methods +or if the synchronization is implementation dependent, i.e. only what +is need to guarantee a corrected implementation. + +The synchronization spec is also silent on whether there is any +contract implied with the caller. If the storage can assume that a +single client is single-threaded and that it will not call, e.g., store() +until after it calls tpc_begin(), the implementation can be +substantially simplified. + +New and/or unspecified methods: + +tpc_vote(): handled like tpc_abort +undo(): how's that handled? + +Methods that have nothing to do with committing/non-committing: +load(), loadSerial(), getName(), getSize(), __len__(), history(), +undoLog(), pack(). + +Specific questions: + +The spec & docs say that undo() takes three arguments, the second +being a transaction. If the specified arg isn't the current +transaction, the undo() should raise StorageTransactionError. This +isn't implemented anywhere. It looks like undo can be called at +anytime. + +FileStorage does not allow undo() during a pack. How should this be +tested? Is it a general restriction? + + + +""" + +from ZODB.Connection import TransactionMetaData +from ZODB.POSException import StorageTransactionError + +OID = "\000" * 8 +SERIALNO = "\000" * 8 +TID = "\000" * 8 + +class SynchronizedStorage(object): + + def verifyNotCommitting(self, callable, *args): + self.assertRaises(StorageTransactionError, callable, *args) + + def verifyWrongTrans(self, callable, *args): + t = TransactionMetaData() + self._storage.tpc_begin(t) + self.assertRaises(StorageTransactionError, callable, *args) + self._storage.tpc_abort(t) + + def checkStoreNotCommitting(self): + self.verifyNotCommitting(self._storage.store, + OID, SERIALNO, b"", "", TransactionMetaData()) + + def checkStoreWrongTrans(self): + self.verifyWrongTrans(self._storage.store, + OID, SERIALNO, b"", "", TransactionMetaData()) + + def checkAbortNotCommitting(self): + self._storage.tpc_abort(TransactionMetaData()) + + def checkAbortWrongTrans(self): + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.tpc_abort(TransactionMetaData()) + self._storage.tpc_abort(t) + + def checkFinishNotCommitting(self): + t = TransactionMetaData() + self.assertRaises(StorageTransactionError, + self._storage.tpc_finish, t) + self._storage.tpc_abort(t) + + def checkFinishWrongTrans(self): + t = TransactionMetaData() + self._storage.tpc_begin(t) + self.assertRaises(StorageTransactionError, + self._storage.tpc_finish, TransactionMetaData()) + self._storage.tpc_abort(t) + + def checkBeginCommitting(self): + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.tpc_abort(t) + + # TODO: how to check undo? diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/TransactionalUndoStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/TransactionalUndoStorage.py new file mode 100644 index 0000000..511b6d8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/TransactionalUndoStorage.py @@ -0,0 +1,773 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Check undo(). + +Any storage that supports undo() must pass these tests. +""" +import time + +from six import PY3 + +from persistent import Persistent +import transaction +from transaction import Transaction + +from ZODB import POSException +from ZODB.Connection import TransactionMetaData +from ZODB.serialize import referencesf +from ZODB.utils import p64, load_current +from ZODB import DB + +from ZODB.tests.MinPO import MinPO +from ZODB.tests.StorageTestBase import zodb_pickle, zodb_unpickle + +ZERO = '\0'*8 + +class C(Persistent): + pass + +def snooze(): + # In Windows, it's possible that two successive time.time() calls return + # the same value. Tim guarantees that time never runs backwards. You + # usually want to call this before you pack a storage, or must make other + # guarantees about increasing timestamps. + now = time.time() + while now == time.time(): + time.sleep(0.1) + +def listeq(L1, L2): + """Return True if L1.sort() == L2.sort() + + Also support iterators. + """ + return sorted(L1) == sorted(L2) + +class TransactionalUndoStorage(object): + + def _multi_obj_transaction(self, objs): + t = TransactionMetaData() + self._storage.tpc_begin(t) + for oid, rev, data in objs: + self._storage.store(oid, rev, data, '', t) + self._storage.tpc_vote(t) + return self._storage.tpc_finish(t) + + def _iterate(self): + """Iterate over the storage in its final state.""" + # This is testing that the iterator() code works correctly. + # The hasattr() guards against ZEO, which doesn't support iterator. + if not hasattr(self._storage, "iterator"): + return + iter = self._storage.iterator() + for txn in iter: + for rec in txn: + pass + + def _begin_undos_vote(self, t, *tids): + self._storage.tpc_begin(t) + oids = set() + for tid in tids: + undo_result = self._storage.undo(tid, t) + if undo_result: + oids.update(undo_result[1]) + oids.update(self._storage.tpc_vote(t) or ()) + return oids + + def undo(self, tid, note=None): + t = TransactionMetaData() + if note is not None: + t.note(note) + oids = self._begin_undos_vote(t, tid) + self._storage.tpc_finish(t) + return oids + + def checkSimpleTransactionalUndo(self): + eq = self.assertEqual + oid = self._storage.new_oid() + revid = self._dostore(oid, data=MinPO(23)) + revid = self._dostore(oid, revid=revid, data=MinPO(24)) + revid = self._dostore(oid, revid=revid, data=MinPO(25)) + + info = self._storage.undoInfo() + # Now start an undo transaction + self._undo(info[0]["id"], [oid], note="undo1") + data, revid = load_current(self._storage, oid) + eq(zodb_unpickle(data), MinPO(24)) + + # Do another one + info = self._storage.undoInfo() + self._undo(info[2]["id"], [oid], note="undo2") + data, revid = load_current(self._storage, oid) + eq(zodb_unpickle(data), MinPO(23)) + + # Try to undo the first record + info = self._storage.undoInfo() + self._undo(info[4]["id"], [oid], note="undo3") + # This should fail since we've undone the object's creation + self.assertRaises(KeyError, load_current, self._storage, oid) + + # And now let's try to redo the object's creation + info = self._storage.undoInfo() + self._undo(info[0]["id"], [oid]) + data, revid = load_current(self._storage, oid) + eq(zodb_unpickle(data), MinPO(23)) + self._iterate() + + def checkCreationUndoneGetTid(self): + # create an object + oid = self._storage.new_oid() + self._dostore(oid, data=MinPO(23)) + # undo its creation + info = self._storage.undoInfo() + tid = info[0]['id'] + self.undo(tid, 'undo1') + # Check that calling getTid on an uncreated object raises a KeyError + # The current version of FileStorage fails this test + self.assertRaises(KeyError, self._storage.getTid, oid) + + def checkUndoCreationBranch1(self): + eq = self.assertEqual + oid = self._storage.new_oid() + revid = self._dostore(oid, data=MinPO(11)) + revid = self._dostore(oid, revid=revid, data=MinPO(12)) + # Undo the last transaction + info = self._storage.undoInfo() + self._undo(info[0]['id'], [oid]) + data, revid = load_current(self._storage, oid) + eq(zodb_unpickle(data), MinPO(11)) + + # Now from here, we can either redo the last undo, or undo the object + # creation. Let's undo the object creation. + info = self._storage.undoInfo() + self._undo(info[2]['id'], [oid]) + self.assertRaises(KeyError, load_current, self._storage, oid) + + # Loading current data via loadBefore should raise a POSKeyError too: + self.assertRaises(KeyError, self._storage.loadBefore, oid, + b'\x7f\xff\xff\xff\xff\xff\xff\xff') + self._iterate() + + def checkUndoCreationBranch2(self): + eq = self.assertEqual + oid = self._storage.new_oid() + revid = self._dostore(oid, data=MinPO(11)) + revid = self._dostore(oid, revid=revid, data=MinPO(12)) + # Undo the last transaction + info = self._storage.undoInfo() + self._undo(info[0]['id'], [oid]) + data, revid = load_current(self._storage, oid) + eq(zodb_unpickle(data), MinPO(11)) + # Now from here, we can either redo the last undo, or undo the object + # creation. Let's redo the last undo + info = self._storage.undoInfo() + self._undo(info[0]['id'], [oid]) + data, revid = load_current(self._storage, oid) + eq(zodb_unpickle(data), MinPO(12)) + self._iterate() + + def checkTwoObjectUndo(self): + eq = self.assertEqual + # Convenience + p31, p32, p51, p52 = map(zodb_pickle, + map(MinPO, (31, 32, 51, 52))) + oid1 = self._storage.new_oid() + oid2 = self._storage.new_oid() + revid1 = revid2 = ZERO + # Store two objects in the same transaction + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.store(oid1, revid1, p31, '', t) + self._storage.store(oid2, revid2, p51, '', t) + # Finish the transaction + self._storage.tpc_vote(t) + tid = self._storage.tpc_finish(t) + # Update those same two objects + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.store(oid1, tid, p32, '', t) + self._storage.store(oid2, tid, p52, '', t) + # Finish the transaction + self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + # Make sure the objects have the current value + data, revid1 = load_current(self._storage, oid1) + eq(zodb_unpickle(data), MinPO(32)) + data, revid2 = load_current(self._storage, oid2) + eq(zodb_unpickle(data), MinPO(52)) + + # Now attempt to undo the transaction containing two objects + info = self._storage.undoInfo() + self._undo(info[0]['id'], [oid1, oid2]) + data, revid1 = load_current(self._storage, oid1) + eq(zodb_unpickle(data), MinPO(31)) + data, revid2 = load_current(self._storage, oid2) + eq(zodb_unpickle(data), MinPO(51)) + self._iterate() + + def checkTwoObjectUndoAtOnce(self): + # Convenience + eq = self.assertEqual + unless = self.assertTrue + p30, p31, p32, p50, p51, p52 = map(zodb_pickle, + map(MinPO, + (30, 31, 32, 50, 51, 52))) + oid1 = self._storage.new_oid() + oid2 = self._storage.new_oid() + # Store two objects in the same transaction + tid = self._multi_obj_transaction([(oid1, ZERO, p30), + (oid2, ZERO, p50), + ]) + # Update those same two objects + tid = self._multi_obj_transaction([(oid1, tid, p31), + (oid2, tid, p51), + ]) + # Update those same two objects + tid = self._multi_obj_transaction([(oid1, tid, p32), + (oid2, tid, p52), + ]) + # Make sure the objects have the current value + data, revid1 = load_current(self._storage, oid1) + eq(zodb_unpickle(data), MinPO(32)) + data, revid2 = load_current(self._storage, oid2) + eq(zodb_unpickle(data), MinPO(52)) + # Now attempt to undo the transaction containing two objects + info = self._storage.undoInfo() + tid = info[0]['id'] + tid1 = info[1]['id'] + t = TransactionMetaData() + oids = self._begin_undos_vote(t, tid, tid1) + serial = self._storage.tpc_finish(t) + # We may get the finalization stuff called an extra time, + # depending on the implementation. + if serial is None: + self.assertEqual(oids, {oid1, oid2}) + data, revid1 = load_current(self._storage, oid1) + eq(zodb_unpickle(data), MinPO(30)) + data, revid2 = load_current(self._storage, oid2) + eq(zodb_unpickle(data), MinPO(50)) + + # Now try to undo the one we just did to undo, whew + info = self._storage.undoInfo() + self._undo(info[0]['id'], [oid1, oid2]) + data, revid1 = load_current(self._storage, oid1) + eq(zodb_unpickle(data), MinPO(32)) + data, revid2 = load_current(self._storage, oid2) + eq(zodb_unpickle(data), MinPO(52)) + self._iterate() + + def checkTwoObjectUndoAgain(self): + eq = self.assertEqual + p31, p32, p33, p51, p52, p53 = map( + zodb_pickle, + map(MinPO, (31, 32, 33, 51, 52, 53))) + # Like the above, but the first revision of the objects are stored in + # different transactions. + oid1 = self._storage.new_oid() + oid2 = self._storage.new_oid() + revid1 = self._dostore(oid1, data=p31, already_pickled=1) + revid2 = self._dostore(oid2, data=p51, already_pickled=1) + # Update those same two objects + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.store(oid1, revid1, p32, '', t) + self._storage.store(oid2, revid2, p52, '', t) + # Finish the transaction + self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + # Now attempt to undo the transaction containing two objects + info = self._storage.undoInfo() + self._undo(info[0]["id"], [oid1, oid2]) + data, revid1 = load_current(self._storage, oid1) + eq(zodb_unpickle(data), MinPO(31)) + data, revid2 = load_current(self._storage, oid2) + eq(zodb_unpickle(data), MinPO(51)) + # Like the above, but this time, the second transaction contains only + # one object. + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.store(oid1, revid1, p33, '', t) + self._storage.store(oid2, revid2, p53, '', t) + # Finish the transaction + self._storage.tpc_vote(t) + tid = self._storage.tpc_finish(t) + # Update in different transactions + revid1 = self._dostore(oid1, revid=tid, data=MinPO(34)) + revid2 = self._dostore(oid2, revid=tid, data=MinPO(54)) + # Now attempt to undo the transaction containing two objects + info = self._storage.undoInfo() + self.undo(info[1]['id']) + data, revid1 = load_current(self._storage, oid1) + eq(zodb_unpickle(data), MinPO(33)) + data, revid2 = load_current(self._storage, oid2) + eq(zodb_unpickle(data), MinPO(54)) + self._iterate() + + def checkNotUndoable(self): + eq = self.assertEqual + # Set things up so we've got a transaction that can't be undone + oid = self._storage.new_oid() + revid_a = self._dostore(oid, data=MinPO(51)) + revid_b = self._dostore(oid, revid=revid_a, data=MinPO(52)) + revid_c = self._dostore(oid, revid=revid_b, data=MinPO(53)) + # Start the undo + info = self._storage.undoInfo() + tid = info[1]['id'] + t = TransactionMetaData() + self.assertRaises(POSException.UndoError, + self._begin_undos_vote, t, tid) + self._storage.tpc_abort(t) + # Now have more fun: object1 and object2 are in the same transaction, + # which we'll try to undo to, but one of them has since modified in + # different transaction, so the undo should fail. + oid1 = oid + revid1 = revid_c + oid2 = self._storage.new_oid() + revid2 = ZERO + p81, p82, p91, p92 = map(zodb_pickle, + map(MinPO, (81, 82, 91, 92))) + + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.store(oid1, revid1, p81, '', t) + self._storage.store(oid2, revid2, p91, '', t) + self._storage.tpc_vote(t) + tid = self._storage.tpc_finish(t) + # Make sure the objects have the expected values + data, revid_11 = load_current(self._storage, oid1) + eq(zodb_unpickle(data), MinPO(81)) + data, revid_22 = load_current(self._storage, oid2) + eq(zodb_unpickle(data), MinPO(91)) + eq(revid_11, tid) + eq(revid_22, tid) + # Now modify oid2 + revid2 = self._dostore(oid2, tid, MinPO(92)) + self.assertNotEqual(tid, revid2) + info = self._storage.undoInfo() + tid = info[1]['id'] + t = TransactionMetaData() + self.assertRaises(POSException.UndoError, + self._begin_undos_vote, t, tid) + self._storage.tpc_abort(t) + self._iterate() + + def checkTransactionalUndoAfterPack(self): + # bwarsaw Date: Thu Mar 28 21:04:43 2002 UTC + # This is a test which should provoke the underlying bug in + # transactionalUndo() on a standby storage. If our hypothesis + # is correct, the bug is in FileStorage, and is caused by + # encoding the file position in the `id' field of the undoLog + # information. Note that Full just encodes the tid, but this + # is a problem for FileStorage (we have a strategy for fixing + # this). + + # So, basically, this makes sure that undo info doesn't depend + # on file positions. We change the file positions in an undo + # record by packing. + + # Add a few object revisions + oid = b'\0'*8 + revid0 = self._dostore(oid, data=MinPO(50)) + revid1 = self._dostore(oid, revid=revid0, data=MinPO(51)) + snooze() + packtime = time.time() + snooze() # time.time() now distinct from packtime + revid2 = self._dostore(oid, revid=revid1, data=MinPO(52)) + self._dostore(oid, revid=revid2, data=MinPO(53)) + # Now get the undo log + info = self._storage.undoInfo() + self.assertEqual(len(info), 4) + tid = info[0]['id'] + # Now pack just the initial revision of the object. We need the + # second revision otherwise we won't be able to undo the third + # revision! + self._storage.pack(packtime, referencesf) + # Make some basic assertions about the undo information now + info2 = self._storage.undoInfo() + self.assertEqual(len(info2), 2) + # And now attempt to undo the last transaction + undone, = self.undo(tid) + self.assertEqual(undone, oid) + data, revid = load_current(self._storage, oid) + # The object must now be at the second state + self.assertEqual(zodb_unpickle(data), MinPO(52)) + self._iterate() + + def checkTransactionalUndoAfterPackWithObjectUnlinkFromRoot(self): + eq = self.assertEqual + db = DB(self._storage) + conn = db.open() + try: + root = conn.root() + + o1 = C() + o2 = C() + root['obj'] = o1 + o1.obj = o2 + txn = transaction.get() + txn.note(u'o1 -> o2') + txn.commit() + now = packtime = time.time() + while packtime <= now: + packtime = time.time() + + o3 = C() + o2.obj = o3 + txn = transaction.get() + txn.note(u'o1 -> o2 -> o3') + txn.commit() + + o1.obj = o3 + txn = transaction.get() + txn.note(u'o1 -> o3') + txn.commit() + + log = self._storage.undoLog() + eq(len(log), 4) + for entry in zip(log, (b'o1 -> o3', b'o1 -> o2 -> o3', + b'o1 -> o2', b'initial database creation')): + eq(entry[0]['description'], entry[1]) + + self._storage.pack(packtime, referencesf) + + log = self._storage.undoLog() + for entry in zip(log, (b'o1 -> o3', b'o1 -> o2 -> o3')): + eq(entry[0]['description'], entry[1]) + + tid = log[0]['id'] + db.undo(tid) + txn = transaction.get() + txn.note(u'undo') + txn.commit() + # undo does a txn-undo, but doesn't invalidate + conn.sync() + + log = self._storage.undoLog() + for entry in zip(log, (b'undo', b'o1 -> o3', b'o1 -> o2 -> o3')): + eq(entry[0]['description'], entry[1]) + + eq(o1.obj, o2) + eq(o1.obj.obj, o3) + self._iterate() + finally: + conn.close() + db.close() + + def checkPackAfterUndoDeletion(self): + db = DB(self._storage) + cn = db.open() + try: + root = cn.root() + + pack_times = [] + def set_pack_time(): + pack_times.append(time.time()) + snooze() + + root["key0"] = MinPO(0) + root["key1"] = MinPO(1) + root["key2"] = MinPO(2) + txn = transaction.get() + txn.note(u"create 3 keys") + txn.commit() + + set_pack_time() + + del root["key1"] + txn = transaction.get() + txn.note(u"delete 1 key") + txn.commit() + + set_pack_time() + + root._p_deactivate() + cn.sync() + self.assertTrue(listeq(root.keys(), ["key0", "key2"])) + + L = db.undoInfo() + db.undo(L[0]["id"]) + txn = transaction.get() + txn.note(u"undo deletion") + txn.commit() + + set_pack_time() + + root._p_deactivate() + cn.sync() + self.assertTrue(listeq(root.keys(), ["key0", "key1", "key2"])) + + for t in pack_times: + self._storage.pack(t, referencesf) + + root._p_deactivate() + cn.sync() + self.assertTrue(listeq(root.keys(), ["key0", "key1", "key2"])) + for i in range(3): + obj = root["key%d" % i] + self.assertEqual(obj.value, i) + root.items() + self._inter_pack_pause() + finally: + cn.close() + db.close() + + + def checkPackAfterUndoManyTimes(self): + db = DB(self._storage) + cn = db.open() + try: + rt = cn.root() + + rt["test"] = MinPO(1) + transaction.commit() + rt["test2"] = MinPO(2) + transaction.commit() + rt["test"] = MinPO(3) + txn = transaction.get() + txn.note(u"root of undo") + txn.commit() + + packtimes = [] + for i in range(10): + L = db.undoInfo() + db.undo(L[0]["id"]) + txn = transaction.get() + txn.note(u"undo %d" % i) + txn.commit() + rt._p_deactivate() + cn.sync() + + self.assertEqual(rt["test"].value, i % 2 and 3 or 1) + self.assertEqual(rt["test2"].value, 2) + + packtimes.append(time.time()) + snooze() + + for t in packtimes: + self._storage.pack(t, referencesf) + cn.sync() + + # TODO: Is _cache supposed to have a clear() method, or not? + # cn._cache.clear() + + # The last undo set the value to 3 and pack should + # never change that. + self.assertEqual(rt["test"].value, 3) + self.assertEqual(rt["test2"].value, 2) + self._inter_pack_pause() + finally: + cn.close() + db.close() + + def _inter_pack_pause(self): + # DirectoryStorage needs a pause between packs, + # most other storages dont. + pass + + def checkTransactionalUndoIterator(self): + # check that data_txn set in iterator makes sense + if not hasattr(self._storage, "iterator"): + return + + s = self._storage + + BATCHES = 4 + OBJECTS = 4 + + orig = [] + for i in range(BATCHES): + t = TransactionMetaData() + tid = p64(i + 1) + s.tpc_begin(t, tid) + for j in range(OBJECTS): + oid = s.new_oid() + obj = MinPO(i * OBJECTS + j) + s.store(oid, None, zodb_pickle(obj), '', t) + orig.append((tid, oid)) + s.tpc_vote(t) + s.tpc_finish(t) + + orig = [(tid, oid, s.getTid(oid)) for tid, oid in orig] + + i = 0 + for tid, oid, revid in orig: + self._dostore(oid, revid=revid, data=MinPO(revid), + description="update %s" % i) + + # Undo the OBJECTS transactions that modified objects created + # in the ith original transaction. + + def undo(i): + info = s.undoInfo() + t = TransactionMetaData() + s.tpc_begin(t) + base = i * OBJECTS + i + for j in range(OBJECTS): + tid = info[base + j]['id'] + s.undo(tid, t) + s.tpc_vote(t) + s.tpc_finish(t) + + for i in range(BATCHES): + undo(i) + + # There are now (2 + OBJECTS) * BATCHES transactions: + # BATCHES original transactions, followed by + # OBJECTS * BATCHES modifications, followed by + # BATCHES undos + + transactions = s.iterator() + eq = self.assertEqual + + for i in range(BATCHES): + txn = next(transactions) + + tid = p64(i + 1) + eq(txn.tid, tid) + + L1 = {(rec.oid, rec.tid, rec.data_txn) for rec in txn} + L2 = {(oid, revid, None) for _tid, oid, revid in orig + if _tid == tid} + + eq(L1, L2) + + for i in range(BATCHES * OBJECTS): + txn = next(transactions) + eq(len([rec for rec in txn if rec.data_txn is None]), 1) + + for i in range(BATCHES): + txn = next(transactions) + + # The undos are performed in reverse order. + otid = p64(BATCHES - i) + L1 = [(rec.oid, rec.data_txn) for rec in txn] + L2 = [(oid, otid) for _tid, oid, revid in orig + if _tid == otid] + L1.sort() + L2.sort() + eq(L1, L2) + + self.assertRaises(StopIteration, next, transactions) + + def checkUndoLogMetadata(self): + # test that the metadata is correct in the undo log + t = transaction.get() + t.note(u't1') + t.setExtendedInfo('k2', 'this is transaction metadata') + t.setUser(u'u3',path=u'p3') + db = DB(self._storage) + conn = db.open() + try: + root = conn.root() + o1 = C() + root['obj'] = o1 + txn = transaction.get() + txn.commit() + l = self._storage.undoLog() + self.assertEqual(len(l),2) + d = l[0] + self.assertEqual(d['description'], b't1') + self.assertEqual(d['k2'], 'this is transaction metadata') + self.assertEqual(d['user_name'], b'p3 u3') + finally: + conn.close() + db.close() + + # A common test body for index tests on undoInfo and undoLog. Before + # ZODB 3.4, they always returned a wrong number of results (one too + # few _or_ too many, depending on how they were called). + def _exercise_info_indices(self, method_name): + db = DB(self._storage) + info_func = getattr(db, method_name) + cn = db.open() + rt = cn.root() + + # Do some transactions. + for key in "abcdefghijklmnopqrstuvwxyz": + rt[key] = ord(key) + transaction.commit() + + # 26 letters = 26 transactions, + the hidden transaction to make + # the root object, == 27 expected. + allofem = info_func(0, 100000) + self.assertEqual(len(allofem), 27) + + # Asking for no more than 100000 should do the same. + redundant = info_func(last=-1000000) + self.assertEqual(allofem, redundant) + + # By default, we should get only 20 back. + default = info_func() + self.assertEqual(len(default), 20) + # And they should be the most recent 20. + self.assertEqual(default, allofem[:20]) + + # If we ask for only one, we should get only the most recent. + fresh = info_func(last=1) + self.assertEqual(len(fresh), 1) + self.assertEqual(fresh[0], allofem[0]) + + # Another way of asking for only the most recent. + redundant = info_func(last=-1) + self.assertEqual(fresh, redundant) + + # Try a slice that doesn't start at 0. + oddball = info_func(first=11, last=17) + self.assertEqual(len(oddball), 17-11) + self.assertEqual(oddball, allofem[11 : 11+len(oddball)]) + + # And another way to spell the same thing. + redundant = info_func(first=11, last=-6) + self.assertEqual(oddball, redundant) + + cn.close() + # Caution: don't close db; the framework does that. If you close + # it here, the ZODB tests still work, but the ZRS RecoveryStorageTests + # fail (closing the DB here in those tests closes the ZRS primary + # before a ZRS secondary even starts, and then the latter can't + # find a server to recover from). + + def checkIndicesInUndoInfo(self): + self._exercise_info_indices("undoInfo") + + def checkIndicesInUndoLog(self): + self._exercise_info_indices("undoLog") + + def checkUndoMultipleConflictResolution(self, reverse=False): + from .ConflictResolution import PCounter + db = DB(self._storage) + cn = db.open() + try: + cn.root.x = PCounter() + transaction.commit() + + for i in range(4): + with db.transaction() as conn: + conn.transaction_manager.get().note( + (str if PY3 else unicode)(i)) + conn.root.x.inc() + + ids = [l['id'] for l in db.undoLog(1, 3)] + if reverse: + ids.reverse() + + db.undoMultiple(ids) + transaction.commit() + + self.assertEqual(cn.root.x._value, 2) + finally: + cn.close() + db.close() + + def checkUndoMultipleConflictResolutionReversed(self): + self.checkUndoMultipleConflictResolution(True) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/__init__.py new file mode 100644 index 0000000..669dd67 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/__init__.py @@ -0,0 +1 @@ +# Having this makes debugging better. diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_basic.txt b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_basic.txt new file mode 100644 index 0000000..f32a623 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_basic.txt @@ -0,0 +1,198 @@ +############################################################################## +# +# Copyright (c) 2005 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +ZODB Blob support +================= + +You create a blob like this:: + + >>> from ZODB.blob import Blob + >>> myblob = Blob() + +A blob implements the IBlob interface:: + + >>> from ZODB.interfaces import IBlob + >>> IBlob.providedBy(myblob) + True + +We can open a new blob file for reading, but it won't have any data:: + + >>> with myblob.open("r") as fp: fp.read() + '' + +But we can write data to a new Blob by opening it for writing:: + + >>> f = myblob.open("w") + >>> _ = f.write(b"Hi, Blob!") + +If we try to open a Blob again while it is open for writing, we get an error:: + + >>> myblob.open("r") + Traceback (most recent call last): + ... + BlobError: Already opened for writing. + +We can close the file:: + + >>> f.close() + +Now we can open it for reading:: + + >>> f2 = myblob.open("r") + +And we get the data back:: + + >>> f2.read() + 'Hi, Blob!' + +If we want to, we can open it again:: + + >>> f3 = myblob.open("r") + >>> f3.read() + 'Hi, Blob!' + +But we can't open it for writing, while it is opened for reading:: + + >>> myblob.open("a") + Traceback (most recent call last): + ... + BlobError: Already opened for reading. + +Before we can write, we have to close the readers:: + + >>> f2.close() + >>> f3.close() + +Now we can open it for writing again and e.g. append data:: + + >>> f4 = myblob.open("a") + >>> _ = f4.write(b"\nBlob is fine.") + +We can't open a blob while it is open for writing: + + >>> myblob.open("w") + Traceback (most recent call last): + ... + BlobError: Already opened for writing. + + >>> myblob.open("r") + Traceback (most recent call last): + ... + BlobError: Already opened for writing. + + >>> f4.close() + +Now we can read it:: + + >>> f4a = myblob.open("r") + >>> f4a.read() + 'Hi, Blob!\nBlob is fine.' + >>> f4a.close() + +You shouldn't need to explicitly close a blob unless you hold a reference +to it via a name. If the first line in the following test kept a reference +around via a name, the second call to open it in a writable mode would fail +with a BlobError, but it doesn't:: + + >>> with myblob.open("r+") as fp: fp.read() + 'Hi, Blob!\nBlob is fine.' + >>> f4b = myblob.open("a") + >>> f4b.close() + +We can read lines out of the blob too:: + + >>> f5 = myblob.open("r") + >>> f5.readline() + 'Hi, Blob!\n' + >>> f5.readline() + 'Blob is fine.' + >>> f5.close() + +We can seek to certain positions in a blob and read portions of it:: + + >>> f6 = myblob.open('r') + >>> _ = f6.seek(4) + >>> int(f6.tell()) + 4 + >>> f6.read(5) + 'Blob!' + >>> f6.close() + +We can use the object returned by a blob open call as an iterable:: + + >>> f7 = myblob.open('r') + >>> for line in f7: + ... print(line.decode()) + Hi, Blob! + + Blob is fine. + >>> f7.close() + +We can truncate a blob:: + + >>> f8 = myblob.open('a') + >>> _ = f8.truncate(0) + >>> f8.close() + >>> f8 = myblob.open('r') + >>> f8.read() + '' + >>> f8.close() + +Blobs are always opened in binary mode:: + + >>> f9 = myblob.open("r") + >>> f9.mode + 'rb' + >>> f9.close() + +Blobs that have not been committed can be opened using any mode, +except for "c":: + + >>> import six + >>> from ZODB.blob import BlobError, valid_modes + >>> for mode in valid_modes: + ... try: + ... f10 = Blob().open(mode) + ... except BlobError: + ... six.print_('open failed with mode "%s"' % mode) + ... else: + ... f10.close() + open failed with mode "c" + +Some cleanup in this test is needed:: + + >>> import transaction + >>> transaction.get().abort() + +Subclassing Blobs +----------------- + +Blobs are not subclassable:: + + >>> class SubBlob(Blob): + ... pass + >>> my_sub_blob = SubBlob() + Traceback (most recent call last): + ... + TypeError: Blobs do not support subclassing. + +Passing data to the blob constructor +------------------------------------ + +If you have a small amount of data, you can pass it to the blob +constructor. (This is a convenience, mostly for writing tests.) + + >>> myblob = Blob(b'some data') + >>> with myblob.open() as fp: fp.read() + 'some data' diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_connection.txt b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_connection.txt new file mode 100644 index 0000000..1e95741 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_connection.txt @@ -0,0 +1,88 @@ +Connection support for Blobs tests +================================== + +Connections handle Blobs specially. To demonstrate that, we first need a Blob +with some data: + + >>> from ZODB.interfaces import IBlob + >>> from ZODB.blob import Blob + >>> import transaction + >>> blob = Blob() + >>> data = blob.open("w") + >>> _ = data.write(b"I'm a happy Blob.") + >>> data.close() + +We also need a database with a blob supporting storage. (We're going to use +FileStorage rather than MappingStorage here because we will want ``loadBefore`` +for one of our examples.) + + >>> blob_storage = create_storage() + >>> from ZODB.DB import DB + >>> database = DB(blob_storage) + +Putting a Blob into a Connection works like every other object: + + >>> connection = database.open() + >>> root = connection.root() + >>> root['myblob'] = blob + >>> transaction.commit() + +We can also commit a transaction that seats a blob into place without +calling the blob's open method: + + >>> nothing = transaction.begin() + >>> anotherblob = Blob() + >>> root['anotherblob'] = anotherblob + >>> nothing = transaction.commit() + +Getting stuff out of there works similarly: + + >>> transaction2 = transaction.TransactionManager() + >>> connection2 = database.open(transaction_manager=transaction2) + >>> root = connection2.root() + >>> blob2 = root['myblob'] + >>> IBlob.providedBy(blob2) + True + >>> with blob2.open("r") as fp: fp.read() + "I'm a happy Blob." + >>> transaction2.abort() + +MVCC also works. + + >>> transaction3 = transaction.TransactionManager() + >>> connection3 = database.open(transaction_manager=transaction3) + >>> f = connection.root()['myblob'].open('w') + >>> _ = f.write(b'I am an ecstatic Blob.') + >>> f.close() + >>> transaction.commit() + >>> with connection3.root()['myblob'].open('r') as fp: fp.read() + "I'm a happy Blob." + + >>> transaction2.abort() + >>> transaction3.abort() + >>> connection2.close() + >>> connection3.close() + +You can't put blobs into a database that has uses a Non-Blob-Storage, though: + + >>> from ZODB.MappingStorage import MappingStorage + >>> no_blob_storage = MappingStorage() + >>> database2 = DB(no_blob_storage) + >>> connection2 = database2.open(transaction_manager=transaction2) + >>> root = connection2.root() + >>> root['myblob'] = Blob() + >>> transaction2.commit() # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + Unsupported: Storing Blobs in ... + + >>> transaction2.abort() + >>> connection2.close() + +After testing this, we don't need the storage directory and databases anymore: + + >>> transaction.abort() + >>> connection.close() + >>> database.close() + >>> database2.close() + >>> blob_storage.close() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_consume.txt b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_consume.txt new file mode 100644 index 0000000..1be8ec1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_consume.txt @@ -0,0 +1,132 @@ +Consuming existing files +======================== + +The ZODB Blob implementation allows to import existing files as Blobs within +an O(1) operation we call `consume`:: + +Let's create a file:: + + >>> to_import = open('to_import', 'wb') + >>> _ = to_import.write(b"I'm a Blob and I feel fine.") + +The file *must* be closed before giving it to consumeFile: + + >>> to_import.close() + +Now, let's consume this file in a blob by specifying it's name:: + + >>> from ZODB.blob import Blob + >>> blob = Blob() + >>> blob.consumeFile('to_import') + +After the consumeFile operation, the original file has been removed: + + >>> import os + >>> os.path.exists('to_import') + False + +We now can call open on the blob and read and write the data:: + + >>> blob_read = blob.open('r') + >>> blob_read.read() + "I'm a Blob and I feel fine." + >>> blob_read.close() + >>> blob_write = blob.open('w') + >>> _ = blob_write.write(b'I was changed.') + >>> blob_write.close() + +We can not consume a file when there is a reader or writer around for a blob +already:: + + >>> with open('to_import', 'wb') as file: + ... _ = file.write(b'I am another blob.') + >>> blob_read = blob.open('r') + >>> blob.consumeFile('to_import') + Traceback (most recent call last): + BlobError: Already opened for reading. + >>> blob_read.close() + >>> blob_write = blob.open('w') + >>> blob.consumeFile('to_import') + Traceback (most recent call last): + BlobError: Already opened for writing. + >>> blob_write.close() + +Now, after closing all readers and writers we can consume files again:: + + >>> blob.consumeFile('to_import') + >>> blob_read = blob.open('r') + >>> blob_read.read() + 'I am another blob.' + + >>> blob_read.close() + +Edge cases +========== + +There are some edge cases what happens when the link() operation +fails. We simulate this in different states: + +Case 1: We don't have uncommitted data, but the link operation fails. We fall +back to try a copy/remove operation that is successfull:: + + >>> with open('to_import', 'wb') as file: + ... _ = file.write(b'Some data.') + + >>> def failing_rename(f1, f2): + ... if f1 == 'to_import': + ... raise OSError("I can't link.") + ... os_rename(f1, f2) + + >>> blob = Blob() + >>> os_rename = os.rename + >>> os.rename = failing_rename + >>> blob.consumeFile('to_import') + +The blob did not have data before, so it shouldn't have data now:: + + >>> with blob.open('r') as fp: fp.read() + 'Some data.' + +Case 2: We don't have uncommitted data and both the link operation and the +copy fail. The exception will be re-raised and the target file will not +exist:: + + >>> blob = Blob() + >>> import ZODB.utils + >>> utils_cp = ZODB.utils.cp + + >>> def failing_copy(f1, f2): + ... raise OSError("I can't copy.") + + >>> ZODB.utils.cp = failing_copy + >>> with open('to_import', 'wb') as file: + ... _ = file.write(b'Some data.') + >>> blob.consumeFile('to_import') + Traceback (most recent call last): + OSError: I can't copy. + +The blob did not have data before, so it shouldn't have data now:: + + >>> with blob.open('r') as fp: fp.read() + '' + +Case 3: We have uncommitted data, but the link and the copy operations fail. +The exception will be re-raised and the target file will exist with the +previous uncomitted data:: + + >>> blob = Blob() + >>> with blob.open('w') as blob_writing: + ... _ = blob_writing.write(b'Uncommitted data') + + >>> blob.consumeFile('to_import') + Traceback (most recent call last): + OSError: I can't copy. + +The blob did existed before and had uncommitted data, this shouldn't have +changed:: + + >>> with blob.open('r') as fp: fp.read() + 'Uncommitted data' + + >>> os.rename = os_rename + >>> ZODB.utils.cp = utils_cp diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_importexport.txt b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_importexport.txt new file mode 100644 index 0000000..fe24b14 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_importexport.txt @@ -0,0 +1,70 @@ +Import/export support for blob data +=================================== + +Set up: + + >>> import ZODB.blob, transaction + >>> from persistent.mapping import PersistentMapping + +We need an database with an undoing blob supporting storage: + + >>> database1 = ZODB.DB(create_storage('1')) + >>> database2 = ZODB.DB(create_storage('2')) + +Create our root object for database1: + + >>> connection1 = database1.open() + >>> root1 = connection1.root() + +Put a couple blob objects in our database1 and on the filesystem: + + >>> import time, os + >>> nothing = transaction.begin() + >>> data1 = b'x'*100000 + >>> blob1 = ZODB.blob.Blob() + >>> with blob1.open('w') as file: + ... _ = file.write(data1) + >>> data2 = b'y'*100000 + >>> blob2 = ZODB.blob.Blob() + >>> with blob2.open('w') as file: + ... _ = file.write(data2) + >>> d = PersistentMapping({'blob1':blob1, 'blob2':blob2}) + >>> root1['blobdata'] = d + >>> transaction.commit() + +Export our blobs from a database1 connection: + + >>> conn = root1['blobdata']._p_jar + >>> oid = root1['blobdata']._p_oid + >>> exportfile = 'export' + >>> connection1.exportFile(oid, exportfile).close() + +Import our exported data into database2: + + >>> connection2 = database2.open() + >>> root2 = connection2.root() + >>> nothing = transaction.begin() + >>> data = root2._p_jar.importFile(exportfile) + >>> root2['blobdata'] = data + >>> transaction.commit() + +Make sure our data exists: + + >>> items1 = root1['blobdata'] + >>> items2 = root2['blobdata'] + >>> bool(items1.keys() == items2.keys()) + True + >>> with items1['blob1'].open() as fp1: + ... with items2['blob1'].open() as fp2: + ... fp1.read() == fp2.read() + True + >>> with items1['blob2'].open() as fp1: + ... with items2['blob2'].open() as fp2: + ... fp1.read() == fp2.read() + True + >>> transaction.get().abort() + +.. cleanup + + >>> database1.close() + >>> database2.close() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_layout.txt b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_layout.txt new file mode 100644 index 0000000..36551af --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_layout.txt @@ -0,0 +1,303 @@ +====================== +Blob directory layouts +====================== + +The internal structure of the blob directories is governed by so called +`layouts`. The current default layout is called `bushy`. + +The original blob implementation used a layout that we now call `lawn` and +which is still available for backwards compatibility. + +Layouts implement two methods: one for computing a relative path for an +OID and one for turning a relative path back into an OID. + +Our terminology is roughly the same as used in `DirectoryStorage`. + +The `bushy` layout +================== + +The bushy layout splits the OID into the 8 byte parts, reverses them and +creates one directory level for each part, named by the hexlified +representation of the byte value. This results in 8 levels of directories, the +leaf directories being used for the revisions of the blobs and at most 256 +entries per directory level: + +>>> from ZODB.blob import BushyLayout +>>> bushy = BushyLayout() +>>> bushy.oid_to_path(b'\x00\x00\x00\x00\x00\x00\x00\x00') +'0x00/0x00/0x00/0x00/0x00/0x00/0x00/0x00' +>>> bushy.oid_to_path(b'\x00\x00\x00\x00\x00\x00\x00\x01') +'0x00/0x00/0x00/0x00/0x00/0x00/0x00/0x01' + +>>> import os +>>> bushy.path_to_oid(os.path.join( +... '0x01', '0x00', '0x00', '0x00', '0x00', '0x00', '0x00', '0x00')) +'\x01\x00\x00\x00\x00\x00\x00\x00' +>>> bushy.path_to_oid(os.path.join( +... '0xff', '0x00', '0x00', '0x00', '0x00', '0x00', '0x00', '0x00')) +'\xff\x00\x00\x00\x00\x00\x00\x00' + +Paths that do not represent an OID will cause a ValueError: + +>>> bushy.path_to_oid('tmp') +Traceback (most recent call last): +ValueError: Not a valid OID path: `tmp` + + +The `lawn` layout +================= + +The lawn layout creates on directory for each blob named by the blob's hex +representation of its OID. This has some limitations on various file systems +like performance penalties or the inability to store more than a given number +of blobs at the same time (e.g. 32k on ext3). + +>>> from ZODB.blob import LawnLayout +>>> lawn = LawnLayout() +>>> lawn.oid_to_path(b'\x00\x00\x00\x00\x00\x00\x00\x00') +'0x00' +>>> lawn.oid_to_path(b'\x00\x00\x00\x00\x00\x00\x00\x01') +'0x01' + +>>> lawn.path_to_oid('0x01') +'\x00\x00\x00\x00\x00\x00\x00\x01' + +Paths that do not represent an OID will cause a ValueError: + +>>> lawn.path_to_oid('tmp') +Traceback (most recent call last): +ValueError: Not a valid OID path: `tmp` +>>> lawn.path_to_oid('') +Traceback (most recent call last): +ValueError: Not a valid OID path: `` + + +Auto-detecting the layout of a directory +======================================== + +To allow easier migration, we provide an auto-detection feature that analyses a +blob directory and decides for a strategy to use. In general it prefers to +choose the `bushy` layout, except if it determines that the directory has +already been used to create a lawn structure. + +>>> from ZODB.blob import auto_layout_select + +1. Non-existing directories will trigger a bushy layout: + +>>> import os, shutil +>>> auto_layout_select('blobs') +'bushy' + +2. Empty directories will trigger a bushy layout too: + +>>> os.mkdir('blobs') +>>> auto_layout_select('blobs') +'bushy' + +3. If the directory contains a marker for the strategy it will be used: + +>>> from ZODB.blob import LAYOUT_MARKER +>>> import os.path +>>> with open(os.path.join('blobs', LAYOUT_MARKER), 'wb') as file: +... _ = file.write(b'bushy') +>>> auto_layout_select('blobs') +'bushy' +>>> with open(os.path.join('blobs', LAYOUT_MARKER), 'wb') as file: +... _ = file.write(b'lawn') +>>> auto_layout_select('blobs') +'lawn' +>>> shutil.rmtree('blobs') + +4. If the directory does not contain a marker but other files that are +not hidden, we assume that it was created with an earlier version of +the blob implementation and uses our `lawn` layout: + +>>> os.mkdir('blobs') +>>> with open(os.path.join('blobs', '0x0101'), 'wb') as file: +... _ = file.write(b'foo') +>>> auto_layout_select('blobs') +'lawn' +>>> shutil.rmtree('blobs') + +5. If the directory contains only hidden files, use the bushy layout: + +>>> os.mkdir('blobs') +>>> with open(os.path.join('blobs', '.svn'), 'wb') as file: +... _ = file.write(b'blah') +>>> auto_layout_select('blobs') +'bushy' +>>> shutil.rmtree('blobs') + + +Directory layout markers +======================== + +When the file system helper (FSH) is asked to create the directory structure, +it will leave a marker with the choosen layout if no marker exists yet: + +>>> from ZODB.blob import FilesystemHelper +>>> blobs = 'blobs' +>>> fsh = FilesystemHelper(blobs) +>>> fsh.layout_name +'bushy' +>>> fsh.create() +>>> with open(os.path.join(blobs, LAYOUT_MARKER), 'rb') as fp: fp.read() +'bushy' + +If the FSH finds a marker, then it verifies whether its content matches the +strategy that was chosen. It will raise an exception if we try to work with a +directory that has a different marker than the chosen strategy: + +>>> fsh = FilesystemHelper(blobs, 'lawn') +>>> fsh.layout_name +'lawn' +>>> fsh.create() # doctest: +ELLIPSIS +Traceback (most recent call last): +ValueError: Directory layout `lawn` selected for blob directory .../blobs/, but marker found for layout `bushy` +>>> rmtree(blobs) + +This function interacts with the automatic detection in the way, that an +unmarked directory will be marked the first time when it is auto-guessed and +the marker will be used in the future: + +>>> import ZODB.FileStorage +>>> from ZODB.blob import BlobStorage +>>> datafs = 'data.fs' +>>> base_storage = ZODB.FileStorage.FileStorage(datafs) + +>>> os.mkdir(blobs) +>>> with open(os.path.join(blobs, 'foo'), 'wb') as file: +... _ = file.write(b'foo') +>>> blob_storage = BlobStorage(blobs, base_storage) +>>> blob_storage.fshelper.layout_name +'lawn' +>>> with open(os.path.join(blobs, LAYOUT_MARKER), 'rb') as fp: fp.read() +'lawn' +>>> blob_storage = BlobStorage('blobs', base_storage, layout='bushy') +... # doctest: +ELLIPSIS +Traceback (most recent call last): +ValueError: Directory layout `bushy` selected for blob directory .../blobs/, but marker found for layout `lawn` + + +>>> base_storage.close() +>>> rmtree('blobs') + + +Migrating between directory layouts +=================================== + +A script called `migrateblobs.py` is distributed with the ZODB for offline +migration capabilities between different directory layouts. It can migrate any +blob directory layout to any other layout. It leaves the original blob +directory untouched (except from eventually creating a temporary directory and +the storage layout marker). + +The migration is accessible as a library function: + +>>> from ZODB.scripts.migrateblobs import migrate + +Create a `lawn` directory structure and migrate it to the new `bushy` one: + +>>> from ZODB.blob import FilesystemHelper +>>> d = 'd' +>>> os.mkdir(d) +>>> old = os.path.join(d, 'old') +>>> old_fsh = FilesystemHelper(old, 'lawn') +>>> old_fsh.create() +>>> blob1 = old_fsh.getPathForOID(7039, create=True) +>>> blob2 = old_fsh.getPathForOID(10, create=True) +>>> blob3 = old_fsh.getPathForOID(7034, create=True) +>>> with open(os.path.join(blob1, 'foo'), 'wb') as file: +... _ = file.write(b'foo') +>>> with open(os.path.join(blob1, 'foo2'), 'wb') as file: +... _ = file.write(b'bar') +>>> with open(os.path.join(blob2, 'foo3'), 'wb') as file: +... _ = file.write(b'baz') +>>> with open(os.path.join(blob2, 'foo4'), 'wb') as file: +... _ = file.write(b'qux') +>>> with open(os.path.join(blob3, 'foo5'), 'wb') as file: +... _ = file.write(b'quux') +>>> with open(os.path.join(blob3, 'foo6'), 'wb') as file: +... _ = file.write(b'corge') + +Committed blobs have their permissions set to 000 + +The migration function is called with the old and the new path and the layout +that shall be used for the new directory: + +>>> bushy = os.path.join(d, 'bushy') +>>> migrate(old, bushy, 'bushy') # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE +Migrating blob data from `.../old` (lawn) to `.../bushy` (bushy) + OID: 0x0a - 2 files + OID: 0x1b7a - 2 files + OID: 0x1b7f - 2 files + +The new directory now contains the same files in different directories, but +with the same sizes and permissions: + +>>> lawn_files = {} +>>> for base, dirs, files in os.walk(old): +... for file_name in files: +... lawn_files[file_name] = os.path.join(base, file_name) + +>>> bushy_files = {} +>>> for base, dirs, files in os.walk(bushy): +... for file_name in files: +... bushy_files[file_name] = os.path.join(base, file_name) + +>>> len(lawn_files) == len(bushy_files) +True + +>>> import six +>>> for file_name, lawn_path in sorted(lawn_files.items()): +... if file_name == '.layout': +... continue +... lawn_stat = os.stat(lawn_path) +... bushy_path = bushy_files[file_name] +... bushy_stat = os.stat(bushy_path) +... six.print_(lawn_path, '-->', bushy_path) +... if ((lawn_stat.st_mode, lawn_stat.st_size) != +... (bushy_stat.st_mode, bushy_stat.st_size)): +... print('oops') +old/0x1b7f/foo --> bushy/0x00/0x00/0x00/0x00/0x00/0x00/0x1b/0x7f/foo +old/0x1b7f/foo2 --> bushy/0x00/0x00/0x00/0x00/0x00/0x00/0x1b/0x7f/foo2 +old/0x0a/foo3 --> bushy/0x00/0x00/0x00/0x00/0x00/0x00/0x00/0x0a/foo3 +old/0x0a/foo4 --> bushy/0x00/0x00/0x00/0x00/0x00/0x00/0x00/0x0a/foo4 +old/0x1b7a/foo5 --> bushy/0x00/0x00/0x00/0x00/0x00/0x00/0x1b/0x7a/foo5 +old/0x1b7a/foo6 --> bushy/0x00/0x00/0x00/0x00/0x00/0x00/0x1b/0x7a/foo6 + +We can also migrate the bushy layout back to the lawn layout: + +>>> lawn = os.path.join(d, 'lawn') +>>> migrate(bushy, lawn, 'lawn') +Migrating blob data from `.../bushy` (bushy) to `.../lawn` (lawn) + OID: 0x0a - 2 files + OID: 0x1b7a - 2 files + OID: 0x1b7f - 2 files + +>>> lawn_files = {} +>>> for base, dirs, files in os.walk(lawn): +... for file_name in files: +... lawn_files[file_name] = os.path.join(base, file_name) + +>>> len(lawn_files) == len(bushy_files) +True + +>>> for file_name, lawn_path in sorted(lawn_files.items()): +... if file_name == '.layout': +... continue +... lawn_stat = os.stat(lawn_path) +... bushy_path = bushy_files[file_name] +... bushy_stat = os.stat(bushy_path) +... six.print_(bushy_path, '-->', lawn_path) +... if ((lawn_stat.st_mode, lawn_stat.st_size) != +... (bushy_stat.st_mode, bushy_stat.st_size)): +... print('oops') +bushy/0x00/0x00/0x00/0x00/0x00/0x00/0x1b/0x7f/foo --> lawn/0x1b7f/foo +bushy/0x00/0x00/0x00/0x00/0x00/0x00/0x1b/0x7f/foo2 --> lawn/0x1b7f/foo2 +bushy/0x00/0x00/0x00/0x00/0x00/0x00/0x00/0x0a/foo3 --> lawn/0x0a/foo3 +bushy/0x00/0x00/0x00/0x00/0x00/0x00/0x00/0x0a/foo4 --> lawn/0x0a/foo4 +bushy/0x00/0x00/0x00/0x00/0x00/0x00/0x1b/0x7a/foo5 --> lawn/0x1b7a/foo5 +bushy/0x00/0x00/0x00/0x00/0x00/0x00/0x1b/0x7a/foo6 --> lawn/0x1b7a/foo6 + +>>> rmtree(d) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_packing.txt b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_packing.txt new file mode 100644 index 0000000..e608151 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_packing.txt @@ -0,0 +1,112 @@ +Packing support for blob data +============================= + +Set up: + + >>> from ZODB.serialize import referencesf + >>> from ZODB.blob import Blob + >>> from ZODB import utils + >>> from ZODB.DB import DB + >>> import transaction + +A helper method to assure a unique timestamp across multiple platforms: + + >>> from ZODB.tests.testblob import new_time + +UNDOING +======= + +We need a database with an undoing blob supporting storage: + + >>> blob_storage = create_storage() + >>> database = DB(blob_storage) + +Create our root object: + + >>> connection1 = database.open() + >>> root = connection1.root() + +Put some revisions of a blob object in our database and on the filesystem: + + >>> import os + >>> tids = [] + >>> times = [] + >>> blob = Blob() + + >>> for i in range(5): + ... _ = transaction.begin() + ... times.append(new_time()) + ... with blob.open('w') as file: + ... _ = file.write(b'this is blob data ' + str(i).encode()) + ... if i: + ... tids.append(blob._p_serial) + ... else: + ... root['blob'] = blob + ... transaction.commit() + + >>> blob._p_activate() + >>> tids.append(blob._p_serial) + + >>> oid = root['blob']._p_oid + >>> fns = [ blob_storage.fshelper.getBlobFilename(oid, x) for x in tids ] + >>> [ os.path.exists(x) for x in fns ] # no pack + [True, True, True, True, True] + +Do a pack to the slightly before the first revision was written: + + >>> packtime = times[0] + >>> blob_storage.pack(packtime, referencesf) + >>> [ os.path.exists(x) for x in fns ] + [True, True, True, True, True] + +Do a pack to the slightly before the second revision was written: + + >>> packtime = times[1] + >>> blob_storage.pack(packtime, referencesf) + >>> [ os.path.exists(x) for x in fns ] + [True, True, True, True, True] + +Do a pack to the slightly before the third revision was written: + + >>> packtime = times[2] + >>> blob_storage.pack(packtime, referencesf) + >>> [ os.path.exists(x) for x in fns ] + [False, True, True, True, True] + +Do a pack to the slightly before the fourth revision was written: + + >>> packtime = times[3] + >>> blob_storage.pack(packtime, referencesf) + >>> [ os.path.exists(x) for x in fns ] + [False, False, True, True, True] + +Do a pack to the slightly before the fifth revision was written: + + >>> packtime = times[4] + >>> blob_storage.pack(packtime, referencesf) + >>> [ os.path.exists(x) for x in fns ] + [False, False, False, True, True] + +Do a pack to now: + + >>> packtime = new_time() + >>> blob_storage.pack(packtime, referencesf) + >>> [ os.path.exists(x) for x in fns ] + [False, False, False, False, True] + +Delete the object and do a pack, it should get rid of the most current +revision as well as the entire directory: + + >>> nothing = transaction.begin() + >>> del root['blob'] + >>> transaction.commit() + >>> packtime = new_time() + >>> blob_storage.pack(packtime, referencesf) + >>> [ os.path.exists(x) for x in fns ] + [False, False, False, False, False] + >>> os.path.exists(os.path.split(fns[0])[0]) + False + +Clean up our blob directory and database: + + >>> database.close() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_tempdir.txt b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_tempdir.txt new file mode 100644 index 0000000..6d0b580 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_tempdir.txt @@ -0,0 +1,53 @@ +======================================= +Temporary directory handling with blobs +======================================= + +When creating uncommitted data files for a blob (e.g. by calling +`blob.open('w')`) we need to decide where to create them. The decision depends +on whether the blob is already stored in a database or not. + +Case 1: Blobs that are not in a database yet +============================================ + +Let's create a new blob and open it for writing:: + + >>> from ZODB.blob import Blob + >>> b = Blob() + >>> w = b.open('w') + +The created file is in the default temporary directory:: + + >>> import tempfile + >>> w.name.startswith(tempfile.gettempdir()) + True + + >>> w.close() + +Case 2: Blobs that are in a database +==================================== + +For this case we instanciate a blob and add it to a database immediately. +First, we need a datatabase with blob support:: + + >>> from ZODB.MappingStorage import MappingStorage + >>> from ZODB.blob import BlobStorage + >>> from ZODB.DB import DB + >>> import os.path + >>> base_storage = MappingStorage('test') + >>> blob_dir = os.path.abspath('blobs') + >>> blob_storage = BlobStorage(blob_dir, base_storage) + >>> database = DB(blob_storage) + +Now we create a blob and put it in the database. After that we open it for +writing and expect the file to be in the blob temporary directory:: + + >>> blob = Blob() + >>> connection = database.open() + >>> connection.add(blob) + >>> w = blob.open('w') + >>> w.name.startswith(os.path.join(blob_dir, 'tmp')) + True + + >>> w.close() + >>> database.close() + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_transaction.txt b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_transaction.txt new file mode 100644 index 0000000..8a1498a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blob_transaction.txt @@ -0,0 +1,418 @@ +Transaction support for Blobs +============================= + +We need a database with a blob supporting storage:: + + >>> import ZODB.blob, transaction + >>> blob_dir = 'blobs' + >>> blob_storage = create_storage(blob_dir=blob_dir) + >>> database = ZODB.DB(blob_storage) + >>> connection1 = database.open() + >>> root1 = connection1.root() + +Putting a Blob into a Connection works like any other Persistent object:: + + >>> blob1 = ZODB.blob.Blob() + >>> with blob1.open('w') as file: + ... _ = file.write(b'this is blob 1') + >>> root1['blob1'] = blob1 + >>> 'blob1' in root1 + True + +Aborting a blob add leaves the blob unchanged: + + >>> transaction.abort() + >>> 'blob1' in root1 + False + + >>> blob1._p_oid + >>> blob1._p_jar + >>> with blob1.open() as fp: + ... fp.read() + 'this is blob 1' + +It doesn't clear the file because there is no previously committed version: + + >>> fname = blob1._p_blob_uncommitted + >>> import os + >>> os.path.exists(fname) + True + +Let's put the blob back into the root and commit the change: + + >>> root1['blob1'] = blob1 + >>> transaction.commit() + +Now, if we make a change and abort it, we'll return to the committed +state: + + >>> os.path.exists(fname) + False + >>> blob1._p_blob_uncommitted + + >>> with blob1.open('w') as file: + ... _ = file.write(b'this is new blob 1') + >>> with blob1.open() as fp: + ... fp.read() + 'this is new blob 1' + >>> fname = blob1._p_blob_uncommitted + >>> os.path.exists(fname) + True + + >>> transaction.abort() + >>> os.path.exists(fname) + False + >>> blob1._p_blob_uncommitted + + >>> with blob1.open() as fp: + ... fp.read() + 'this is blob 1' + +Opening a blob gives us a filehandle. Getting data out of the +resulting filehandle is accomplished via the filehandle's read method:: + + >>> connection2 = database.open() + >>> root2 = connection2.root() + >>> blob1a = root2['blob1'] + + >>> blob1afh1 = blob1a.open("r") + >>> blob1afh1.read() + 'this is blob 1' + +Let's make another filehandle for read only to blob1a. Each file +handle has a reference to the (same) underlying blob:: + + >>> blob1afh2 = blob1a.open("r") + >>> blob1afh2.blob is blob1afh1.blob + True + +Let's close the first filehandle we got from the blob:: + + >>> blob1afh1.close() + +Let's abort this transaction, and ensure that the filehandles that we +opened are still open:: + + >>> transaction.abort() + >>> blob1afh2.read() + 'this is blob 1' + + >>> blob1afh2.close() + +If we open a blob for append, writing any number of bytes to the +blobfile should result in the blob being marked "dirty" in the +connection (we just aborted above, so the object should be "clean" +when we start):: + + >>> bool(blob1a._p_changed) + False + >>> with blob1a.open('r') as fp: + ... fp.read() + 'this is blob 1' + >>> with blob1a.open('a') as blob1afh3: + ... assert(bool(blob1a._p_changed)) + ... _ = blob1afh3.write(b'woot!') + >>> blob1afh3.close() + +We can open more than one blob object during the course of a single +transaction:: + + >>> blob2 = ZODB.blob.Blob() + >>> with blob2.open('w') as file: + ... _ = file.write(b'this is blob 3') + >>> root2['blob2'] = blob2 + >>> transaction.commit() + +Since we committed the current transaction above, the aggregate +changes we've made to blob, blob1a (these refer to the same object) and +blob2 (a different object) should be evident:: + + >>> with blob1.open('r') as fp: + ... fp.read() + 'this is blob 1woot!' + >>> with blob1a.open('r') as fp: + ... fp.read() + 'this is blob 1woot!' + >>> with blob2.open('r') as fp: + ... fp.read() + 'this is blob 3' + +We shouldn't be able to persist a blob filehandle at commit time +(although the exception which is raised when an object cannot be +pickled appears to be particulary unhelpful for casual users at the +moment):: + + >>> with blob1.open('r') as f: + ... root1['wontwork'] = f + ... transaction.commit() + Traceback (most recent call last): + ... + TypeError: ... + +Abort for good measure:: + + >>> transaction.abort() + +Attempting to change a blob simultaneously from two different +connections should result in a write conflict error:: + + >>> tm1 = transaction.TransactionManager() + >>> tm2 = transaction.TransactionManager() + >>> root3 = database.open(transaction_manager=tm1).root() + >>> root4 = database.open(transaction_manager=tm2).root() + >>> blob1c3 = root3['blob1'] + >>> blob1c4 = root4['blob1'] + >>> with blob1c3.open('a') as blob1c3fh1: + ... _ = blob1c3fh1.write(b'this is from connection 3') + >>> with blob1c4.open('a') as blob1c4fh1: + ... _ = blob1c4fh1.write(b'this is from connection 4') + >>> tm1.commit() + >>> with root3['blob1'].open('r') as fp: + ... fp.read() + 'this is blob 1woot!this is from connection 3' + >>> tm2.commit() + Traceback (most recent call last): + ... + ConflictError: database conflict error (oid 0x01, class ZODB.blob.Blob...) + +After the conflict, the winning transaction's result is visible on both +connections:: + + >>> with root3['blob1'].open('r') as fp: + ... fp.read() + 'this is blob 1woot!this is from connection 3' + >>> tm2.abort() + >>> with root4['blob1'].open('r') as fp: + ... fp.read() + 'this is blob 1woot!this is from connection 3' + +You can't commit a transaction while blob files are open: + + >>> f = root3['blob1'].open('w') + >>> tm1.commit() + Traceback (most recent call last): + ... + ValueError: Can't commit with opened blobs. + + >>> f.close() + >>> tm1.abort() + >>> f = root3['blob1'].open('w') + >>> f.close() + + >>> f = root3['blob1'].open('r') + >>> tm1.commit() + Traceback (most recent call last): + ... + ValueError: Can't commit with opened blobs. + >>> f.close() + >>> tm1.abort() + +Savepoints and Blobs +-------------------- + +We do support optimistic savepoints: + + >>> connection5 = database.open() + >>> root5 = connection5.root() + >>> blob = ZODB.blob.Blob() + >>> with blob.open("w") as blob_fh: + ... _ = blob_fh.write(b"I'm a happy blob.") + >>> root5['blob'] = blob + >>> transaction.commit() + >>> with root5['blob'].open("r") as fp: + ... fp.read() + "I'm a happy blob." + >>> with root5['blob'].open("a") as blob_fh: + ... _ = blob_fh.write(b" And I'm singing.") + >>> with root5['blob'].open("r") as fp: + ... fp.read() + "I'm a happy blob. And I'm singing." + >>> savepoint = transaction.savepoint(optimistic=True) + + >>> with root5['blob'].open("r") as fp: + ... fp.read() + "I'm a happy blob. And I'm singing." + +Savepoints store the blobs in temporary directories in the temporary +directory of the blob storage: + + >>> len([name for name in os.listdir(os.path.join(blob_dir, 'tmp')) + ... if name.startswith('savepoint')]) + 1 + +After committing the transaction, the temporary savepoint files are moved to +the committed location again: + + >>> transaction.commit() + >>> len([name for name in os.listdir(os.path.join(blob_dir, 'tmp')) + ... if name.startswith('savepoint')]) + 0 + +We support non-optimistic savepoints too: + + >>> with root5['blob'].open("a") as file: + ... _ = file.write(b" And I'm dancing.") + >>> with root5['blob'].open("r") as fp: + ... fp.read() + "I'm a happy blob. And I'm singing. And I'm dancing." + >>> savepoint = transaction.savepoint() + +Again, the savepoint creates a new savepoints directory: + + >>> len([name for name in os.listdir(os.path.join(blob_dir, 'tmp')) + ... if name.startswith('savepoint')]) + 1 + + >>> with root5['blob'].open("w") as file: + ... _ = file.write(b" And the weather is beautiful.") + >>> savepoint.rollback() + + >>> with root5['blob'].open("r") as fp: + ... fp.read() + "I'm a happy blob. And I'm singing. And I'm dancing." + >>> transaction.abort() + +The savepoint blob directory gets cleaned up on an abort: + + >>> len([name for name in os.listdir(os.path.join(blob_dir, 'tmp')) + ... if name.startswith('savepoint')]) + 0 + +Reading Blobs outside of a transaction +-------------------------------------- + +If you want to read from a Blob outside of transaction boundaries (e.g. to +stream a file to the browser), committed method to get the name of a +file that can be opened. + + >>> connection6 = database.open() + >>> root6 = connection6.root() + >>> blob = ZODB.blob.Blob() + >>> with blob.open("w") as blob_fh: + ... _ = blob_fh.write(b"I'm a happy blob.") + >>> root6['blob'] = blob + >>> transaction.commit() + >>> with open(blob.committed()) as fp: + ... fp.read() + "I'm a happy blob." + +We can also read committed data by calling open with a 'c' flag: + + >>> f = blob.open('c') + +This just returns a regular file object: + + >>> type(f) == file_type + True + +and doesn't prevent us from opening the blob for writing: + + >>> with blob.open('w') as file: + ... _ = file.write(b'x') + >>> with blob.open() as fp: fp.read() + 'x' + + >>> f.read() + "I'm a happy blob." + + >>> f.close() + >>> transaction.abort() + +An exception is raised if we call committed on a blob that has +uncommitted changes: + + >>> blob = ZODB.blob.Blob() + >>> blob.committed() + Traceback (most recent call last): + ... + BlobError: Uncommitted changes + + >>> blob.open('c') + Traceback (most recent call last): + ... + BlobError: Uncommitted changes + + >>> with blob.open('w') as file: + ... _ = file.write(b"I'm a happy blob.") + >>> root6['blob6'] = blob + >>> blob.committed() + Traceback (most recent call last): + ... + BlobError: Uncommitted changes + + >>> blob.open('c') + Traceback (most recent call last): + ... + BlobError: Uncommitted changes + + >>> s = transaction.savepoint() + >>> blob.committed() + Traceback (most recent call last): + ... + BlobError: Uncommitted changes + + >>> blob.open('c') + Traceback (most recent call last): + ... + BlobError: Uncommitted changes + + >>> transaction.commit() + >>> with open(blob.committed()) as fp: + ... fp.read() + "I'm a happy blob." + +You can't open a committed blob file for writing: + + >>> try: + ... open(blob.committed(), 'w') # doctest: +ELLIPSIS + ... except: + ... # Produces IOError in Py2 and PermissionError in Py3 + ... print('Error raised.') + Error raised. + +tpc_abort +--------- + +If a transaction is aborted in the middle of 2-phase commit, any data +stored are discarded. + + >>> olddata, oldserial = blob_storage.load(blob._p_oid, '') + >>> from ZODB.Connection import TransactionMetaData + >>> t = TransactionMetaData() + >>> blob_storage.tpc_begin(t) + >>> with open('blobfile', 'wb') as file: + ... _ = file.write(b'This data should go away') + >>> blob_storage.storeBlob(blob._p_oid, oldserial, olddata, 'blobfile', + ... '', t) + >>> new_oid = blob_storage.new_oid() + >>> with open('blobfile2', 'wb') as file: + ... _ = file.write(b'This data should go away too') + >>> blob_storage.storeBlob(new_oid, '\0'*8, olddata, 'blobfile2', + ... '', t) + >>> bool(blob_storage.tpc_vote(t)) + False + >>> blob_storage.tpc_abort(t) + +Now, the serial for the existing blob should be the same: + + >>> blob_storage.load(blob._p_oid, '') == (olddata, oldserial) + True + +The old data should be unaffected: + + >>> with open(blob_storage.loadBlob(blob._p_oid, oldserial)) as fp: + ... fp.read() + "I'm a happy blob." + +Similarly, the new object wasn't added to the storage: + + >>> blob_storage.load(new_oid, '') + Traceback (most recent call last): + ... + POSKeyError: 0x... + +.. clean up + + >>> tm1.abort() + >>> tm2.abort() + >>> database.close() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/blobstorage_packing.txt b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blobstorage_packing.txt new file mode 100644 index 0000000..4790ed9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/blobstorage_packing.txt @@ -0,0 +1,164 @@ +############################################################################## +# +# Copyright (c) 2005 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +Packing support for blob data +============================= + +Set up: + + >>> from ZODB.MappingStorage import MappingStorage + >>> from ZODB.serialize import referencesf + >>> from ZODB.blob import Blob, BlobStorage + >>> from ZODB import utils + >>> from ZODB.DB import DB + >>> import transaction + >>> storagefile = 'Data.fs' + >>> blob_dir = 'blobs' + +A helper method to assure a unique timestamp across multiple platforms: + + >>> from ZODB.tests.testblob import new_time + +UNDOING +======= + +See blob_packing.txt. + +NON-UNDOING +=========== + +We need an database with a NON-undoing blob supporting storage: + + >>> base_storage = MappingStorage('storage') + >>> blob_storage = BlobStorage(blob_dir, base_storage) + >>> database = DB(blob_storage) + +Create our root object: + + >>> connection1 = database.open() + >>> root = connection1.root() + +Put some revisions of a blob object in our database and on the filesystem: + + >>> import time, os + >>> tids = [] + >>> times = [] + >>> nothing = transaction.begin() + >>> times.append(new_time()) + >>> blob = Blob() + >>> with blob.open('w') as file: + ... _ = file.write(b'this is blob data 0') + >>> root['blob'] = blob + >>> transaction.commit() + >>> tids.append(blob_storage._tid) + + >>> nothing = transaction.begin() + >>> times.append(new_time()) + >>> with root['blob'].open('w') as file: + ... _ = file.write(b'this is blob data 1') + >>> transaction.commit() + >>> tids.append(blob_storage._tid) + + >>> nothing = transaction.begin() + >>> times.append(new_time()) + >>> with root['blob'].open('w') as file: + ... _ = file.write(b'this is blob data 2') + >>> transaction.commit() + >>> tids.append(blob_storage._tid) + + >>> nothing = transaction.begin() + >>> times.append(new_time()) + >>> with root['blob'].open('w') as file: + ... _ = file.write(b'this is blob data 3') + >>> transaction.commit() + >>> tids.append(blob_storage._tid) + + >>> nothing = transaction.begin() + >>> times.append(new_time()) + >>> with root['blob'].open('w') as file: + ... _ = file.write(b'this is blob data 4') + >>> transaction.commit() + >>> tids.append(blob_storage._tid) + + >>> oid = root['blob']._p_oid + >>> fns = [ blob_storage.fshelper.getBlobFilename(oid, x) for x in tids ] + >>> [ os.path.exists(x) for x in fns ] + [True, True, True, True, True] + +Get our blob filenames for this oid. + + >>> fns = [ blob_storage.fshelper.getBlobFilename(oid, x) for x in tids ] + +Do a pack to the slightly before the first revision was written: + + >>> packtime = times[0] + >>> blob_storage.pack(packtime, referencesf) + >>> [ os.path.exists(x) for x in fns ] + [False, False, False, False, True] + +Do a pack to now: + + >>> packtime = new_time() + >>> blob_storage.pack(packtime, referencesf) + >>> [ os.path.exists(x) for x in fns ] + [False, False, False, False, True] + +Delete the object and do a pack, it should get rid of the most current +revision as well as the entire directory: + + >>> nothing = transaction.begin() + >>> del root['blob'] + >>> transaction.commit() + >>> packtime = new_time() + >>> blob_storage.pack(packtime, referencesf) + >>> [ os.path.exists(x) for x in fns ] + [False, False, False, False, False] + >>> os.path.exists(os.path.split(fns[0])[0]) + False + +Avoiding parallel packs +======================= + +Blob packing (similar to FileStorage) can only be run once at a time. For +this, a flag (_blobs_pack_is_in_progress) is set. If the pack method is called +while this flag is set, it will refuse to perform another pack, until the flag +is reset: + + >>> blob_storage._blobs_pack_is_in_progress + False + >>> blob_storage._blobs_pack_is_in_progress = True + >>> blob_storage.pack(packtime, referencesf) + Traceback (most recent call last): + BlobStorageError: Already packing + >>> blob_storage._blobs_pack_is_in_progress = False + >>> blob_storage.pack(packtime, referencesf) + +We can also see, that the flag is set during the pack, by leveraging the +knowledge that the underlying storage's pack method is also called: + + >>> import six + >>> def dummy_pack(time, ref): + ... six.print_( + ... "_blobs_pack_is_in_progress =", + ... blob_storage._blobs_pack_is_in_progress) + ... return base_pack(time, ref) + >>> base_pack = base_storage.pack + >>> base_storage.pack = dummy_pack + >>> blob_storage.pack(packtime, referencesf) + _blobs_pack_is_in_progress = True + >>> blob_storage._blobs_pack_is_in_progress + False + >>> base_storage.pack = base_pack + + >>> database.close() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/component.xml b/thesisenv/lib/python3.6/site-packages/ZODB/tests/component.xml new file mode 100644 index 0000000..f0a8bcc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/component.xml @@ -0,0 +1,16 @@ + + +
+ + +
+ + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/dangle.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/dangle.py new file mode 100644 index 0000000..bcbfac5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/dangle.py @@ -0,0 +1,66 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +"""Functional test to produce a dangling reference.""" +from __future__ import print_function + +import time + +import transaction +from ZODB.FileStorage import FileStorage +from ZODB import DB + +from persistent import Persistent + +class P(Persistent): + pass + +def create_dangling_ref(db): + rt = db.open().root() + + rt[1] = o1 = P() + transaction.get().note(u"create o1") + transaction.commit() + + rt[2] = o2 = P() + transaction.get().note(u"create o2") + transaction.commit() + + c = o1.child = P() + transaction.get().note(u"set child on o1") + transaction.commit() + + o1.child = P() + transaction.get().note(u"replace child on o1") + transaction.commit() + + time.sleep(2) + # The pack should remove the reference to c, because it is no + # longer referenced from o1. But the object still exists and has + # an oid, so a new commit of it won't create a new object. + db.pack() + + print(repr(c._p_oid)) + o2.child = c + transaction.get().note(u"set child on o2") + transaction.commit() + +def main(): + fs = FileStorage(u"dangle.fs") + db = DB(fs) + create_dangling_ref(db) + db.close() + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/dbopen.txt b/thesisenv/lib/python3.6/site-packages/ZODB/tests/dbopen.txt new file mode 100644 index 0000000..7354307 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/dbopen.txt @@ -0,0 +1,377 @@ +===================== +Connection Management +===================== + + +Here we exercise the connection management done by the DB class. + + >>> from ZODB import DB + >>> from ZODB.MappingStorage import MappingStorage as Storage + +Capturing log messages from DB is important for some of the examples: + + >>> from zope.testing.loggingsupport import InstalledHandler + >>> handler = InstalledHandler('ZODB.DB') + +Create a storage, and wrap it in a DB wrapper: + + >>> st = Storage() + >>> db = DB(st) + +By default, we can open 7 connections without any log messages: + + >>> conns = [db.open() for dummy in range(7)] + >>> handler.records + [] + +Open one more, and we get a warning: + + >>> conns.append(db.open()) + >>> len(handler.records) + 1 + >>> msg = handler.records[0] + >>> import six + >>> six.print_(msg.name, msg.levelname, msg.getMessage()) + ZODB.DB WARNING DB.open() has 8 open connections with a pool_size of 7 + +Open 6 more, and we get 6 more warnings: + + >>> conns.extend([db.open() for dummy in range(6)]) + >>> len(conns) + 14 + >>> len(handler.records) + 7 + >>> msg = handler.records[-1] + >>> six.print_(msg.name, msg.levelname, msg.getMessage()) + ZODB.DB WARNING DB.open() has 14 open connections with a pool_size of 7 + +Add another, so that it's more than twice the default, and the level +rises to critical: + + >>> conns.append(db.open()) + >>> len(conns) + 15 + >>> len(handler.records) + 8 + >>> msg = handler.records[-1] + >>> six.print_(msg.name, msg.levelname, msg.getMessage()) + ZODB.DB CRITICAL DB.open() has 15 open connections with a pool_size of 7 + +While it's boring, it's important to verify that the same relationships +hold if the default pool size is overridden. + + >>> handler.clear() + >>> db.close() + >>> st = Storage() + >>> PS = 2 # smaller pool size + >>> db = DB(st, pool_size=PS) + >>> conns = [db.open() for dummy in range(PS)] + >>> handler.records + [] + +A warning for opening one more: + + >>> conns.append(db.open()) + >>> len(handler.records) + 1 + >>> msg = handler.records[0] + >>> six.print_(msg.name, msg.levelname, msg.getMessage()) + ZODB.DB WARNING DB.open() has 3 open connections with a pool_size of 2 + +More warnings through 4 connections: + + >>> conns.extend([db.open() for dummy in range(PS-1)]) + >>> len(conns) + 4 + >>> len(handler.records) + 2 + >>> msg = handler.records[-1] + >>> six.print_(msg.name, msg.levelname, msg.getMessage()) + ZODB.DB WARNING DB.open() has 4 open connections with a pool_size of 2 + +And critical for going beyond that: + + >>> conns.append(db.open()) + >>> len(conns) + 5 + >>> len(handler.records) + 3 + >>> msg = handler.records[-1] + >>> six.print_(msg.name, msg.levelname, msg.getMessage()) + ZODB.DB CRITICAL DB.open() has 5 open connections with a pool_size of 2 + +We can change the pool size on the fly: + + >>> handler.clear() + >>> db.setPoolSize(6) + >>> conns.append(db.open()) + >>> handler.records # no log msg -- the pool is bigger now + [] + >>> conns.append(db.open()) # but one more and there's a warning again + >>> len(handler.records) + 1 + >>> msg = handler.records[0] + >>> six.print_(msg.name, msg.levelname, msg.getMessage()) + ZODB.DB WARNING DB.open() has 7 open connections with a pool_size of 6 + +Enough of that. + + >>> handler.clear() + >>> db.close() + +More interesting is the stack-like nature of connection reuse. So long as +we keep opening new connections, and keep them alive, all connections +returned are distinct: + + >>> st = Storage() + >>> db = DB(st) + >>> c1 = db.open() + >>> c1.cacheMinimize() + >>> c2 = db.open() + >>> c3 = db.open() + >>> c1 is c2 or c1 is c3 or c2 is c3 + False + +Let's put some markers on the connections, so we can identify these +specific objects later: + + >>> c1.MARKER = 'c1' + >>> c2.MARKER = 'c2' + >>> c3.MARKER = 'c3' + +Now explicitly close c1 and c2: + + >>> c1.close() + >>> c2.close() + +Reaching into the internals, we can see that db's connection pool now has +two connections available for reuse, and knows about three connections in +all: + + >>> pool = db.pool + >>> len(pool.available) + 2 + >>> len(pool.all) + 3 + +Since we closed c2 last, it's at the top of the available stack, so will +be reused by the next open(): + + >>> c1 = db.open() + >>> c1.MARKER + 'c2' + >>> len(pool.available), len(pool.all) + (1, 3) + + >>> c3.close() # now the stack has c3 on top, then c1 + >>> c2 = db.open() + >>> c2.MARKER + 'c3' + >>> len(pool.available), len(pool.all) + (1, 3) + >>> c3 = db.open() + >>> c3.MARKER + 'c1' + >>> len(pool.available), len(pool.all) + (0, 3) + +It's a bit more complicated though. The connection pool tries to keep +connections with larger caches at the top of the stack. It does this +by having connections with smaller caches "sink" below connections with +larger caches when they are closed. + +To see this, we'll add some objects to the caches: + + >>> for i in range(10): + ... c1.root()[i] = c1.root().__class__() + >>> import transaction + >>> transaction.commit() + >>> c1._cache.cache_non_ghost_count + 11 + + >>> for i in range(5): + ... _ = len(c2.root()[i]) + >>> c2._cache.cache_non_ghost_count + 6 + +Now, we'll close the connections and get them back: + + >>> c1.close() + >>> c2.close() + >>> c3.close() + +We closed c3 last, but c1 is the biggest, so we get c1 on the next +open: + + >>> db.open() is c1 + True + +Similarly, c2 is the next buggest, so we get that next: + + >>> db.open() is c2 + True + +and finally c3: + + >>> db.open() is c3 + True + +What about the 3 in pool.all? We've seen that closing connections doesn't +reduce pool.all, and it would be bad if DB kept connections alive forever. + +In fact pool.all is a "weak set" of connections -- it holds weak references +to connections. That alone doesn't keep connection objects alive. The +weak set allows DB's statistics methods to return info about connections +that are still alive. + + + >>> len(db.cacheDetailSize()) # one result for each connection's cache + 3 + +If a connection object is abandoned (it becomes unreachable), then it +will vanish from pool.all automatically. However, connections are +involved in cycles, so exactly when a connection vanishes from +pool.all isn't predictable. It can be forced (on most platforms but +not Jython) by running gc.collect(): + + >>> import gc, sys + >>> dummy = gc.collect() + >>> len(pool.all) + 3 + >>> c3 = None + >>> dummy = gc.collect() # removes c3 from pool.all + >>> len(pool.all) if not sys.platform.startswith("java") else 2 + 2 + +Note that c3 is really gone; in particular it didn't get added back to +the stack of available connections by magic: + + >>> len(pool.available) + 0 + +Nothing in that last block should have logged any msgs: + + >>> handler.records + [] + +If "too many" connections are open, then closing one may kick an older +closed one out of the available connection stack. + + >>> db.close() + >>> st = Storage() + >>> db = DB(st, pool_size=3) + >>> conns = [db.open() for dummy in range(6)] + >>> conns[0].cacheMinimize() + >>> len(handler.records) # 3 warnings for the "excess" connections + 3 + >>> pool = db.pool + >>> len(pool.available), len(pool.all) + (0, 6) + +Let's mark them: + + >>> for i, c in enumerate(conns): + ... c.MARKER = i + +Closing connections adds them to the stack: + + >>> for i in range(3): + ... conns[i].close() + >>> len(pool.available), len(pool.all) + (3, 6) + >>> del conns[:3] # leave the ones with MARKERs 3, 4 and 5 + +Closing another one will purge the one with MARKER 0 from the stack +(since it was the first added to the stack): + + >>> [c.MARKER for (t, c) in pool.available] + [0, 1, 2] + >>> conns[0].close() # MARKER 3 + >>> len(pool.available), len(pool.all) + (3, 5) + >>> [c.MARKER for (t, c) in pool.available] + [1, 2, 3] + +Similarly for the other two: + + >>> conns[1].close(); conns[2].close() + >>> len(pool.available), len(pool.all) + (3, 3) + >>> [c.MARKER for (t, c) in pool.available] + [3, 4, 5] + +Reducing the pool size may also purge the oldest closed connections: + + >>> db.setPoolSize(2) # gets rid of MARKER 3 + >>> len(pool.available), len(pool.all) + (2, 2) + >>> [c.MARKER for (t, c) in pool.available] + [4, 5] + +Since MARKER 5 is still the last one added to the stack, it will be the +first popped: + + >>> c1 = db.open(); c2 = db.open() + >>> c1.MARKER, c2.MARKER + (5, 4) + >>> len(pool.available), len(pool.all) + (0, 2) + +Next: when a closed Connection is removed from .available due to exceeding +pool_size, that Connection's cache is cleared (this behavior was new in +ZODB 3.6b6). While user code may still hold a reference to that +Connection, once it vanishes from .available it's really not usable for +anything sensible (it can never be in the open state again). Waiting for +gc to reclaim the Connection and its cache eventually works, but that can +take "a long time" and caches can hold on to many objects, and limited +resources (like RDB connections), for the duration. + + >>> db.close() + >>> st = Storage() + >>> db = DB(st, pool_size=2) + >>> conn0 = db.open() + >>> conn0.cacheMinimize(); import gc; _ = gc.collect() # See fix84.rst + >>> len(conn0._cache) # empty now + 0 + >>> import transaction + >>> conn0.root()['a'] = 1 + >>> transaction.commit() + >>> len(conn0._cache) # but now the cache holds the root object + 1 + +Now open more connections so that the total exceeds pool_size (2): + + >>> conn1 = db.open(); _ = conn1.root()['a'] + >>> conn2 = db.open(); _ = conn2.root()['a'] + +Note that we accessed the objects in the new connections so they would +be of the same size, so that when they get closed, they don't sink +below conn0. + + >>> pool = db.pool + >>> len(pool.all), len(pool.available) # all Connections are in use + (3, 0) + +Return pool_size (2) Connections to the pool: + + >>> conn0.close() + >>> conn1.close() + >>> len(pool.all), len(pool.available) + (3, 2) + >>> len(conn0._cache) # nothing relevant has changed yet + 1 + +When we close the third connection, conn0 will be booted from .all, and +we expect its cache to be cleared then: + + >>> conn2.close() + >>> len(pool.all), len(pool.available) + (2, 2) + >>> len(conn0._cache) # conn0's cache is empty again + 0 + >>> del conn0, conn1, conn2 + +Clean up. + + >>> st.close() + >>> handler.uninstall() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/fix84.rst b/thesisenv/lib/python3.6/site-packages/ZODB/tests/fix84.rst new file mode 100644 index 0000000..29b6bfc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/fix84.rst @@ -0,0 +1,19 @@ +A change in the way databases were initialized affected tests +============================================================= + +Originally, databases added root objects by interacting directly with +storages, rather than using connections. As storages transaction +interaction became more complex, interacting directly with storages +let to duplicated code (and buggy) code. + +See: https://github.com/zopefoundation/ZODB/issues/84 + +Fixing this had some impacts that affected tests: + +- New databases now have a connection with a single object in it's cache. + This is a very slightly good thing, but it broke some tests expectations. + +- Tests that manipulated time, had their clocks off because of new time calls. + +This led to some test fixes, in many cases adding a mysterious +``cacheMinimize()`` call. diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/hexstorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/hexstorage.py new file mode 100644 index 0000000..f27c0e3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/hexstorage.py @@ -0,0 +1,166 @@ +############################################################################## +# +# Copyright (c) 2010 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import ZODB.blob +import ZODB.interfaces +import ZODB.utils +import zope.interface +from binascii import hexlify, unhexlify + +@zope.interface.implementer(ZODB.interfaces.IStorageWrapper) +class HexStorage(object): + + + copied_methods = ( + 'close', 'getName', 'getSize', 'history', 'isReadOnly', + 'lastTransaction', 'new_oid', 'sortKey', + 'tpc_abort', 'tpc_begin', 'tpc_finish', 'tpc_vote', + 'loadBlob', 'openCommittedBlobFile', 'temporaryDirectory', + 'supportsUndo', 'undo', 'undoLog', 'undoInfo', + ) + + def __init__(self, base): + self.base = base + base.registerDB(self) + + for name in self.copied_methods: + v = getattr(base, name, None) + if v is not None: + setattr(self, name, v) + + zope.interface.directlyProvides(self, zope.interface.providedBy(base)) + + def __getattr__(self, name): + return getattr(self.base, name) + + def __len__(self): + return len(self.base) + + load = ZODB.utils.load_current + + def loadBefore(self, oid, tid): + r = self.base.loadBefore(oid, tid) + if r is not None: + data, serial, after = r + return unhexlify(data[2:]), serial, after + else: + return r + + def loadSerial(self, oid, serial): + return unhexlify(self.base.loadSerial(oid, serial)[2:]) + + def pack(self, pack_time, referencesf, gc=True): + def refs(p, oids=None): + return referencesf(unhexlify(p[2:]), oids) + return self.base.pack(pack_time, refs, gc) + + def registerDB(self, db): + self.db = db + self._db_transform = db.transform_record_data + self._db_untransform = db.untransform_record_data + + _db_transform = _db_untransform = lambda self, data: data + + def store(self, oid, serial, data, version, transaction): + return self.base.store( + oid, serial, b'.h'+hexlify(data), version, transaction) + + def restore(self, oid, serial, data, version, prev_txn, transaction): + return self.base.restore( + oid, serial, data and (b'.h'+hexlify(data)), version, prev_txn, + transaction) + + def iterator(self, start=None, stop=None): + it = self.base.iterator(start, stop) + try: + for t in it: + yield Transaction(self, t) + finally: + if hasattr(it, 'close'): + it.close() + + def storeBlob(self, oid, oldserial, data, blobfilename, version, + transaction): + return self.base.storeBlob(oid, oldserial, b'.h'+hexlify(data), + blobfilename, version, transaction) + + def restoreBlob(self, oid, serial, data, blobfilename, prev_txn, + transaction): + return self.base.restoreBlob(oid, serial, + data and (b'.h'+hexlify(data)), + blobfilename, prev_txn, transaction) + + def invalidateCache(self): + return self.db.invalidateCache() + + def invalidate(self, transaction_id, oids, version=''): + return self.db.invalidate(transaction_id, oids, version) + + def references(self, record, oids=None): + return self.db.references(unhexlify(record[2:]), oids) + + def transform_record_data(self, data): + return b'.h'+hexlify(self._db_transform(data)) + + def untransform_record_data(self, data): + return self._db_untransform(unhexlify(data[2:])) + + def record_iternext(self, next=None): + oid, tid, data, next = self.base.record_iternext(next) + return oid, tid, unhexlify(data[2:]), next + + def copyTransactionsFrom(self, other): + ZODB.blob.copyTransactionsFromTo(other, self) + +class ServerHexStorage(HexStorage): + """Use on ZEO storage server when Hex is used on client + + Don't do conversion as part of load/store, but provide + pickle decoding. + """ + + copied_methods = HexStorage.copied_methods + ( + 'load', 'loadBefore', 'loadSerial', 'store', 'restore', + 'iterator', 'storeBlob', 'restoreBlob', 'record_iternext', + ) + +class Transaction(object): + + def __init__(self, store, trans): + self.__store = store + self.__trans = trans + + def __iter__(self): + for r in self.__trans: + if r.data: + r.data = self.__store.untransform_record_data(r.data) + yield r + + def __getattr__(self, name): + return getattr(self.__trans, name) + +class ZConfigHex(object): + + _factory = HexStorage + + def __init__(self, config): + self.config = config + self.name = config.getSectionName() + + def open(self): + base = self.config.base.open() + return self._factory(base) + +class ZConfigServerHex(ZConfigHex): + + _factory = ServerHexStorage diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/loggingsupport.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/loggingsupport.py new file mode 100644 index 0000000..daf59fb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/loggingsupport.py @@ -0,0 +1,122 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Support for testing logging code + +If you want to test that your code generates proper log output, you +can create and install a handler that collects output: + + >>> handler = InstalledHandler('foo.bar') + +The handler is installed into loggers for all of the names passed. In +addition, the logger level is set to 1, which means, log +everything. If you want to log less than everything, you can provide a +level keyword argument. The level setting effects only the named +loggers. + +Then, any log output is collected in the handler: + + >>> logging.getLogger('foo.bar').exception('eek') + >>> logging.getLogger('foo.bar').info('blah blah') + + >>> import six + >>> for record in handler.records: + ... six.print_(record.name, record.levelname) + ... six.print_(' ', record.getMessage()) + foo.bar ERROR + eek + foo.bar INFO + blah blah + +A similar effect can be gotten by just printing the handler: + + >>> print(handler) + foo.bar ERROR + eek + foo.bar INFO + blah blah + +After checking the log output, you need to uninstall the handler: + + >>> handler.uninstall() + +At which point, the handler won't get any more log output. +Let's clear the handler: + + >>> handler.clear() + >>> handler.records + [] + +And then log something: + + >>> logging.getLogger('foo.bar').info('blah') + +and, sure enough, we still have no output: + + >>> handler.records + [] + +$Id: loggingsupport.py 28349 2004-11-06 00:10:32Z tim_one $ +""" + +import logging + +class Handler(logging.Handler): + + def __init__(self, *names, **kw): + logging.Handler.__init__(self) + self.names = names + self.records = [] + self.setLoggerLevel(**kw) + + def setLoggerLevel(self, level=1): + self.level = level + self.oldlevels = {} + + def emit(self, record): + self.records.append(record) + + def clear(self): + del self.records[:] + + def install(self): + for name in self.names: + logger = logging.getLogger(name) + self.oldlevels[name] = logger.level + logger.setLevel(self.level) + logger.addHandler(self) + + def uninstall(self): + for name in self.names: + logger = logging.getLogger(name) + logger.setLevel(self.oldlevels[name]) + logger.removeHandler(self) + + def __str__(self): + return '\n'.join( + [("%s %s\n %s" % + (record.name, record.levelname, + '\n'.join([line + for line in record.getMessage().split('\n') + if line.strip()]) + ) + ) + for record in self.records] + ) + + +class InstalledHandler(Handler): + + def __init__(self, *names): + Handler.__init__(self, *names) + self.install() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/multidb.txt b/thesisenv/lib/python3.6/site-packages/ZODB/tests/multidb.txt new file mode 100644 index 0000000..e8dffcf --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/multidb.txt @@ -0,0 +1,201 @@ +================== +Multiple Databases +================== + +Multi-database support adds the ability to tie multiple databases into a +collection. The original proposal is in the fishbowl: + + http://www.zope.org/Wikis/ZODB/MultiDatabases/ + +It was implemented during the PyCon 2005 sprints, but in a simpler form, +by Jim Fulton, Christian Theune, and Tim Peters. Overview: + +No private attributes were added, and one new method was introduced. + +``DB``: + +- a new ``.database_name`` attribute holds the name of this database. + +- a new ``.databases`` attribute maps from database name to ``DB`` object; all + databases in a multi-database collection share the same ``.databases`` object + +- the ``DB`` constructor has new optional arguments with the same names + (``database_name=`` and ``databases=``). + +``Connection``: + +- a new ``.connections`` attribute maps from database name to a ``Connection`` + for the database with that name; the ``.connections`` mapping object is also + shared among databases in a collection. + +- a new ``.get_connection(database_name)`` method returns a ``Connection`` for + a database in the collection; if a connection is already open, it's returned + (this is the value ``.connections[database_name]``), else a new connection + is opened (and stored as ``.connections[database_name]``) + + +Creating a multi-database starts with creating a named ``DB``: + + >>> from ZODB.tests.test_storage import MinimalMemoryStorage + >>> from ZODB import DB + >>> dbmap = {} + >>> db = DB(MinimalMemoryStorage(), database_name='root', databases=dbmap) + +The database name is accessible afterwards and in a newly created collection: + + >>> db.database_name + 'root' + >>> db.databases # doctest: +ELLIPSIS + {'root': } + >>> db.databases is dbmap + True + +Adding another database to the collection works like this: + + >>> db2 = DB(MinimalMemoryStorage(), + ... database_name='notroot', + ... databases=dbmap) + +The new ``db2`` now shares the ``databases`` dictionary with db and has two +entries: + + >>> db2.databases is db.databases is dbmap + True + >>> len(db2.databases) + 2 + >>> names = sorted(dbmap.keys()); print(names) + ['notroot', 'root'] + +It's an error to try to insert a database with a name already in use: + + >>> db3 = DB(MinimalMemoryStorage(), + ... database_name='root', + ... databases=dbmap) + Traceback (most recent call last): + ... + ValueError: database_name 'root' already in databases + +Because that failed, ``db.databases`` wasn't changed: + + >>> len(db.databases) # still 2 + 2 + +You can (still) get a connection to a database this way: + + >>> import transaction + >>> tm = transaction.TransactionManager() + >>> cn = db.open(transaction_manager=tm) + >>> cn # doctest: +ELLIPSIS + + +This is the only connection in this collection right now: + + >>> cn.connections # doctest: +ELLIPSIS + {'root': } + +Getting a connection to a different database from an existing connection in the +same database collection (this enables 'connection binding' within a given +thread/transaction/context ...): + + >>> cn2 = cn.get_connection('notroot') + >>> cn2 # doctest: +ELLIPSIS + + +The second connection gets the same transaction manager as the first: + + >>> cn2.transaction_manager is tm + True + +Now there are two connections in that collection: + + >>> cn2.connections is cn.connections + True + >>> len(cn2.connections) + 2 + >>> names = sorted(cn.connections.keys()); print(names) + ['notroot', 'root'] + +So long as this database group remains open, the same ``Connection`` objects +are returned: + + >>> cn.get_connection('root') is cn + True + >>> cn.get_connection('notroot') is cn2 + True + >>> cn2.get_connection('root') is cn + True + >>> cn2.get_connection('notroot') is cn2 + True + +Of course trying to get a connection for a database not in the group raises +an exception: + + >>> cn.get_connection('no way') + Traceback (most recent call last): + ... + KeyError: 'no way' + +Clean up: + + >>> for a_db in list(dbmap.values()): + ... a_db.close() + + +Configuration from File +----------------------- + +The database name can also be specified in a config file, starting in +ZODB 3.6: + + >>> from ZODB.config import databaseFromString + >>> config = """ + ... + ... + ... database-name this_is_the_name + ... + ... """ + >>> db = databaseFromString(config) + >>> print(db.database_name) + this_is_the_name + >>> sorted(db.databases.keys()) + ['this_is_the_name'] + +However, the ``.databases`` attribute cannot be configured from file. It +can be passed to the `ZConfig` factory. I'm not sure of the clearest way +to test that here; this is ugly: + + >>> from ZODB.config import getDbSchema + >>> import ZConfig + >>> try: + ... from cStringIO import StringIO + ... except ImportError: + ... # Py3 + ... from io import StringIO + + +Derive a new `config2` string from the `config` string, specifying a +different database_name: + + >>> config2 = config.replace("this_is_the_name", "another_name") + +Now get a `ZConfig` factory from `config2`: + + >>> f = StringIO(config2) + >>> zconfig, handle = ZConfig.loadConfigFile(getDbSchema(), f) + >>> factory = zconfig.database + +The desired ``databases`` mapping can be passed to this factory: + + >>> db2 = factory[0].open(databases=db.databases) + >>> print(db2.database_name) # has the right name + another_name + >>> db.databases is db2.databases # shares .databases with `db` + True + >>> all = sorted(db2.databases.keys()) + >>> all # and db.database_name & db2.database_name are the keys + ['another_name', 'this_is_the_name'] + +Cleanup. + + >>> db.close() + >>> db2.close() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/sampledm.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/sampledm.py new file mode 100644 index 0000000..10fb190 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/sampledm.py @@ -0,0 +1,407 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Sample objects for use in tests +""" + +class DataManager(object): + """Sample data manager + + This class provides a trivial data-manager implementation and doc + strings to illustrate the the protocol and to provide a tool for + writing tests. + + Our sample data manager has state that is updated through an inc + method and through transaction operations. + + When we create a sample data manager: + + >>> dm = DataManager() + + It has two bits of state, state: + + >>> dm.state + 0 + + and delta: + + >>> dm.delta + 0 + + Both of which are initialized to 0. state is meant to model + committed state, while delta represents tentative changes within a + transaction. We change the state by calling inc: + + >>> dm.inc() + + which updates delta: + + >>> dm.delta + 1 + + but state isn't changed until we commit the transaction: + + >>> dm.state + 0 + + To commit the changes, we use 2-phase commit. We execute the first + stage by calling prepare. We need to pass a transation. Our + sample data managers don't really use the transactions for much, + so we'll be lazy and use strings for transactions: + + >>> t1 = '1' + >>> dm.prepare(t1) + + The sample data manager updates the state when we call prepare: + + >>> dm.state + 1 + >>> dm.delta + 1 + + This is mainly so we can detect some affect of calling the methods. + + Now if we call commit: + + >>> dm.commit(t1) + + Our changes are"permanent". The state reflects the changes and the + delta has been reset to 0. + + >>> dm.state + 1 + >>> dm.delta + 0 + """ + + def __init__(self): + self.state = 0 + self.sp = 0 + self.transaction = None + self.delta = 0 + self.prepared = False + + def inc(self, n=1): + self.delta += n + + def prepare(self, transaction): + """Prepare to commit data + + >>> dm = DataManager() + >>> dm.inc() + >>> t1 = '1' + >>> dm.prepare(t1) + >>> dm.commit(t1) + >>> dm.state + 1 + >>> dm.inc() + >>> t2 = '2' + >>> dm.prepare(t2) + >>> dm.abort(t2) + >>> dm.state + 1 + + It is en error to call prepare more than once without an intervening + commit or abort: + + >>> dm.prepare(t1) + + >>> dm.prepare(t1) + Traceback (most recent call last): + ... + TypeError: Already prepared + + >>> dm.prepare(t2) + Traceback (most recent call last): + ... + TypeError: Already prepared + + >>> dm.abort(t1) + + If there was a preceeding savepoint, the transaction must match: + + >>> rollback = dm.savepoint(t1) + >>> dm.prepare(t2) + Traceback (most recent call last): + ,,, + TypeError: ('Transaction missmatch', '2', '1') + + >>> dm.prepare(t1) + + """ + if self.prepared: + raise TypeError('Already prepared') + self._checkTransaction(transaction) + self.prepared = True + self.transaction = transaction + self.state += self.delta + + def _checkTransaction(self, transaction): + if (transaction is not self.transaction + and self.transaction is not None): + raise TypeError("Transaction missmatch", + transaction, self.transaction) + + def abort(self, transaction): + """Abort a transaction + + The abort method can be called before two-phase commit to + throw away work done in the transaction: + + >>> dm = DataManager() + >>> dm.inc() + >>> dm.state, dm.delta + (0, 1) + >>> t1 = '1' + >>> dm.abort(t1) + >>> dm.state, dm.delta + (0, 0) + + The abort method also throws away work done in savepoints: + + >>> dm.inc() + >>> r = dm.savepoint(t1) + >>> dm.inc() + >>> r = dm.savepoint(t1) + >>> dm.state, dm.delta + (0, 2) + >>> dm.abort(t1) + >>> dm.state, dm.delta + (0, 0) + + If savepoints are used, abort must be passed the same + transaction: + + >>> dm.inc() + >>> r = dm.savepoint(t1) + >>> t2 = '2' + >>> dm.abort(t2) + Traceback (most recent call last): + ... + TypeError: ('Transaction missmatch', '2', '1') + + >>> dm.abort(t1) + + The abort method is also used to abort a two-phase commit: + + >>> dm.inc() + >>> dm.state, dm.delta + (0, 1) + >>> dm.prepare(t1) + >>> dm.state, dm.delta + (1, 1) + >>> dm.abort(t1) + >>> dm.state, dm.delta + (0, 0) + + Of course, the transactions passed to prepare and abort must + match: + + >>> dm.prepare(t1) + >>> dm.abort(t2) + Traceback (most recent call last): + ... + TypeError: ('Transaction missmatch', '2', '1') + + >>> dm.abort(t1) + + + """ + self._checkTransaction(transaction) + if self.transaction is not None: + self.transaction = None + + if self.prepared: + self.state -= self.delta + self.prepared = False + + self.delta = 0 + + def commit(self, transaction): + """Complete two-phase commit + + >>> dm = DataManager() + >>> dm.state + 0 + >>> dm.inc() + + We start two-phase commit by calling prepare: + + >>> t1 = '1' + >>> dm.prepare(t1) + + We complete it by calling commit: + + >>> dm.commit(t1) + >>> dm.state + 1 + + It is an error ro call commit without calling prepare first: + + >>> dm.inc() + >>> t2 = '2' + >>> dm.commit(t2) + Traceback (most recent call last): + ... + TypeError: Not prepared to commit + + >>> dm.prepare(t2) + >>> dm.commit(t2) + + If course, the transactions given to prepare and commit must + be the same: + + >>> dm.inc() + >>> t3 = '3' + >>> dm.prepare(t3) + >>> dm.commit(t2) + Traceback (most recent call last): + ... + TypeError: ('Transaction missmatch', '2', '3') + + """ + if not self.prepared: + raise TypeError('Not prepared to commit') + self._checkTransaction(transaction) + self.delta = 0 + self.transaction = None + self.prepared = False + + def savepoint(self, transaction): + """Provide the ability to rollback transaction state + + Savepoints provide a way to: + + - Save partial transaction work. For some data managers, this + could allow resources to be used more efficiently. + + - Provide the ability to revert state to a point in a + transaction without aborting the entire transaction. In + other words, savepoints support partial aborts. + + Savepoints don't use two-phase commit. If there are errors in + setting or rolling back to savepoints, the application should + abort the containing transaction. This is *not* the + responsibility of the data manager. + + Savepoints are always associated with a transaction. Any work + done in a savepoint's transaction is tentative until the + transaction is committed using two-phase commit. + + >>> dm = DataManager() + >>> dm.inc() + >>> t1 = '1' + >>> r = dm.savepoint(t1) + >>> dm.state, dm.delta + (0, 1) + >>> dm.inc() + >>> dm.state, dm.delta + (0, 2) + >>> r.rollback() + >>> dm.state, dm.delta + (0, 1) + >>> dm.prepare(t1) + >>> dm.commit(t1) + >>> dm.state, dm.delta + (1, 0) + + Savepoints must have the same transaction: + + >>> r1 = dm.savepoint(t1) + >>> dm.state, dm.delta + (1, 0) + >>> dm.inc() + >>> dm.state, dm.delta + (1, 1) + >>> t2 = '2' + >>> r2 = dm.savepoint(t2) + Traceback (most recent call last): + ... + TypeError: ('Transaction missmatch', '2', '1') + + >>> r2 = dm.savepoint(t1) + >>> dm.inc() + >>> dm.state, dm.delta + (1, 2) + + If we rollback to an earlier savepoint, we discard all work + done later: + + >>> r1.rollback() + >>> dm.state, dm.delta + (1, 0) + + and we can no longer rollback to the later savepoint: + + >>> r2.rollback() + Traceback (most recent call last): + ... + TypeError: ('Attempt to roll back to invalid save point', 3, 2) + + We can roll back to a savepoint as often as we like: + + >>> r1.rollback() + >>> r1.rollback() + >>> r1.rollback() + >>> dm.state, dm.delta + (1, 0) + + >>> dm.inc() + >>> dm.inc() + >>> dm.inc() + >>> dm.state, dm.delta + (1, 3) + >>> r1.rollback() + >>> dm.state, dm.delta + (1, 0) + + But we can't rollback to a savepoint after it has been + committed: + + >>> dm.prepare(t1) + >>> dm.commit(t1) + + >>> r1.rollback() + Traceback (most recent call last): + ... + TypeError: Attempt to rollback stale rollback + + """ + if self.prepared: + raise TypeError("Can't get savepoint during two-phase commit") + self._checkTransaction(transaction) + self.transaction = transaction + self.sp += 1 + return Rollback(self) + +class Rollback(object): + + def __init__(self, dm): + self.dm = dm + self.sp = dm.sp + self.delta = dm.delta + self.transaction = dm.transaction + + def rollback(self): + if self.transaction is not self.dm.transaction: + raise TypeError("Attempt to rollback stale rollback") + if self.dm.sp < self.sp: + raise TypeError("Attempt to roll back to invalid save point", + self.sp, self.dm.sp) + self.dm.sp = self.sp + self.dm.delta = self.delta + + +def test_suite(): + from doctest import DocTestSuite + return DocTestSuite() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/speed.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/speed.py new file mode 100644 index 0000000..a9852ec --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/speed.py @@ -0,0 +1,127 @@ +from __future__ import print_function +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +usage="""Test speed of a ZODB storage + +Options: + + -d file The data file to use as input. + The default is this script. + + -n n The number of repititions + + -s module A module that defines a 'Storage' + attribute, which is an open storage. + If not specified, a FileStorage will ne + used. + + -z Test compressing data + + -D Run in debug mode + + -L Test loads as well as stores by minimizing + the cache after eachrun + + -M Output means only +""" + +import sys, os, getopt, string, time +sys.path.insert(0, os.getcwd()) + +import ZODB, ZODB.FileStorage +import persistent +import transaction + +class P(persistent.Persistent): pass + +def main(args): + + opts, args = getopt.getopt(args, 'zd:n:Ds:LM') + z=s=None + data=sys.argv[0] + nrep=5 + minimize=0 + detailed=1 + for o, v in opts: + if o=='-n': nrep=string.atoi(v) + elif o=='-d': data=v + elif o=='-s': s=v + elif o=='-z': + global zlib + import zlib + z=compress + elif o=='-L': + minimize=1 + elif o=='-M': + detailed=0 + elif o=='-D': + global debug + os.environ['STUPID_LOG_FILE']='' + os.environ['STUPID_LOG_SEVERITY']='-999' + + if s: + s=__import__(s, globals(), globals(), ('__doc__',)) + s=s.Storage + else: + s=ZODB.FileStorage.FileStorage('zeo_speed.fs', create=1) + + with open(data) as fp: + data = fp.read() + db=ZODB.DB(s, + # disable cache deactivation + cache_size=4000, + cache_deactivate_after=6000,) + + results={1:0, 10:0, 100:0, 1000:0} + for j in range(nrep): + for r in 1, 10, 100, 1000: + t=time.time() + jar=db.open() + transaction.begin() + rt=jar.root() + key='s%s' % r + if key in rt: p=rt[key] + else: rt[key]=p=P() + for i in range(r): + if z is not None: d=z(data) + else: d=data + v=getattr(p, str(i), P()) + v.d=d + setattr(p,str(i),v) + transaction.commit() + jar.close() + t=time.time()-t + if detailed: + sys.stderr.write("%s\t%s\t%.4f\n" % (j, r, t)) + sys.stdout.flush() + results[r]=results[r]+t + rt=d=p=v=None # release all references + if minimize: + time.sleep(3) + jar.cacheMinimize(3) + + if detailed: print('-'*24) + for r in 1, 10, 100, 1000: + t=results[r]/nrep + sys.stderr.write("mean:\t%s\t%.4f\t%.4f (s/o)\n" % (r, t, t/r)) + + db.close() + + +def compress(s): + c=zlib.compressobj() + o=c.compress(s) + return o+c.flush() + +if __name__=='__main__': main(sys.argv[1:]) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/synchronizers.txt b/thesisenv/lib/python3.6/site-packages/ZODB/tests/synchronizers.txt new file mode 100644 index 0000000..50c2fca --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/synchronizers.txt @@ -0,0 +1,96 @@ +============= +Synchronizers +============= + +Here are some tests that storage ``sync()`` methods get called at appropriate +times in the life of a transaction. The tested behavior is new in ZODB 3.4. + +First define a lightweight storage with a ``sync()`` method: + + >>> import ZODB + >>> from ZODB.MappingStorage import MappingStorage + >>> import transaction + + >>> class SimpleStorage(MappingStorage): + ... sync_called = False + ... + ... def sync(self, *args): + ... self.sync_called = True + +Make a change locally: + + >>> st = SimpleStorage() + >>> db = ZODB.DB(st) + >>> st.sync_called = False + >>> cn = db.open() + >>> rt = cn.root() + >>> rt['a'] = 1 + +Sync isn't called when a connection is opened, even though that +implicitly starts a new transaction: + + >>> st.sync_called + False + +Sync is only called when we explicitly start a new transaction: + + >>> _ = transaction.begin() + + >>> st.sync_called + True + >>> st.sync_called = False + +BTW, calling ``sync()`` on a connection starts a new transaction, which +caused ``sync()`` to be called on the storage: + + >>> cn.sync() + >>> st.sync_called + True + >>> st.sync_called = False + +``sync()`` is not called by the Connection's ``afterCompletion()`` +hook after the commit completes, because we'll sync when a new +transaction begins: + + >>> transaction.commit() + >>> st.sync_called # False before 3.4 + False + +``sync()`` is also not called by the ``afterCompletion()`` hook after an abort. + + >>> st.sync_called = False + >>> rt['b'] = 2 + >>> transaction.abort() + >>> st.sync_called # False before 3.4 + False + +And ``sync()`` is called whenever we explicitly start a new transaction, via +the ``newTransaction()`` hook. + + >>> st.sync_called = False + >>> dummy = transaction.begin() + >>> st.sync_called # False before 3.4 + True + +Clean up. Closing db isn't enough -- closing a DB doesn't close its +`Connections`. Leaving our `Connection` open here can cause the +``SimpleStorage.sync()`` method to get called later, during another test, and +our doctest-synthesized module globals no longer exist then. You get a weird +traceback then ;-) + + >>> cn.close() + +As a special case, if a synchronizer registers while a transaction is +in flight, then newTransaction and thus the storage sync method is +called: + + >>> tm = transaction.TransactionManager() + >>> st.sync_called = False + >>> _ = tm.begin() # we're doing this _before_ opening a connection + >>> cn = db.open(transaction_manager=tm) + >>> st.sync_called + True + + >>> cn.close() + >>> db.close() + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testActivityMonitor.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testActivityMonitor.py new file mode 100644 index 0000000..7469949 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testActivityMonitor.py @@ -0,0 +1,107 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests of the default activity monitor. + +See ZODB/ActivityMonitor.py + +$Id$ +""" + +import unittest +import time + +from ZODB.ActivityMonitor import ActivityMonitor + + +class FakeConnection(object): + + loads = 0 + stores = 0 + + def _transferred(self, loads, stores): + self.loads = self.loads + loads + self.stores = self.stores + stores + + def getTransferCounts(self, clear=0): + res = self.loads, self.stores + if clear: + self.loads = self.stores = 0 + return res + + +class Tests(unittest.TestCase): + + def testAddLogEntries(self): + am = ActivityMonitor(history_length=3600) + self.assertEqual(len(am.log), 0) + c = FakeConnection() + c._transferred(1, 2) + am.closedConnection(c) + c._transferred(3, 7) + am.closedConnection(c) + self.assertEqual(len(am.log), 2) + + def testTrim(self): + am = ActivityMonitor(history_length=0.1) + c = FakeConnection() + c._transferred(1, 2) + am.closedConnection(c) + time.sleep(0.2) + c._transferred(3, 7) + am.closedConnection(c) + self.assertTrue(len(am.log) <= 1) + + def testSetHistoryLength(self): + am = ActivityMonitor(history_length=3600) + c = FakeConnection() + c._transferred(1, 2) + am.closedConnection(c) + time.sleep(0.2) + c._transferred(3, 7) + am.closedConnection(c) + self.assertEqual(len(am.log), 2) + am.setHistoryLength(0.1) + self.assertEqual(am.getHistoryLength(), 0.1) + self.assertTrue(len(am.log) <= 1) + + def testActivityAnalysis(self): + am = ActivityMonitor(history_length=3600) + c = FakeConnection() + c._transferred(1, 2) + am.closedConnection(c) + c._transferred(3, 7) + am.closedConnection(c) + res = am.getActivityAnalysis(start=0, end=0, divisions=10) + lastend = 0 + for n in range(9): + div = res[n] + self.assertEqual(div['stores'], 0) + self.assertEqual(div['loads'], 0) + self.assertTrue(div['start'] > 0) + self.assertTrue(div['start'] >= lastend) + self.assertTrue(div['start'] < div['end']) + lastend = div['end'] + div = res[9] + self.assertEqual(div['stores'], 9) + self.assertEqual(div['loads'], 4) + self.assertTrue(div['start'] > 0) + self.assertTrue(div['start'] >= lastend) + self.assertTrue(div['start'] < div['end']) + + +def test_suite(): + return unittest.makeSuite(Tests) + +if __name__=='__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testBroken.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testBroken.py new file mode 100644 index 0000000..3a6b296 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testBroken.py @@ -0,0 +1,99 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test broken-object suppport +""" + +import sys +import unittest +import persistent +import transaction +import os +if os.environ.get('USE_ZOPE_TESTING_DOCTEST'): + from zope.testing.doctest import DocTestSuite +else: + from doctest import DocTestSuite +from ZODB.tests.util import DB, checker + +def test_integration(): + r"""Test the integration of broken object support with the databse: + + >>> db = DB() + + We'll create a fake module with a class: + + >>> class NotThere(object): + ... Atall = type('Atall', (persistent.Persistent, ), + ... {'__module__': 'ZODB.not.there'}) + + And stuff this into sys.modules to simulate a regular module: + + >>> sys.modules['ZODB.not.there'] = NotThere + >>> sys.modules['ZODB.not'] = NotThere + + Now, we'll create and save an instance, and make sure we can + load it in another connection: + + >>> a = NotThere.Atall() + >>> a.x = 1 + >>> conn1 = db.open() + >>> conn1.root()['a'] = a + >>> transaction.commit() + + >>> conn2 = db.open() + >>> a2 = conn2.root()['a'] + >>> a2.__class__ is a.__class__ + True + >>> a2.x + 1 + + Now, we'll uninstall the module, simulating having the module + go away: + + >>> del sys.modules['ZODB.not.there'] + + and we'll try to load the object in another connection: + + >>> conn3 = db.open() + >>> a3 = conn3.root()['a'] + >>> a3 # doctest: +NORMALIZE_WHITESPACE + + + >>> a3.__Broken_state__ + {'x': 1} + + Broken objects provide an interface: + + >>> from ZODB.interfaces import IBroken + >>> IBroken.providedBy(a3) + True + + Let's clean up: + + >>> db.close() + >>> del sys.modules['ZODB.not'] + + Cleanup: + + >>> import ZODB.broken + >>> ZODB.broken.broken_cache.clear() + """ + +def test_suite(): + return unittest.TestSuite(( + DocTestSuite('ZODB.broken', checker=checker), + DocTestSuite(checker=checker), + )) + +if __name__ == '__main__': unittest.main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testCache.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testCache.py new file mode 100644 index 0000000..f423706 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testCache.py @@ -0,0 +1,544 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""A few simple tests of the public cache API. + +Each DB Connection has a separate PickleCache. The Cache serves two +purposes. It acts like a memo for unpickling. It also keeps recent +objects in memory under the assumption that they may be used again. +""" + +from persistent import Persistent +from persistent import PickleCache +from persistent.mapping import PersistentMapping +from ZODB.tests.MinPO import MinPO +from ZODB.utils import p64 +import doctest +import gc +import sys +import threading +import transaction +import unittest +import ZODB +import ZODB.MappingStorage +import ZODB.tests.util + + +class CacheTestBase(ZODB.tests.util.TestCase): + + def setUp(self): + ZODB.tests.util.TestCase.setUp(self) + store = ZODB.MappingStorage.MappingStorage() + self.db = ZODB.DB(store, + cache_size = self.CACHE_SIZE) + self.conns = [] + + def tearDown(self): + self.db.close() + ZODB.tests.util.TestCase.tearDown(self) + + CACHE_SIZE = 20 + + def noodle_new_connection(self): + """Do some reads and writes on a new connection.""" + + c = self.db.open() + self.conns.append(c) + self.noodle_connection(c) + + def noodle_connection(self, c): + r = c.root() + + i = len(self.conns) + d = r.get(i) + if d is None: + d = r[i] = PersistentMapping() + transaction.commit() + + for i in range(15): + o = d.get(i) + if o is None: + o = d[i] = MinPO(i) + o.value += 1 + transaction.commit() + + + +# CantGetRidOfMe is used by checkMinimizeTerminates. +make_trouble = True +class CantGetRidOfMe(MinPO): + def __init__(self, value): + MinPO.__init__(self, value) + self.an_attribute = 42 + + def __del__(self): + # Referencing an attribute of self causes self to be + # loaded into the cache again, which also resurrects + # self. + if make_trouble: + self.an_attribute + +class DBMethods(CacheTestBase): + + def setUp(self): + CacheTestBase.setUp(self) + for i in range(4): + self.noodle_new_connection() + + def testCacheDetail(self): + for name, count in self.db.cacheDetail(): + self.assertEqual(isinstance(name, str), True) + self.assertEqual(isinstance(count, int), True) + + def testCacheExtremeDetail(self): + expected = ['conn_no', 'id', 'oid', 'rc', 'klass', 'state'] + for dict in self.db.cacheExtremeDetail(): + for k, v in dict.items(): + self.assertTrue(k in expected) + + # TODO: not really sure how to do a black box test of the cache. + # Should the full sweep and minimize calls always remove things? + + def testFullSweep(self): + old_size = self.db.cacheSize() + self.db.cacheFullSweep() + new_size = self.db.cacheSize() + self.assertTrue(new_size < old_size, "%s < %s" % (old_size, new_size)) + + def testMinimize(self): + old_size = self.db.cacheSize() + self.db.cacheMinimize() + new_size = self.db.cacheSize() + self.assertTrue(new_size < old_size, "%s < %s" % (old_size, new_size)) + + def testMinimizeTerminates(self): + # This is tricky. cPickleCache had a case where it could get into + # an infinite loop, but we don't want the test suite to hang + # if this bug reappears. So this test spawns a thread to run the + # dangerous operation, and the main thread complains if the worker + # thread hasn't finished in 30 seconds (arbitrary, but way more + # than enough). In that case, the worker thread will continue + # running forever (until killed externally), but at least the + # test suite will move on. + # + # The bug was triggered by having a persistent object whose __del__ + # method references an attribute of the object. An attempt to + # ghostify such an object will clear the attribute, and if the + # cache also releases the last Python reference to the object then + # (due to ghostifying it), the __del__ method gets invoked. + # Referencing the attribute loads the object again, and also + # puts it back into the cPickleCache. If the cache implementation + # isn't looking out for this, it can get into an infinite loop + # then, endlessly trying to ghostify an object that in turn keeps + # unghostifying itself again. + + # This test uses threads, so we can't use the default + # transaction manager. + for conn in self.conns: + conn.close() + self.conns[0] = self.db.open(transaction.TransactionManager()) + + class Worker(threading.Thread): + + def __init__(self, testcase): + threading.Thread.__init__(self) + self.testcase = testcase + + def run(self): + global make_trouble + # Make CantGetRidOfMe.__del__ dangerous. + make_trouble = True + + conn = self.testcase.conns[0] + r = conn.root() + d = r[1] + for i in range(len(d)): + d[i] = CantGetRidOfMe(i) + conn.transaction_manager.commit() + + self.testcase.db.cacheMinimize() + + # Defang the nasty objects. Else, because they're + # immortal now, they hang around and create trouble + # for subsequent tests. + make_trouble = False + self.testcase.db.cacheMinimize() + + w = Worker(self) + w.start() + w.join(30) + if w.isAlive(): + self.fail("cacheMinimize still running after 30 seconds -- " + "almost certainly in an infinite loop") + + # TODO: don't have an explicit test for incrgc, because the + # connection and database call it internally. + # Same for the get and invalidate methods. + + def testLRUitems(self): + # get a cache + c = self.conns[0]._cache + c.lru_items() + + def testClassItems(self): + c = self.conns[0]._cache + c.klass_items() + +class LRUCacheTests(CacheTestBase): + + def testLRU(self): + # verify the LRU behavior of the cache + dataset_size = 5 + CACHE_SIZE = dataset_size*2+1 + # a cache big enough to hold the objects added in two + # transactions, plus the root object + self.db.setCacheSize(CACHE_SIZE) + c = self.db.open() + r = c.root() + l = {} + # the root is the only thing in the cache, because all the + # other objects are new + self.assertEqual(len(c._cache), 1) + # run several transactions + for t in range(5): + for i in range(dataset_size): + l[(t,i)] = r[i] = MinPO(i) + transaction.commit() + # commit() will register the objects, placing them in the + # cache. at the end of commit, the cache will be reduced + # down to CACHE_SIZE items + if len(l)>CACHE_SIZE: + self.assertEqual(c._cache.ringlen(), CACHE_SIZE) + for i in range(dataset_size): + # Check objects added in the first two transactions. + # They must all be ghostified. + self.assertEqual(l[(0,i)]._p_changed, None) + self.assertEqual(l[(1,i)]._p_changed, None) + # Check objects added in the last two transactions. + # They must all still exist in memory, but have + # had their changes flushed + self.assertEqual(l[(3,i)]._p_changed, 0) + self.assertEqual(l[(4,i)]._p_changed, 0) + # Of the objects added in the middle transaction, most + # will have been ghostified. There is one cache slot + # that may be occupied by either one of those objects or + # the root, depending on precise order of access. We do + # not bother to check this + + def testSize(self): + self.db.cacheMinimize() + self.assertEqual(self.db.cacheSize(), 0) + + CACHE_SIZE = 10 + self.db.setCacheSize(CACHE_SIZE) + + CONNS = 3 + for i in range(CONNS): + self.noodle_new_connection() + + self.assertEqual(self.db.cacheSize(), CACHE_SIZE * CONNS) + details = self.db.cacheDetailSize() + self.assertEqual(len(details), CONNS) + for d in details: + self.assertEqual(d['ngsize'], CACHE_SIZE) + + # The assertion below is non-sensical + # The (poorly named) cache size is a target for non-ghosts. + # The cache *usually* contains non-ghosts, so that the + # size normally exceeds the target size. + + #self.assertEqual(d['size'], CACHE_SIZE) + + def testDetail(self): + CACHE_SIZE = 10 + self.db.setCacheSize(CACHE_SIZE) + + CONNS = 3 + for i in range(CONNS): + self.noodle_new_connection() + + gc.collect() + + # Obscure: The above gc.collect call is necessary to make this test + # pass. + # + # This test then only works because the order of computations + # and object accesses in the "noodle" calls is such that the + # persistent mapping containing the MinPO objects is + # deactivated before the MinPO objects. + # + # - Without the gc call, the cache will contain ghost MinPOs + # and the check of the MinPO count below will fail. That's + # because the counts returned by cacheDetail include ghosts. + # + # - If the mapping object containing the MinPOs isn't + # deactivated, there will be one fewer non-ghost MinPO and + # the test will fail anyway. + # + # This test really needs to be thought through and documented + # better. + + + for klass, count in self.db.cacheDetail(): + if klass.endswith('MinPO'): + self.assertEqual(count, CONNS * CACHE_SIZE) + if klass.endswith('PersistentMapping'): + # one root per connection + self.assertEqual(count, CONNS) + + for details in self.db.cacheExtremeDetail(): + # one 'details' dict per object + if details['klass'].endswith('PersistentMapping'): + self.assertEqual(details['state'], None) + else: + self.assertTrue(details['klass'].endswith('MinPO')) + self.assertEqual(details['state'], 0) + # The cache should never hold an unreferenced ghost. + if details['state'] is None: # i.e., it's a ghost + self.assertTrue(details['rc'] > 0) + +class StubDataManager(object): + def setklassstate(self, object): + pass + +class StubObject(Persistent): + pass + +class CacheErrors(unittest.TestCase): + + def setUp(self): + self.jar = StubDataManager() + self.cache = PickleCache(self.jar) + + def testGetBogusKey(self): + self.assertEqual(self.cache.get(p64(0)), None) + try: + self.cache[12] + except KeyError: + pass + else: + self.fail("expected KeyError") + try: + self.cache[12] = 12 + except TypeError: + pass + else: + self.fail("expected TyepError") + try: + del self.cache[12] + except TypeError: + pass + else: + self.fail("expected TypeError") + + def testBogusObject(self): + def add(key, obj): + self.cache[key] = obj + + # getrefcount is an implementation detail of CPython, + # not present under PyPy/Jython + rc = getattr(sys, 'getrefcount', lambda x: 1) + nones = rc(None) + + key = p64(2) + # value isn't persistent + self.assertRaises(TypeError, add, key, 12) + + o = StubObject() + # o._p_oid == None + self.assertRaises(TypeError, add, key, o) + + o._p_oid = p64(3) + self.assertRaises(ValueError, add, key, o) + + o._p_oid = key + # o._p_jar == None + self.assertRaises(Exception, add, key, o) + + o._p_jar = self.jar + self.cache[key] = o + # make sure it can be added multiple times + self.cache[key] = o + + # same object, different keys + self.assertRaises(ValueError, add, p64(0), o) + + if sys.gettrace() is None: + # 'coverage' keeps track of coverage information in a data + # structure that adds a new reference to None for each executed + # line of code, which interferes with this test. So check it + # only if we're running without coverage tracing. + + # On Python 3.7, we can see the value of reference counts + # to None actually go *down* by a few. Possibly it has to + # do with the lazy tracking of frames? + # (https://github.com/python/cpython/commit/5a625d0aa6a6d9ec6574ee8344b41d63dcb9897e) + self.assertLessEqual(rc(None), nones) + + def testTwoCaches(self): + jar2 = StubDataManager() + cache2 = PickleCache(jar2) + + o = StubObject() + key = o._p_oid = p64(1) + o._p_jar = jar2 + + cache2[key] = o + + try: + self.cache[key] = o + except ValueError: + pass + else: + self.fail("expected ValueError because object already in cache") + + def testReadOnlyAttrsWhenCached(self): + o = StubObject() + key = o._p_oid = p64(1) + o._p_jar = self.jar + self.cache[key] = o + try: + o._p_oid = p64(2) + except ValueError: + pass + else: + self.fail("expect that you can't change oid of cached object") + try: + del o._p_jar + except ValueError: + pass + else: + self.fail("expect that you can't delete jar of cached object") + + def testTwoObjsSameOid(self): + # Try to add two distinct objects with the same oid to the cache. + # This has always been an error, but the error message prior to + # ZODB 3.2.6 didn't make sense. This test verifies that (a) an + # exception is raised; and, (b) the error message is the intended + # one. + obj1 = StubObject() + key = obj1._p_oid = p64(1) + obj1._p_jar = self.jar + self.cache[key] = obj1 + + obj2 = StubObject() + obj2._p_oid = key + obj2._p_jar = self.jar + try: + self.cache[key] = obj2 + except ValueError as detail: + self.assertEqual(str(detail), + "A different object already has the same oid") + else: + self.fail("two objects with the same oid should have failed") + +def test_basic_cache_size_estimation(): + """Make sure the basic accounting is correct: + + >>> import ZODB.MappingStorage + >>> db = ZODB.MappingStorage.DB() + >>> conn = db.open() + >>> conn.cacheMinimize(); _ = gc.collect() # See fix84.rst + + >>> def check_cache_size(cache, expected): + ... actual = cache.total_estimated_size + ... if actual != expected: + ... print("expected %d, got %d" % (expected, actual)) + ... print("objects in cache:") + ... for oid, obj in sorted(cache.items()): + ... print(repr(oid), " - ", obj._p_estimated_size, "bytes") + + +The cache is empty initially: + + >>> check_cache_size(conn._cache, 0) + +We force the root to be loaded and the cache grows: + + >>> getattr(conn.root, 'z', None) + >>> root_size = conn.root._root._p_estimated_size + >>> check_cache_size(conn._cache, root_size) + +We need to unwrap the RootConvenience to get to the actual persistent +mapping that is our root object and see its estimated size + + >>> root_size in (64, 128) + True + +.. note:: + + The actual size is 60 (Python 2.6 using cPickle; would be 62 if we + used pickle) or 65 bytes (Python 3.3) due to slight differences in + pickle bytecode that is used. You can play with :: + + pickletools.dis(conn._storage.load(conn.root._root._p_oid)[0])) + + to see the differences in the first pickle (encoding the object class). + and + + pickletools.dis(conn._storage.load(conn.root._root._p_oid)[0][N:])) + + to see the differences in the second pickle (encoding the object state, + here N is the length of the first pickle). + + These sizes are then rounded up to a multiple of 64, to fit in a + 24-bit field for obscure reasons having to do with C structure size + BBB due to evil packages shipping their own copies of cPersistence.h. + +We add some data and the cache grows: + + >>> conn.root.z = ZODB.tests.util.P('x'*100) + >>> import transaction + >>> transaction.commit() + >>> root_size = conn.root._root._p_estimated_size + >>> z_size = conn.root.z._p_estimated_size + >>> check_cache_size(conn._cache, root_size + z_size) + +Note that the size of the root object increased also, so we need to take +a new measurement + + >>> root_size in (128, 192) + True + >>> z_size + 192 + +Loading the objects in another connection gets the same sizes: + + >>> conn2 = db.open() + >>> check_cache_size(conn2._cache, 0) + >>> getattr(conn2.root, 'x', None) + >>> check_cache_size(conn2._cache, root_size) + >>> _ = conn2.root.z.name + >>> check_cache_size(conn2._cache, root_size + z_size) + +If we deactivate, the size goes down: + + >>> conn2.root.z._p_deactivate() + >>> check_cache_size(conn2._cache, root_size) + +Loading data directly, rather than through traversal updates the cache +size correctly: + + >>> conn3 = db.open() + >>> _ = conn3.get(conn2.root.z._p_oid).name + >>> check_cache_size(conn3._cache, z_size) + + """ + + +def test_suite(): + s = unittest.makeSuite(DBMethods) + s.addTest(unittest.makeSuite(LRUCacheTests)) + s.addTest(unittest.makeSuite(CacheErrors)) + s.addTest(doctest.DocTestSuite()) + return s diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testConfig.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testConfig.py new file mode 100644 index 0000000..c709e66 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testConfig.py @@ -0,0 +1,209 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import doctest +import tempfile +import unittest + +import transaction +import ZODB.config +import ZODB.tests.util +from ZODB.POSException import ReadOnlyError + + +class ConfigTestBase(ZODB.tests.util.TestCase): + def _opendb(self, s): + return ZODB.config.databaseFromString(s) + + def tearDown(self): + ZODB.tests.util.TestCase.tearDown(self) + if getattr(self, "storage", None) is not None: + self.storage.cleanup() + + def _test(self, s): + db = self._opendb(s) + try: + self.storage = db._storage + # Do something with the database to make sure it works + cn = db.open() + rt = cn.root() + rt["test"] = 1 + transaction.commit() + finally: + db.close() + + +class ZODBConfigTest(ConfigTestBase): + def test_map_config1(self): + self._test( + """ + + + + """) + + def test_map_config2(self): + self._test( + """ + + + cache-size 1000 + + """) + + def test_file_config1(self): + path = tempfile.mktemp() + self._test( + """ + + + path %s + + + """ % path) + + def test_file_config2(self): + path = tempfile.mktemp() + # first pass to actually create database file + self._test( + """ + + + path %s + + + """ % path) + # write operations must be disallowed on read-only access + cfg = """ + + + path %s + create false + read-only true + + + """ % path + self.assertRaises(ReadOnlyError, self._test, cfg) + + def test_demo_config(self): + cfg = """ + + + name foo + + + + """ + self._test(cfg) + + +def database_xrefs_config(): + r""" + >>> db = ZODB.config.databaseFromString( + ... "\n\n\n\n") + >>> db.xrefs + True + >>> db = ZODB.config.databaseFromString( + ... "\nallow-implicit-cross-references true\n" + ... "\n\n\n") + >>> db.xrefs + True + >>> db = ZODB.config.databaseFromString( + ... "\nallow-implicit-cross-references false\n" + ... "\n\n\n") + >>> db.xrefs + False + """ + +def multi_atabases(): + r"""If there are multiple codb sections -> multidatabase + + >>> db = ZODB.config.databaseFromString(''' + ... + ... + ... + ... + ... + ... + ... + ... + ... + ... database-name Bar + ... + ... + ... + ... ''') + >>> sorted(db.databases) + ['', 'Bar', 'foo'] + + >>> db.database_name + '' + >>> db.databases[db.database_name] is db + True + >>> db.databases['foo'] is not db + True + >>> db.databases['Bar'] is not db + True + >>> db.databases['Bar'] is not db.databases['foo'] + True + + Can't have repeats: + + >>> ZODB.config.databaseFromString(''' + ... + ... + ... + ... + ... + ... + ... + ... + ... + ... + ... + ... + ... ''') # doctest: +NORMALIZE_WHITESPACE + Traceback (most recent call last): + ... + ConfigurationSyntaxError: + section names must not be re-used within the same container:'1' (line 9) + + >>> ZODB.config.databaseFromString(''' + ... + ... + ... + ... + ... + ... + ... + ... + ... ''') # doctest: +NORMALIZE_WHITESPACE + Traceback (most recent call last): + ... + ValueError: database_name '' already in databases + + """ + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(doctest.DocTestSuite( + setUp=ZODB.tests.util.setUp, + tearDown=ZODB.tests.util.tearDown, + checker=ZODB.tests.util.checker)) + suite.addTest(unittest.makeSuite(ZODBConfigTest)) + return suite + + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testConnection.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testConnection.py new file mode 100644 index 0000000..88276b9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testConnection.py @@ -0,0 +1,1398 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Unit tests for the Connection class.""" +from __future__ import print_function + +import doctest +import re +import six +import sys +import unittest + +import transaction +from transaction import Transaction + +import ZODB.tests.util +from ZODB.config import databaseFromString +from ZODB.utils import p64, u64, z64 +from persistent import Persistent +from zope.interface.verify import verifyObject +from zope.testing import loggingsupport, renormalizing + +from .. import mvccadapter + +checker = renormalizing.RENormalizing([ + # Python 3 bytes add a "b". + (re.compile("b('.*?')"), r"\1"), + # Python 3 removes empty list representation. + (re.compile(r"set\(\[\]\)"), r"set()"), + # Python 3 adds module name to exceptions. + (re.compile("ZODB.POSException.POSKeyError"), r"POSKeyError"), + (re.compile("ZODB.POSException.ReadConflictError"), r"ReadConflictError"), + (re.compile("ZODB.POSException.ConflictError"), r"ConflictError"), + (re.compile("ZODB.POSException.ConnectionStateError"), + r"ConnectionStateError"), + ]) + + +class ConnectionDotAdd(ZODB.tests.util.TestCase): + + def setUp(self): + ZODB.tests.util.TestCase.setUp(self) + from ZODB.Connection import Connection + self.db = StubDatabase() + self.datamgr = Connection(self.db) + self.datamgr.open() + self.transaction = Transaction() + + def test_add(self): + from ZODB.POSException import InvalidObjectReference + obj = StubObject() + self.assertTrue(obj._p_oid is None) + self.assertTrue(obj._p_jar is None) + self.datamgr.add(obj) + self.assertTrue(obj._p_oid is not None) + self.assertTrue(obj._p_jar is self.datamgr) + self.assertTrue(self.datamgr.get(obj._p_oid) is obj) + + # Only first-class persistent objects may be added. + self.assertRaises(TypeError, self.datamgr.add, object()) + + # Adding to the same connection does not fail. Object keeps the + # same oid. + oid = obj._p_oid + self.datamgr.add(obj) + self.assertEqual(obj._p_oid, oid) + + # Cannot add an object from a different connection. + obj2 = StubObject() + obj2._p_jar = object() + self.assertRaises(InvalidObjectReference, self.datamgr.add, obj2) + + def testResetOnAbort(self): + # Check that _p_oid and _p_jar are reset when a transaction is + # aborted. + obj = StubObject() + self.datamgr.add(obj) + oid = obj._p_oid + self.datamgr.abort(self.transaction) + self.assertTrue(obj._p_oid is None) + self.assertTrue(obj._p_jar is None) + self.assertRaises(KeyError, self.datamgr.get, oid) + + def testResetOnTpcAbort(self): + obj = StubObject() + self.datamgr.add(obj) + oid = obj._p_oid + + # Simulate an error while committing some other object. + + self.datamgr.tpc_begin(self.transaction) + # Let's pretend something bad happens here. + # Call tpc_abort, clearing everything. + self.datamgr.tpc_abort(self.transaction) + self.assertTrue(obj._p_oid is None) + self.assertTrue(obj._p_jar is None) + self.assertRaises(KeyError, self.datamgr.get, oid) + + def testTpcAbortAfterCommit(self): + obj = StubObject() + self.datamgr.add(obj) + oid = obj._p_oid + self.datamgr.tpc_begin(self.transaction) + self.datamgr.commit(self.transaction) + # Let's pretend something bad happened here. + self.datamgr.tpc_abort(self.transaction) + self.assertTrue(obj._p_oid is None) + self.assertTrue(obj._p_jar is None) + self.assertRaises(KeyError, self.datamgr.get, oid) + self.assertEqual(self.db.storage._stored, [oid]) + + def testCommit(self): + obj = StubObject() + self.datamgr.add(obj) + oid = obj._p_oid + self.datamgr.tpc_begin(self.transaction) + self.datamgr.commit(self.transaction) + self.datamgr.tpc_finish(self.transaction) + self.assertTrue(obj._p_oid is oid) + self.assertTrue(obj._p_jar is self.datamgr) + + # This next assertTrue is covered by an assert in tpc_finish. + ##self.assertTrue(not self.datamgr._added) + + self.assertEqual(self.db.storage._stored, [oid]) + self.assertEqual(self.db.storage._finished, [oid]) + + def testModifyOnGetstate(self): + member = StubObject() + subobj = StubObject() + subobj.member = member + obj = ModifyOnGetStateObject(subobj) + self.datamgr.add(obj) + self.datamgr.tpc_begin(self.transaction) + self.datamgr.commit(self.transaction) + self.datamgr.tpc_finish(self.transaction) + storage = self.db.storage + self.assertTrue(obj._p_oid in storage._stored, "object was not stored") + self.assertTrue(subobj._p_oid in storage._stored, + "subobject was not stored") + self.assertTrue(member._p_oid in storage._stored, + "member was not stored") + self.assertTrue(self.datamgr._added_during_commit is None) + + def testUnusedAddWorks(self): + # When an object is added, but not committed, it shouldn't be stored, + # but also it should be an error. + obj = StubObject() + self.datamgr.add(obj) + self.datamgr.tpc_begin(self.transaction) + self.datamgr.tpc_finish(self.transaction) + self.assertTrue(obj._p_oid not in + self.datamgr._storage._storage._stored) + + def test__resetCacheResetsReader(self): + # https://bugs.launchpad.net/zodb/+bug/142667 + old_cache = self.datamgr._cache + self.datamgr._resetCache() + new_cache = self.datamgr._cache + self.assertFalse(new_cache is old_cache) + self.assertTrue(self.datamgr._reader._cache is new_cache) + + +class SetstateErrorLoggingTests(ZODB.tests.util.TestCase): + + def setUp(self): + ZODB.tests.util.TestCase.setUp(self) + from ZODB.Connection import Connection + self.db = db = databaseFromString("\n\n") + self.datamgr = self.db.open() + self.object = StubObject() + self.datamgr.add(self.object) + transaction.commit() + self.handler = loggingsupport.InstalledHandler("ZODB") + + def tearDown(self): + self.handler.uninstall() + + def test_closed_connection_wont_setstate(self): + oid = self.object._p_oid + self.object._p_deactivate() + self.datamgr.close() + self.assertRaises( + ZODB.POSException.ConnectionStateError, + self.datamgr.setstate, self.object) + record, = self.handler.records + self.assertEqual( + record.msg, + "Shouldn't load state for ZODB.tests.testConnection.StubObject" + " 0x01 when the connection is closed") + self.assertTrue(record.exc_info) + + +class UserMethodTests(unittest.TestCase): + + # add isn't tested here, because there are a bunch of traditional + # unit tests for it. + + def doctest_root(self): + r"""doctest of root() method + + The root() method is simple, and the tests are pretty minimal. + Ensure that a new database has a root and that it is a + PersistentMapping. + + >>> db = databaseFromString("\n\n") + >>> cn = db.open() + >>> root = cn.root() + >>> type(root).__name__ + 'PersistentMapping' + >>> root._p_oid + '\x00\x00\x00\x00\x00\x00\x00\x00' + >>> root._p_jar is cn + True + >>> db.close() + """ + + def doctest_get(self): + r"""doctest of get() method + + The get() method return the persistent object corresponding to + an oid. + + >>> db = databaseFromString("\n\n") + >>> cn = db.open() + >>> cn.cacheMinimize() # See fix84.rst + >>> obj = cn.get(p64(0)) + >>> obj._p_oid + '\x00\x00\x00\x00\x00\x00\x00\x00' + + The object is a ghost. + + >>> obj._p_state + -1 + + And multiple calls with the same oid, return the same object. + + >>> obj2 = cn.get(p64(0)) + >>> obj is obj2 + True + + If all references to the object are released, then a new + object will be returned. The cache doesn't keep unreferenced + ghosts alive, although on some implementations like PyPy we + need to run a garbage collection to be sure they go away. (The + next object returned may still have the same id, because Python + may re-use the same memory.) + + >>> del obj, obj2 + >>> import gc + >>> _ = gc.collect() + >>> cn._cache.get(p64(0), None) + + If the object is unghosted, then it will stay in the cache + after the last reference is released. (This is true only if + there is room in the cache and the object is recently used.) + + >>> obj = cn.get(p64(0)) + >>> obj._p_activate() + >>> y = id(obj) + >>> del obj + >>> obj = cn.get(p64(0)) + >>> id(obj) == y + True + >>> obj._p_state + 0 + + A request for an object that doesn't exist will raise a POSKeyError. + + >>> cn.get(p64(1)) + Traceback (most recent call last): + ... + POSKeyError: 0x01 + """ + + def doctest_close(self): + r"""doctest of close() method + + This is a minimal test, because most of the interesting + effects on closing a connection involve its interaction with the + database and the transaction. + + >>> db = databaseFromString("\n\n") + >>> cn = db.open() + + It's safe to close a connection multiple times. + >>> cn.close() + >>> cn.close() + >>> cn.close() + + It's not possible to load or store objects once the storage is closed. + + >>> cn.get(p64(0)) + Traceback (most recent call last): + ... + ConnectionStateError: The database connection is closed + >>> p = Persistent() + >>> cn.add(p) + Traceback (most recent call last): + ... + ConnectionStateError: The database connection is closed + """ + + def doctest_close_with_pending_changes(self): + r"""doctest to ensure close() w/ pending changes complains + + >>> import transaction + + Just opening and closing is fine. + >>> db = databaseFromString("\n\n") + >>> cn = db.open() + >>> cn.close() + + Opening, making a change, committing, and closing is fine. + >>> cn = db.open() + >>> cn.root()['a'] = 1 + >>> transaction.commit() + >>> cn.close() + + Opening, making a change, and aborting is fine. + >>> cn = db.open() + >>> cn.root()['a'] = 1 + >>> transaction.abort() + >>> cn.close() + + But trying to close with a change pending complains. + >>> cn = db.open() + >>> cn.root()['a'] = 10 + >>> cn.close() + Traceback (most recent call last): + ... + ConnectionStateError: Cannot close a connection joined to a transaction + + This leaves the connection as it was, so we can still commit + the change. + >>> transaction.commit() + >>> cn2 = db.open() + >>> cn2.root()['a'] + 10 + >>> cn.close(); cn2.close() + + >>> db.close() + """ + + def doctest_onCloseCallbacks(self): + r"""doctest of onCloseCallback() method + + >>> db = databaseFromString("\n\n") + >>> cn = db.open() + + Every function registered is called, even if it raises an + exception. They are only called once. + + >>> L = [] + >>> def f(): + ... L.append("f") + >>> def g(): + ... L.append("g") + ... return 1 / 0 + >>> cn.onCloseCallback(g) + >>> cn.onCloseCallback(f) + >>> cn.close() + >>> L + ['g', 'f'] + >>> del L[:] + >>> cn.close() + >>> L + [] + + The implementation keeps a list of callbacks that is reset + to a class variable (which is bound to None) after the connection + is closed. + + >>> cn._Connection__onCloseCallbacks + """ + + def doctest_close_dispatches_to_activity_monitors(self): + r"""doctest that connection close updates activity monitors + + Set up a multi-database: + + >>> db1 = ZODB.DB(None) + >>> db2 = ZODB.DB(None, databases=db1.databases, database_name='2', + ... cache_size=10) + >>> conn1 = db1.open() + >>> conn2 = conn1.get_connection('2') + + Add activity monitors to both dbs: + + >>> from ZODB.ActivityMonitor import ActivityMonitor + >>> db1.setActivityMonitor(ActivityMonitor()) + >>> db2.setActivityMonitor(ActivityMonitor()) + + Commit a transaction that affects both connections: + + >>> conn1.root()[0] = conn1.root().__class__() + >>> conn2.root()[0] = conn2.root().__class__() + >>> transaction.commit() + + After closing the primary connection, both monitors should be up to + date: + + >>> conn1.close() + >>> len(db1.getActivityMonitor().log) + 1 + >>> len(db2.getActivityMonitor().log) + 1 + """ + + def doctest_db(self): + r"""doctest of db() method + + >>> db = databaseFromString("\n\n") + >>> cn = db.open() + >>> cn.db() is db + True + >>> cn.close() + >>> cn.db() is db + True + """ + + def doctest_isReadOnly(self): + r"""doctest of isReadOnly() method + + >>> db = databaseFromString("\n\n") + >>> cn = db.open() + >>> cn.isReadOnly() + False + >>> cn.close() + >>> cn.isReadOnly() + Traceback (most recent call last): + ... + ConnectionStateError: The database connection is closed + + >>> db.close() + + An expedient way to create a read-only storage: + + >>> db = databaseFromString("\n\n") + >>> db.storage.isReadOnly = lambda: True + >>> cn = db.open() + >>> cn.isReadOnly() + True + """ + + def doctest_cache(self): + r"""doctest of cacheMinimize(). + + Thus test us minimal, just verifying that the method can be called + and has some effect. We need other tests that verify the cache works + as intended. + + >>> db = databaseFromString("\n\n") + >>> cn = db.open() + >>> r = cn.root() + >>> cn.cacheMinimize() + >>> r._p_state + -1 + + >>> r._p_activate() + >>> r._p_state # up to date + 0 + >>> cn.cacheMinimize() + >>> r._p_state # ghost again + -1 + """ + +def doctest_transaction_retry_convenience(): + """ + Simple test to verify integration with the transaction retry + helper my verifying that we can raise ConflictError and have it + handled properly. + + This is an adaptation of the convenience tests in transaction. + + >>> db = ZODB.tests.util.DB() + >>> conn = db.open() + >>> dm = conn.root() + + >>> ntry = 0 + >>> with transaction.manager: + ... dm['ntry'] = 0 + + >>> import ZODB.POSException + >>> for attempt in transaction.manager.attempts(): + ... with attempt as t: + ... t.note(u'test') + ... six.print_(dm['ntry'], ntry) + ... ntry += 1 + ... dm['ntry'] = ntry + ... if ntry % 3: + ... raise ZODB.POSException.ConflictError() + 0 0 + 0 1 + 0 2 + """ + +class InvalidationTests(unittest.TestCase): + + # It's harder to write serious tests, because some of the critical + # correctness issues relate to concurrency. We'll have to depend + # on the various concurrent updates and NZODBThreads tests to + # handle these. + + def doctest_invalidate(self): + r""" + + This test initializes the database with several persistent + objects, then manually delivers invalidations and verifies that + they have the expected effect. + + >>> db = databaseFromString("\n\n") + >>> mvcc_storage = db._mvcc_storage + >>> cn = db.open() + >>> mvcc_instance = cn._storage + >>> p1 = Persistent() + >>> p2 = Persistent() + >>> p3 = Persistent() + >>> r = cn.root() + >>> r.update(dict(p1=p1, p2=p2, p3=p3)) + >>> transaction.commit() + + Transaction ids are 8-byte strings, just like oids; p64() will + create one from an int. + + >>> mvcc_storage.invalidate(p64(1), {p1._p_oid: 1}) + + Transaction start times are based on storage's last + transaction. (Previousely, they were based on the first + invalidation seen in a transaction.) + + >>> mvcc_instance.poll_invalidations() == [p1._p_oid] + True + >>> mvcc_instance._start == p64(u64(db.storage.lastTransaction()) + 1) + True + + >>> mvcc_storage.invalidate(p64(10), {p2._p_oid: 1, p64(76): 1}) + + Calling invalidate() doesn't affect the object state until + a transaction boundary. + + >>> p1._p_state + 0 + >>> p2._p_state + 0 + >>> p3._p_state + 0 + + The sync() method will abort the current transaction and + process any pending invalidations. + + >>> cn.sync() + >>> p1._p_state + 0 + >>> p2._p_state + -1 + >>> p3._p_state + 0 + + >>> db.close() + """ + +def doctest_invalidateCache(): + """The invalidateCache method invalidates a connection's cache. + + It also prevents reads until the end of a transaction:: + + >>> from ZODB.tests.util import DB + >>> import transaction + >>> db = DB() + >>> mvcc_storage = db._mvcc_storage + >>> tm = transaction.TransactionManager() + >>> connection = db.open(transaction_manager=tm) + >>> connection.root()['a'] = StubObject() + >>> connection.root()['a'].x = 1 + >>> connection.root()['b'] = StubObject() + >>> connection.root()['b'].x = 1 + >>> connection.root()['c'] = StubObject() + >>> connection.root()['c'].x = 1 + >>> tm.commit() + >>> connection.root()['b']._p_deactivate() + >>> connection.root()['c'].x = 2 + + So we have a connection and an active transaction with some modifications. + Lets call invalidateCache: + + >>> mvcc_storage.invalidateCache() + + This won't have any effect until the next transaction: + + >>> connection.root()['a']._p_changed + 0 + >>> connection.root()['b']._p_changed + >>> connection.root()['c']._p_changed + 1 + + But if we sync(): + + >>> connection.sync() + + All of our data was invalidated: + + >>> connection.root()['a']._p_changed + >>> connection.root()['b']._p_changed + >>> connection.root()['c']._p_changed + + But we can load data as usual: + + Now, if we try to load an object, we'll get a read conflict: + + >>> connection.root()['b'].x + 1 + + >>> db.close() + """ + +def doctest_connection_root_convenience(): + """Connection root attributes can now be used as objects with attributes + + >>> db = ZODB.tests.util.DB() + >>> conn = db.open() + >>> conn.root.x + Traceback (most recent call last): + ... + AttributeError: x + + >>> del conn.root.x + Traceback (most recent call last): + ... + AttributeError: x + + >>> conn.root()['x'] = 1 + >>> conn.root.x + 1 + >>> conn.root.y = 2 + >>> sorted(conn.root().items()) + [('x', 1), ('y', 2)] + + >>> conn.root + + + >>> del conn.root.x + >>> sorted(conn.root().items()) + [('y', 2)] + + >>> conn.root.rather_long_name = 1 + >>> conn.root.rather_long_name2 = 1 + >>> conn.root.rather_long_name4 = 1 + >>> conn.root.rather_long_name5 = 1 + >>> conn.root + + """ + +class proper_ghost_initialization_with_empty__p_deactivate_class(Persistent): + def _p_deactivate(self): + pass + +def doctest_proper_ghost_initialization_with_empty__p_deactivate(): + """ + See https://bugs.launchpad.net/zodb/+bug/185066 + + >>> db = ZODB.tests.util.DB() + >>> conn = db.open() + >>> C = proper_ghost_initialization_with_empty__p_deactivate_class + >>> conn.root.x = x = C() + >>> conn.root.x.y = 1 + >>> transaction.commit() + + >>> conn2 = db.open() + >>> bool(conn2.root.x._p_changed) + False + >>> conn2.root.x.y + 1 + + """ + +def doctest_readCurrent(): + r""" + The connection's readCurrent method is called to provide a higher + level of consistency in cases where an object is read to compute an + update to a separate object. When this is used, the + checkCurrentSerialInTransaction method on the storage is called in + 2-phase commit. + + To demonstrate this, we'll create a storage and give it a test + implementation of checkCurrentSerialInTransaction. + + >>> import ZODB.MappingStorage + >>> store = ZODB.MappingStorage.MappingStorage() + + >>> from ZODB.POSException import ReadConflictError + >>> bad = set() + >>> def checkCurrentSerialInTransaction(oid, serial, trans): + ... six.print_('checkCurrentSerialInTransaction', repr(oid)) + ... if oid in bad: + ... raise ReadConflictError(oid=oid) + + >>> store.checkCurrentSerialInTransaction = checkCurrentSerialInTransaction + + Now, we'll use the storage as usual. checkCurrentSerialInTransaction + won't normally be called: + + >>> db = ZODB.DB(store) + >>> conn = db.open() + >>> conn.root.a = ZODB.tests.util.P('a') + >>> transaction.commit() + >>> conn.root.b = ZODB.tests.util.P('b') + >>> transaction.commit() + + >>> conn.root.a._p_oid + b'\x00\x00\x00\x00\x00\x00\x00\x01' + >>> conn.root.b._p_oid + b'\x00\x00\x00\x00\x00\x00\x00\x02' + + If we call readCurrent for an object and we modify another object, + then checkCurrentSerialInTransaction will be called for the object + readCurrent was called on. + + >>> conn.readCurrent(conn.root.a) + >>> conn.root.b.x = 0 + >>> transaction.commit() + checkCurrentSerialInTransaction '\x00\x00\x00\x00\x00\x00\x00\x01' + + It doesn't matter how often we call readCurrent, + checkCurrentSerialInTransaction will be called only once: + + >>> conn.readCurrent(conn.root.a) + >>> conn.readCurrent(conn.root.a) + >>> conn.readCurrent(conn.root.a) + >>> conn.readCurrent(conn.root.a) + >>> conn.root.b.x += 1 + >>> transaction.commit() + checkCurrentSerialInTransaction '\x00\x00\x00\x00\x00\x00\x00\x01' + + checkCurrentSerialInTransaction won't be called if another object + isn't modified: + + + >>> conn.readCurrent(conn.root.a) + >>> transaction.commit() + + Or if the object it was called on is modified: + + >>> conn.readCurrent(conn.root.a) + >>> conn.root.a.x = 0 + >>> conn.root.b.x += 1 + >>> transaction.commit() + + If the storage raises a conflict error, it'll be propagated: + + >>> _ = str(conn.root.a) # do read + >>> bad.add(conn.root.a._p_oid) + >>> conn.readCurrent(conn.root.a) + >>> conn.root.b.x += 1 + >>> transaction.commit() + Traceback (most recent call last): + ... + ReadConflictError: database read conflict error (oid 0x01) + + >>> transaction.abort() + + The conflict error will cause the affected object to be invalidated: + + >>> conn.root.a._p_changed + + The storage may raise it later: + + >>> def checkCurrentSerialInTransaction(oid, serial, trans): + ... if not trans == transaction.get(): print('oops') + ... six.print_('checkCurrentSerialInTransaction', repr(oid)) + ... store.badness = ReadConflictError(oid=oid) + + >>> def tpc_vote(t): + ... if store.badness: + ... badness = store.badness + ... store.badness = None + ... raise badness + + >>> store.checkCurrentSerialInTransaction = checkCurrentSerialInTransaction + >>> store.badness = None + >>> store.tpc_vote = tpc_vote + + It will still be propagated: + + >>> _ = str(conn.root.a) # do read + >>> conn.readCurrent(conn.root.a) + >>> conn.root.b.x = +1 + >>> transaction.commit() + Traceback (most recent call last): + ... + ReadConflictError: database read conflict error (oid 0x01) + + >>> transaction.abort() + + The conflict error will cause the affected object to be invalidated: + + >>> conn.root.a._p_changed + + Read checks don't leak across transactions: + + >>> conn.readCurrent(conn.root.a) + >>> transaction.commit() + >>> conn.root.b.x = +1 + >>> transaction.commit() + + Read checks to work across savepoints. + + >>> conn.readCurrent(conn.root.a) + >>> conn.root.b.x = +1 + >>> _ = transaction.savepoint() + >>> transaction.commit() + Traceback (most recent call last): + ... + ReadConflictError: database read conflict error (oid 0x01) + + >>> transaction.abort() + + >>> conn.readCurrent(conn.root.a) + >>> _ = transaction.savepoint() + >>> conn.root.b.x = +1 + >>> transaction.commit() + Traceback (most recent call last): + ... + ReadConflictError: database read conflict error (oid 0x01) + + >>> transaction.abort() + + """ + +def doctest_cache_management_of_subconnections(): + """Make that cache management works for subconnections. + + When we use multi-databases, we open a connection in one database and + access connections to other databases through it. This test verifies + that cache management is applied to all of the connections. + + Set up a multi-database: + + >>> db1 = ZODB.DB(None) + >>> db2 = ZODB.DB(None, databases=db1.databases, database_name='2', + ... cache_size=10) + >>> conn1 = db1.open() + >>> conn2 = conn1.get_connection('2') + + Populate it with some data, more than will fit in the cache: + + >>> for i in range(100): + ... conn2.root()[i] = conn2.root().__class__() + + Upon commit, the cache is reduced to the cache size: + + >>> transaction.commit() + >>> conn2._cache.cache_non_ghost_count + 10 + + Fill it back up: + + >>> for i in range(100): + ... _ = str(conn2.root()[i]) + >>> conn2._cache.cache_non_ghost_count + 101 + + Doing cache GC on the primary also does it on the secondary: + + >>> conn1.cacheGC() + >>> conn2._cache.cache_non_ghost_count + 10 + + Ditto for cache minimize: + + >>> conn1.cacheMinimize() + >>> conn2._cache.cache_non_ghost_count + 0 + + + Fill it back up: + + >>> for i in range(100): + ... _ = str(conn2.root()[i]) + >>> conn2._cache.cache_non_ghost_count + 101 + + GC is done on reopen: + + >>> conn1.close() + >>> db1.open() is conn1 + True + >>> conn2 is conn1.get_connection('2') + True + + >>> conn2._cache.cache_non_ghost_count + 10 + + """ + +class C_invalidations_of_new_objects_work_after_savepoint(Persistent): + def __init__(self): + self.settings = 1 + + def _p_invalidate(self): + print('INVALIDATE', self.settings) + Persistent._p_invalidate(self) + print(self.settings) # POSKeyError here + +def doctest_abort_of_savepoint_creating_new_objects_w_exotic_invalidate_doesnt_break(): + r""" + Before, the following would fail with a POSKeyError, which was + somewhat surprising, in a very edgy sort of way. :) + + Really, when an object add is aborted, the object should be "removed" from + the db and its invalidation method shouldn't even be called: + + >>> conn = ZODB.connection(None) + >>> conn.root.x = x = C_invalidations_of_new_objects_work_after_savepoint() + >>> _ = transaction.savepoint() + >>> x._p_oid + '\x00\x00\x00\x00\x00\x00\x00\x01' + + >>> x._p_jar is conn + True + + >>> transaction.abort() + + After the abort, the oid and jar are None: + + >>> x._p_oid + >>> x._p_jar + + """ + +class Clp9460655(Persistent): + def __init__(self, word, id): + super(Clp9460655, self).__init__() + self.id = id + self._word = word + +def doctest_lp9460655(): + r""" + >>> conn = ZODB.connection(None) + >>> root = conn.root() + >>> Word = Clp9460655 + + >>> from BTrees.OOBTree import OOBTree + >>> data = root['data'] = OOBTree() + + >>> commonWords = [] + >>> count = "0" + >>> for x in ('hello', 'world', 'how', 'are', 'you'): + ... commonWords.append(Word(x, count)) + ... count = str(int(count) + 1) + + >>> sv = transaction.savepoint() + >>> for word in commonWords: + ... sv2 = transaction.savepoint() + ... data[word.id] = word + + >>> sv.rollback() + >>> print(commonWords[1].id) # raises POSKeyError + 1 + + """ + +def doctest_lp615758_transaction_abort_Incomplete_cleanup_for_new_objects(): + r""" + + As the following doctest demonstrates, "abort" forgets to + reset "_p_changed" for new (i.e. "added") objects. + + >>> class P(Persistent): pass + ... + >>> c = ZODB.connection(None) + >>> obj = P() + >>> c.add(obj) + >>> obj.x = 1 + >>> obj._p_changed + True + >>> transaction.abort() + >>> obj._p_changed + False + + >>> c.close() + """ + +class Clp485456_setattr_in_getstate_doesnt_cause_multiple_stores(Persistent): + + def __getstate__(self): + self.got = 1 + return self.__dict__.copy() + +def doctest_lp485456_setattr_in_setstate_doesnt_cause_multiple_stores(): + r""" + >>> C = Clp485456_setattr_in_getstate_doesnt_cause_multiple_stores + >>> conn = ZODB.connection(None) + >>> oldstore = conn._storage.store + >>> def store(oid, *args): + ... six.print_('storing', repr(oid)) + ... return oldstore(oid, *args) + >>> conn._storage.store = store + + When we commit a change, we only get a single store call + + >>> conn.root.x = C() + >>> transaction.commit() + storing '\x00\x00\x00\x00\x00\x00\x00\x00' + storing '\x00\x00\x00\x00\x00\x00\x00\x01' + + Retry with the new object registered before its referrer. + + >>> z = C() + >>> conn.add(z) + >>> conn.root.z = z + >>> transaction.commit() + storing '\x00\x00\x00\x00\x00\x00\x00\x02' + storing '\x00\x00\x00\x00\x00\x00\x00\x00' + + We still see updates: + + >>> conn.root.x.y = 1 + >>> transaction.commit() + storing '\x00\x00\x00\x00\x00\x00\x00\x01' + + Not not non-updates: + + >>> transaction.commit() + + Let's try some combinations with savepoints: + + >>> conn.root.n = 0 + >>> _ = transaction.savepoint() + + >>> oldspstore = conn._storage.store + >>> def store(oid, *args): + ... six.print_('savepoint storing', repr(oid)) + ... return oldspstore(oid, *args) + >>> conn._storage.store = store + + >>> conn.root.y = C() + >>> _ = transaction.savepoint() + savepoint storing '\x00\x00\x00\x00\x00\x00\x00\x00' + savepoint storing '\x00\x00\x00\x00\x00\x00\x00\x03' + + >>> conn.root.y.x = 1 + >>> _ = transaction.savepoint() + savepoint storing '\x00\x00\x00\x00\x00\x00\x00\x03' + + >>> transaction.commit() + storing '\x00\x00\x00\x00\x00\x00\x00\x00' + storing '\x00\x00\x00\x00\x00\x00\x00\x03' + + >>> conn.close() + """ + + +class _PlayPersistent(Persistent): + def setValueWithSize(self, size=0): self.value = size*' ' + __init__ = setValueWithSize + +class EstimatedSizeTests(ZODB.tests.util.TestCase): + """check that size estimations are handled correctly.""" + + def setUp(self): + ZODB.tests.util.TestCase.setUp(self) + self.db = db = databaseFromString("\n\n") + self.conn = c = db.open() + self.obj = obj = _PlayPersistent() + c.root()['obj'] = obj + transaction.commit() + + def test_size_set_on_write_commit(self): + obj, cache = self.obj, self.conn._cache + # we have just written "obj". Its size should not be zero + size, cache_size = obj._p_estimated_size, cache.total_estimated_size + self.assertTrue(size > 0) + self.assertTrue(cache_size > size) + # increase the size, write again and check that the size changed + obj.setValueWithSize(1000) + transaction.commit() + new_size = obj._p_estimated_size + self.assertTrue(new_size > size) + self.assertEqual(cache.total_estimated_size, + cache_size + new_size - size) + + def test_size_set_on_write_savepoint(self): + obj, cache = self.obj, self.conn._cache + # we have just written "obj". Its size should not be zero + size, cache_size = obj._p_estimated_size, cache.total_estimated_size + # increase the size, write again and check that the size changed + obj.setValueWithSize(1000) + transaction.savepoint() + new_size = obj._p_estimated_size + self.assertTrue(new_size > size) + self.assertEqual(cache.total_estimated_size, + cache_size + new_size - size) + + def test_size_set_on_load(self): + c = self.db.open() # new connection + obj = c.root()['obj'] + # the object is still a ghost and '_p_estimated_size' not yet set + # access to unghost + cache = c._cache + cache_size = cache.total_estimated_size + obj.value + size = obj._p_estimated_size + self.assertTrue(size > 0) + self.assertEqual(cache.total_estimated_size, cache_size + size) + # we test here as well that the deactivation works reduced the cache + # size + obj._p_deactivate() + self.assertEqual(cache.total_estimated_size, cache_size) + + def test_configuration(self): + # verify defaults .... + expected = 0 + # ... on db + db = self.db + self.assertEqual(db.getCacheSizeBytes(), expected) + self.assertEqual(db.getHistoricalCacheSizeBytes(), expected) + # ... on connection + conn = self.conn + self.assertEqual(conn._cache.cache_size_bytes, expected) + # verify explicit setting ... + expected = 10000 + # ... on db + db = databaseFromString("\n" + " cache-size-bytes %d\n" + " historical-cache-size-bytes %d\n" + " \n" + "" + % (expected, expected+1) + ) + self.assertEqual(db.getCacheSizeBytes(), expected) + self.assertEqual(db.getHistoricalCacheSizeBytes(), expected+1) + # ... on connectionB + conn = db.open() + self.assertEqual(conn._cache.cache_size_bytes, expected) + # test huge (larger than 4 byte) size limit (if possible) + if sys.maxsize > (0x1 << 33): + db = databaseFromString("\n" + " cache-size-bytes 8GB\n" + " \n" + "" + ) + self.assertEqual(db.getCacheSizeBytes(), 0x1 << 33) + + + def test_cache_garbage_collection(self): + db = self.db + # activate size based cache garbage collection + db.setCacheSizeBytes(1) + conn = self.conn + cache = conn._cache + # verify the change worked as expected + self.assertEqual(cache.cache_size_bytes, 1) + # verify our entrance assumption is fulfilled + self.assertTrue(cache.total_estimated_size > 1) + conn.cacheGC() + self.assertTrue(cache.total_estimated_size <= 1) + # sanity check + self.assertTrue(cache.total_estimated_size >= 0) + + def test_cache_garbage_collection_shrinking_object(self): + db = self.db + # activate size based cache garbage collection + db.setCacheSizeBytes(1000) + obj, conn, cache = self.obj, self.conn, self.conn._cache + # verify the change worked as expected + self.assertEqual(cache.cache_size_bytes, 1000) + # verify our entrance assumption is fulfilled + self.assertTrue(cache.total_estimated_size > 1) + # give the objects some size + obj.setValueWithSize(500) + transaction.savepoint() + self.assertTrue(cache.total_estimated_size > 500) + # make the object smaller + obj.setValueWithSize(100) + transaction.savepoint() + # make sure there was no overflow + self.assertTrue(cache.total_estimated_size != 0) + # the size is not larger than the allowed maximum + self.assertTrue(cache.total_estimated_size <= 1000) + +# ---- stubs + +class StubObject(Persistent): + pass + +class ErrorOnGetstateException(Exception): + pass + +class ErrorOnGetstateObject(Persistent): + + def __getstate__(self): + raise ErrorOnGetstateException + +class ModifyOnGetStateObject(Persistent): + + def __init__(self, p): + self._v_p = p + + def __getstate__(self): + self._p_jar.add(self._v_p) + self.p = self._v_p + return Persistent.__getstate__(self) + + +class StubStorage(object): + """Very simple in-memory storage that does *just* enough to support tests. + + Only one concurrent transaction is supported. + Voting is not supported. + + Inspect self._stored and self._finished to see how the storage has been + used during a unit test. Whenever an object is stored in the store() + method, its oid is appended to self._stored. When a transaction is + finished, the oids that have been stored during the transaction are + appended to self._finished. + """ + + # internal + _oid = 1 + _transaction = None + + def __init__(self): + # internal + self._stored = [] + self._finished = [] + self._data = {} + self._transdata = {} + self._transstored = [] + + def new_oid(self): + oid = str(self._oid) + self._oid += 1 + return str(oid).encode() + + def sortKey(self): + return 'StubStorage sortKey' + + def tpc_begin(self, transaction): + if transaction is None: + raise TypeError('transaction may not be None') + elif self._transaction is None: + self._transaction = transaction + elif self._transaction != transaction: + raise RuntimeError( + 'StubStorage uses only one transaction at a time') + + def tpc_abort(self, transaction): + if transaction is None: + raise TypeError('transaction may not be None') + elif self._transaction != transaction: + raise RuntimeError( + 'StubStorage uses only one transaction at a time') + del self._transaction + self._transdata.clear() + + def tpc_finish(self, transaction, callback): + if transaction is None: + raise TypeError('transaction may not be None') + elif self._transaction != transaction: + raise RuntimeError( + 'StubStorage uses only one transaction at a time') + self._finished.extend(self._transstored) + self._data.update(self._transdata) + callback(transaction) + del self._transaction + self._transdata.clear() + self._transstored = [] + return z64 + + def load(self, oid, version=''): + if version != '': + raise TypeError('StubStorage does not support versions.') + return self._data[oid] + + def loadBefore(self, oid, tid): + return self._data[oid] + (None, ) + + def store(self, oid, serial, p, version, transaction): + if version != '': + raise TypeError('StubStorage does not support versions.') + if transaction is None: + raise TypeError('transaction may not be None') + elif self._transaction != transaction: + raise RuntimeError( + 'StubStorage uses only one transaction at a time') + self._stored.append(oid) + self._transstored.append(oid) + self._transdata[oid] = (p, serial) + + def lastTransaction(self): + return z64 + + +class TestConnection(unittest.TestCase): + + def test_connection_interface(self): + from ZODB.interfaces import IConnection + db = databaseFromString("\n\n") + cn = db.open() + verifyObject(IConnection, cn) + db.close() + + def test_storage_afterCompletionCalled(self): + db = ZODB.DB(None) + conn = db.open() + data = [] + conn._storage.afterCompletion = lambda : data.append(None) + conn.transaction_manager.commit() + self.assertEqual(len(data), 1) + conn.close() + self.assertEqual(len(data), 2) + db.close() + + def test_explicit_transactions_no_newTransactuon_on_afterCompletion(self): + syncs = [] + from .MVCCMappingStorage import MVCCMappingStorage + storage = MVCCMappingStorage() + + new_instance = storage.new_instance + def new_instance2(): + inst = new_instance() + sync = inst.sync + def sync2(*args): + sync() + syncs.append(1) + inst.sync = sync2 + return inst + + storage.new_instance = new_instance2 + + db = ZODB.DB(storage) + del syncs[:] # Need to do this to clear effect of getting the + # root object + + # We don't want to depend on latest transaction package, so + # just set attr for test: + tm = transaction.TransactionManager() + tm.explicit = True + + conn = db.open(tm) + self.assertEqual(len(syncs), 0) + conn.transaction_manager.begin() + self.assertEqual(len(syncs), 1) + conn.transaction_manager.commit() + self.assertEqual(len(syncs), 1) + conn.transaction_manager.begin() + self.assertEqual(len(syncs), 2) + conn.transaction_manager.abort() + self.assertEqual(len(syncs), 2) + conn.close() + self.assertEqual(len(syncs), 2) + + # For reference, in non-explicit mode: + conn = db.open() + self.assertEqual(len(syncs), 3) + conn._storage.sync = syncs.append + conn.transaction_manager.begin() + self.assertEqual(len(syncs), 4) + conn.transaction_manager.abort() + self.assertEqual(len(syncs), 5) + conn.close() + + db.close() + +class StubDatabase(object): + + def __init__(self): + self.storage = StubStorage() + self._mvcc_storage = mvccadapter.MVCCAdapter(self.storage) + self.new_oid = self.storage.new_oid + + classFactory = None + database_name = 'stubdatabase' + databases = {'stubdatabase': database_name} + + def invalidate(self, transaction, dict_with_oid_keys, connection): + pass + + large_record_size = 1<<30 + +def test_suite(): + s = unittest.makeSuite(ConnectionDotAdd) + s.addTest(unittest.makeSuite(SetstateErrorLoggingTests)) + s.addTest(doctest.DocTestSuite(checker=checker)) + s.addTest(unittest.makeSuite(TestConnection)) + s.addTest(unittest.makeSuite(EstimatedSizeTests)) + return s diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testConnectionSavepoint.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testConnectionSavepoint.py new file mode 100644 index 0000000..a82651d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testConnectionSavepoint.py @@ -0,0 +1,214 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import doctest +import persistent.mapping +import re +import transaction +import unittest +import ZODB.tests.util +from zope.testing import renormalizing + +checker = renormalizing.RENormalizing([ + # Python 3 bytes add a "b". + (re.compile("b('.*?')"), r"\1"), + # Python 3 adds module name to exceptions. + (re.compile("ZODB.POSException.ConnectionStateError"), + r"ConnectionStateError"), + ]) + +def testAddingThenModifyThenAbort(): + """\ +We ran into a problem in which abort failed after adding an object in +a savepoint and then modifying the object. The problem was that, on +commit, the savepoint was aborted before the modifications were +aborted. Because the object was added in the savepoint, its _p_oid +and _p_jar were cleared when the savepoint was aborted. The object +was in the registered-object list. There's an invariant for this +list that states that all objects in the list should have an oid and +(correct) jar. + +The fix was to abort work done after the savepoint before aborting the +savepoint. + + >>> import ZODB.tests.util + >>> db = ZODB.tests.util.DB() + >>> connection = db.open() + >>> root = connection.root() + + >>> ob = persistent.mapping.PersistentMapping() + >>> root['ob'] = ob + >>> sp = transaction.savepoint() + >>> ob.x = 1 + >>> transaction.abort() +""" + +def testModifyThenSavePointThenModifySomeMoreThenCommit(): + """\ +We got conflict errors when we committed after we modified an object +in a savepoint, and then modified it some more after the last +savepoint. + +The problem was that we were effectively commiting the object twice -- +when commiting the current data and when committing the savepoint. +The fix was to first make a new savepoint to move new changes to the +savepoint storage and *then* to commit the savepoint storage. + + >>> import ZODB.tests.util + >>> db = ZODB.tests.util.DB() + >>> connection = db.open() + >>> root = connection.root() + >>> sp = transaction.savepoint() + >>> root['a'] = 1 + >>> sp = transaction.savepoint() + >>> root['a'] = 2 + >>> transaction.commit() +""" + +def testCantCloseConnectionWithActiveSavepoint(): + """ + >>> import ZODB.tests.util + >>> db = ZODB.tests.util.DB() + >>> connection = db.open() + >>> root = connection.root() + >>> root['a'] = 1 + >>> sp = transaction.savepoint() + >>> connection.close() + Traceback (most recent call last): + ... + ConnectionStateError: Cannot close a connection joined to a transaction + + >>> db.close() + """ + +def testSavepointDoesCacheGC(): + """\ +Although the interface doesn't guarantee this internal detail, making a +savepoint should do incremental gc on connection memory caches. Indeed, +one traditional use for savepoints is simply to free memory space midstream +during a long transaction. Before ZODB 3.4.2, making a savepoint failed +to trigger cache gc, and this test verifies that it now does. + + >>> import gc + >>> import ZODB + >>> from ZODB.tests.MinPO import MinPO + >>> from ZODB.MappingStorage import MappingStorage + >>> import transaction + >>> CACHESIZE = 5 # something tiny + >>> LOOPCOUNT = CACHESIZE * 10 + >>> st = MappingStorage("Test") + >>> db = ZODB.DB(st, cache_size=CACHESIZE) + >>> cn = db.open() + >>> rt = cn.root() + +Now attach substantially more than CACHESIZE persistent objects to the root: + + >>> for i in range(LOOPCOUNT): + ... rt[i] = MinPO(i) + >>> transaction.commit() + +Now modify all of them; the cache should contain LOOPCOUNT MinPO objects +then, + 1 for the root object: + + >>> for i in range(LOOPCOUNT): + ... obj = rt[i] + ... obj.value = -i + >>> len(cn._cache) == LOOPCOUNT + 1 + True + +Making a savepoint at this time used to leave the cache holding the same +number of objects. Make sure the cache shrinks now instead. + + >>> dummy = transaction.savepoint() + +Jython needs a GC, and needs to actually access the cache data to be +sure the size is updated (it uses "eventually consistent" implementations for +its weak dictionaries): + + >>> _ = gc.collect() + >>> _ = getattr(cn._cache, 'data', {}).values() + >>> _ = getattr(cn._cache, 'data', {}).keys() + >>> len(cn._cache) <= CACHESIZE + 1 + True + +Verify all the values are as expected: + + >>> failures = [] + >>> for i in range(LOOPCOUNT): + ... obj = rt[i] + ... if obj.value != -i: + ... failures.append(obj) + >>> failures + [] + + >>> transaction.abort() + >>> db.close() +""" + +def testIsReadonly(): + """\ +The connection isReadonly method relies on the _storage to have an isReadOnly. +We simply rely on the underlying storage method. + + >>> import ZODB.tests.util + >>> db = ZODB.tests.util.DB() + >>> connection = db.open() + >>> root = connection.root() + >>> root['a'] = 1 + >>> sp = transaction.savepoint() + >>> connection.isReadOnly() + False +""" + +class SelfActivatingObject(persistent.Persistent): + + def _p_invalidate(self): + super(SelfActivatingObject, self)._p_invalidate() + self._p_activate() + +def testInvalidateAfterRollback(): + """\ +The rollback used to invalidate objects before resetting the TmpStore. +This caused problems for custom _p_invalidate methods that would load +the wrong state. + + >>> import ZODB.tests.util + >>> db = ZODB.tests.util.DB() + >>> connection = db.open() + >>> root = connection.root() + + >>> root['p'] = p = SelfActivatingObject() + >>> transaction.commit() + >>> p.foo = 1 + >>> sp = transaction.savepoint() + >>> p.foo = 2 + >>> sp2 = transaction.savepoint() + >>> sp.rollback() + >>> p.foo # This used to wrongly return 2 + 1 + """ + + +def tearDown(test): + transaction.abort() + +def test_suite(): + return unittest.TestSuite(( + doctest.DocFileSuite( + 'testConnectionSavepoint.txt', + tearDown=tearDown, checker=checker), + doctest.DocTestSuite(tearDown=tearDown, checker=checker), + )) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testConnectionSavepoint.txt b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testConnectionSavepoint.txt new file mode 100644 index 0000000..56125b6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testConnectionSavepoint.txt @@ -0,0 +1,196 @@ +========== +Savepoints +========== + +Savepoints provide a way to save to disk intermediate work done during a +transaction allowing: + +- partial transaction (subtransaction) rollback (abort) + +- state of saved objects to be freed, freeing on-line memory for other + uses + +Savepoints make it possible to write atomic subroutines that don't make +top-level transaction commitments. + + +Applications +------------ + +To demonstrate how savepoints work with transactions, we'll show an example. + + >>> import ZODB.tests.util + >>> db = ZODB.tests.util.DB() + >>> connection = db.open() + >>> root = connection.root() + >>> root['name'] = 'bob' + +As with other data managers, we can commit changes: + + >>> import transaction + >>> transaction.commit() + >>> root['name'] + 'bob' + +and abort changes: + + >>> root['name'] = 'sally' + >>> root['name'] + 'sally' + >>> transaction.abort() + >>> root['name'] + 'bob' + +Now, let's look at an application that manages funds for people. It allows +deposits and debits to be entered for multiple people. It accepts a sequence +of entries and generates a sequence of status messages. For each entry, it +applies the change and then validates the user's account. If the user's +account is invalid, we roll back the change for that entry. The success or +failure of an entry is indicated in the output status. First we'll initialize +some accounts: + + >>> root['bob-balance'] = 0.0 + >>> root['bob-credit'] = 0.0 + >>> root['sally-balance'] = 0.0 + >>> root['sally-credit'] = 100.0 + >>> transaction.commit() + +Now, we'll define a validation function to validate an account: + + >>> def validate_account(name): + ... if root[name+'-balance'] + root[name+'-credit'] < 0: + ... raise ValueError('Overdrawn', name) + +And a function to apply entries. If the function fails in some unexpected +way, it rolls back all of its changes and prints the error: + + >>> import six + >>> def apply_entries(entries): + ... savepoint = transaction.savepoint() + ... try: + ... for name, amount in entries: + ... entry_savepoint = transaction.savepoint() + ... try: + ... root[name+'-balance'] += amount + ... validate_account(name) + ... except ValueError as error: + ... entry_savepoint.rollback() + ... six.print_('Error', str(error)) + ... else: + ... six.print_('Updated', name) + ... except Exception as error: + ... savepoint.rollback() + ... six.print_('Unexpected exception', error) + +Now let's try applying some entries: + + >>> apply_entries([ + ... ('bob', 10.0), + ... ('sally', 10.0), + ... ('bob', 20.0), + ... ('sally', 10.0), + ... ('bob', -100.0), + ... ('sally', -100.0), + ... ]) + Updated bob + Updated sally + Updated bob + Updated sally + Error ('Overdrawn', 'bob') + Updated sally + + >>> root['bob-balance'] + 30.0 + + >>> root['sally-balance'] + -80.0 + +If we provide entries that cause an unexpected error: + + >>> apply_entries([ + ... ('bob', 10.0), + ... ('sally', 10.0), + ... ('bob', '20.0'), + ... ('sally', 10.0), + ... ]) #doctest: +ELLIPSIS + Updated bob + Updated sally + Unexpected exception unsupported operand type(s) for +...: 'float' and 'str' + +Because the apply_entries used a savepoint for the entire function, it was +able to rollback the partial changes without rolling back changes made in the +previous call to ``apply_entries``: + + >>> root['bob-balance'] + 30.0 + + >>> root['sally-balance'] + -80.0 + +If we now abort the outer transactions, the earlier changes will go +away: + + >>> transaction.abort() + + >>> root['bob-balance'] + 0.0 + + >>> root['sally-balance'] + 0.0 + + +Savepoint invalidation +---------------------- + +A savepoint can be used any number of times: + + >>> root['bob-balance'] = 100.0 + >>> root['bob-balance'] + 100.0 + >>> savepoint = transaction.savepoint() + + >>> root['bob-balance'] = 200.0 + >>> root['bob-balance'] + 200.0 + >>> savepoint.rollback() + >>> root['bob-balance'] + 100.0 + + >>> savepoint.rollback() # redundant, but should be harmless + >>> root['bob-balance'] + 100.0 + + >>> root['bob-balance'] = 300.0 + >>> root['bob-balance'] + 300.0 + >>> savepoint.rollback() + >>> root['bob-balance'] + 100.0 + +However, using a savepoint invalidates any savepoints that come after it: + + >>> root['bob-balance'] = 200.0 + >>> root['bob-balance'] + 200.0 + >>> savepoint1 = transaction.savepoint() + + >>> root['bob-balance'] = 300.0 + >>> root['bob-balance'] + 300.0 + >>> savepoint2 = transaction.savepoint() + + >>> savepoint.rollback() + >>> root['bob-balance'] + 100.0 + + >>> savepoint2.rollback() # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + InvalidSavepointRollbackError: invalidated by a later savepoint + + >>> savepoint1.rollback() # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + InvalidSavepointRollbackError: invalidated by a later savepoint + + >>> transaction.abort() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testDB.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testDB.py new file mode 100644 index 0000000..f19a6fc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testDB.py @@ -0,0 +1,431 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from six import PY2 + +from ZODB.tests.MinPO import MinPO +import doctest +import os +import re +import sys +import time +import transaction +import unittest +import ZODB +import ZODB.tests.util +from zope.testing import renormalizing + +checker = renormalizing.RENormalizing([ + # Python 3 bytes add a "b". + (re.compile("b('.*?')"), + r"\1"), + # Python 3 adds module name to exceptions. + (re.compile("ZODB.POSException.ReadConflictError"), r"ReadConflictError"), + ]) + + +# Return total number of connections across all pools in a db._pools. +def nconn(pools): + return sum([len(pool.all) for pool in pools.values()]) + +class DBTests(ZODB.tests.util.TestCase): + + def setUp(self): + ZODB.tests.util.TestCase.setUp(self) + self.db = ZODB.DB('test.fs') + + def tearDown(self): + self.db.close() + ZODB.tests.util.TestCase.tearDown(self) + + def dowork(self): + c = self.db.open() + r = c.root() + o = r[time.time()] = MinPO(0) + transaction.commit() + for i in range(25): + o.value = MinPO(i) + transaction.commit() + o = o.value + serial = o._p_serial + root_serial = r._p_serial + c.close() + return serial, root_serial + + # make sure the basic methods are callable + + def testSets(self): + self.db.setCacheSize(15) + self.db.setHistoricalCacheSize(15) + + def test_references(self): + + # TODO: For now test that we're using referencesf. We really should + # have tests of referencesf. + + import ZODB.serialize + self.assertTrue(self.db.references is ZODB.serialize.referencesf) + + def test_history_and_undo_meta_data_text_handlinf(self): + db = self.db + conn = db.open() + for i in range(3): + with conn.transaction_manager as t: + t.note(u'work %s' % i) + t.setUser(u'user%s' % i) + conn.root()[i] = 42 + + conn.close() + + from ZODB.utils import z64 + + def check(info, text): + for i, h in enumerate(reversed(info)): + for (name, expect) in (('description', 'work %s'), + ('user_name', '/ user%s')): + expect = expect % i + if not text: + expect = expect.encode('ascii') + self.assertEqual(h[name], expect) + + if PY2: + expect = unicode if text else str + for name in 'description', 'user_name': + self.assertTrue(isinstance(h[name], expect)) + + check(db.storage.history(z64, 3), False) + check(db.storage.undoLog(0, 3) , False) + check(db.storage.undoInfo(0, 3) , False) + check(db.history(z64, 3), True) + check(db.undoLog(0, 3) , True) + check(db.undoInfo(0, 3) , True) + +def test_invalidateCache(): + """The invalidateCache method invalidates a connection caches for all of + the connections attached to a database:: + + >>> from ZODB.tests.util import DB + >>> import transaction + >>> db = DB() + >>> mvcc_storage = db._mvcc_storage + >>> tm1 = transaction.TransactionManager() + >>> c1 = db.open(transaction_manager=tm1) + >>> c1.root()['a'] = MinPO(1) + >>> tm1.commit() + >>> tm2 = transaction.TransactionManager() + >>> c2 = db.open(transaction_manager=tm2) + >>> c2.root()['a'].value + 1 + >>> tm3 = transaction.TransactionManager() + >>> c3 = db.open(transaction_manager=tm3) + >>> c3.root()['a'].value + 1 + >>> c3.close() + + >>> mvcc_storage.invalidateCache() + >>> c1.root.a._p_changed + 0 + >>> c1.sync() + >>> c1.root.a._p_changed + >>> c2.root.a._p_changed + 0 + >>> c2.sync() + >>> c2.root.a._p_changed + >>> c3 is db.open(transaction_manager=tm3) + True + >>> c3.root.a._p_changed + + >>> c1.root()['a'].value + 1 + >>> c2.root()['a'].value + 1 + >>> c3.root()['a'].value + 1 + + >>> db.close() + """ + +def connectionDebugInfo(): + r"""DB.connectionDebugInfo provides information about connections. + + >>> import time + >>> now = 1228423244.1 + >>> def faux_time(): + ... global now + ... now += .1 + ... return now + >>> real_time = time.time + >>> if isinstance(time, type): + ... time.time = staticmethod(faux_time) # Jython + ... else: + ... time.time = faux_time + + >>> from ZODB.tests.util import DB + >>> import transaction + >>> db = DB() + >>> c1 = db.open() + >>> c1.setDebugInfo('test info') + >>> c1.root()['a'] = MinPO(1) + >>> transaction.commit() + >>> c2 = db.open() + >>> _ = c1.root()['a'] + >>> c2.close() + + >>> c3 = db.open(before=c1.root()._p_serial) + + >>> info = db.connectionDebugInfo() + >>> info = sorted(info, key=lambda i: str(i['opened'])) + >>> before = [x['before'] for x in info] + >>> opened = [x['opened'] for x in info] + >>> infos = [x['info'] for x in info] + >>> before == [None, c1.root()._p_serial, None] + True + >>> opened + ['2008-12-04T20:40:44Z (1.30s)', '2008-12-04T20:40:46Z (0.10s)', None] + >>> infos + ['test info (2)', ' (0)', ' (0)'] + + >>> time.time = real_time + + """ + +def passing_a_file_name_to_DB(): + """You can pass a file-storage file name to DB. + + (Also note that we can access DB in ZODB.) + + >>> db = ZODB.DB('data.fs') + >>> db.storage # doctest: +ELLIPSIS + >> os.path.exists('data.fs') + True + + >>> db.close() + """ + +def passing_None_to_DB(): + """You can pass None DB to get a MappingStorage. + + (Also note that we can access DB in ZODB.) + + >>> db = ZODB.DB(None) + >>> db.storage # doctest: +ELLIPSIS + >> db.close() + """ + +def open_convenience(): + """Often, we just want to open a single connection. + + >>> conn = ZODB.connection('data.fs') + >>> conn.root() + {} + + >>> conn.root()['x'] = 1 + >>> transaction.commit() + >>> conn.close() + + Let's make sure the database was cloased when we closed the + connection, and that the data is there. + + >>> db = ZODB.DB('data.fs') + >>> conn = db.open() + >>> conn.root() + {'x': 1} + >>> db.close() + + + We can pass storage-specific arguments if they don't conflict with + DB arguments. + + >>> conn = ZODB.connection('data.fs', blob_dir='blobs') + >>> conn.root()['b'] = ZODB.blob.Blob(b'test') + >>> transaction.commit() + >>> conn.close() + + >>> db = ZODB.DB('data.fs', blob_dir='blobs') + >>> conn = db.open() + >>> with conn.root()['b'].open() as fp: fp.read() + 'test' + >>> db.close() + + """ + +def db_with_transaction(): + """Using databases with with + + The transaction method returns a context manager that when entered + starts a transaction with a private transaction manager. To + illustrate this, we start a trasnaction using a regular connection + and see that it isn't automatically committed or aborted as we use + the transaction context manager. + + >>> db = ZODB.tests.util.DB() + >>> conn = db.open() + >>> conn.root()['x'] = conn.root().__class__() + >>> transaction.commit() + >>> conn.root()['x']['x'] = 1 + + >>> with db.transaction() as conn2: + ... conn2.root()['y'] = 1 + + >>> conn2.opened + +Now, we'll open a 3rd connection a verify that + + >>> conn3 = db.open() + >>> conn3.root()['x'] + {} + >>> conn3.root()['y'] + 1 + >>> conn3.close() + +Let's try again, but this time, we'll have an exception: + + >>> with db.transaction() as conn2: + ... conn2.root()['y'] = 2 + ... XXX #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + NameError: name 'XXX' is not defined + + >>> conn2.opened + + >>> conn3 = db.open() + >>> conn3.root()['x'] + {} + >>> conn3.root()['y'] + 1 + >>> conn3.close() + + >>> transaction.commit() + + >>> conn3 = db.open() + >>> conn3.root()['x'] + {'x': 1} + + + >>> db.close() + """ + +def connection_allows_empty_version_for_idiots(): + r""" + >>> db = ZODB.DB('t.fs') + >>> c = ZODB.tests.util.assert_deprecated( + ... (lambda : db.open('')), + ... 'A version string was passed to open') + >>> c.root() + {} + >>> db.close() + """ + +def warn_when_data_records_are_big(): + """ +When data records are large, a warning is issued to try to prevent new +users from shooting themselves in the foot. + + >>> db = ZODB.DB('t.fs', create=True) + >>> conn = db.open() + >>> conn.root.x = 'x'*(1<<24) + >>> ZODB.tests.util.assert_warning(UserWarning, transaction.commit, + ... "object you're saving is large.") + >>> db.close() + +The large_record_size option can be used to control the record size: + + >>> db = ZODB.DB('t.fs', create=True, large_record_size=999) + >>> conn = db.open() + >>> conn.root.x = 'x' + >>> transaction.commit() + + >>> conn.root.x = 'x'*999 + >>> ZODB.tests.util.assert_warning(UserWarning, transaction.commit, + ... "object you're saving is large.") + + >>> db.close() + +We can also specify it using a configuration option: + + >>> import ZODB.config + >>> db = ZODB.config.databaseFromString(''' + ... + ... large-record-size 1MB + ... + ... path t.fs + ... create true + ... + ... + ... ''') + >>> conn = db.open() + >>> conn.root.x = 'x' + >>> transaction.commit() + + >>> conn.root.x = 'x'*(1<<20) + >>> ZODB.tests.util.assert_warning(UserWarning, transaction.commit, + ... "object you're saving is large.") + + >>> db.close() + """ # ' + +def minimally_test_connection_timeout(): + """There's a mechanism to discard old connections. + + Make sure it doesn't error. :) + + >>> db = ZODB.DB(None, pool_timeout=.01) + >>> c1 = db.open() + >>> c1.cacheMinimize() # See fix84.rst + >>> c2 = db.open() + >>> c1.close() + >>> c2.close() + >>> time.sleep(.02) + >>> db.open() is c2 + True + + >>> db.pool.available + [] + + """ + +def cleanup_on_close(): + """Verify that various references are cleared on close + + >>> db = ZODB.DB(None) + + >>> conn = db.open() + >>> conn.root.x = 'x' + >>> transaction.commit() + >>> conn.close() + + >>> historical_conn = db.open(at=db.lastTransaction()) + >>> historical_conn.close() + + >>> db.close() + + >>> db.databases + {} + + >>> db.pool.pop() is None + True + + >>> [pool is None for pool in db.historical_pool.pools.values()] + [] +""" + +def test_suite(): + s = unittest.makeSuite(DBTests) + s.addTest(doctest.DocTestSuite( + setUp=ZODB.tests.util.setUp, tearDown=ZODB.tests.util.tearDown, + checker=checker + )) + return s diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testDemoStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testDemoStorage.py new file mode 100644 index 0000000..25d32e2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testDemoStorage.py @@ -0,0 +1,288 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from ZODB.DB import DB +from ZODB.tests import ( + BasicStorage, + ConflictResolution, + HistoryStorage, + IteratorStorage, + MTStorage, + PackableStorage, + RevisionStorage, + StorageTestBase, + Synchronization, + ) + +import os +if os.environ.get('USE_ZOPE_TESTING_DOCTEST'): + from zope.testing import doctest +else: + import doctest +import random +import re +import transaction +import unittest +import ZODB.DemoStorage +import ZODB.tests.hexstorage +import ZODB.tests.util +import ZODB.utils + +from ZODB.utils import load_current + +from zope.testing import renormalizing + +class DemoStorageTests( + StorageTestBase.StorageTestBase, + BasicStorage.BasicStorage, + ConflictResolution.ConflictResolvingStorage, + HistoryStorage.HistoryStorage, + IteratorStorage.ExtendedIteratorStorage, + IteratorStorage.IteratorStorage, + MTStorage.MTStorage, + PackableStorage.PackableStorage, + RevisionStorage.RevisionStorage, + Synchronization.SynchronizedStorage, + ): + + def setUp(self): + StorageTestBase.StorageTestBase.setUp(self) + self._storage = ZODB.DemoStorage.DemoStorage() + + def checkOversizeNote(self): + # This base class test checks for the common case where a storage + # doesnt support huge transaction metadata. This storage doesnt + # have this limit, so we inhibit this test here. + pass + + def checkLoadDelegation(self): + # Minimal test of loadEX w/o version -- ironically + db = DB(self._storage) # creates object 0. :) + s2 = ZODB.DemoStorage.DemoStorage(base=self._storage) + self.assertEqual(load_current(s2, ZODB.utils.z64), + load_current(self._storage, ZODB.utils.z64)) + + def checkLengthAndBool(self): + self.assertEqual(len(self._storage), 0) + self.assertTrue(not self._storage) + db = DB(self._storage) # creates object 0. :) + self.assertEqual(len(self._storage), 1) + self.assertTrue(self._storage) + with db.transaction() as conn: + for i in range(10): + conn.root()[i] = conn.root().__class__() + self.assertEqual(len(self._storage), 11) + self.assertTrue(self._storage) + db.close() + + def checkLoadBeforeUndo(self): + pass # we don't support undo yet + checkUndoZombie = checkLoadBeforeUndo + + def checkBaseHistory(self): + def base_only(): + yield 11 + yield 12 + yield 13 + self._storage = self._storage.push() + self._checkHistory(base_only()) + self._storage = self._storage.pop() + def base_and_changes(): + yield 11 + yield 12 + self._storage = self._storage.push() + yield 13 + yield 14 + self._checkHistory(base_and_changes()) + self._storage = self._storage.pop() + +class DemoStorageHexTests(DemoStorageTests): + + def setUp(self): + StorageTestBase.StorageTestBase.setUp(self) + self._storage = ZODB.tests.hexstorage.HexStorage( + ZODB.DemoStorage.DemoStorage()) + +class DemoStorageWrappedBase(DemoStorageTests): + + def setUp(self): + StorageTestBase.StorageTestBase.setUp(self) + self._base = self._makeBaseStorage() + self._storage = ZODB.DemoStorage.DemoStorage(base=self._base) + + def tearDown(self): + self._base.close() + StorageTestBase.StorageTestBase.tearDown(self) + + def _makeBaseStorage(self): + raise NotImplementedError + + def checkPackOnlyOneObject(self): + pass # Wrapping demo storages don't do gc + + def checkPackWithMultiDatabaseReferences(self): + pass # we never do gc + checkPackAllRevisions = checkPackWithMultiDatabaseReferences + +class DemoStorageWrappedAroundMappingStorage(DemoStorageWrappedBase): + + def _makeBaseStorage(self): + from ZODB.MappingStorage import MappingStorage + return MappingStorage() + +class DemoStorageWrappedAroundFileStorage(DemoStorageWrappedBase): + + def _makeBaseStorage(self): + from ZODB.FileStorage import FileStorage + return FileStorage('FileStorageTests.fs') + +class DemoStorageWrappedAroundHexMappingStorage(DemoStorageWrappedBase): + + def _makeBaseStorage(self): + from ZODB.MappingStorage import MappingStorage + return ZODB.tests.hexstorage.HexStorage(MappingStorage()) + + +def setUp(test): + random.seed(0) + ZODB.tests.util.setUp(test) + +def testSomeDelegation(): + r""" + >>> import six + >>> class S(object): + ... def __init__(self, name): + ... self.name = name + ... def getSize(self): + ... six.print_(self.name, 'size') + ... def close(self): + ... six.print_(self.name, 'closed') + ... sortKey = __len__ = getTid = None + ... tpc_finish = tpc_vote = tpc_transaction = None + ... _lock = ZODB.utils.Lock() + ... getName = lambda self: 'S' + ... isReadOnly = tpc_transaction = None + ... supportsUndo = undo = undoLog = undoInfo = None + ... supportsTransactionalUndo = None + ... def new_oid(self): + ... return '\0' * 8 + ... def tpc_begin(self, t, tid, status): + ... six.print_('begin', tid, status) + ... def tpc_abort(self, t): + ... pass + + >>> from ZODB.DemoStorage import DemoStorage + >>> storage = DemoStorage(base=S(1), changes=S(2)) + + >>> storage.getSize() + 2 size + + >>> storage.close() + 1 closed + 2 closed + + >>> storage.tpc_begin(1, 2, 3) + begin 2 3 + >>> storage.tpc_abort(1) + + >>> + + """ + +def blob_pos_key_error_with_non_blob_base(): + """ + >>> storage = ZODB.DemoStorage.DemoStorage() + >>> storage.loadBlob(ZODB.utils.p64(1), ZODB.utils.p64(1)) + Traceback (most recent call last): + ... + POSKeyError: 0x01 + + >>> storage.openCommittedBlobFile(ZODB.utils.p64(1), ZODB.utils.p64(1)) + Traceback (most recent call last): + ... + POSKeyError: 0x01 + + """ + +def load_before_base_storage_current(): + """ + Here we'll exercise that DemoStorage's loadBefore method works + properly when deferring to a record that is current in the + base storage. + + >>> import time + >>> import transaction + >>> import ZODB.DB + >>> import ZODB.DemoStorage + >>> import ZODB.MappingStorage + >>> import ZODB.utils + + >>> base = ZODB.MappingStorage.MappingStorage() + >>> basedb = ZODB.DB(base) + >>> conn = basedb.open() + >>> conn.root()['foo'] = 'bar' + >>> transaction.commit() + >>> conn.close() + >>> storage = ZODB.DemoStorage.DemoStorage(base=base) + >>> db = ZODB.DB(storage) + >>> conn = db.open() + >>> conn.root()['foo'] = 'baz' + >>> time.sleep(.1) # Windows has a low-resolution clock + >>> transaction.commit() + + >>> oid = ZODB.utils.z64 + >>> base_current = load_current(storage.base, oid) + >>> tid = ZODB.utils.p64(ZODB.utils.u64(base_current[1]) + 1) + >>> base_record = storage.base.loadBefore(oid, tid) + >>> base_record[-1] is None + True + >>> base_current == base_record[:2] + True + + >>> t = storage.loadBefore(oid, tid) + + The data and tid are the values from the base storage, but the + next tid is from changes. + + >>> t[:2] == base_record[:2] + True + >>> t[-1] == load_current(storage.changes, oid)[1] + True + + >>> conn.close() + >>> db.close() + >>> base.close() + """ + +def test_suite(): + suite = unittest.TestSuite(( + doctest.DocTestSuite( + setUp=setUp, tearDown=ZODB.tests.util.tearDown, + checker=ZODB.tests.util.checker + ), + doctest.DocFileSuite( + '../DemoStorage.test', + setUp=setUp, + tearDown=ZODB.tests.util.tearDown, + checker=ZODB.tests.util.checker, + ), + )) + suite.addTest(unittest.makeSuite(DemoStorageTests, 'check')) + suite.addTest(unittest.makeSuite(DemoStorageHexTests, 'check')) + suite.addTest(unittest.makeSuite(DemoStorageWrappedAroundFileStorage, + 'check')) + suite.addTest(unittest.makeSuite(DemoStorageWrappedAroundMappingStorage, + 'check')) + suite.addTest(unittest.makeSuite(DemoStorageWrappedAroundHexMappingStorage, + 'check')) + return suite diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testFileStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testFileStorage.py new file mode 100644 index 0000000..2733048 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testFileStorage.py @@ -0,0 +1,742 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import doctest +import os +if os.environ.get('USE_ZOPE_TESTING_DOCTEST'): + from zope.testing import doctest +import sys +import unittest +import transaction +import ZODB.FileStorage +import ZODB.tests.hexstorage +import ZODB.tests.testblob +import zope.testing.setupstack +from ZODB import POSException +from ZODB import DB +from ZODB.Connection import TransactionMetaData +from ZODB.fsIndex import fsIndex +from ZODB.utils import U64, p64, z64, load_current + +from ZODB.tests import StorageTestBase, BasicStorage, TransactionalUndoStorage +from ZODB.tests import PackableStorage, Synchronization, ConflictResolution +from ZODB.tests import HistoryStorage, IteratorStorage, Corruption +from ZODB.tests import RevisionStorage, PersistentStorage, MTStorage +from ZODB.tests import ReadOnlyStorage, RecoveryStorage +from ZODB.tests.StorageTestBase import MinPO, zodb_pickle +from ZODB._compat import dump, dumps, _protocol + +from . import util + +class FileStorageTests( + StorageTestBase.StorageTestBase, + BasicStorage.BasicStorage, + TransactionalUndoStorage.TransactionalUndoStorage, + RevisionStorage.RevisionStorage, + PackableStorage.PackableStorageWithOptionalGC, + PackableStorage.PackableUndoStorage, + Synchronization.SynchronizedStorage, + ConflictResolution.ConflictResolvingStorage, + ConflictResolution.ConflictResolvingTransUndoStorage, + HistoryStorage.HistoryStorage, + IteratorStorage.IteratorStorage, + IteratorStorage.ExtendedIteratorStorage, + PersistentStorage.PersistentStorage, + MTStorage.MTStorage, + ReadOnlyStorage.ReadOnlyStorage + ): + + def open(self, **kwargs): + self._storage = ZODB.FileStorage.FileStorage('FileStorageTests.fs', + **kwargs) + + def setUp(self): + StorageTestBase.StorageTestBase.setUp(self) + self.open(create=1) + + def checkLongMetadata(self): + s = "X" * 75000 + try: + self._dostore(user=s) + except POSException.StorageError: + pass + else: + self.fail("expect long user field to raise error") + try: + self._dostore(description=s) + except POSException.StorageError: + pass + else: + self.fail("expect long user field to raise error") + + def check_use_fsIndex(self): + + self.assertEqual(self._storage._index.__class__, fsIndex) + + # A helper for checking that when an .index contains a dict for the + # index, it's converted to an fsIndex when the file is opened. + def convert_index_to_dict(self): + # Convert the index in the current .index file to a Python dict. + # Return the index originally found. + data = fsIndex.load('FileStorageTests.fs.index') + index = data['index'] + + newindex = dict(index) + data['index'] = newindex + + with open('FileStorageTests.fs.index', 'wb') as fp: + dump(data, fp, _protocol) + return index + + def check_conversion_to_fsIndex(self, read_only=False): + from ZODB.fsIndex import fsIndex + + # Create some data, and remember the index. + for i in range(10): + self._dostore() + oldindex_as_dict = dict(self._storage._index) + + # Save the index. + self._storage.close() + + # Convert it to a dict. + old_index = self.convert_index_to_dict() + self.assertTrue(isinstance(old_index, fsIndex)) + new_index = self.convert_index_to_dict() + self.assertTrue(isinstance(new_index, dict)) + + # Verify it's converted to fsIndex in memory upon open. + self.open(read_only=read_only) + self.assertTrue(isinstance(self._storage._index, fsIndex)) + + # Verify it has the right content. + newindex_as_dict = dict(self._storage._index) + self.assertEqual(oldindex_as_dict, newindex_as_dict) + + # Check that the type on disk has changed iff read_only is False. + self._storage.close() + current_index = self.convert_index_to_dict() + if read_only: + self.assertTrue(isinstance(current_index, dict)) + else: + self.assertTrue(isinstance(current_index, fsIndex)) + + def check_conversion_to_fsIndex_readonly(self): + # Same thing, but the disk .index should continue to hold a + # Python dict. + self.check_conversion_to_fsIndex(read_only=True) + + def check_conversion_from_dict_to_btree_data_in_fsIndex(self): + # To support efficient range searches on its keys as part of + # implementing a record iteration protocol in FileStorage, we + # converted the fsIndex class from using a dictionary as its + # self._data attribute to using an OOBTree in its stead. + + from ZODB.fsIndex import fsIndex + from BTrees.OOBTree import OOBTree + + # Create some data, and remember the index. + for i in range(10): + self._dostore() + data_dict = dict(self._storage._index._data) + + # Replace the OOBTree with a dictionary and commit it. + self._storage._index._data = data_dict + transaction.commit() + + # Save the index. + self._storage.close() + + # Verify it's converted to fsIndex in memory upon open. + self.open() + self.assertTrue(isinstance(self._storage._index, fsIndex)) + self.assertTrue(isinstance(self._storage._index._data, OOBTree)) + + # Verify it has the right content. + new_data_dict = dict(self._storage._index._data) + self.assertEqual(len(data_dict), len(new_data_dict)) + + for k in data_dict: + old_tree = data_dict[k] + new_tree = new_data_dict[k] + self.assertEqual(list(old_tree.items()), list(new_tree.items())) + + def check_save_after_load_with_no_index(self): + for i in range(10): + self._dostore() + self._storage.close() + os.remove('FileStorageTests.fs.index') + self.open() + self.assertEqual(self._storage._saved, 1) + + def checkStoreBumpsOid(self): + # If .store() is handed an oid bigger than the storage knows + # about already, it's crucial that the storage bump its notion + # of the largest oid in use. + t = TransactionMetaData() + self._storage.tpc_begin(t) + giant_oid = b'\xee' * 8 + # Store an object. + # oid, serial, data, version, transaction + r1 = self._storage.store(giant_oid, b'\0'*8, b'data', b'', t) + # Finish the transaction. + r2 = self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + # Before ZODB 3.2.6, this failed, with ._oid == z64. + self.assertEqual(self._storage._oid, giant_oid) + + def checkRestoreBumpsOid(self): + # As above, if .restore() is handed an oid bigger than the storage + # knows about already, it's crucial that the storage bump its notion + # of the largest oid in use. Because copyTransactionsFrom(), and + # ZRS recovery, use the .restore() method, this is plain critical. + t = TransactionMetaData() + self._storage.tpc_begin(t) + giant_oid = b'\xee' * 8 + # Store an object. + # oid, serial, data, version, prev_txn, transaction + r1 = self._storage.restore(giant_oid, b'\0'*8, b'data', b'', None, t) + # Finish the transaction. + r2 = self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + # Before ZODB 3.2.6, this failed, with ._oid == z64. + self.assertEqual(self._storage._oid, giant_oid) + + def checkCorruptionInPack(self): + # This sets up a corrupt .fs file, with a redundant transaction + # length mismatch. The implementation of pack in many releases of + # ZODB blew up if the .fs file had such damage: it detected the + # damage, but the code to raise CorruptedError referenced an undefined + # global. + import time + + from ZODB.FileStorage.format import CorruptedError + from ZODB.serialize import referencesf + + db = DB(self._storage) + conn = db.open() + conn.root()['xyz'] = 1 + transaction.commit() + + # Ensure it's all on disk. + db.close() + self._storage.close() + + # Reopen before damaging. + self.open() + + # Open .fs directly, and damage content. + with open('FileStorageTests.fs', 'r+b') as f: + f.seek(0, 2) + pos2 = f.tell() - 8 + f.seek(pos2) + tlen2 = U64(f.read(8)) # length-8 of the last transaction + pos1 = pos2 - tlen2 + 8 # skip over the tid at the start + f.seek(pos1) + tlen1 = U64(f.read(8)) # should be redundant length-8 + self.assertEqual(tlen1, tlen2) # verify that it is redundant + + # Now damage the second copy. + f.seek(pos2) + f.write(p64(tlen2 - 1)) + + # Try to pack. This used to yield + # NameError: global name 's' is not defined + try: + self._storage.pack(time.time(), referencesf) + except CorruptedError as detail: + self.assertTrue("redundant transaction length does not match " + "initial transaction length" in str(detail)) + else: + self.fail("expected CorruptedError") + + def check_record_iternext(self): + + db = DB(self._storage) + conn = db.open() + conn.root()['abc'] = MinPO('abc') + conn.root()['xyz'] = MinPO('xyz') + transaction.commit() + + # Ensure it's all on disk. + db.close() + self._storage.close() + + self.open() + + key = None + for x in (b'\000', b'\001', b'\002'): + oid, tid, data, next_oid = self._storage.record_iternext(key) + self.assertEqual(oid, (b'\000' * 7) + x) + key = next_oid + expected_data, expected_tid = load_current(self._storage, oid) + self.assertEqual(expected_data, data) + self.assertEqual(expected_tid, tid) + if x == b'\002': + self.assertEqual(next_oid, None) + else: + self.assertNotEqual(next_oid, None) + + def checkFlushAfterTruncate(self, fail=False): + r0 = self._dostore(z64) + storage = self._storage + t = TransactionMetaData() + storage.tpc_begin(t) + storage.store(z64, r0, b'foo', b'', t) + storage.tpc_vote(t) + # Read operations are done with separate 'file' objects with their + # own buffers: here, the buffer also includes voted data. + load_current(storage, z64) + # This must invalidate all read buffers. + storage.tpc_abort(t) + self._dostore(z64, r0, b'bar', 1) + # In the case that read buffers were not invalidated, return value + # is based on what was cached during the first load. + self.assertEqual(load_current(storage, z64)[0], + b'foo' if fail else b'bar') + + # We want to be sure that the above test detects any regression + # in the code it checks, because any bug here is like a time bomb: not + # obvious, hard to reproduce, with possible data corruption. + # It's even more important that FilePool.flush() is quite aggressive and + # we'd like to optimize it when Python gets an API to flush read buffers. + # Therefore, 'checkFlushAfterTruncate' is tested in turn by another unit + # test. + # On Windows, flushing explicitely is not (always?) necessary. + if sys.platform != 'win32': + def checkFlushNeededAfterTruncate(self): + self._storage._files.flush = lambda: None + self.checkFlushAfterTruncate(True) + +class FileStorageHexTests(FileStorageTests): + + def open(self, **kwargs): + self._storage = ZODB.tests.hexstorage.HexStorage( + ZODB.FileStorage.FileStorage('FileStorageTests.fs',**kwargs)) + + +class FileStorageTestsWithBlobsEnabled(FileStorageTests): + + def open(self, **kwargs): + if 'blob_dir' not in kwargs: + kwargs = kwargs.copy() + kwargs['blob_dir'] = 'blobs' + FileStorageTests.open(self, **kwargs) + + +class FileStorageHexTestsWithBlobsEnabled(FileStorageTests): + + def open(self, **kwargs): + if 'blob_dir' not in kwargs: + kwargs = kwargs.copy() + kwargs['blob_dir'] = 'blobs' + FileStorageTests.open(self, **kwargs) + self._storage = ZODB.tests.hexstorage.HexStorage(self._storage) + + +class FileStorageRecoveryTest( + StorageTestBase.StorageTestBase, + RecoveryStorage.RecoveryStorage, + ): + + def setUp(self): + StorageTestBase.StorageTestBase.setUp(self) + self._storage = ZODB.FileStorage.FileStorage("Source.fs", create=True) + self._dst = ZODB.FileStorage.FileStorage("Dest.fs", create=True) + + def tearDown(self): + self._dst.close() + StorageTestBase.StorageTestBase.tearDown(self) + + def new_dest(self): + return ZODB.FileStorage.FileStorage('Dest.fs') + +class FileStorageHexRecoveryTest(FileStorageRecoveryTest): + + def setUp(self): + StorageTestBase.StorageTestBase.setUp(self) + self._storage = ZODB.tests.hexstorage.HexStorage( + ZODB.FileStorage.FileStorage("Source.fs", create=True)) + self._dst = ZODB.tests.hexstorage.HexStorage( + ZODB.FileStorage.FileStorage("Dest.fs", create=True)) + + +class FileStorageNoRestore(ZODB.FileStorage.FileStorage): + + @property + def restore(self): + raise Exception + + +class FileStorageNoRestoreRecoveryTest(FileStorageRecoveryTest): + # This test actually verifies a code path of + # BaseStorage.copyTransactionsFrom. For simplicity of implementation, we + # use a FileStorage deprived of its restore method. + + def setUp(self): + StorageTestBase.StorageTestBase.setUp(self) + self._storage = FileStorageNoRestore("Source.fs", create=True) + self._dst = FileStorageNoRestore("Dest.fs", create=True) + + def new_dest(self): + return FileStorageNoRestore('Dest.fs') + + def checkRestoreAcrossPack(self): + # Skip this check as it calls restore directly. + pass + + +class AnalyzeDotPyTest(StorageTestBase.StorageTestBase): + + def setUp(self): + StorageTestBase.StorageTestBase.setUp(self) + self._storage = ZODB.FileStorage.FileStorage("Source.fs", create=True) + + def checkanalyze(self): + import types + from BTrees.OOBTree import OOBTree + from ZODB.scripts import analyze + + # Set up a module to act as a broken import + module_name = 'brokenmodule' + module = types.ModuleType(module_name) + sys.modules[module_name] = module + + class Broken(MinPO): + __module__ = module_name + module.Broken = Broken + + oids = [[self._storage.new_oid(), None] for i in range(3)] + def store(i, data): + oid, revid = oids[i] + self._storage.store(oid, revid, data, "", t) + + for i in range(2): + t = TransactionMetaData() + self._storage.tpc_begin(t) + + # sometimes data is in this format + store(0, dumps(OOBTree, _protocol)) + # and it could be from a broken module + store(1, dumps(Broken, _protocol)) + # but mostly it looks like this + store(2, zodb_pickle(MinPO(2))) + + self._storage.tpc_vote(t) + tid = self._storage.tpc_finish(t) + for oid_revid in oids: + oid_revid[1] = tid + + # now break the import of the Broken class + del sys.modules[module_name] + + # from ZODB.scripts.analyze.analyze + fsi = self._storage.iterator() + rep = analyze.Report() + for txn in fsi: + analyze.analyze_trans(rep, txn) + + # from ZODB.scripts.analyze.report + typemap = sorted(rep.TYPEMAP.keys()) + cumpct = 0.0 + for t in typemap: + pct = rep.TYPESIZE[t] * 100.0 / rep.DBYTES + cumpct += pct + + self.assertAlmostEqual(cumpct, 100.0, 0, + "Failed to analyze some records") + +# Raise an exception if the tids in FileStorage fs aren't +# strictly increasing. +def checkIncreasingTids(fs): + lasttid = b'\0' * 8 + for txn in fs.iterator(): + if lasttid >= txn.tid: + raise ValueError("tids out of order %r >= %r" % (lasttid, txn.tid)) + lasttid = txn.tid + +# Return a TimeStamp object 'minutes' minutes in the future. +def timestamp(minutes): + import time + from persistent.TimeStamp import TimeStamp + + t = time.time() + 60 * minutes + return TimeStamp(*time.gmtime(t)[:5] + (t % 60,)) + +def testTimeTravelOnOpen(): + """ + >>> from ZODB.FileStorage import FileStorage + >>> from zope.testing.loggingsupport import InstalledHandler + + Arrange to capture log messages -- they're an important part of + this test! + + >>> handler = InstalledHandler('ZODB.FileStorage') + + Create a new file storage. + + >>> st = FileStorage('temp.fs', create=True) + >>> db = DB(st) + >>> db.close() + + First check the normal case: transactions are recorded with + increasing tids, and time doesn't run backwards. + + >>> st = FileStorage('temp.fs') + >>> db = DB(st) + >>> conn = db.open() + >>> conn.root()['xyz'] = 1 + >>> transaction.get().commit() + >>> checkIncreasingTids(st) + >>> db.close() + >>> st.cleanup() # remove .fs, .index, etc files + >>> handler.records # i.e., no log messages + [] + + Now force the database to have transaction records with tids from + the future. + + >>> st = FileStorage('temp.fs', create=True) + >>> st._ts = timestamp(15) # 15 minutes in the future + >>> db = DB(st) + >>> db.close() + + >>> st = FileStorage('temp.fs') # this should log a warning + >>> db = DB(st) + >>> conn = db.open() + >>> conn.root()['xyz'] = 1 + >>> transaction.get().commit() + >>> checkIncreasingTids(st) + >>> db.close() + >>> st.cleanup() + + >>> [record.levelname for record in handler.records] + ['WARNING'] + >>> handler.clear() + + And one more time, with transaction records far in the future. + We expect to log a critical error then, as a time so far in the + future probably indicates a real problem with the system. Shorter + spans may be due to clock drift. + + >>> st = FileStorage('temp.fs', create=True) + >>> st._ts = timestamp(60) # an hour in the future + >>> db = DB(st) + >>> db.close() + + >>> st = FileStorage('temp.fs') # this should log a critical error + >>> db = DB(st) + >>> conn = db.open() + >>> conn.root()['xyz'] = 1 + >>> transaction.get().commit() + >>> checkIncreasingTids(st) + >>> db.close() + >>> st.cleanup() + + >>> [record.levelname for record in handler.records] + ['CRITICAL'] + >>> handler.clear() + >>> handler.uninstall() + """ + +def lastInvalidations(): + """ + +The last invalidations method is used by a storage server to populate +it's data structure of recent invalidations. The lastInvalidations +method is passed a count and must return up to count number of the +most recent transactions. + +We'll create a FileStorage and populate it with some data, keeping +track of the transactions along the way: + + >>> fs = ZODB.FileStorage.FileStorage('t.fs', create=True) + >>> db = DB(fs) + >>> conn = db.open() + >>> from persistent.mapping import PersistentMapping + >>> last = [] + >>> for i in range(100): + ... conn.root()[i] = PersistentMapping() + ... transaction.commit() + ... last.append(fs.lastTransaction()) + +Now, we can call lastInvalidations on it: + + >>> invalidations = fs.lastInvalidations(10) + >>> [t for (t, oids) in invalidations] == last[-10:] + True + + >>> from ZODB.utils import u64 + >>> [[int(u64(oid)) for oid in oids] + ... for (i, oids) in invalidations] + ... # doctest: +NORMALIZE_WHITESPACE + [[0, 91], [0, 92], [0, 93], [0, 94], [0, 95], + [0, 96], [0, 97], [0, 98], [0, 99], [0, 100]] + +If we ask for more transactions than there are, we'll get as many as +there are: + + >>> len(fs.lastInvalidations(1000)) + 101 + +Of course, calling lastInvalidations on an empty storage refturns no data: + + >>> db.close() + >>> fs = ZODB.FileStorage.FileStorage('t.fs', create=True) + >>> list(fs.lastInvalidations(10)) + [] + + >>> fs.close() + """ + +def deal_with_finish_failures(): + r""" + + It's really bad to get errors in FileStorage's _finish method, as + that can cause the file storage to be in an inconsistent + state. The data file will be fine, but the internal data + structures might be hosed. For this reason, FileStorage will close + if there is an error after it has finished writing transaction + data. It bothers to do very little after writing this data, so + this should rarely, if ever, happen. + + >>> fs = ZODB.FileStorage.FileStorage('data.fs') + >>> db = DB(fs) + >>> conn = db.open() + >>> conn.root()[1] = 1 + >>> transaction.commit() + + Now, we'll indentially break the file storage. It provides a hook + for this purpose. :) + + >>> fs._finish_finish = lambda : None + >>> conn.root()[1] = 1 + + >>> import zope.testing.loggingsupport + >>> handler = zope.testing.loggingsupport.InstalledHandler( + ... 'ZODB.FileStorage') + >>> transaction.commit() # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + TypeError: () takes ... + + + >>> print(handler) + ZODB.FileStorage CRITICAL + Failure in _finish. Closing. + + >>> handler.uninstall() + + >>> load_current(fs, b'\0'*8) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ValueError: ... + + >>> db.close() + >>> fs = ZODB.FileStorage.FileStorage('data.fs') + >>> db = DB(fs) + >>> conn = db.open() + >>> conn.root() + {1: 1} + + >>> transaction.abort() + >>> db.close() + """ + +def pack_with_open_blob_files(): + """ + Make sure packing works while there are open blob files. + + >>> fs = ZODB.FileStorage.FileStorage('data.fs', blob_dir='blobs') + >>> db = ZODB.DB(fs) + >>> tm1 = transaction.TransactionManager() + >>> conn1 = db.open(tm1) + >>> import ZODB.blob + >>> conn1.root()[1] = ZODB.blob.Blob() + >>> conn1.add(conn1.root()[1]) + >>> with conn1.root()[1].open('w') as file: + ... _ = file.write(b'some data') + >>> tm1.commit() + + >>> tm2 = transaction.TransactionManager() + >>> conn2 = db.open(tm2) + >>> f = conn1.root()[1].open() + >>> conn1.root()[2] = ZODB.blob.Blob() + >>> conn1.add(conn1.root()[2]) + >>> with conn1.root()[2].open('w') as file: + ... _ = file.write(b'some more data') + + >>> db.pack() + >>> f.read() + 'some data' + >>> f.close() + + >>> tm1.commit() + >>> conn2.sync() + >>> with conn2.root()[2].open() as fp: fp.read() + 'some more data' + + >>> db.close() + """ + +def readonly_open_nonexistent_file(): + """ + Make sure error is reported when non-existent file is tried to be opened + read-only. + + >>> try: + ... fs = ZODB.FileStorage.FileStorage('nonexistent.fs', read_only=True) + ... except Exception as e: + ... # Python2 raises IOError; Python3 - FileNotFoundError + ... print("error: %s" % str(e)) # doctest: +ELLIPSIS + error: ... No such file or directory: 'nonexistent.fs' + """ + +def test_suite(): + suite = unittest.TestSuite() + for klass in [ + FileStorageTests, FileStorageHexTests, + Corruption.FileStorageCorruptTests, + FileStorageRecoveryTest, FileStorageHexRecoveryTest, + FileStorageNoRestoreRecoveryTest, + FileStorageTestsWithBlobsEnabled, FileStorageHexTestsWithBlobsEnabled, + AnalyzeDotPyTest, + ]: + suite.addTest(unittest.makeSuite(klass, "check")) + suite.addTest(doctest.DocTestSuite( + setUp=zope.testing.setupstack.setUpDirectory, + tearDown=util.tearDown, + checker=util.checker)) + suite.addTest(ZODB.tests.testblob.storage_reusable_suite( + 'BlobFileStorage', + lambda name, blob_dir: + ZODB.FileStorage.FileStorage('%s.fs' % name, blob_dir=blob_dir), + test_blob_storage_recovery=True, + test_packing=True, + )) + suite.addTest(ZODB.tests.testblob.storage_reusable_suite( + 'BlobFileHexStorage', + lambda name, blob_dir: + ZODB.tests.hexstorage.HexStorage( + ZODB.FileStorage.FileStorage('%s.fs' % name, blob_dir=blob_dir)), + test_blob_storage_recovery=True, + test_packing=True, + )) + suite.addTest(PackableStorage.IExternalGC_suite( + lambda : ZODB.FileStorage.FileStorage( + 'data.fs', blob_dir='blobs', pack_gc=False))) + suite.layer = util.MininalTestLayer('testFileStorage') + return suite + +if __name__=='__main__': + unittest.main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testMVCCMappingStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testMVCCMappingStorage.py new file mode 100644 index 0000000..9dd337a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testMVCCMappingStorage.py @@ -0,0 +1,200 @@ +############################################################################## +# +# Copyright (c) Zope Corporation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import unittest + +from persistent.mapping import PersistentMapping +import transaction +from ZODB.Connection import TransactionMetaData +from ZODB.DB import DB +from ZODB.tests.MVCCMappingStorage import MVCCMappingStorage +import ZODB.blob +import ZODB.tests.testblob + +from ZODB.tests import ( + BasicStorage, + HistoryStorage, + IteratorStorage, + MTStorage, + PackableStorage, + RevisionStorage, + StorageTestBase, + Synchronization, + ) + +class MVCCTests(object): + + def checkClosingNestedDatabasesWorks(self): + # This tests for the error described in + # https://github.com/zopefoundation/ZODB/issues/45 + db1 = DB(self._storage) + db2 = DB(None, databases=db1.databases, database_name='2') + db1.open().get_connection('2') + db1.close() + db2.close() + + def checkCrossConnectionInvalidation(self): + # Verify connections see updated state at txn boundaries. + # This will fail if the Connection doesn't poll for changes. + db = DB(self._storage) + try: + c1 = db.open(transaction.TransactionManager()) + r1 = c1.root() + r1['myobj'] = 'yes' + c2 = db.open(transaction.TransactionManager()) + r2 = c2.root() + self.assertTrue('myobj' not in r2) + + c1.transaction_manager.commit() + self.assertTrue('myobj' not in r2) + + c2.sync() + self.assertTrue('myobj' in r2) + self.assertTrue(r2['myobj'] == 'yes') + finally: + db.close() + + def checkCrossConnectionIsolation(self): + # Verify MVCC isolates connections. + # This will fail if Connection doesn't poll for changes. + db = DB(self._storage) + try: + c1 = db.open() + r1 = c1.root() + r1['alpha'] = PersistentMapping() + r1['gamma'] = PersistentMapping() + transaction.commit() + + # Open a second connection but don't load root['alpha'] yet + c2 = db.open() + r2 = c2.root() + + r1['alpha']['beta'] = 'yes' + + storage = c1._storage + t = transaction.Transaction() + t.description = u'isolation test 1' + c1.tpc_begin(t) + c1.commit(t) + storage.tpc_vote(t.data(c1)) + storage.tpc_finish(t.data(c1)) + + # The second connection will now load root['alpha'], but due to + # MVCC, it should continue to see the old state. + self.assertTrue(r2['alpha']._p_changed is None) # A ghost + self.assertTrue(not r2['alpha']) + self.assertTrue(r2['alpha']._p_changed == 0) + + # make root['alpha'] visible to the second connection + c2.sync() + + # Now it should be in sync + self.assertTrue(r2['alpha']._p_changed is None) # A ghost + self.assertTrue(r2['alpha']) + self.assertTrue(r2['alpha']._p_changed == 0) + self.assertTrue(r2['alpha']['beta'] == 'yes') + + # Repeat the test with root['gamma'] + r1['gamma']['delta'] = 'yes' + + storage = c1._storage + t = transaction.Transaction() + t.description = u'isolation test 2' + c1.tpc_begin(t) + c1.commit(t) + storage.tpc_vote(t.data(c1)) + storage.tpc_finish(t.data(c1)) + + # The second connection will now load root[3], but due to MVCC, + # it should continue to see the old state. + self.assertTrue(r2['gamma']._p_changed is None) # A ghost + self.assertTrue(not r2['gamma']) + self.assertTrue(r2['gamma']._p_changed == 0) + + # make root[3] visible to the second connection + c2.sync() + + # Now it should be in sync + self.assertTrue(r2['gamma']._p_changed is None) # A ghost + self.assertTrue(r2['gamma']) + self.assertTrue(r2['gamma']._p_changed == 0) + self.assertTrue(r2['gamma']['delta'] == 'yes') + finally: + db.close() + + +class MVCCMappingStorageTests( + StorageTestBase.StorageTestBase, + BasicStorage.BasicStorage, + + HistoryStorage.HistoryStorage, + IteratorStorage.ExtendedIteratorStorage, + IteratorStorage.IteratorStorage, + MTStorage.MTStorage, + PackableStorage.PackableStorageWithOptionalGC, + RevisionStorage.RevisionStorage, + Synchronization.SynchronizedStorage, + MVCCTests + ): + + def setUp(self): + self._storage = MVCCMappingStorage() + + def tearDown(self): + self._storage.close() + + def checkLoadBeforeUndo(self): + pass # we don't support undo yet + checkUndoZombie = checkLoadBeforeUndo + + def checkTransactionIdIncreases(self): + import time + from ZODB.utils import newTid + from ZODB.TimeStamp import TimeStamp + t = TransactionMetaData() + self._storage.tpc_begin(t) + self._storage.tpc_vote(t) + self._storage.tpc_finish(t) + + # Add a fake transaction + transactions = self._storage._transactions + self.assertEqual(1, len(transactions)) + fake_timestamp = b'zzzzzzzy' # the year 5735 ;-) + transactions[fake_timestamp] = transactions.values()[0] + + # Verify the next transaction comes after the fake transaction + t = TransactionMetaData() + self._storage.tpc_begin(t) + self.assertEqual(self._storage._tid, b'zzzzzzzz') + +def create_blob_storage(name, blob_dir): + s = MVCCMappingStorage(name) + return ZODB.blob.BlobStorage(blob_dir, s) + +def test_suite(): + suite = unittest.makeSuite(MVCCMappingStorageTests, 'check') + # Note: test_packing doesn't work because even though MVCCMappingStorage + # retains history, it does not provide undo methods, so the + # BlobStorage wrapper calls _packNonUndoing instead of _packUndoing, + # causing blobs to get deleted even though object states are retained. + suite.addTest(ZODB.tests.testblob.storage_reusable_suite( + 'MVCCMapping', create_blob_storage, + test_undo=False, + )) + return suite + +if __name__ == "__main__": + loader = unittest.TestLoader() + loader.testMethodPrefix = "check" + unittest.main(testLoader=loader) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testMappingStorage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testMappingStorage.py new file mode 100644 index 0000000..7a10d08 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testMappingStorage.py @@ -0,0 +1,99 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from collections import namedtuple +import ZODB.MappingStorage +import unittest +import ZODB.tests.hexstorage + + +from ZODB.tests import ( + BasicStorage, + HistoryStorage, + IteratorStorage, + MTStorage, + PackableStorage, + RevisionStorage, + StorageTestBase, + Synchronization, + ) + +class MappingStorageTests( + StorageTestBase.StorageTestBase, + BasicStorage.BasicStorage, + + HistoryStorage.HistoryStorage, + IteratorStorage.ExtendedIteratorStorage, + IteratorStorage.IteratorStorage, + MTStorage.MTStorage, + PackableStorage.PackableStorageWithOptionalGC, + RevisionStorage.RevisionStorage, + Synchronization.SynchronizedStorage, + ): + + def setUp(self): + StorageTestBase.StorageTestBase.setUp(self, ) + self._storage = ZODB.MappingStorage.MappingStorage() + + def checkOversizeNote(self): + # This base class test checks for the common case where a storage + # doesnt support huge transaction metadata. This storage doesnt + # have this limit, so we inhibit this test here. + pass + + def checkLoadBeforeUndo(self): + pass # we don't support undo yet + checkUndoZombie = checkLoadBeforeUndo + +class MappingStorageHexTests(MappingStorageTests): + + def setUp(self): + StorageTestBase.StorageTestBase.setUp(self, ) + self._storage = ZODB.tests.hexstorage.HexStorage( + ZODB.MappingStorage.MappingStorage()) + +MockTransaction = namedtuple( + 'transaction', + ['user', 'description', 'extension'] +) + +class MappingStorageTransactionRecordTests(unittest.TestCase): + + def setUp(self): + self._transaction_record = ZODB.MappingStorage.TransactionRecord( + 0, + MockTransaction('user', 'description', 'extension'), + '' + ) + + def check_set__extension(self): + self._transaction_record._extension = 'new' + self.assertEqual(self._transaction_record.extension, 'new') + + def check_get__extension(self): + self.assertEqual( + self._transaction_record.extension, + self._transaction_record._extension + ) + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(MappingStorageTests, 'check')) + suite.addTest(unittest.makeSuite(MappingStorageHexTests, 'check')) + suite.addTest(unittest.makeSuite(MappingStorageTransactionRecordTests, 'check')) + return suite + +if __name__ == "__main__": + loader = unittest.TestLoader() + loader.testMethodPrefix = "check" + unittest.main(testLoader=loader) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testPersistentList.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testPersistentList.py new file mode 100644 index 0000000..fd26134 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testPersistentList.py @@ -0,0 +1,227 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test the list interface to PersistentList +""" + +import unittest +from persistent.list import PersistentList + +from six import PY2 + +l0 = [] +l1 = [0] +l2 = [0, 1] + +class TestPList(unittest.TestCase): + def checkTheWorld(self): + # Test constructors + u = PersistentList() + u0 = PersistentList(l0) + u1 = PersistentList(l1) + u2 = PersistentList(l2) + + uu = PersistentList(u) + uu0 = PersistentList(u0) + uu1 = PersistentList(u1) + uu2 = PersistentList(u2) + + v = PersistentList(tuple(u)) + class OtherList(object): + def __init__(self, initlist): + self.__data = initlist + def __len__(self): + return len(self.__data) + def __getitem__(self, i): + return self.__data[i] + v0 = PersistentList(OtherList(u0)) + vv = PersistentList("this is also a sequence") + + # Test __repr__ + eq = self.assertEqual + + eq(str(u0), str(l0), "str(u0) == str(l0)") + eq(repr(u1), repr(l1), "repr(u1) == repr(l1)") + eq(repr(u2), repr(l2), "repr(u2) == repr(l2)") + + # Test __cmp__ and __len__ + + # Py3: No cmp() or __cmp__ anymore. + if PY2: + def mycmp(a, b): + r = cmp(a, b) + if r < 0: return -1 + if r > 0: return 1 + return r + + all = [l0, l1, l2, u, u0, u1, u2, uu, uu0, uu1, uu2] + for a in all: + for b in all: + eq(mycmp(a, b), mycmp(len(a), len(b)), + "mycmp(a, b) == mycmp(len(a), len(b))") + + # Test __getitem__ + + for i in range(len(u2)): + eq(u2[i], i, "u2[i] == i") + + # Test __setitem__ + + uu2[0] = 0 + uu2[1] = 100 + try: + uu2[2] = 200 + except IndexError: + pass + else: + self.fail("uu2[2] shouldn't be assignable") + + # Test __delitem__ + + del uu2[1] + del uu2[0] + try: + del uu2[0] + except IndexError: + pass + else: + self.fail("uu2[0] shouldn't be deletable") + + # Test __getslice__ + + for i in range(-3, 4): + eq(u2[:i], l2[:i], "u2[:i] == l2[:i]") + eq(u2[i:], l2[i:], "u2[i:] == l2[i:]") + for j in range(-3, 4): + eq(u2[i:j], l2[i:j], "u2[i:j] == l2[i:j]") + + # Test __setslice__ + + for i in range(-3, 4): + u2[:i] = l2[:i] + eq(u2, l2, "u2 == l2") + u2[i:] = l2[i:] + eq(u2, l2, "u2 == l2") + for j in range(-3, 4): + u2[i:j] = l2[i:j] + eq(u2, l2, "u2 == l2") + + uu2 = u2[:] + uu2[:0] = [-2, -1] + eq(uu2, [-2, -1, 0, 1], "uu2 == [-2, -1, 0, 1]") + uu2[0:] = [] + eq(uu2, [], "uu2 == []") + + # Test __contains__ + for i in u2: + self.assertTrue(i in u2, "i in u2") + for i in min(u2)-1, max(u2)+1: + self.assertTrue(i not in u2, "i not in u2") + + # Test __delslice__ + + uu2 = u2[:] + del uu2[1:2] + del uu2[0:1] + eq(uu2, [], "uu2 == []") + + uu2 = u2[:] + del uu2[1:] + del uu2[:1] + eq(uu2, [], "uu2 == []") + + # Test __add__, __radd__, __mul__ and __rmul__ + + #self.assertTrue(u1 + [] == [] + u1 == u1, "u1 + [] == [] + u1 == u1") + self.assertTrue(u1 + [1] == u2, "u1 + [1] == u2") + #self.assertTrue([-1] + u1 == [-1, 0], "[-1] + u1 == [-1, 0]") + self.assertTrue(u2 == u2*1 == 1*u2, "u2 == u2*1 == 1*u2") + self.assertTrue(u2+u2 == u2*2 == 2*u2, "u2+u2 == u2*2 == 2*u2") + self.assertTrue(u2+u2+u2 == u2*3 == 3*u2, "u2+u2+u2 == u2*3 == 3*u2") + + # Test append + + u = u1[:] + u.append(1) + eq(u, u2, "u == u2") + + # Test insert + + u = u2[:] + u.insert(0, -1) + eq(u, [-1, 0, 1], "u == [-1, 0, 1]") + + # Test pop + + u = PersistentList([0, -1, 1]) + u.pop() + eq(u, [0, -1], "u == [0, -1]") + u.pop(0) + eq(u, [-1], "u == [-1]") + + # Test remove + + u = u2[:] + u.remove(1) + eq(u, u1, "u == u1") + + # Test count + u = u2*3 + eq(u.count(0), 3, "u.count(0) == 3") + eq(u.count(1), 3, "u.count(1) == 3") + eq(u.count(2), 0, "u.count(2) == 0") + + + # Test index + + eq(u2.index(0), 0, "u2.index(0) == 0") + eq(u2.index(1), 1, "u2.index(1) == 1") + try: + u2.index(2) + except ValueError: + pass + else: + self.fail("expected ValueError") + + # Test reverse + + u = u2[:] + u.reverse() + eq(u, [1, 0], "u == [1, 0]") + u.reverse() + eq(u, u2, "u == u2") + + # Test sort + + u = PersistentList([1, 0]) + u.sort() + eq(u, u2, "u == u2") + + # Test extend + + u = u1[:] + u.extend(u2) + eq(u, u1 + u2, "u == u1 + u2") + + def checkBackwardCompat(self): + # Verify that the sanest of the ZODB 3.2 dotted paths still works. + from ZODB.PersistentList import PersistentList as oldPath + self.assertTrue(oldPath is PersistentList) + +def test_suite(): + return unittest.makeSuite(TestPList, 'check') + +if __name__ == "__main__": + loader = unittest.TestLoader() + loader.testMethodPrefix = "check" + unittest.main(testLoader=loader) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testPersistentMapping.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testPersistentMapping.py new file mode 100644 index 0000000..4bd689e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testPersistentMapping.py @@ -0,0 +1,149 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Verify that PersistentMapping works with old versions of Zope. + +The comments in PersistentMapping.py address the issue in some detail. +The pickled form of a PersistentMapping must use _container to store +the actual mapping, because old versions of Zope used this attribute. +If the new code doesn't generate pickles that are consistent with the +old code, developers will have a hard time testing the new code. +""" + +import unittest +import sys + +import ZODB +from ZODB.Connection import TransactionMetaData +from ZODB.MappingStorage import MappingStorage + +from six import PY2 + +# This pickle contains a persistent mapping pickle created from the +# old code. +pickle = ('((U\x0bPersistenceq\x01U\x11PersistentMappingtq\x02Nt.}q\x03U\n' + '_containerq\x04}q\x05U\x07versionq\x06U\x03oldq\x07ss.\n') + +class PMTests(unittest.TestCase): + + def checkOldStyleRoot(self): + # The Persistence module doesn't exist in Zope3's idea of what ZODB + # is, but the global `pickle` references it explicitly. So just + # bail if Persistence isn't available. + try: + import Persistence + except ImportError: + return + # insert the pickle in place of the root + s = MappingStorage() + t = TransactionMetaData() + s.tpc_begin(t) + s.store('\000' * 8, None, pickle, '', t) + s.tpc_vote(t) + s.tpc_finish(t) + + db = ZODB.DB(s) + # If the root can be loaded successfully, we should be okay. + r = db.open().root() + # But make sure it looks like a new mapping + self.assertTrue(hasattr(r, 'data')) + self.assertTrue(not hasattr(r, '_container')) + + def checkBackwardCompat(self): + # Verify that the sanest of the ZODB 3.2 dotted paths still works. + from persistent.mapping import PersistentMapping as newPath + from ZODB.PersistentMapping import PersistentMapping as oldPath + + self.assertTrue(oldPath is newPath) + + def checkBasicOps(self): + from persistent.mapping import PersistentMapping + m = PersistentMapping({'x': 1}, a=2, b=3) + m['name'] = 'bob' + self.assertEqual(m['name'], "bob") + self.assertEqual(m.get('name', 42), "bob") + self.assertTrue('name' in m) + + try: + m['fred'] + except KeyError: + pass + else: + self.fail("expected KeyError") + self.assertTrue('fred' not in m) + self.assertEqual(m.get('fred'), None) + self.assertEqual(m.get('fred', 42), 42) + + keys = sorted(m.keys()) + self.assertEqual(keys, ['a', 'b', 'name', 'x']) + + values = set(m.values()) + self.assertEqual(values, set([1, 2, 3, 'bob'])) + + items = sorted(m.items()) + self.assertEqual(items, + [('a', 2), ('b', 3), ('name', 'bob'), ('x', 1)]) + + if PY2: + keys = sorted(m.iterkeys()) + self.assertEqual(keys, ['a', 'b', 'name', 'x']) + + values = sorted(m.itervalues()) + self.assertEqual(values, [1, 2, 3, 'bob']) + + items = sorted(m.iteritems()) + self.assertEqual( + items, [('a', 2), ('b', 3), ('name', 'bob'), ('x', 1)]) + + # PersistentMapping didn't have an __iter__ method before ZODB 3.4.2. + # Check that it plays well now with the Python iteration protocol. + def checkIteration(self): + from persistent.mapping import PersistentMapping + m = PersistentMapping({'x': 1}, a=2, b=3) + m['name'] = 'bob' + + def check(keylist): + keylist.sort() + self.assertEqual(keylist, ['a', 'b', 'name', 'x']) + + check(list(m)) + check([key for key in m]) + + i = iter(m) + keylist = [] + while 1: + try: + key = next(i) + except StopIteration: + break + keylist.append(key) + check(keylist) + +def find_global(modulename, classname): + """Helper for this test suite to get special PersistentMapping""" + + if classname == "PersistentMapping": + class PersistentMapping(object): + def __setstate__(self, state): + self.__dict__.update(state) + return PersistentMapping + else: + __import__(modulename) + mod = sys.modules[modulename] + return getattr(mod, classname) + +def test_suite(): + return unittest.makeSuite(PMTests, 'check') + +if __name__ == "__main__": + unittest.main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testPersistentWeakref.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testPersistentWeakref.py new file mode 100644 index 0000000..b7976a3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testPersistentWeakref.py @@ -0,0 +1,275 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""ZODB-based (functional) tests persistent weakrefs +""" +__docformat__ = "reStructuredText" + + + +def test_weakrefs_functional(): + """Persistent weak references + + Persistent weak references are used much like Python weak + references. The major difference is that you can't specify an + object to be called when the object is removed from the database. + + Here's an example. We'll start by creating a persistent object and + a reference to it: + + >>> import transaction + >>> from persistent.wref import WeakRef + >>> import persistent, ZODB.tests.MinPO + >>> import ZODB.tests.util + >>> ob = ZODB.tests.MinPO.MinPO() + >>> ref = WeakRef(ob) + >>> ref() is ob + True + + The hash of the ref is the same as the hash of the referenced object: + + >>> hash(ref) == hash(ob) + True + + Two refs to the same object are equal: + + >>> WeakRef(ob) == ref + True + + >>> ob2 = ZODB.tests.MinPO.MinPO(1) + >>> WeakRef(ob2) == ref + False + + Lets save the reference and the referenced object in a database: + + >>> db = ZODB.tests.util.DB() + + >>> conn1 = db.open() + >>> conn1.root()['ob'] = ob + >>> conn1.root()['ref'] = ref + >>> transaction.commit() + + If we open a new connection, we can use the reference: + + >>> conn2 = db.open() + >>> conn2.root()['ref']() is conn2.root()['ob'] + True + >>> hash(conn2.root()['ref']) == hash(conn2.root()['ob']) + True + + But if we delete the referenced object and pack: + + >>> del conn2.root()['ob'] + >>> transaction.commit() + >>> ZODB.tests.util.pack(db) + + And then look in a new connection: + + >>> conn3 = db.open() + >>> conn3.root()['ob'] + Traceback (most recent call last): + ... + KeyError: 'ob' + + Trying to dereference the reference returns None: + + >>> conn3.root()['ref']() + + Trying to get a hash, raises a type error: + + >>> hash(conn3.root()['ref']) + Traceback (most recent call last): + ... + TypeError: Weakly-referenced object has gone away + + Always explicitly close databases: :) + + >>> db.close() + >>> del ob, ref, db, conn1, conn2, conn3 + + When multiple databases are in use, a weakref in one database may + point to an object in a different database. Let's create two new + databases to demonstrate this. + + >>> dbA = ZODB.tests.util.DB( + ... database_name = 'dbA', + ... ) + >>> dbB = ZODB.tests.util.DB( + ... database_name = 'dbB', + ... databases = dbA.databases, + ... ) + >>> connA1 = dbA.open() + >>> connB1 = connA1.get_connection('dbB') + + Now create and add a new object and a weak reference, and add them + to different databases. + + >>> ob = ZODB.tests.MinPO.MinPO() + >>> ref = WeakRef(ob) + >>> connA1.root()['ob'] = ob + >>> connA1.add(ob) + >>> connB1.root()['ref'] = ref + >>> transaction.commit() + + After a succesful commit, the reference should know the oid, + database name and connection of the object. + + >>> ref.oid == ob._p_oid + True + >>> ref.database_name == 'dbA' + True + >>> ref.dm is ob._p_jar is connA1 + True + + If we open new connections, we should be able to use the reference. + + >>> connA2 = dbA.open() + >>> connB2 = connA2.get_connection('dbB') + >>> ref2 = connB2.root()['ref'] + >>> ob2 = connA2.root()['ob'] + >>> ref2() is ob2 + True + >>> ref2.oid == ob2._p_oid + True + >>> ref2.database_name == 'dbA' + True + >>> ref2.dm is ob2._p_jar is connA2 + True + + Always explicitly close databases: :) + + >>> dbA.close() + >>> dbB.close() + + """ + + +def test_PersistentWeakKeyDictionary(): + """Persistent weak key dictionary + + This is akin to WeakKeyDictionaries. Note, however, that removal + of items is extremely lazy. See below. + + We'll start by creating a PersistentWeakKeyDictionary and adding + some persistent objects to it. + + >>> import transaction + >>> from persistent.wref import PersistentWeakKeyDictionary + >>> d = PersistentWeakKeyDictionary() + >>> import ZODB.tests.util + >>> p1 = ZODB.tests.util.P('p1') + >>> p2 = ZODB.tests.util.P('p2') + >>> p3 = ZODB.tests.util.P('p3') + >>> d[p1] = 1 + >>> d[p2] = 2 + >>> d[p3] = 3 + + We'll create an extra persistent object that's not in the dict: + + >>> p4 = ZODB.tests.util.P('p4') + + Now we'll excercise iteration and item access: + + >>> l = [(str(k), d[k], d.get(k)) for k in d] + >>> l.sort() + >>> l + [('P(p1)', 1, 1), ('P(p2)', 2, 2), ('P(p3)', 3, 3)] + + And the containment operator: + + >>> [p in d for p in [p1, p2, p3, p4]] + [True, True, True, False] + + We can add the dict and the referenced objects to a database: + + >>> db = ZODB.tests.util.DB() + + >>> conn1 = db.open() + >>> conn1.root()['p1'] = p1 + >>> conn1.root()['d'] = d + >>> conn1.root()['p2'] = p2 + >>> conn1.root()['p3'] = p3 + >>> transaction.commit() + + And things still work, as before: + + >>> l = [(str(k), d[k], d.get(k)) for k in d] + >>> l.sort() + >>> l + [('P(p1)', 1, 1), ('P(p2)', 2, 2), ('P(p3)', 3, 3)] + >>> [p in d for p in [p1, p2, p3, p4]] + [True, True, True, False] + + Likewise, we can read the objects from another connection and + things still work. + + >>> conn2 = db.open() + >>> d = conn2.root()['d'] + >>> p1 = conn2.root()['p1'] + >>> p2 = conn2.root()['p2'] + >>> p3 = conn2.root()['p3'] + >>> l = [(str(k), d[k], d.get(k)) for k in d] + >>> l.sort() + >>> l + [('P(p1)', 1, 1), ('P(p2)', 2, 2), ('P(p3)', 3, 3)] + >>> [p in d for p in [p1, p2, p3, p4]] + [True, True, True, False] + + Now, we'll delete one of the objects from the database, but *not* + from the dictionary: + + >>> del conn2.root()['p2'] + >>> transaction.commit() + + And pack the database, so that the no-longer referenced p2 is + actually removed from the database. + + >>> ZODB.tests.util.pack(db) + + Now if we access the dictionary in a new connection, it no longer + has p2: + + >>> conn3 = db.open() + >>> d = conn3.root()['d'] + >>> l = [(str(k), d[k], d.get(k)) for k in d] + >>> l.sort() + >>> l + [('P(p1)', 1, 1), ('P(p3)', 3, 3)] + + It's worth nothing that that the versions of the dictionary in + conn1 and conn2 still have p2, because p2 is still in the caches + for those connections. + + Always explicitly close databases: :) + + >>> db.close() + + """ + +def test_PersistentWeakKeyDictionary_get(): + """ + >>> import ZODB.tests.util + >>> from persistent.wref import PersistentWeakKeyDictionary + >>> key = ZODB.tests.util.P("key") + >>> missing = ZODB.tests.util.P("missing") + >>> d = PersistentWeakKeyDictionary([(key, 1)]) + >>> d.get(key) + 1 + >>> d.get(missing) + >>> d.get(missing, 12) + 12 + """ + +def test_suite(): + from doctest import DocTestSuite + return DocTestSuite() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testRecover.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testRecover.py new file mode 100644 index 0000000..745553e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testRecover.py @@ -0,0 +1,228 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests of the file storage recovery script.""" + +import os +import random +import sys +import unittest + +import ZODB +import ZODB.tests.util +from ZODB.FileStorage import FileStorage +from ZODB._compat import decodebytes +import ZODB.fsrecover + +from persistent.mapping import PersistentMapping +import transaction + +try: + import StringIO +except ImportError: + # Py3 + import io as StringIO + +class RecoverTest(ZODB.tests.util.TestCase): + + path = None + + def setUp(self): + ZODB.tests.util.TestCase.setUp(self) + self.path = 'source.fs' + self.storage = FileStorage(self.path) + self.populate() + self.dest = 'dest.fs' + self.recovered = None + + def tearDown(self): + self.storage.close() + if self.recovered is not None: + self.recovered.close() + temp = FileStorage(self.dest) + temp.close() + ZODB.tests.util.TestCase.tearDown(self) + + def populate(self): + db = ZODB.DB(self.storage) + cn = db.open() + rt = cn.root() + + # Create a bunch of objects; the Data.fs is about 100KB. + for i in range(50): + d = rt[i] = PersistentMapping() + transaction.commit() + for j in range(50): + d[j] = "a" * j + transaction.commit() + + def damage(self, num, size): + self.storage.close() + # Drop size null bytes into num random spots. + for i in range(num - 1): + offset = random.randint(0, self.storage._pos - size) + # Note that we open the file as r+, not a+. Seeking a file + # open in append mode is effectively a no-op *depending on + # platform*, as the write may simply append to the file. An + # earlier version of this code opened the file in a+ mode, + # meaning on some platforms it was only writing to the end of the + # file, and so the test cases were always finding that bad data. + # For compatibility with that, we do one write outside the loop + # at the end. + with open(self.path, "r+b") as f: + f.seek(offset) + f.write(b"\0" * size) + + with open(self.path, 'rb') as f: + f.seek(offset) + v = f.read(size) + self.assertEqual(b"\0" * size, v) + + with open(self.path, 'a+b') as f: + f.write(b"\0" * size) + + ITERATIONS = 5 + + # Run recovery, from self.path to self.dest. Return whatever + # recovery printed to stdout, as a string. + def recover(self): + orig_stdout = sys.stdout + faux_stdout = StringIO.StringIO() + try: + sys.stdout = faux_stdout + try: + ZODB.fsrecover.recover(self.path, self.dest, + verbose=0, partial=True, force=False, pack=1) + except SystemExit: + raise RuntimeError("recover tried to exit") + finally: + sys.stdout = orig_stdout + return faux_stdout.getvalue() + + # Caution: because recovery is robust against many kinds of damage, + # it's almost impossible for a call to self.recover() to raise an + # exception. As a result, these tests may pass even if fsrecover.py + # is broken badly. testNoDamage() tries to ensure that at least + # recovery doesn't produce any error msgs if the input .fs is in + # fact not damaged. + def testNoDamage(self): + output = self.recover() + self.assertTrue('error' not in output, output) + self.assertTrue('\n0 bytes removed during recovery' in output, output) + + # Verify that the recovered database is identical to the original. + with open(self.path, 'rb') as before: + before_guts = before.read() + + with open(self.dest, 'rb') as after: + after_guts = after.read() + + self.assertEqual(before_guts, after_guts, + "recovery changed a non-damaged .fs file") + + def testOneBlock(self): + for i in range(self.ITERATIONS): + self.damage(1, 1024) + output = self.recover() + self.assertTrue('error' in output, output) + self.recovered = FileStorage(self.dest) + self.recovered.close() + os.remove(self.path) + os.rename(self.dest, self.path) + + def testFourBlocks(self): + for i in range(self.ITERATIONS): + self.damage(4, 512) + output = self.recover() + self.assertTrue('error' in output, output) + self.recovered = FileStorage(self.dest) + self.recovered.close() + os.remove(self.path) + os.rename(self.dest, self.path) + + def testBigBlock(self): + for i in range(self.ITERATIONS): + self.damage(1, 32 * 1024) + output = self.recover() + self.assertTrue('error' in output, output) + self.recovered = FileStorage(self.dest) + self.recovered.close() + os.remove(self.path) + os.rename(self.dest, self.path) + + def testBadTransaction(self): + # Find transaction headers and blast them. + + L = self.storage.undoLog() + r = L[3] + tid = decodebytes(r["id"] + b"\n") + pos1 = self.storage._txn_find(tid, 0) + + r = L[8] + tid = decodebytes(r["id"] + b"\n") + pos2 = self.storage._txn_find(tid, 0) + + self.storage.close() + + # Overwrite the entire header. + with open(self.path, "a+b") as f: + f.seek(pos1 - 50) + f.write(b"\0" * 100) + output = self.recover() + self.assertTrue('error' in output, output) + self.recovered = FileStorage(self.dest) + self.recovered.close() + os.remove(self.path) + os.rename(self.dest, self.path) + + # Overwrite part of the header. + with open(self.path, "a+b") as f: + f.seek(pos2 + 10) + f.write(b"\0" * 100) + output = self.recover() + self.assertTrue('error' in output, output) + self.recovered = FileStorage(self.dest) + self.recovered.close() + + # Issue 1846: When a transaction had 'c' status (not yet committed), + # the attempt to open a temp file to write the trailing bytes fell + # into an infinite loop. + def testUncommittedAtEnd(self): + # Find a transaction near the end. + L = self.storage.undoLog() + r = L[1] + tid = decodebytes(r["id"] + b"\n") + pos = self.storage._txn_find(tid, 0) + + # Overwrite its status with 'c'. + with open(self.path, "r+b") as f: + f.seek(pos + 16) + current_status = f.read(1) + self.assertEqual(current_status, b' ') + f.seek(pos + 16) + f.write(b'c') + + # Try to recover. The original bug was that this never completed -- + # infinite loop in fsrecover.py. Also, in the ZODB 3.2 line, + # reference to an undefined global masked the infinite loop. + self.recover() + + # Verify the destination got truncated. + self.assertEqual(os.path.getsize(self.dest), pos) + + # Get rid of the temp file holding the truncated bytes. + os.remove(ZODB.fsrecover._trname) + + +def test_suite(): + return unittest.makeSuite(RecoverTest) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testSerialize.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testSerialize.py new file mode 100644 index 0000000..e7fefac --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testSerialize.py @@ -0,0 +1,250 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import doctest +import sys +import unittest + +from persistent import Persistent +from persistent.wref import WeakRef + +import zope.testing.setupstack + +import ZODB.tests.util +from ZODB import serialize +from ZODB._compat import Pickler, PersistentUnpickler, BytesIO, _protocol, IS_JYTHON + +class PersistentObject(Persistent): + pass + +class ClassWithNewargs(int): + def __new__(cls, value): + return int.__new__(cls, value) + + def __getnewargs__(self): + return int(self), + +class ClassWithoutNewargs(object): + def __init__(self, value): + self.value = value + +def make_pickle(ob): + sio = BytesIO() + p = Pickler(sio, _protocol) + p.dump(ob) + return sio.getvalue() + + +def _factory(conn, module_name, name): + return globals()[name] + +class SerializerTestCase(unittest.TestCase): + + # old format: (module, name), None + old_style_without_newargs = make_pickle( + ((__name__, "ClassWithoutNewargs"), None)) + + # old format: (module, name), argtuple + old_style_with_newargs = make_pickle( + ((__name__, "ClassWithNewargs"), (1,))) + + # new format: klass + new_style_without_newargs = make_pickle( + ClassWithoutNewargs) + + # new format: klass, argtuple + new_style_with_newargs = make_pickle( + (ClassWithNewargs, (1,))) + + def test_getClassName(self): + r = serialize.ObjectReader(factory=_factory) + eq = self.assertEqual + eq(r.getClassName(self.old_style_with_newargs), + __name__ + ".ClassWithNewargs") + eq(r.getClassName(self.new_style_with_newargs), + __name__ + ".ClassWithNewargs") + eq(r.getClassName(self.old_style_without_newargs), + __name__ + ".ClassWithoutNewargs") + eq(r.getClassName(self.new_style_without_newargs), + __name__ + ".ClassWithoutNewargs") + + def test_getGhost(self): + # Use a TestObjectReader since we need _get_class() to be + # implemented; otherwise this is just a BaseObjectReader. + + class TestObjectReader(serialize.ObjectReader): + # A production object reader would optimize this, but we + # don't need to in a test + def _get_class(self, module, name): + __import__(module) + return getattr(sys.modules[module], name) + + r = TestObjectReader(factory=_factory) + g = r.getGhost(self.old_style_with_newargs) + self.assertTrue(isinstance(g, ClassWithNewargs)) + self.assertEqual(g, 1) + g = r.getGhost(self.old_style_without_newargs) + self.assertTrue(isinstance(g, ClassWithoutNewargs)) + g = r.getGhost(self.new_style_with_newargs) + self.assertTrue(isinstance(g, ClassWithNewargs)) + g = r.getGhost(self.new_style_without_newargs) + self.assertTrue(isinstance(g, ClassWithoutNewargs)) + + def test_myhasattr(self): + + class OldStyle(object): + bar = "bar" + def __getattr__(self, name): + if name == "error": + raise ValueError("whee!") + else: + raise AttributeError(name) + + class NewStyle(object): + bar = "bar" + def _raise(self): + raise ValueError("whee!") + error = property(_raise) + + self.assertRaises(ValueError, + serialize.myhasattr, OldStyle(), "error") + self.assertRaises(ValueError, + serialize.myhasattr, NewStyle(), "error") + self.assertTrue(serialize.myhasattr(OldStyle(), "bar")) + self.assertTrue(serialize.myhasattr(NewStyle(), "bar")) + self.assertTrue(not serialize.myhasattr(OldStyle(), "rat")) + self.assertTrue(not serialize.myhasattr(NewStyle(), "rat")) + + def test_persistent_id_noload(self): + # make sure we can noload weak references and other list-based + # references like we expect. Protect explicitly against the + # breakage in CPython 2.7 and zodbpickle < 0.6.0 + o = PersistentObject() + o._p_oid = b'abcd' + + top = PersistentObject() + top._p_oid = b'efgh' + top.ref = WeakRef(o) + + pickle = serialize.ObjectWriter().serialize(top) + # Make sure the persistent id is pickled using the 'C', + # SHORT_BINBYTES opcode: + self.assertTrue(b'C\x04abcd' in pickle) + + refs = [] + u = PersistentUnpickler(None, refs.append, BytesIO(pickle)) + u.noload() + u.noload() + + self.assertEqual(refs, [['w', (b'abcd',)]]) + + def test_protocol_3_binary_handling(self): + from ZODB.serialize import _protocol + self.assertEqual(3, _protocol) # Yeah, whitebox + o = PersistentObject() + o._p_oid = b'o' + o.o = PersistentObject() + o.o._p_oid = b'o.o' + pickle = serialize.ObjectWriter().serialize(o) + + # Make sure the persistent id is pickled using the 'C', + # SHORT_BINBYTES opcode: + self.assertTrue(b'C\x03o.o' in pickle) + +class SerializerFunctestCase(unittest.TestCase): + + def setUp(self): + import tempfile + self._tempdir = tempfile.mkdtemp(suffix='serializerfunc') + + def tearDown(self): + import shutil + shutil.rmtree(self._tempdir) + + def test_funky_datetime_serialization(self): + import os + import subprocess + fqn = os.path.join(self._tempdir, 'Data.fs') + prep_args = [sys.executable, '-c', + 'from ZODB.tests.testSerialize import _functest_prep; ' + '_functest_prep(%s)' % repr(fqn)] + # buildout doesn't arrange for the sys.path to be exported, + # so force it ourselves + environ = os.environ.copy() + if IS_JYTHON: + # Jython 2.7rc2 has a bug; if its Lib directory is + # specifically put on the PYTHONPATH, then it doesn't add + # it itself, which means it fails to 'import site' because + # it can't import '_jythonlib' and the whole process fails + # We would use multiprocessing here, but it doesn't exist on jython + sys_path = [x for x in sys.path + if not x.endswith('Lib') and x != '__classpath__' and x!= '__pyclasspath__/'] + else: + sys_path = sys.path + environ['PYTHONPATH'] = os.pathsep.join(sys_path) + subprocess.check_call(prep_args, env=environ) + load_args = [sys.executable, '-c', + 'from ZODB.tests.testSerialize import _functest_load; ' + '_functest_load(%s)' % repr(fqn)] + subprocess.call(load_args, env=environ) + +def _working_failing_datetimes(): + import datetime + WORKING = datetime.datetime(5375, 12, 31, 23, 59, 59) + # Any date after 5375 A.D. appears to trigger this bug. + FAILING = datetime.datetime(5376, 12, 31, 23, 59, 59) + return WORKING, FAILING + +def _functest_prep(fqn): + # Prepare the database with a BTree which won't deserialize + # if the bug is present. + # run in separate process) + import transaction + from BTrees.OOBTree import OOBTree + from ZODB import DB + WORKING, FAILING = _working_failing_datetimes() + db = DB(fqn) + conn = db.open() + try: + root = conn.root() + tree = root['tree'] = OOBTree() + tree[WORKING] = 'working' + tree[FAILING] = 'failing' + transaction.commit() + finally: # Windoze + conn.close() + db.close() + +def _functest_load(fqn): + # Open the database and attempt to deserialize the tree + # (run in separate process) + from ZODB import DB + WORKING, FAILING = _working_failing_datetimes() + db = DB(fqn) + conn = db.open() + try: + root = conn.root() + tree = root['tree'] + assert tree[WORKING] == 'working' + assert tree[FAILING] == 'failing' + finally: # Windoze + conn.close() + db.close() + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(SerializerTestCase), + unittest.makeSuite(SerializerFunctestCase), + doctest.DocTestSuite("ZODB.serialize", + checker=ZODB.tests.util.checker), + )) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testUtils.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testUtils.py new file mode 100644 index 0000000..852b08d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testUtils.py @@ -0,0 +1,161 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test the routines to convert between long and 64-bit strings""" +import doctest +import random +import re +import unittest +from persistent import Persistent + +from zope.testing import renormalizing +from ZODB.utils import U64, p64, u64 +from ZODB._compat import loads + + +NUM = 100 + + +checker = renormalizing.RENormalizing([ + # Python 3 bytes add a "b". + (re.compile("b('.*?')"), r"\1"), + # Windows shows result from 'u64' as long? + (re.compile(r"(\d+)L"), r"\1"), + ]) + +class TestUtils(unittest.TestCase): + + small = [random.randrange(1, 1<<32) + for i in range(NUM)] + large = [random.randrange(1<<32, 1<<64) + for i in range(NUM)] + all = small + large + + def test_LongToStringToLong(self): + for num in self.all: + s = p64(num) + n = U64(s) + self.assertEqual(num, n, "U64() failed") + n2 = u64(s) + self.assertEqual(num, n2, "u64() failed") + + def test_KnownConstants(self): + self.assertEqual(b"\000\000\000\000\000\000\000\001", p64(1)) + self.assertEqual(b"\000\000\000\001\000\000\000\000", p64(1<<32)) + self.assertEqual(u64(b"\000\000\000\000\000\000\000\001"), 1) + self.assertEqual(U64(b"\000\000\000\000\000\000\000\001"), 1) + self.assertEqual(u64(b"\000\000\000\001\000\000\000\000"), 1<<32) + self.assertEqual(U64(b"\000\000\000\001\000\000\000\000"), 1<<32) + + def test_PersistentIdHandlesDescriptor(self): + from ZODB.serialize import ObjectWriter + class P(Persistent): + pass + + writer = ObjectWriter(None) + self.assertEqual(writer.persistent_id(P), None) + + # It's hard to know where to put this test. We're checking that the + # ConflictError constructor uses utils.py's get_pickle_metadata() to + # deduce the class path from a pickle, instead of actually loading + # the pickle (and so also trying to import application module and + # class objects, which isn't a good idea on a ZEO server when avoidable). + def test_ConflictErrorDoesntImport(self): + from ZODB.serialize import ObjectWriter + from ZODB.POSException import ConflictError + from ZODB.tests.MinPO import MinPO + + obj = MinPO() + data = ObjectWriter().serialize(obj) + + # The pickle contains a GLOBAL ('c') opcode resolving to MinPO's + # module and class. + self.assertTrue(b'cZODB.tests.MinPO\nMinPO\n' in data) + + # Fiddle the pickle so it points to something "impossible" instead. + data = data.replace(b'cZODB.tests.MinPO\nMinPO\n', + b'cpath.that.does.not.exist\nlikewise.the.class\n') + # Pickle can't resolve that GLOBAL opcode -- gets ImportError. + self.assertRaises(ImportError, loads, data) + + # Verify that building ConflictError doesn't get ImportError. + try: + raise ConflictError(object=obj, data=data) + except ConflictError as detail: + # And verify that the msg names the impossible path. + self.assertTrue( + 'path.that.does.not.exist.likewise.the.class' in str(detail)) + else: + self.fail("expected ConflictError, but no exception raised") + + def test_get_pickle_metadata_w_protocol_0_class_pickle(self): + from ZODB.utils import get_pickle_metadata + from ZODB._compat import dumps + pickle = dumps(ExampleClass, protocol=0) + self.assertEqual(get_pickle_metadata(pickle), + (__name__, ExampleClass.__name__)) + + def test_get_pickle_metadata_w_protocol_1_class_pickle(self): + from ZODB.utils import get_pickle_metadata + from ZODB._compat import dumps + pickle = dumps(ExampleClass, protocol=1) + self.assertEqual(get_pickle_metadata(pickle), + (__name__, ExampleClass.__name__)) + + def test_get_pickle_metadata_w_protocol_2_class_pickle(self): + from ZODB.utils import get_pickle_metadata + from ZODB._compat import dumps + pickle = dumps(ExampleClass, protocol=2) + self.assertEqual(get_pickle_metadata(pickle), + (__name__, ExampleClass.__name__)) + + def test_get_pickle_metadata_w_protocol_3_class_pickle(self): + from ZODB.utils import get_pickle_metadata + from ZODB._compat import dumps + from ZODB._compat import HIGHEST_PROTOCOL + if HIGHEST_PROTOCOL >= 3: + pickle = dumps(ExampleClass, protocol=3) + self.assertEqual(get_pickle_metadata(pickle), + (__name__, ExampleClass.__name__)) + + def test_p64_bad_object(self): + with self.assertRaises(ValueError) as exc: + p64(2 ** 65) + + e = exc.exception + # The args will be whatever the struct.error args were, + # which vary from version to version and across implementations, + # followed by the bad value + self.assertEqual(e.args[-1], 2 ** 65) + + def test_u64_bad_object(self): + with self.assertRaises(ValueError) as exc: + u64(b'123456789') + + e = exc.exception + # The args will be whatever the struct.error args were, + # which vary from version to version and across implementations, + # followed by the bad value + self.assertEqual(e.args[-1], b'123456789') + + + +class ExampleClass(object): + pass + +def test_suite(): + suite = unittest.defaultTestLoader.loadTestsFromName(__name__) + suite.addTest( + doctest.DocFileSuite('../utils.txt', checker=checker) + ) + return suite diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testZODB.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testZODB.py new file mode 100644 index 0000000..c5f7949 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testZODB.py @@ -0,0 +1,642 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from persistent import Persistent +from persistent.mapping import PersistentMapping +from ZODB.POSException import ReadConflictError +from ZODB.POSException import TransactionFailedError + +import doctest +from BTrees.OOBTree import OOBTree +import transaction +import unittest +import ZODB +import ZODB.FileStorage +import ZODB.MappingStorage +import ZODB.tests.util + +class P(Persistent): + pass + + +class ZODBTests(ZODB.tests.util.TestCase): + + def setUp(self): + ZODB.tests.util.TestCase.setUp(self) + self._storage = ZODB.FileStorage.FileStorage( + 'ZODBTests.fs', create=1) + self._db = ZODB.DB(self._storage) + + def tearDown(self): + self._db.close() + ZODB.tests.util.TestCase.tearDown(self) + + def populate(self): + transaction.begin() + conn = self._db.open() + root = conn.root() + root['test'] = pm = PersistentMapping() + for n in range(100): + pm[n] = PersistentMapping({0: 100 - n}) + transaction.get().note(u'created test data') + transaction.commit() + conn.close() + + def checkExportImport(self, abort_it=False): + self.populate() + conn = self._db.open() + try: + self.duplicate(conn, abort_it) + finally: + conn.close() + conn = self._db.open() + try: + self.verify(conn, abort_it) + finally: + conn.close() + + def duplicate(self, conn, abort_it): + transaction.begin() + transaction.get().note(u'duplication') + root = conn.root() + ob = root['test'] + assert len(ob) > 10, 'Insufficient test data' + try: + import tempfile + with tempfile.TemporaryFile(prefix="DUP") as f: + ob._p_jar.exportFile(ob._p_oid, f) + assert f.tell() > 0, 'Did not export correctly' + f.seek(0) + new_ob = ob._p_jar.importFile(f) + self.assertEqual(new_ob, ob) + root['dup'] = new_ob + if abort_it: + transaction.abort() + else: + transaction.commit() + except: + transaction.abort() + raise + + def verify(self, conn, abort_it): + transaction.begin() + root = conn.root() + ob = root['test'] + try: + ob2 = root['dup'] + except KeyError: + if abort_it: + # Passed the test. + return + else: + raise + else: + self.assertTrue(not abort_it, 'Did not abort duplication') + l1 = list(ob.items()) + l1.sort() + l2 = list(ob2.items()) + l2.sort() + l1 = list(map(lambda k_v: (k_v[0], k_v[1][0]), l1)) + l2 = list(map(lambda k_v1: (k_v1[0], k_v1[1][0]), l2)) + self.assertEqual(l1, l2) + self.assertTrue(ob._p_oid != ob2._p_oid) + self.assertEqual(ob._p_jar, ob2._p_jar) + oids = {} + for v in ob.values(): + oids[v._p_oid] = 1 + for v in ob2.values(): + assert v._p_oid not in oids, ( + 'Did not fully separate duplicate from original') + transaction.commit() + + def checkExportImportAborted(self): + self.checkExportImport(abort_it=True) + + def checkResetCache(self): + # The cache size after a reset should be 0. Note that + # _resetCache is not a public API, but the resetCaches() + # function is, and resetCaches() causes _resetCache() to be + # called. + self.populate() + conn = self._db.open() + conn.root() + self.assertTrue(len(conn._cache) > 0) # Precondition + conn._resetCache() + self.assertEqual(len(conn._cache), 0) + + def checkResetCachesAPI(self): + # Checks the resetCaches() API. + # (resetCaches used to be called updateCodeTimestamp.) + self.populate() + conn = self._db.open() + conn.root() + self.assertTrue(len(conn._cache) > 0) # Precondition + ZODB.Connection.resetCaches() + conn.close() + self.assertTrue(len(conn._cache) > 0) # Still not flushed + conn.open() # simulate the connection being reopened + self.assertEqual(len(conn._cache), 0) + + def checkExplicitTransactionManager(self): + # Test of transactions that apply to only the connection, + # not the thread. + tm1 = transaction.TransactionManager() + conn1 = self._db.open(transaction_manager=tm1) + tm2 = transaction.TransactionManager() + conn2 = self._db.open(transaction_manager=tm2) + try: + r1 = conn1.root() + r2 = conn2.root() + if 'item' in r1: + del r1['item'] + tm1.get().commit() + r1.get('item') + r2.get('item') + r1['item'] = 1 + tm1.get().commit() + self.assertEqual(r1['item'], 1) + # r2 has not seen a transaction boundary, + # so it should be unchanged. + self.assertEqual(r2.get('item'), None) + conn2.sync() + # Now r2 is updated. + self.assertEqual(r2['item'], 1) + + # Now, for good measure, send an update in the other direction. + r2['item'] = 2 + tm2.get().commit() + self.assertEqual(r1['item'], 1) + self.assertEqual(r2['item'], 2) + conn1.sync() + conn2.sync() + self.assertEqual(r1['item'], 2) + self.assertEqual(r2['item'], 2) + finally: + conn1.close() + conn2.close() + + def checkSavepointDoesntGetInvalidations(self): + # Prior to ZODB 3.2.9 and 3.4, Connection.tpc_finish() processed + # invalidations even for a subtxn commit. This could make + # inconsistent state visible after a subtxn commit. There was a + # suspicion that POSKeyError was possible as a result, but I wasn't + # able to construct a case where that happened. + # Subtxns are deprecated now, but it's good to check that the + # same kind of thing doesn't happen when making savepoints either. + + # Set up the database, to hold + # root --> "p" -> value = 1 + # --> "q" -> value = 2 + tm1 = transaction.TransactionManager() + conn = self._db.open(transaction_manager=tm1) + r1 = conn.root() + p = P() + p.value = 1 + r1["p"] = p + q = P() + q.value = 2 + r1["q"] = q + tm1.commit() + + # Now txn T1 changes p.value to 3 locally (subtxn commit). + p.value = 3 + tm1.savepoint() + + # Start new txn T2 with a new connection. + tm2 = transaction.TransactionManager() + cn2 = self._db.open(transaction_manager=tm2) + r2 = cn2.root() + p2 = r2["p"] + self.assertEqual(p._p_oid, p2._p_oid) + # T2 shouldn't see T1's change of p.value to 3, because T1 didn't + # commit yet. + self.assertEqual(p2.value, 1) + # Change p.value to 4, and q.value to 5. Neither should be visible + # to T1, because T1 is still in progress. + p2.value = 4 + q2 = r2["q"] + self.assertEqual(q._p_oid, q2._p_oid) + self.assertEqual(q2.value, 2) + q2.value = 5 + tm2.commit() + + # Back to T1. p and q still have the expected values. + rt = conn.root() + self.assertEqual(rt["p"].value, 3) + self.assertEqual(rt["q"].value, 2) + + # Now make another savepoint in T1. This shouldn't change what + # T1 sees for p and q. + rt["r"] = P() + tm1.savepoint() + + # Making that savepoint in T1 should not process invalidations + # from T2's commit. p.value should still be 3 here (because that's + # what T1 savepointed earlier), and q.value should still be 2. + # Prior to ZODB 3.2.9 and 3.4, q.value was 5 here. + rt = conn.root() + try: + self.assertEqual(rt["p"].value, 3) + self.assertEqual(rt["q"].value, 2) + finally: + tm1.abort() + + def checkTxnBeginImpliesAbort(self): + # begin() should do an abort() first, if needed. + cn = self._db.open() + rt = cn.root() + rt['a'] = 1 + + transaction.begin() # should abort adding 'a' to the root + rt = cn.root() + self.assertRaises(KeyError, rt.__getitem__, 'a') + + transaction.begin() + rt = cn.root() + self.assertRaises(KeyError, rt.__getitem__, 'a') + + # One more time. + transaction.begin() + rt = cn.root() + rt['a'] = 3 + + transaction.begin() + rt = cn.root() + self.assertRaises(KeyError, rt.__getitem__, 'a') + self.assertRaises(KeyError, rt.__getitem__, 'b') + + # That used methods of the default transaction *manager*. Alas, + # that's not necessarily the same as using methods of the current + # transaction, and, in fact, when this test was written, + # Transaction.begin() didn't do anything (everything from here + # down failed). + # Later (ZODB 3.6): Transaction.begin() no longer exists, so the + # rest of this test was tossed. + + def checkFailingCommitSticks(self): + # See also checkFailingSavepointSticks. + cn = self._db.open() + rt = cn.root() + rt['a'] = 1 + + # Arrange for commit to fail during tpc_vote. + poisoned = PoisonedObject(PoisonedJar(break_tpc_vote=True)) + transaction.get().register(poisoned) + + self.assertRaises(PoisonedError, transaction.get().commit) + # Trying to commit again fails too. + self.assertRaises(TransactionFailedError, transaction.commit) + self.assertRaises(TransactionFailedError, transaction.commit) + self.assertRaises(TransactionFailedError, transaction.commit) + + # The change to rt['a'] is lost. + self.assertRaises(KeyError, rt.__getitem__, 'a') + + # Trying to modify an object also fails, because Transaction.join() + # also raises TransactionFailedError. + self.assertRaises(TransactionFailedError, rt.__setitem__, 'b', 2) + + # Clean up via abort(), and try again. + transaction.abort() + rt['a'] = 1 + transaction.commit() + self.assertEqual(rt['a'], 1) + + # Cleaning up via begin() should also work. + rt['a'] = 2 + transaction.get().register(poisoned) + self.assertRaises(PoisonedError, transaction.commit) + self.assertRaises(TransactionFailedError, transaction.commit) + # The change to rt['a'] is lost. + self.assertEqual(rt['a'], 1) + # Trying to modify an object also fails. + self.assertRaises(TransactionFailedError, rt.__setitem__, 'b', 2) + # Clean up via begin(), and try again. + transaction.begin() + rt['a'] = 2 + transaction.commit() + self.assertEqual(rt['a'], 2) + + cn.close() + + def checkSavepointRollbackAndReadCurrent(self): + ''' + savepoint rollback after readcurrent was called on a new object + should not raise POSKeyError + ''' + cn = self._db.open() + try: + transaction.begin() + root = cn.root() + added_before_savepoint = P() + root['added_before_savepoint'] = added_before_savepoint + sp = transaction.savepoint() + added_before_savepoint.btree = new_btree = OOBTree() + cn.add(new_btree) + new_btree['change_to_trigger_read_current'] = P() + sp.rollback() + transaction.commit() + self.assertTrue('added_before_savepoint' in root) + finally: + transaction.abort() + cn.close() + + def checkFailingSavepointSticks(self): + cn = self._db.open() + rt = cn.root() + rt['a'] = 1 + transaction.savepoint() + self.assertEqual(rt['a'], 1) + + rt['b'] = 2 + + # Make a jar that raises PoisonedError when making a savepoint. + poisoned = PoisonedJar(break_savepoint=True) + transaction.get().join(poisoned) + self.assertRaises(PoisonedError, transaction.savepoint) + # Trying to make a savepoint again fails too. + self.assertRaises(TransactionFailedError, transaction.savepoint) + self.assertRaises(TransactionFailedError, transaction.savepoint) + # Top-level commit also fails. + self.assertRaises(TransactionFailedError, transaction.commit) + + # The changes to rt['a'] and rt['b'] are lost. + self.assertRaises(KeyError, rt.__getitem__, 'a') + self.assertRaises(KeyError, rt.__getitem__, 'b') + + # Trying to modify an object also fails, because Transaction.join() + # also raises TransactionFailedError. + self.assertRaises(TransactionFailedError, rt.__setitem__, 'b', 2) + + # Clean up via abort(), and try again. + transaction.abort() + rt['a'] = 1 + transaction.commit() + self.assertEqual(rt['a'], 1) + + # Cleaning up via begin() should also work. + rt['a'] = 2 + transaction.get().join(poisoned) + self.assertRaises(PoisonedError, transaction.savepoint) + # Trying to make a savepoint again fails too. + self.assertRaises(TransactionFailedError, transaction.savepoint) + + # The change to rt['a'] is lost. + self.assertEqual(rt['a'], 1) + # Trying to modify an object also fails. + self.assertRaises(TransactionFailedError, rt.__setitem__, 'b', 2) + + # Clean up via begin(), and try again. + transaction.begin() + rt['a'] = 2 + transaction.savepoint() + self.assertEqual(rt['a'], 2) + transaction.commit() + + cn2 = self._db.open() + rt = cn.root() + self.assertEqual(rt['a'], 2) + + cn.close() + cn2.close() + + def checkMultipleUndoInOneTransaction(self): + # Verify that it's possible to perform multiple undo + # operations within a transaction. If ZODB performs the undo + # operations in a nondeterministic order, this test will often + # fail. + + conn = self._db.open() + try: + root = conn.root() + + # Add transactions that set root["state"] to (0..5) + for state_num in range(6): + transaction.begin() + root['state'] = state_num + transaction.get().note(u'root["state"] = %d' % state_num) + transaction.commit() + + # Undo all but the first. Note that no work is actually + # performed yet. + transaction.begin() + log = self._db.undoLog() + self._db.undoMultiple([log[i]['id'] for i in range(5)]) + + transaction.get().note(u'undo states 1 through 5') + + # Now attempt all those undo operations. + transaction.commit() + + # Sanity check: we should be back to the first state. + self.assertEqual(root['state'], 0) + finally: + transaction.abort() + conn.close() + +class ReadConflictTests(ZODB.tests.util.TestCase): + + def setUp(self): + ZODB.tests.utils.TestCase.setUp(self) + self._storage = ZODB.MappingStorage.MappingStorage() + + def readConflict(self, shouldFail=True): + # Two transactions run concurrently. Each reads some object, + # then one commits and the other tries to read an object + # modified by the first. This read should fail with a conflict + # error because the object state read is not necessarily + # consistent with the objects read earlier in the transaction. + + tm1 = transaction.TransactionManager() + conn = self._db.open(transaction_manager=tm1) + r1 = conn.root() + r1["p"] = self.obj + self.obj.child1 = P() + tm1.get().commit() + + # start a new transaction with a new connection + tm2 = transaction.TransactionManager() + cn2 = self._db.open(transaction_manager=tm2) + # start a new transaction with the other connection + r2 = cn2.root() + + self.assertEqual(r1._p_serial, r2._p_serial) + + self.obj.child2 = P() + tm1.get().commit() + + # resume the transaction using cn2 + obj = r2["p"] + # An attempt to access obj should fail, because r2 was read + # earlier in the transaction and obj was modified by the othe + # transaction. + if shouldFail: + self.assertRaises(ReadConflictError, lambda: obj.child1) + # And since ReadConflictError was raised, attempting to commit + # the transaction should re-raise it. checkNotIndependent() + # failed this part of the test for a long time. + self.assertRaises(ReadConflictError, tm2.get().commit) + + # And since that commit failed, trying to commit again should + # fail again. + self.assertRaises(TransactionFailedError, tm2.get().commit) + # And again. + self.assertRaises(TransactionFailedError, tm2.get().commit) + # Etc. + self.assertRaises(TransactionFailedError, tm2.get().commit) + + else: + # make sure that accessing the object succeeds + obj.child1 + tm2.get().abort() + + + def checkReadConflict(self): + self.obj = P() + self.readConflict() + + def checkReadConflictIgnored(self): + # Test that an application that catches a read conflict and + # continues can not commit the transaction later. + root = self._db.open().root() + root["real_data"] = real_data = PersistentMapping() + root["index"] = index = PersistentMapping() + + real_data["a"] = PersistentMapping({"indexed_value": 0}) + real_data["b"] = PersistentMapping({"indexed_value": 1}) + index[1] = PersistentMapping({"b": 1}) + index[0] = PersistentMapping({"a": 1}) + transaction.commit() + + # load some objects from one connection + tm = transaction.TransactionManager() + cn2 = self._db.open(transaction_manager=tm) + r2 = cn2.root() + real_data2 = r2["real_data"] + index2 = r2["index"] + + real_data["b"]["indexed_value"] = 0 + del index[1]["b"] + index[0]["b"] = 1 + transaction.commit() + + del real_data2["a"] + try: + del index2[0]["a"] + except ReadConflictError: + # This is the crux of the text. Ignore the error. + pass + else: + self.fail("No conflict occurred") + + # real_data2 still ready to commit + self.assertTrue(real_data2._p_changed) + + # index2 values not ready to commit + self.assertTrue(not index2._p_changed) + self.assertTrue(not index2[0]._p_changed) + self.assertTrue(not index2[1]._p_changed) + + self.assertRaises(ReadConflictError, tm.get().commit) + self.assertRaises(TransactionFailedError, tm.get().commit) + tm.get().abort() + + def checkReadConflictErrorClearedDuringAbort(self): + # When a transaction is aborted, the "memory" of which + # objects were the cause of a ReadConflictError during + # that transaction should be cleared. + root = self._db.open().root() + data = PersistentMapping({'d': 1}) + root["data"] = data + transaction.commit() + + # Provoke a ReadConflictError. + tm2 = transaction.TransactionManager() + cn2 = self._db.open(transaction_manager=tm2) + r2 = cn2.root() + data2 = r2["data"] + + data['d'] = 2 + transaction.commit() + + try: + data2['d'] = 3 + except ReadConflictError: + pass + else: + self.fail("No conflict occurred") + + # Explicitly abort cn2's transaction. + tm2.get().abort() + + # cn2 should retain no memory of the read conflict after an abort(), + # but 3.2.3 had a bug wherein it did. + data_conflicts = data._p_jar._conflicts + data2_conflicts = data2._p_jar._conflicts + self.assertFalse(data_conflicts) + self.assertFalse(data2_conflicts) # this used to fail + + # And because of that, we still couldn't commit a change to data2['d'] + # in the new transaction. + cn2.sync() # process the invalidation for data2['d'] + data2['d'] = 3 + tm2.get().commit() # 3.2.3 used to raise ReadConflictError + + cn2.close() + +class PoisonedError(Exception): + pass + +# PoisonedJar arranges to raise PoisonedError from interesting places. +class PoisonedJar(object): + def __init__(self, break_tpc_begin=False, break_tpc_vote=False, + break_savepoint=False): + self.break_tpc_begin = break_tpc_begin + self.break_tpc_vote = break_tpc_vote + self.break_savepoint = break_savepoint + + def sortKey(self): + return str(id(self)) + + def tpc_begin(self, *args): + if self.break_tpc_begin: + raise PoisonedError("tpc_begin fails") + + # A way to poison a top-level commit. + def tpc_vote(self, *args): + if self.break_tpc_vote: + raise PoisonedError("tpc_vote fails") + + # A way to poison a savepoint -- also a way to poison a subtxn commit. + def savepoint(self): + if self.break_savepoint: + raise PoisonedError("savepoint fails") + + def commit(*args): + pass + + def abort(*self): + pass + + +class PoisonedObject(object): + def __init__(self, poisonedjar): + self._p_jar = poisonedjar + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(ZODBTests, 'check'), + )) + +if __name__ == "__main__": + unittest.main(defaultTest="test_suite") diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_TransactionMetaData.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_TransactionMetaData.py new file mode 100644 index 0000000..43405b2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_TransactionMetaData.py @@ -0,0 +1,117 @@ +############################################################################## +# +# Copyright (c) Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest +import warnings + +from .._compat import dumps, loads, _protocol +from ..Connection import TransactionMetaData + +class TransactionMetaDataTests(unittest.TestCase): + + def test_basic(self): + t = TransactionMetaData( + u'user\x80', u'description\x80', dict(foo='FOO')) + self.assertEqual(t.user, b'user\xc2\x80') + self.assertEqual(t.description, b'description\xc2\x80') + self.assertEqual(t.extension, dict(foo='FOO')) + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + self.assertEqual(t._extension, t.extension) + self.assertEqual(len(w), 1) + self.assertTrue(issubclass(w[-1].category, DeprecationWarning)) + self.assertTrue("_extension is deprecated" in str(w[-1].message)) + + def test_basic_no_encoding(self): + t = TransactionMetaData( + b'user', b'description', dumps(dict(foo='FOO'), _protocol)) + self.assertEqual(t.user, b'user') + self.assertEqual(t.description, b'description') + self.assertEqual(t.extension, dict(foo='FOO')) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.assertEqual(t._extension, t.extension) + + def test_constructor_default_args(self): + t = TransactionMetaData() + self.assertEqual(t.user, b'') + self.assertEqual(t.description, b'') + self.assertEqual(t.extension, {}) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.assertEqual(t._extension, t.extension) + + def test_set_extension(self): + t = TransactionMetaData(u'', u'', b'') + self.assertEqual(t.user, b'') + self.assertEqual(t.description, b'') + self.assertEqual(t.extension, {}) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.assertEqual(t._extension, t.extension) + + for name in 'extension', '_extension': + data = {name: name + 'foo'} + setattr(t, name, data) + self.assertEqual(t.extension, data) + self.assertEqual(t._extension, t.extension) + data = {} + setattr(t, name, data) + self.assertEqual(t.extension, data) + self.assertEqual(t._extension, t.extension) + + def test_used_by_connection(self): + import ZODB + from ZODB.MappingStorage import MappingStorage + + class Storage(MappingStorage): + def tpc_begin(self, transaction): + self.test_transaction = transaction + return MappingStorage.tpc_begin(self, transaction) + + storage = Storage() + conn = ZODB.connection(storage) + with conn.transaction_manager as t: + t.user = u'user\x80' + t.description = u'description\x80' + t.setExtendedInfo('foo', 'FOO') + conn.root.x = 1 + + t = storage.test_transaction + self.assertEqual(t.__class__, TransactionMetaData) + self.assertEqual(t.user, b'user\xc2\x80') + self.assertEqual(t.description, b'description\xc2\x80') + self.assertEqual(t.extension, dict(foo='FOO')) + + def test_data(self): + t = TransactionMetaData() + + # Can't get data that wasn't set: + with self.assertRaises(KeyError) as c: + t.data(self) + self.assertEqual(c.exception.args, (self,)) + + data = dict(a=1) + t.set_data(self, data) + self.assertEqual(t.data(self), data) + + # Can't get something we haven't stored. + with self.assertRaises(KeyError) as c: + t.data(data) + self.assertEqual(c.exception.args, (data,)) + +def test_suite(): + return unittest.makeSuite(TransactionMetaDataTests) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_cache.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_cache.py new file mode 100644 index 0000000..22799fe --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_cache.py @@ -0,0 +1,242 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test behavior of Connection plus cPickleCache.""" +from persistent import Persistent +from ZODB.config import databaseFromString +import transaction +import doctest + +class RecalcitrantObject(Persistent): + """A Persistent object that will not become a ghost.""" + + deactivations = 0 + + def _p_deactivate(self): + self.__class__.deactivations += 1 + + def init(cls): + cls.deactivations = 0 + + init = classmethod(init) + +class RegularObject(Persistent): + + deactivations = 0 + invalidations = 0 + + def _p_deactivate(self): + self.__class__.deactivations += 1 + super(RegularObject, self)._p_deactivate() + + def _p_invalidate(self): + self.__class__.invalidations += 1 + super(RegularObject, self)._p_invalidate() + + def init(cls): + cls.deactivations = 0 + cls.invalidations = 0 + + init = classmethod(init) + +class PersistentObject(Persistent): + pass + +class CacheTests(object): + + def test_cache(self): + r"""Test basic cache methods. + + Let's start with a clean transaction + + >>> transaction.abort() + + >>> RegularObject.init() + >>> db = databaseFromString("\n" + ... "cache-size 4\n" + ... "\n" + ... "") + >>> cn = db.open() + >>> r = cn.root() + >>> L = [] + >>> for i in range(5): + ... o = RegularObject() + ... L.append(o) + ... r[i] = o + >>> transaction.commit() + + After committing a transaction and calling cacheGC(), there + should be cache-size (4) objects in the cache. One of the + RegularObjects was deactivated. + + >>> cn._cache.ringlen() + 4 + >>> RegularObject.deactivations + 1 + + If we explicitly activate the objects again, the ringlen + should go back up to 5. + + >>> for o in L: + ... o._p_activate() + >>> cn._cache.ringlen() + 5 + + >>> cn.cacheGC() + >>> cn._cache.ringlen() + 4 + >>> RegularObject.deactivations + 2 + + >>> cn.cacheMinimize() + >>> cn._cache.ringlen() + 0 + >>> RegularObject.deactivations + 6 + + If we activate all the objects again and mark one as modified, + then the one object should not be deactivated even by a + minimize. + + >>> for o in L: + ... o._p_activate() + >>> o.attr = 1 + >>> cn._cache.ringlen() + 5 + >>> cn.cacheMinimize() + >>> cn._cache.ringlen() + 1 + >>> RegularObject.deactivations + 10 + + Clean up + + >>> transaction.abort() + + """ + + def test_cache_gc_recalcitrant(self): + r"""Test that a cacheGC() call will return. + + It's possible for a particular object to ignore the + _p_deactivate() call. We want to check several things in this + case. The cache should called the real _p_deactivate() method + not the one provided by Persistent. The cacheGC() call should + also return when it's looked at each item, regardless of whether + it became a ghost. + + >>> RecalcitrantObject.init() + >>> db = databaseFromString("\n" + ... "cache-size 4\n" + ... "\n" + ... "") + >>> cn = db.open() + >>> r = cn.root() + >>> L = [] + >>> for i in range(5): + ... o = RecalcitrantObject() + ... L.append(o) + ... r[i] = o + >>> transaction.commit() + >>> [o._p_state for o in L] + [0, 0, 0, 0, 0] + + The Connection calls cacheGC() after it commits a transaction. + Since the cache will now have more objects that it's target size, + it will call _p_deactivate() on each RecalcitrantObject. + + >>> RecalcitrantObject.deactivations + 5 + >>> [o._p_state for o in L] + [0, 0, 0, 0, 0] + + An explicit call to cacheGC() has the same effect. + + >>> cn.cacheGC() + >>> RecalcitrantObject.deactivations + 10 + >>> [o._p_state for o in L] + [0, 0, 0, 0, 0] + """ + + def test_cache_on_abort(self): + r"""Test that the cache handles transaction abort correctly. + + >>> RegularObject.init() + >>> db = databaseFromString("\n" + ... "cache-size 4\n" + ... "\n" + ... "") + >>> cn = db.open() + >>> r = cn.root() + >>> L = [] + >>> for i in range(5): + ... o = RegularObject() + ... L.append(o) + ... r[i] = o + >>> transaction.commit() + >>> RegularObject.deactivations + 1 + + Modify three of the objects and verify that they are + deactivated when the transaction aborts. + + >>> for i in range(0, 5, 2): + ... L[i].attr = i + >>> [L[i]._p_state for i in range(0, 5, 2)] + [1, 1, 1] + >>> cn._cache.ringlen() + 5 + + >>> transaction.abort() + >>> len(cn._cache) + 6 + >>> cn._cache.cache_non_ghost_count + 2 + >>> cn._cache.ringlen() + 2 + >>> RegularObject.deactivations + 4 + """ + def test_gc_on_open_connections(self): + r"""Test that automatic GC is not applied to open connections. + + This test (and the corresponding fix) was introduced because of bug + report 113923. + + We start with a persistent object and add a list attribute:: + + >>> db = databaseFromString("\n" + ... "cache-size 0\n" + ... "\n" + ... "") + >>> cn1 = db.open() + >>> r = cn1.root() + >>> r['ob'] = PersistentObject() + >>> r['ob'].l = [] + >>> transaction.commit() + + Now, let's modify the object in a way that doesn't get noticed. Then, + we open another connection which triggers automatic garbage + connection. After that, the object should not have been ghostified:: + + >>> r['ob'].l.append(1) + >>> cn2 = db.open() + >>> r['ob'].l + [1] + + """ + + +def test_suite(): + return doctest.DocTestSuite() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_datamanageradapter.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_datamanageradapter.py new file mode 100644 index 0000000..cfeb40b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_datamanageradapter.py @@ -0,0 +1,242 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest +from doctest import DocTestSuite +from transaction._transaction import DataManagerAdapter +from ZODB.tests.sampledm import DataManager + +def test_normal_commit(): + """ + So, we have a data manager: + + >>> dm = DataManager() + + and we do some work that modifies uncommited state: + + >>> dm.inc() + >>> dm.state, dm.delta + (0, 1) + + Now we'll commit the changes. When the data manager joins a transaction, + the transaction will create an adapter. + + >>> dma = DataManagerAdapter(dm) + + and register it as a modified object. At commit time, the + transaction will get the "jar" like this: + + >>> jar = getattr(dma, '_p_jar', dma) + + and, of course, the jar and the adapter will be the same: + + >>> jar is dma + True + + The transaction will call tpc_begin: + + >>> t1 = '1' + >>> jar.tpc_begin(t1) + + Then the transaction will call commit on the jar: + + >>> jar.commit(t1) + + This doesn't actually do anything. :) + + >>> dm.state, dm.delta + (0, 1) + + The transaction will then call tpc_vote: + + >>> jar.tpc_vote(t1) + + This prepares the data manager: + + >>> dm.state, dm.delta + (1, 1) + >>> dm.prepared + True + + Finally, tpc_finish is called: + + >>> jar.tpc_finish(t1) + + and the data manager finishes the two-phase commit: + + >>> dm.state, dm.delta + (1, 0) + >>> dm.prepared + False + """ + +def test_abort(): + """ + So, we have a data manager: + + >>> dm = DataManager() + + and we do some work that modifies uncommited state: + + >>> dm.inc() + >>> dm.state, dm.delta + (0, 1) + + When the data manager joins a transaction, + the transaction will create an adapter. + + >>> dma = DataManagerAdapter(dm) + + and register it as a modified object. + + Now we'll abort the transaction. The transaction will get the + "jar" like this: + + >>> jar = getattr(dma, '_p_jar', dma) + + and, of course, the jar and the adapter will be the same: + + >>> jar is dma + True + + Then the transaction will call abort on the jar: + + >>> t1 = '1' + >>> jar.abort(t1) + + Which aborts the changes in the data manager: + + >>> dm.state, dm.delta + (0, 0) + """ + +def test_tpc_abort_phase1(): + """ + So, we have a data manager: + + >>> dm = DataManager() + + and we do some work that modifies uncommited state: + + >>> dm.inc() + >>> dm.state, dm.delta + (0, 1) + + Now we'll commit the changes. When the data manager joins a transaction, + the transaction will create an adapter. + + >>> dma = DataManagerAdapter(dm) + + and register it as a modified object. At commit time, the + transaction will get the "jar" like this: + + >>> jar = getattr(dma, '_p_jar', dma) + + and, of course, the jar and the adapter will be the same: + + >>> jar is dma + True + + The transaction will call tpc_begin: + + >>> t1 = '1' + >>> jar.tpc_begin(t1) + + Then the transaction will call commit on the jar: + + >>> jar.commit(t1) + + This doesn't actually do anything. :) + + >>> dm.state, dm.delta + (0, 1) + + At this point, the transaction decides to abort. It calls tpc_abort: + + >>> jar.tpc_abort(t1) + + Which causes the state of the data manager to be restored: + + >>> dm.state, dm.delta + (0, 0) + """ + +def test_tpc_abort_phase2(): + """ + So, we have a data manager: + + >>> dm = DataManager() + + and we do some work that modifies uncommited state: + + >>> dm.inc() + >>> dm.state, dm.delta + (0, 1) + + Now we'll commit the changes. When the data manager joins a transaction, + the transaction will create an adapter. + + >>> dma = DataManagerAdapter(dm) + + and register it as a modified object. At commit time, the + transaction will get the "jar" like this: + + >>> jar = getattr(dma, '_p_jar', dma) + + and, of course, the jar and the adapter will be the same: + + >>> jar is dma + True + + The transaction will call tpc_begin: + + >>> t1 = '1' + >>> jar.tpc_begin(t1) + + Then the transaction will call commit on the jar: + + >>> jar.commit(t1) + + This doesn't actually do anything. :) + + >>> dm.state, dm.delta + (0, 1) + + The transaction calls vote: + + >>> jar.tpc_vote(t1) + + This prepares the data manager: + + >>> dm.state, dm.delta + (1, 1) + >>> dm.prepared + True + + At this point, the transaction decides to abort. It calls tpc_abort: + + >>> jar.tpc_abort(t1) + + Which causes the state of the data manager to be restored: + + >>> dm.state, dm.delta + (0, 0) + >>> dm.prepared + False + """ + +def test_suite(): + return DocTestSuite() + +if __name__ == '__main__': + unittest.main() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_doctest_files.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_doctest_files.py new file mode 100644 index 0000000..3746ea2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_doctest_files.py @@ -0,0 +1,54 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import doctest +import unittest + +__test__ = dict( + cross_db_refs_to_blank_db_name = """ + + There was a bug that caused bad refs to be generated is a database + name was blank. + + >>> import ZODB.tests.util, persistent.mapping, transaction + >>> dbs = {} + >>> db1 = ZODB.tests.util.DB(database_name='', databases=dbs) + >>> db2 = ZODB.tests.util.DB(database_name='2', databases=dbs) + >>> conn1 = db1.open() + >>> conn2 = conn1.get_connection('2') + >>> for i in range(10): + ... conn1.root()[i] = persistent.mapping.PersistentMapping() + ... transaction.commit() + >>> conn2.root()[0] = conn1.root()[9] + >>> transaction.commit() + >>> conn2.root()._p_deactivate() + >>> conn2.root()[0] is conn1.root()[9] + True + + >>> list(conn2.root()[0].keys()) + [] + + >>> db2.close() + >>> db1.close() + """, + ) + + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(doctest.DocFileSuite("dbopen.txt", + "multidb.txt", + "synchronizers.txt", + )) + suite.addTest(doctest.DocTestSuite()) + return suite diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_fsdump.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_fsdump.py new file mode 100644 index 0000000..4962b86 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_fsdump.py @@ -0,0 +1,92 @@ +############################################################################## +# +# Copyright (c) 2005 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +r""" +fsdump test +=========== + +Let's get a path to work with first. + +>>> path = 'Data.fs' + +More imports. + +>>> import ZODB +>>> from ZODB.FileStorage import FileStorage +>>> import transaction as txn +>>> from BTrees.OOBTree import OOBTree +>>> from ZODB.FileStorage.fsdump import fsdump # we're testing this + +Create an empty FileStorage. + +>>> st = FileStorage(path) + +For empty DB fsdump() output definitely empty: + +>>> fsdump(path) + +Create a root object and try again: + +>>> db = ZODB.DB(st) # yes, that creates a root object! +>>> fsdump(path) #doctest: +ELLIPSIS +Trans #00000 tid=... time=... offset= + status=' ' user='' description='initial database creation' + data #00000 oid=0000000000000000 size= class=persistent.mapping.PersistentMapping + +Now we see first transaction with root object. + +Let's add a BTree: + +>>> root = db.open().root() +>>> root['tree'] = OOBTree() +>>> txn.get().note(u'added an OOBTree') +>>> txn.get().commit() +>>> fsdump(path) #doctest: +ELLIPSIS +Trans #00000 tid=... time=... offset= + status=' ' user='' description='initial database creation' + data #00000 oid=0000000000000000 size= class=persistent.mapping.PersistentMapping +Trans #00001 tid=... time=... offset= + status=' ' user='' description='added an OOBTree' + data #00000 oid=0000000000000000 size= class=persistent.mapping.PersistentMapping + data #00001 oid=0000000000000001 size= class=BTrees.OOBTree.OOBTree... + +Now we see two transactions and two changed objects. + +Clean up. + +>>> db.close() +""" + +import re +import doctest +import zope.testing.setupstack +import ZODB.tests.util +from zope.testing import renormalizing + +checker = renormalizing.RENormalizing([ + # Normalizing this makes diffs easier to read + (re.compile(r'\btid=[0-9a-f]+\b'), 'tid=...'), + (re.compile(r'\b\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d+\b'), '...'), + # Python 3 produces larger pickles, even when we use zodbpickle :( + # this changes all the offsets and sizes + (re.compile(r'\bsize=[0-9]+\b'), 'size='), + (re.compile(r'\boffset=[0-9]+\b'), 'offset='), +]) + + +def test_suite(): + return doctest.DocTestSuite( + setUp=zope.testing.setupstack.setUpDirectory, + tearDown=ZODB.tests.util.tearDown, + optionflags=doctest.REPORT_NDIFF, + checker=ZODB.tests.util.checker + checker) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_mvccadapter.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_mvccadapter.py new file mode 100644 index 0000000..9b1f28c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_mvccadapter.py @@ -0,0 +1,60 @@ +############################################################################## +# +# Copyright (c) 2017 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import unittest + +from ZODB import mvccadapter + + +class TestBase(unittest.TestCase): + + def test_getattr_does_not_hide_exceptions(self): + class TheException(Exception): + pass + + class RaisesOnAccess(object): + + @property + def thing(self): + raise TheException() + + base = mvccadapter.Base(RaisesOnAccess()) + base._copy_methods = ('thing',) + + with self.assertRaises(TheException): + getattr(base, 'thing') + + def test_getattr_raises_if_missing(self): + base = mvccadapter.Base(self) + base._copy_methods = ('thing',) + + with self.assertRaises(AttributeError): + getattr(base, 'thing') + + +class TestHistoricalStorageAdapter(unittest.TestCase): + + def test_forwards_release(self): + class Base(object): + released = False + + def release(self): + self.released = True + + base = Base() + adapter = mvccadapter.HistoricalStorageAdapter(base, None) + + adapter.release() + + self.assertTrue(base.released) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_prefetch.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_prefetch.py new file mode 100644 index 0000000..161f17c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_prefetch.py @@ -0,0 +1,59 @@ +import unittest + +from ZODB.utils import z64, u64 +import ZODB + +from .MVCCMappingStorage import MVCCMappingStorage + +class PrefetchTests(unittest.TestCase): + + def test_prefetch(self): + db = ZODB.DB(None) + + fetched = [] + def prefetch(oids, tid): + fetched.append((list(map(u64, oids)), tid)) + + db.storage.prefetch = prefetch + + with db.transaction() as conn: + for i in range(10): + conn.root()[i] = conn.root().__class__() + + conn = db.open() + conn.prefetch(z64) + conn.prefetch([z64]) + conn.prefetch(conn.root()) + + conn.prefetch(z64, (conn.root()[i] for i in range(3)), conn.root()[3]) + + self.assertEqual(fetched, + [([0], conn._storage._start), + ([0], conn._storage._start), + ([0], conn._storage._start), + ([0, 1, 2, 3, 4], conn._storage._start), + ]) + + db.close() + + def test_prefetch_optional(self): + conn = ZODB.connection(None) + conn.prefetch(z64) + conn.prefetch([z64]) + conn.prefetch(conn.root()) + conn.prefetch(z64, [z64]) + conn.prefetch(z64, [z64], conn.root()) + conn.close() + + def test_prefetch_optional_imvcc(self): + conn = ZODB.connection(MVCCMappingStorage()) + conn.prefetch(z64) + conn.prefetch([z64]) + conn.prefetch(conn.root()) + conn.prefetch(z64, [z64]) + conn.prefetch(z64, [z64], conn.root()) + conn.close() + + +def test_suite(): + return unittest.makeSuite(PrefetchTests) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_storage.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_storage.py new file mode 100644 index 0000000..6cb47cc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/test_storage.py @@ -0,0 +1,154 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""A storage used for unittests. + +The primary purpose of this module is to have a minimal multi-version +storage to use for unit tests. MappingStorage isn't sufficient. +Since even a minimal storage has some complexity, we run standard +storage tests against the test storage. +""" +import bisect +import unittest + +from ZODB.BaseStorage import BaseStorage +from ZODB import POSException +from ZODB.utils import z64 + +from ZODB.tests import StorageTestBase +from ZODB.tests import BasicStorage, MTStorage, Synchronization +from ZODB.tests import RevisionStorage + +class Transaction(object): + """Hold data for current transaction for MinimalMemoryStorage.""" + + def __init__(self, tid): + self.index = {} + self.tid = tid + + def store(self, oid, data): + self.index[(oid, self.tid)] = data + + def cur(self): + return dict.fromkeys([oid for oid, tid in self.index.keys()], self.tid) + +class MinimalMemoryStorage(BaseStorage, object): + """Simple in-memory storage that supports revisions. + + This storage is needed to test multi-version concurrency control. + It is similar to MappingStorage, but keeps multiple revisions. It + does not support versions. It doesn't implement operations like + pack(), because they aren't necessary for testing. + """ + + def __init__(self): + super(MinimalMemoryStorage, self).__init__("name") + # _index maps oid, tid pairs to data records + self._index = {} + # _cur maps oid to current tid + self._cur = {} + + self._ltid = z64 + + def isCurrent(self, oid, serial): + return serial == self._cur[oid] + + def hook(self, oid, tid, version): + # A hook for testing + pass + + def __len__(self): + return len(self._index) + + def _clear_temp(self): + pass + + def load(self, oid, version=''): + assert version == '' + with self._lock: + assert not version + tid = self._cur[oid] + self.hook(oid, tid, '') + return self._index[(oid, tid)], tid + + def _begin(self, tid, u, d, e): + self._txn = Transaction(tid) + + def store(self, oid, serial, data, v, txn): + if txn is not self._transaction: + raise POSException.StorageTransactionError(self, txn) + assert not v + if self._cur.get(oid) != serial: + if not (serial is None or self._cur.get(oid) in [None, z64]): + raise POSException.ConflictError( + oid=oid, serials=(self._cur.get(oid), serial), data=data) + self._txn.store(oid, data) + return self._tid + + def _abort(self): + del self._txn + + def _finish(self, tid, u, d, e): + with self._lock: + self._index.update(self._txn.index) + self._cur.update(self._txn.cur()) + self._ltid = self._tid + + def loadBefore(self, the_oid, the_tid): + # It's okay if loadBefore() is really expensive, because this + # storage is just used for testing. + with self._lock: + tids = [tid for oid, tid in self._index if oid == the_oid] + if not tids: + raise KeyError(the_oid) + tids.sort() + i = bisect.bisect_left(tids, the_tid) - 1 + if i == -1: + return None + tid = tids[i] + j = i + 1 + if j == len(tids): + end_tid = None + else: + end_tid = tids[j] + + self.hook(the_oid, self._cur[the_oid], '') + + return self._index[(the_oid, tid)], tid, end_tid + + def loadSerial(self, oid, serial): + return self._index[(oid, serial)] + + def close(self): + pass + + cleanup = close + +class MinimalTestSuite(StorageTestBase.StorageTestBase, + BasicStorage.BasicStorage, + MTStorage.MTStorage, + Synchronization.SynchronizedStorage, + RevisionStorage.RevisionStorage, + ): + + def setUp(self): + StorageTestBase.StorageTestBase.setUp(self) + self._storage = MinimalMemoryStorage() + + # we don't implement undo + + def checkLoadBeforeUndo(self): + pass + +def test_suite(): + return unittest.makeSuite(MinimalTestSuite, "check") diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testblob.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testblob.py new file mode 100644 index 0000000..1dfce34 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testblob.py @@ -0,0 +1,846 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from ZODB.blob import Blob +from ZODB.blob import BushyLayout +from ZODB.DB import DB +from ZODB.FileStorage import FileStorage +from ZODB.tests.testConfig import ConfigTestBase +from ZODB._compat import Pickler, Unpickler, _protocol + +import doctest + +import os +import random +import re +import struct +import sys +import time +import transaction +import unittest +import ZConfig +import ZODB.blob +import ZODB.interfaces +import ZODB.tests.IteratorStorage +import ZODB.tests.StorageTestBase +import ZODB.tests.util +import zope.testing.renormalizing + +from io import BytesIO + +try: + file_type = file +except NameError: + # Py3: Python 3 does not have a file type. + import io + file_type = io.BufferedReader + +from . import util + +def new_time(): + """Create a _new_ time stamp. + + This method also makes sure that after retrieving a timestamp that was + *before* a transaction was committed, that at least one second passes so + the packing time actually is before the commit time. + + """ + now = new_time = time.time() + while new_time <= now: + new_time = time.time() + if time.time() - new_time < 1.0: + # Detect if we're in a time monotonically increasing + # layer (two back-to-back calls of time.time() advance the clock + # by a whole second); if so, we don't need to sleep + time.sleep(1.0) + return new_time + + +class ZODBBlobConfigTest(ConfigTestBase): + + def test_map_config1(self): + self._test( + """ + + + blob-dir blobs + + + + """) + + def test_file_config1(self): + self._test( + """ + + + blob-dir blobs + + path Data.fs + + + + """) + + def test_blob_dir_needed(self): + self.assertRaises(ZConfig.ConfigurationSyntaxError, + self._test, + """ + + + + + + """) + + +class BlobCloneTests(ZODB.tests.util.TestCase): + + def testDeepCopyCanInvalidate(self): + """ + Tests regression for invalidation problems related to missing + readers and writers values in cloned objects (see + http://mail.zope.org/pipermail/zodb-dev/2008-August/012054.html) + """ + import ZODB.MappingStorage + database = DB(ZODB.blob.BlobStorage( + 'blobs', ZODB.MappingStorage.MappingStorage())) + connection = database.open() + root = connection.root() + transaction.begin() + root['blob'] = Blob() + transaction.commit() + + stream = BytesIO() + p = Pickler(stream, _protocol) + p.dump(root['blob']) + u = Unpickler(stream) + stream.seek(0) + clone = u.load() + clone._p_invalidate() + + # it should also be possible to open the cloned blob + # (even though it won't contain the original data) + clone.open().close() + + # tearDown + database.close() + + +class BushyLayoutTests(ZODB.tests.util.TestCase): + + def testBushyLayoutOIDToPathUnicode(self): + "OID-to-path should produce valid results given non-ASCII byte strings" + non_ascii_oid = b'>\xf1<0\xe9Q\x99\xf0' + # The argument should already be bytes; + # os.path.sep is native string type under both 2 and 3 + # binascii.hexlify takes bytes and produces bytes under both py2 and py3 + # the result should be the native string type + oid_as_path = BushyLayout().oid_to_path(non_ascii_oid) + self.assertEqual( + oid_as_path, + os.path.sep.join( + '0x3e/0xf1/0x3c/0x30/0xe9/0x51/0x99/0xf0'.split('/'))) + + # the reverse holds true as well + path_as_oid = BushyLayout().path_to_oid(oid_as_path) + self.assertEqual( + path_as_oid, + non_ascii_oid ) + + +class BlobTestBase(ZODB.tests.StorageTestBase.StorageTestBase): + + def setUp(self): + ZODB.tests.StorageTestBase.StorageTestBase.setUp(self) + self._storage = self.create_storage() + + +class BlobUndoTests(BlobTestBase): + + def testUndoWithoutPreviousVersion(self): + database = DB(self._storage) + connection = database.open() + root = connection.root() + transaction.begin() + root['blob'] = Blob() + transaction.commit() + + database.undo(database.undoLog(0, 1)[0]['id']) + transaction.commit() + + # the blob footprint object should exist no longer + self.assertRaises(KeyError, root.__getitem__, 'blob') + database.close() + + def testUndo(self): + database = DB(self._storage) + connection = database.open() + root = connection.root() + transaction.begin() + blob = Blob() + with blob.open('w') as file: + file.write(b'this is state 1') + root['blob'] = blob + transaction.commit() + + transaction.begin() + blob = root['blob'] + with blob.open('w') as file: + file.write(b'this is state 2') + transaction.commit() + + + database.undo(database.undoLog(0, 1)[0]['id']) + transaction.commit() + with blob.open('r') as file: + self.assertEqual(file.read(), b'this is state 1') + + database.close() + + def testUndoAfterConsumption(self): + database = DB(self._storage) + connection = database.open() + root = connection.root() + transaction.begin() + with open('consume1', 'wb') as file: + file.write(b'this is state 1') + blob = Blob() + blob.consumeFile('consume1') + root['blob'] = blob + transaction.commit() + + transaction.begin() + blob = root['blob'] + with open('consume2', 'wb') as file: + file.write(b'this is state 2') + blob.consumeFile('consume2') + transaction.commit() + + database.undo(database.undoLog(0, 1)[0]['id']) + transaction.commit() + + with blob.open('r') as file: + self.assertEqual(file.read(), b'this is state 1') + + database.close() + + def testRedo(self): + database = DB(self._storage) + connection = database.open() + root = connection.root() + blob = Blob() + + transaction.begin() + with blob.open('w') as file: + file.write(b'this is state 1') + root['blob'] = blob + transaction.commit() + + transaction.begin() + blob = root['blob'] + with blob.open('w') as file: + file.write(b'this is state 2') + transaction.commit() + + database.undo(database.undoLog(0, 1)[0]['id']) + transaction.commit() + + with blob.open('r') as file: + self.assertEqual(file.read(), b'this is state 1') + + database.undo(database.undoLog(0, 1)[0]['id']) + transaction.commit() + + with blob.open('r') as file: + self.assertEqual(file.read(), b'this is state 2') + + database.close() + + def testRedoOfCreation(self): + database = DB(self._storage) + connection = database.open() + root = connection.root() + blob = Blob() + + transaction.begin() + with blob.open('w') as file: + file.write(b'this is state 1') + root['blob'] = blob + transaction.commit() + + database.undo(database.undoLog(0, 1)[0]['id']) + transaction.commit() + + self.assertRaises(KeyError, root.__getitem__, 'blob') + + database.undo(database.undoLog(0, 1)[0]['id']) + transaction.commit() + + with blob.open('r') as file: + self.assertEqual(file.read(), b'this is state 1') + + database.close() + + +class RecoveryBlobStorage(BlobTestBase, + ZODB.tests.IteratorStorage.IteratorDeepCompare): + + def setUp(self): + BlobTestBase.setUp(self) + self._dst = self.create_storage('dest') + + def tearDown(self): + self._dst.close() + BlobTestBase.tearDown(self) + + # Requires a setUp() that creates a self._dst destination storage + def testSimpleBlobRecovery(self): + self.assertTrue( + ZODB.interfaces.IBlobStorageRestoreable.providedBy(self._storage) + ) + db = DB(self._storage) + conn = db.open() + conn.root()[1] = ZODB.blob.Blob() + transaction.commit() + conn.root()[2] = ZODB.blob.Blob() + with conn.root()[2].open('w') as file: + file.write(b'some data') + transaction.commit() + conn.root()[3] = ZODB.blob.Blob() + with conn.root()[3].open('w') as file: + file.write( + (b''.join(struct.pack(">I", random.randint(0, (1<<32)-1)) + for i in range(random.randint(10000,20000))) + )[:-random.randint(1,4)] + ) + transaction.commit() + conn.root()[2] = ZODB.blob.Blob() + with conn.root()[2].open('w') as file: + file.write(b'some other data') + transaction.commit() + self._dst.copyTransactionsFrom(self._storage) + self.compare(self._storage, self._dst) + db.close() + + +def gc_blob_removes_uncommitted_data(): + """ + >>> blob = Blob() + >>> with blob.open('w') as file: + ... _ = file.write(b'x') + >>> fname = blob._p_blob_uncommitted + >>> os.path.exists(fname) + True + >>> file = blob = None + + PyPy not being reference counted actually needs GC to be + explicitly requested. In experiments, it finds the weakref + on the first collection, but only does the cleanup on the second + collection: + + >>> import gc + >>> _ = gc.collect() + >>> _ = gc.collect() + + Now the file is gone on all platforms: + + >>> os.path.exists(fname) + False + """ + +def commit_from_wrong_partition(): + """ + It should be possible to commit changes even when a blob is on a + different partition. + + We can simulare this by temporarily breaking os.rename. :) + + >>> def fail(*args): + ... raise OSError + + >>> os_rename = os.rename + >>> os.rename = fail + + >>> import logging + >>> logger = logging.getLogger('ZODB.blob.copied') + >>> handler = logging.StreamHandler(sys.stdout) + >>> logger.propagate = False + >>> logger.setLevel(logging.DEBUG) + >>> logger.addHandler(handler) + + >>> blob_storage = create_storage() + >>> database = DB(blob_storage) + >>> connection = database.open() + >>> root = connection.root() + >>> from ZODB.blob import Blob + >>> root['blob'] = Blob() + >>> with root['blob'].open('w') as file: + ... _ = file.write(b'test') + >>> transaction.commit() # doctest: +ELLIPSIS + Copied blob file ... + + >>> with root['blob'].open() as fp: fp.read() + 'test' + +Works with savepoints too: + + >>> root['blob2'] = Blob() + >>> with root['blob2'].open('w') as file: + ... _ = file.write(b'test2') + >>> _ = transaction.savepoint() # doctest: +ELLIPSIS + Copied blob file ... + + >>> transaction.commit() # doctest: +ELLIPSIS + Copied blob file ... + + >>> with root['blob2'].open() as fp: fp.read() + 'test2' + + >>> os.rename = os_rename + >>> logger.propagate = True + >>> logger.setLevel(0) + >>> logger.removeHandler(handler) + >>> handler.close() + + >>> database.close() + """ + + +def packing_with_uncommitted_data_non_undoing(): + """ + This covers regression for bug #130459. + + When uncommitted data exists it formerly was written to the root of the + blob_directory and confused our packing strategy. We now use a separate + temporary directory that is ignored while packing. + + >>> import transaction + >>> from ZODB.DB import DB + >>> from ZODB.serialize import referencesf + + >>> blob_storage = create_storage() + >>> database = DB(blob_storage) + >>> connection = database.open() + >>> root = connection.root() + >>> from ZODB.blob import Blob + >>> root['blob'] = Blob() + >>> connection.add(root['blob']) + >>> with root['blob'].open('w') as file: + ... _ = file.write(b'test') + + >>> blob_storage.pack(new_time(), referencesf) + + Clean up: + + >>> database.close() + """ + +def packing_with_uncommitted_data_undoing(): + """ + This covers regression for bug #130459. + + When uncommitted data exists it formerly was written to the root of the + blob_directory and confused our packing strategy. We now use a separate + temporary directory that is ignored while packing. + + >>> from ZODB.serialize import referencesf + + >>> blob_storage = create_storage() + >>> database = DB(blob_storage) + >>> connection = database.open() + >>> root = connection.root() + >>> from ZODB.blob import Blob + >>> root['blob'] = Blob() + >>> connection.add(root['blob']) + >>> with root['blob'].open('w') as file: + ... _ = file.write(b'test') + + >>> blob_storage.pack(new_time(), referencesf) + + Clean up: + + >>> database.close() + """ + +def test_blob_file_permissions(): + """ + >>> blob_storage = create_storage() + >>> conn = ZODB.connection(blob_storage) + >>> conn.root.x = ZODB.blob.Blob(b'test') + >>> conn.transaction_manager.commit() + + Blobs have the readability of their parent directories: + + >>> import stat + >>> READABLE = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH + >>> path = conn.root.x.committed() + >>> ((os.stat(path).st_mode & READABLE) == + ... (os.stat(os.path.dirname(path)).st_mode & READABLE)) + True + + The committed file isn't writable: + + >>> WRITABLE = stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH + >>> os.stat(path).st_mode & WRITABLE + 0 + + >>> conn.close() + """ + +def loadblob_tmpstore(): + """ + This is a test for assuring that the TmpStore's loadBlob implementation + falls back correctly to loadBlob on the backend. + + First, let's setup a regular database and store a blob: + + >>> blob_storage = create_storage() + >>> database = DB(blob_storage) + >>> connection = database.open() + >>> root = connection.root() + >>> from ZODB.blob import Blob + >>> root['blob'] = Blob() + >>> connection.add(root['blob']) + >>> with root['blob'].open('w') as file: + ... _ = file.write(b'test') + >>> import transaction + >>> transaction.commit() + >>> blob_oid = root['blob']._p_oid + >>> tid = connection._storage.lastTransaction() + + Now we open a database with a TmpStore in front: + + >>> database.close() + + >>> from ZODB.Connection import TmpStore + >>> tmpstore = TmpStore(blob_storage) + + We can access the blob correctly: + + >>> tmpstore.loadBlob(blob_oid,tid) == blob_storage.loadBlob(blob_oid,tid) + True + + Clean up: + + >>> tmpstore.close() + >>> database.close() + """ + +def is_blob_record(): + r""" + >>> from ZODB.utils import load_current + + >>> bs = create_storage() + >>> db = DB(bs) + >>> conn = db.open() + >>> conn.root()['blob'] = ZODB.blob.Blob() + >>> transaction.commit() + >>> ZODB.blob.is_blob_record(load_current(bs, ZODB.utils.p64(0))[0]) + False + >>> ZODB.blob.is_blob_record(load_current(bs, ZODB.utils.p64(1))[0]) + True + + An invalid pickle yields a false value: + + >>> ZODB.blob.is_blob_record(b"Hello world!") + False + >>> ZODB.blob.is_blob_record(b'c__main__\nC\nq\x01.') + False + >>> ZODB.blob.is_blob_record(b'cWaaaa\nC\nq\x01.') + False + + As does None, which may occur in delete records: + + >>> ZODB.blob.is_blob_record(None) + False + + >>> db.close() + """ + +def do_not_depend_on_cwd(): + """ + >>> bs = create_storage() + >>> here = os.getcwd() + >>> os.mkdir('evil') + >>> os.chdir('evil') + >>> db = DB(bs) + >>> conn = db.open() + >>> conn.root()['blob'] = ZODB.blob.Blob() + >>> with conn.root()['blob'].open('w') as file: + ... _ = file.write(b'data') + >>> transaction.commit() + >>> os.chdir(here) + >>> with conn.root()['blob'].open() as fp: fp.read() + 'data' + + >>> db.close() + """ + +def savepoint_isolation(): + """Make sure savepoint data is distinct accross transactions + + >>> bs = create_storage() + >>> db = DB(bs) + >>> conn = db.open() + >>> conn.root.b = ZODB.blob.Blob(b'initial') + >>> transaction.commit() + >>> with conn.root.b.open('w') as file: + ... _ = file.write(b'1') + >>> _ = transaction.savepoint() + >>> tm = transaction.TransactionManager() + >>> conn2 = db.open(transaction_manager=tm) + >>> with conn2.root.b.open('w') as file: + ... _ = file.write(b'2') + >>> _ = tm.savepoint() + >>> with conn.root.b.open() as fp: fp.read() + '1' + >>> with conn2.root.b.open() as fp: fp.read() + '2' + >>> transaction.abort() + >>> tm.commit() + >>> conn.sync() + >>> with conn.root.b.open() as fp: fp.read() + '2' + >>> db.close() + """ + +def savepoint_commits_without_invalidations_out_of_order(): + """Make sure transactions with blobs can be commited without the + invalidations out of order error (LP #509801) + + >>> bs = create_storage() + >>> db = DB(bs) + >>> tm1 = transaction.TransactionManager() + >>> conn1 = db.open(transaction_manager=tm1) + >>> conn1.root.b = ZODB.blob.Blob(b'initial') + >>> tm1.commit() + >>> with conn1.root.b.open('w') as file: + ... _ = file.write(b'1') + >>> _ = tm1.savepoint() + + >>> tm2 = transaction.TransactionManager() + >>> conn2 = db.open(transaction_manager=tm2) + >>> with conn2.root.b.open('w') as file: + ... _ = file.write(b'2') + >>> _ = tm1.savepoint() + >>> with conn1.root.b.open() as fp: fp.read() + '1' + >>> with conn2.root.b.open() as fp: fp.read() + '2' + >>> tm2.commit() + >>> tm1.commit() # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ConflictError: database conflict error... + >>> tm1.abort() + >>> db.close() + """ + +def savepoint_cleanup(): + """Make sure savepoint data gets cleaned up. + + >>> bs = create_storage() + >>> tdir = bs.temporaryDirectory() + >>> os.listdir(tdir) + [] + + >>> db = DB(bs) + >>> conn = db.open() + >>> conn.root.b = ZODB.blob.Blob(b'initial') + >>> _ = transaction.savepoint() + >>> len(os.listdir(tdir)) + 1 + >>> transaction.abort() + >>> os.listdir(tdir) + [] + >>> conn.root.b = ZODB.blob.Blob(b'initial') + >>> transaction.commit() + >>> with conn.root.b.open('w') as file: + ... _ = file.write(b'1') + >>> _ = transaction.savepoint() + >>> transaction.abort() + >>> os.listdir(tdir) + [] + + >>> db.close() + """ + +def lp440234_Setting__p_changed_of_a_Blob_w_no_uncomitted_changes_is_noop(): + r""" + >>> db = ZODB.DB('data.fs', blob_dir='blobs') + >>> conn = db.open() + >>> blob = ZODB.blob.Blob(b'blah') + >>> conn.add(blob) + >>> transaction.commit() + >>> blob._p_changed = True + >>> old_serial = blob._p_serial + >>> transaction.commit() + >>> with blob.open() as fp: fp.read() + 'blah' + >>> old_serial == blob._p_serial + True + + >>> db.close() + """ + +def setUp(test): + ZODB.tests.util.setUp(test) + test.globs['rmtree'] = zope.testing.setupstack.rmtree + +def timeIncreasesSetUp(test): + setUp(test) + l = test.globs['time_layer'] = ZODB.tests.util.MonotonicallyIncreasingTimeMinimalTestLayer('') + l.testSetUp() + +def timeIncreasesTearDown(test): + test.globs['time_layer'].testTearDown() + util.tearDown(test) + +def setUpBlobAdaptedFileStorage(test): + setUp(test) + + def create_storage(name='data', blob_dir=None): + if blob_dir is None: + blob_dir = '%s.bobs' % name + return ZODB.blob.BlobStorage(blob_dir, FileStorage('%s.fs' % name)) + + test.globs['create_storage'] = create_storage + +def storage_reusable_suite(prefix, factory, + test_blob_storage_recovery=False, + test_packing=False, + test_undo=True, + ): + """Return a test suite for a generic IBlobStorage. + + Pass a factory taking a name and a blob directory name. + """ + + def setup(test): + setUp(test) + def create_storage(name='data', blob_dir=None): + if blob_dir is None: + blob_dir = '%s.bobs' % name + return factory(name, blob_dir) + + test.globs['create_storage'] = create_storage + test.globs['file_type'] = file_type + + suite = unittest.TestSuite() + suite.addTest(doctest.DocFileSuite( + "blob_connection.txt", + "blob_importexport.txt", + "blob_transaction.txt", + setUp=setup, tearDown=util.tearDown, + checker=zope.testing.renormalizing.RENormalizing([ + # Py3k renders bytes where Python2 used native strings... + (re.compile(r"^b'"), "'"), + (re.compile(r'^b"'), '"'), + # ...and native strings where Python2 used unicode. + (re.compile("^POSKeyError: u'No blob file"), + "POSKeyError: 'No blob file"), + # Py3k repr's exceptions with dotted names + (re.compile("^ZODB.interfaces.BlobError:"), "BlobError:"), + (re.compile("^ZODB.POSException.ConflictError:"), "ConflictError:"), + (re.compile("^ZODB.POSException.POSKeyError:"), "POSKeyError:"), + (re.compile("^ZODB.POSException.Unsupported:"), "Unsupported:"), + # Normalize out blobfile paths for sake of Windows + (re.compile( + r'([a-zA-Z]:)?\%(sep)s.*\%(sep)s(server-)?blobs\%(sep)s.*\.blob' + % dict(sep=os.path.sep)), '') + ]), + optionflags=doctest.ELLIPSIS, + )) + if test_packing: + suite.addTest(doctest.DocFileSuite( + "blob_packing.txt", + setUp=setup, tearDown=util.tearDown, + )) + suite.addTest(doctest.DocTestSuite( + setUp=setup, tearDown=util.tearDown, + checker = ( + ZODB.tests.util.checker + + zope.testing.renormalizing.RENormalizing([ + (re.compile(r'\%(sep)s\%(sep)s' % dict(sep=os.path.sep)), '/'), + (re.compile(r'\%(sep)s' % dict(sep=os.path.sep)), '/'), + ])), + )) + + def create_storage(self, name='data', blob_dir=None): + if blob_dir is None: + blob_dir = '%s.bobs' % name + return factory(name, blob_dir) + + def add_test_based_on_test_class(class_): + new_class = class_.__class__( + prefix+class_.__name__, (class_, ), + dict(create_storage=create_storage), + ) + suite.addTest(unittest.makeSuite(new_class)) + + if test_blob_storage_recovery: + add_test_based_on_test_class(RecoveryBlobStorage) + if test_undo: + add_test_based_on_test_class(BlobUndoTests) + + suite.layer = ZODB.tests.util.MonotonicallyIncreasingTimeMinimalTestLayer(prefix+'BlobTests') + + return suite + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(ZODBBlobConfigTest)) + suite.addTest(unittest.makeSuite(BlobCloneTests)) + suite.addTest(unittest.makeSuite(BushyLayoutTests)) + suite.addTest(doctest.DocFileSuite( + "blob_basic.txt", + "blob_consume.txt", + "blob_tempdir.txt", + setUp=setUp, + tearDown=util.tearDown, + optionflags=doctest.ELLIPSIS, + checker=ZODB.tests.util.checker, + )) + suite.addTest(doctest.DocFileSuite( + "blobstorage_packing.txt", + setUp=timeIncreasesSetUp, + tearDown=timeIncreasesTearDown, + optionflags=doctest.ELLIPSIS, + checker=ZODB.tests.util.checker, + )) + suite.addTest(doctest.DocFileSuite( + "blob_layout.txt", + optionflags=doctest.ELLIPSIS|doctest.NORMALIZE_WHITESPACE, + setUp=setUp, + tearDown=util.tearDown, + checker=ZODB.tests.util.checker + + zope.testing.renormalizing.RENormalizing([ + (re.compile(r'\%(sep)s\%(sep)s' % dict(sep=os.path.sep)), '/'), + (re.compile(r'\%(sep)s' % dict(sep=os.path.sep)), '/'), + (re.compile(r'\S+/((old|bushy|lawn)/\S+/foo[23456]?)'), r'\1'), + (re.compile(r"u('[^']*')"), r"\1"), + ]), + )) + suite.addTest(storage_reusable_suite( + 'BlobAdaptedFileStorage', + lambda name, blob_dir: + ZODB.blob.BlobStorage(blob_dir, FileStorage('%s.fs' % name)), + test_blob_storage_recovery=True, + test_packing=True, + )) + + return suite + +if __name__ == '__main__': + unittest.main(defaultTest = 'test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testconflictresolution.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testconflictresolution.py new file mode 100644 index 0000000..f83704e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testconflictresolution.py @@ -0,0 +1,403 @@ +############################################################################## +# +# Copyright (c) 2007 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import manuel.doctest +import manuel.footnote +import doctest +import manuel.capture +import manuel.testing +import persistent +import transaction +import unittest +import ZODB.ConflictResolution +import ZODB.tests.util +import ZODB.POSException +import zope.testing.module + +def setUp(test): + ZODB.tests.util.setUp(test) + zope.testing.module.setUp(test, 'ConflictResolution_txt') + ZODB.ConflictResolution._class_cache.clear() + ZODB.ConflictResolution._unresolvable.clear() + +def tearDown(test): + zope.testing.module.tearDown(test) + ZODB.tests.util.tearDown(test) + ZODB.ConflictResolution._class_cache.clear() + ZODB.ConflictResolution._unresolvable.clear() + + +class ResolveableWhenStateDoesNotChange(persistent.Persistent): + + def _p_resolveConflict(self, old, committed, new): + if new == old: + # old -> new diff is empty, so merge is trivial + committed['resolved'] = 'committed' + return committed + elif committed == old: + # old -> committed diff is empty, so merge is trivial + new['resolved'] = 'new' + return new + # 3-way merge + raise ZODB.POSException.ConflictError + +class Unresolvable(persistent.Persistent): + pass + +def succeed_with_resolution_when_state_is_unchanged(): + """ + If a conflicting change doesn't change the state, then we must still call + _p_resolveConflict, even if in most cases the result would be either + committed or new (as shown above in ResolveableWhenStateDoesNotChange). + One use case is to implement an "asynchronous" cache: + - Initially, a cache value is not filled (e.g. None is used to describe + this state). + - A transaction fills the cache (actually done by a background application) + (None -> "foo"). + - A concurrent transaction invalidates the cache due to some user action + (None -> None), and pushes a new background task to fill the cache. + Then the expected resolved value is None, and not "foo". + + >>> db = ZODB.DB('t.fs') # FileStorage! + >>> storage = db.storage + >>> conn = db.open() + >>> conn.root.x = ResolveableWhenStateDoesNotChange() + >>> conn.root.x.v = 1 + >>> transaction.commit() + >>> serial1 = conn.root.x._p_serial + >>> conn.root.x.v = 2 + >>> transaction.commit() + >>> serial2 = conn.root.x._p_serial + >>> oid = conn.root.x._p_oid + +So, let's try resolving when the old and committed states are the same +but the new state (pickle) is different: + + >>> p = storage.tryToResolveConflict( + ... oid, serial1, serial1, storage.loadSerial(oid, serial2)) + + >>> conn._reader.getState(p)['resolved'] + 'new' + + +And when the old and new states are the same but the committed state +is different: + + >>> p = storage.tryToResolveConflict( + ... oid, serial2, serial1, storage.loadSerial(oid, serial1)) + + >>> conn._reader.getState(p)['resolved'] + 'committed' + +But we still conflict if both the committed and new are different than +the original: + + >>> p = storage.tryToResolveConflict( + ... oid, serial2, serial1, storage.loadSerial(oid, serial2)) + ... # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ConflictError: database conflict error (oid 0x01, ... + + +Of course, there's also no automatic trivial merge if content doesn't support +conflict resolution. Touching an object without change is a common locking +mechanism. + + >>> conn.root.y = Unresolvable() + >>> conn.root.y.v = 1 + >>> transaction.commit() + >>> oid = conn.root.y._p_oid + >>> serial = conn.root.y._p_serial + + >>> p = storage.tryToResolveConflict( + ... oid, serial, serial, storage.loadSerial(oid, serial)) + ... # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ConflictError: database conflict error (oid 0x02, ... + + >>> db.close() + """ + +class Resolveable(persistent.Persistent): + + def _p_resolveConflict(self, old, committed, new): + resolved = {} + for k in old: + if k not in committed: + if k in new and new[k] == old[k]: + continue + raise ZODB.POSException.ConflictError + if k not in new: + if k in committed and committed[k] == old[k]: + continue + raise ZODB.POSException.ConflictError + if committed[k] != old[k]: + if new[k] == old[k]: + resolved[k] = committed[k] + continue + raise ZODB.POSException.ConflictError + if new[k] != old[k]: + if committed[k] == old[k]: + resolved[k] = new[k] + continue + raise ZODB.POSException.ConflictError + resolved[k] = old[k] + + for k in new: + if k in old: + continue + if k in committed: + raise ZODB.POSException.ConflictError + resolved[k] = new[k] + + for k in committed: + if k in old: + continue + if k in new: + raise ZODB.POSException.ConflictError + resolved[k] = committed[k] + + return resolved + +def resolve_even_when_referenced_classes_are_absent(): + """ + +We often want to be able to resolve even when there are pesistent +references to classes that can't be imported. + + >>> class P(persistent.Persistent): + ... pass + + >>> db = ZODB.DB('t.fs') # FileStorage! + >>> storage = db.storage + >>> conn = db.open() + >>> conn.root.x = Resolveable() + >>> transaction.commit() + >>> oid = conn.root.x._p_oid + >>> serial = conn.root.x._p_serial + + >>> conn.root.x.a = P() + >>> transaction.commit() + >>> aid = conn.root.x.a._p_oid + >>> serial1 = conn.root.x._p_serial + + >>> del conn.root.x.a + >>> conn.root.x.b = P() + >>> transaction.commit() + >>> serial2 = conn.root.x._p_serial + +Bwahaha: + + >>> P_aside = P + >>> del P + +Now, even though we can't import P, we can still resolve the conflict: + + >>> p = storage.tryToResolveConflict( + ... oid, serial1, serial, storage.loadSerial(oid, serial2)) + +And load the pickle: + + >>> conn2 = db.open() + >>> P = P_aside + >>> p = conn2._reader.getState(p) + >>> sorted(p), p['a'] is conn2.get(aid), p['b'] is conn2.root.x.b + (['a', 'b'], True, True) + + >>> isinstance(p['a'], P) and isinstance(p['b'], P) + True + + +Oooooof course, this won't work if the subobjects aren't persistent: + + >>> class NP(object): + ... pass + + + >>> conn.root.x = Resolveable() + >>> transaction.commit() + >>> oid = conn.root.x._p_oid + >>> serial = conn.root.x._p_serial + + >>> conn.root.x.a = a = NP() + >>> transaction.commit() + >>> serial1 = conn.root.x._p_serial + + >>> del conn.root.x.a + >>> conn.root.x.b = b = NP() + >>> transaction.commit() + >>> serial2 = conn.root.x._p_serial + +Bwahaha: + + >>> del NP + + + >>> storage.tryToResolveConflict( + ... oid, serial1, serial, storage.loadSerial(oid, serial2)) + ... # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ConflictError: database conflict error (oid ... + + >>> db.close() + """ + + +def resolve_even_when_xdb_referenced_classes_are_absent(): + """Cross-database persistent refs! + + >>> class P(persistent.Persistent): + ... pass + + >>> databases = {} + >>> db = ZODB.DB('t.fs', databases=databases, database_name='') + >>> db2 = ZODB.DB('o.fs', databases=databases, database_name='o') + >>> storage = db.storage + >>> conn = db.open() + >>> conn.root.x = Resolveable() + >>> transaction.commit() + >>> oid = conn.root.x._p_oid + >>> serial = conn.root.x._p_serial + + >>> p = P(); conn.get_connection('o').add(p) + >>> conn.root.x.a = p + >>> transaction.commit() + >>> aid = conn.root.x.a._p_oid + >>> serial1 = conn.root.x._p_serial + + >>> del conn.root.x.a + >>> p = P(); conn.get_connection('o').add(p) + >>> conn.root.x.b = p + >>> transaction.commit() + >>> serial2 = conn.root.x._p_serial + + >>> del p + +Bwahaha: + + >>> P_aside = P + >>> del P + +Now, even though we can't import P, we can still resolve the conflict: + + >>> p = storage.tryToResolveConflict( + ... oid, serial1, serial, storage.loadSerial(oid, serial2)) + +And load the pickle: + + >>> conn2 = db.open() + >>> conn2o = conn2.get_connection('o') + >>> P = P_aside + >>> p = conn2._reader.getState(p) + >>> sorted(p), p['a'] is conn2o.get(aid), p['b'] is conn2.root.x.b + (['a', 'b'], True, True) + + >>> isinstance(p['a'], P) and isinstance(p['b'], P) + True + + >>> db.close() + >>> db2.close() + """ + + +class FailHard(persistent.Persistent): + + def _p_resolveConflict(self, old, committed, new): + raise RuntimeError("epic fail") + + +def show_tryToResolveConflict_log_output(): + """ + Verify output generated by tryToResolveConflict in the logs + + >>> db = ZODB.DB('t.fs') # FileStorage! + >>> storage = db.storage + >>> conn = db.open() + >>> conn.root.x = FailHard() + >>> conn.root.x.v = 1 + >>> transaction.commit() + >>> serial1 = conn.root.x._p_serial + >>> conn.root.x.v = 2 + >>> transaction.commit() + >>> serial2 = conn.root.x._p_serial + >>> oid = conn.root.x._p_oid + +Install a log handler to be able to show log entries + + >>> import logging + >>> from zope.testing.loggingsupport import InstalledHandler + >>> handler = InstalledHandler('ZODB.ConflictResolution', + ... level=logging.DEBUG) + +Content fails hard on conflict resolution: + + >>> p = storage.tryToResolveConflict( + ... oid, serial2, serial1, storage.loadSerial(oid, serial2)) + ... # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ConflictError: database conflict error (oid 0x01, ... + +Content doesn't support conflict resolution: + + >>> conn.root.y = Unresolvable() + >>> conn.root.y.v = 1 + >>> transaction.commit() + >>> oid = conn.root.y._p_oid + >>> serial = conn.root.y._p_serial + + >>> p = storage.tryToResolveConflict( + ... oid, serial, serial, storage.loadSerial(oid, serial)) + ... # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ConflictError: database conflict error (oid 0x02, ... + +Let's see what went into the log: + + >>> len(handler.records) + 2 + + >>> import six + + >>> msg = handler.records[0] + >>> six.print_(msg.name, msg.levelname, msg.getMessage()) + ZODB.ConflictResolution ERROR Unexpected error while trying to resolve conflict on + + >>> msg = handler.records[1] + >>> six.print_(msg.name, msg.levelname, msg.getMessage()) + ZODB.ConflictResolution DEBUG Conflict resolution on failed with ConflictError: database conflict error + +Cleanup: + + >>> handler.uninstall() + >>> db.close() + """ + + +def test_suite(): + return unittest.TestSuite([ + manuel.testing.TestSuite( + manuel.doctest.Manuel(checker=ZODB.tests.util.checker) + + manuel.footnote.Manuel() + + manuel.capture.Manuel(), + '../ConflictResolution.txt', + setUp=setUp, tearDown=tearDown + ), + doctest.DocTestSuite( + setUp=setUp, tearDown=tearDown, + checker=ZODB.tests.util.checker), + ]) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testcrossdatabasereferences.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testcrossdatabasereferences.py new file mode 100644 index 0000000..e29b288 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testcrossdatabasereferences.py @@ -0,0 +1,202 @@ +############################################################################## +# +# Copyright (c) 2005 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import doctest +import persistent +import unittest +import ZODB.tests.util + +class MyClass(persistent.Persistent): + pass + +class MyClass_w_getnewargs(persistent.Persistent): + + def __getnewargs__(self): + return () + +def test_must_use_consistent_connections(): + """ + +It's important to use consistent connections. References to +separate connections to the same database or multi-database won't +work. + +For example, it's tempting to open a second database using the +database open function, but this doesn't work: + + >>> import ZODB.tests.util, transaction, persistent + >>> databases = {} + >>> db1 = ZODB.tests.util.DB(databases=databases, database_name='1') + >>> db2 = ZODB.tests.util.DB(databases=databases, database_name='2') + + >>> tm = transaction.TransactionManager() + >>> conn1 = db1.open(transaction_manager=tm) + >>> p1 = MyClass() + >>> conn1.root()['p'] = p1 + >>> tm.commit() + + >>> conn2 = db2.open(transaction_manager=tm) + + >>> p2 = MyClass() + >>> conn2.root()['p'] = p2 + >>> p2.p1 = p1 + >>> tm.commit() # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS + Traceback (most recent call last): + ... + InvalidObjectReference: + ('Attempt to store a reference to an object from a separate connection to + the same database or multidatabase', + , + ) + + >>> tm.abort() + +Even without multi-databases, a common mistake is to mix objects in +different connections to the same database. + + >>> conn2 = db1.open(transaction_manager=tm) + + >>> p2 = MyClass() + >>> conn2.root()['p'] = p2 + >>> p2.p1 = p1 + >>> tm.commit() # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS + Traceback (most recent call last): + ... + InvalidObjectReference: + ('Attempt to store a reference to an object from a separate connection + to the same database or multidatabase', + , + ) + + >>> tm.abort() + +""" + +def test_connection_management_doesnt_get_caching_wrong(): + """ + +If a connection participates in a multidatabase, then it's +connections must remain so that references between it's cached +objects remain sane. + + >>> import ZODB.tests.util, transaction, persistent + >>> databases = {} + >>> db1 = ZODB.tests.util.DB(databases=databases, database_name='1') + >>> db2 = ZODB.tests.util.DB(databases=databases, database_name='2') + >>> tm = transaction.TransactionManager() + >>> conn1 = db1.open(transaction_manager=tm) + >>> conn2 = conn1.get_connection('2') + >>> z = MyClass() + >>> conn2.root()['z'] = z + >>> tm.commit() + >>> x = MyClass() + >>> x.z = z + >>> conn1.root()['x'] = x + >>> y = MyClass() + >>> y.z = z + >>> conn1.root()['y'] = y + >>> tm.commit() + + >>> conn1.root()['x'].z is conn1.root()['y'].z + True + +So, we have 2 objects in conn1 that point to the same object in conn2. +Now, we'll deactivate one, close and repopen the connection, and see +if we get the same objects: + + >>> x._p_deactivate() + >>> conn1.close() + >>> conn1 = db1.open(transaction_manager=tm) + + >>> conn1.root()['x'].z is conn1.root()['y'].z + True + + >>> db1.close() + >>> db2.close() +""" + +def test_explicit_adding_with_savepoint(): + """ + + >>> import ZODB.tests.util, transaction, persistent + >>> databases = {} + >>> db1 = ZODB.tests.util.DB(databases=databases, database_name='1') + >>> db2 = ZODB.tests.util.DB(databases=databases, database_name='2') + >>> tm = transaction.TransactionManager() + >>> conn1 = db1.open(transaction_manager=tm) + >>> conn2 = conn1.get_connection('2') + >>> z = MyClass() + + >>> conn1.root()['z'] = z + >>> conn1.add(z) + >>> s = tm.savepoint() + >>> conn2.root()['z'] = z + >>> tm.commit() + >>> z._p_jar.db().database_name + '1' + + >>> db1.close() + >>> db2.close() + +""" + +def test_explicit_adding_with_savepoint2(): + """ + + >>> import ZODB.tests.util, transaction, persistent + >>> databases = {} + >>> db1 = ZODB.tests.util.DB(databases=databases, database_name='1') + >>> db2 = ZODB.tests.util.DB(databases=databases, database_name='2') + >>> tm = transaction.TransactionManager() + >>> conn1 = db1.open(transaction_manager=tm) + >>> conn2 = conn1.get_connection('2') + >>> z = MyClass() + + >>> conn1.root()['z'] = z + >>> conn1.add(z) + >>> s = tm.savepoint() + >>> conn2.root()['z'] = z + >>> z.x = 1 + >>> tm.commit() + >>> z._p_jar.db().database_name + '1' + + >>> db1.close() + >>> db2.close() + +""" + +def tearDownDbs(test): + test.globs['db1'].close() + test.globs['db2'].close() + +def test_suite(): + return unittest.TestSuite(( + doctest.DocFileSuite( + '../cross-database-references.txt', + globs=dict(MyClass=MyClass), + tearDown=tearDownDbs, + checker=ZODB.tests.util.checker, + ), + doctest.DocFileSuite( + '../cross-database-references.txt', + globs=dict(MyClass=MyClass_w_getnewargs), + tearDown=tearDownDbs, + checker=ZODB.tests.util.checker, + ), + doctest.DocTestSuite(checker=ZODB.tests.util.checker), + )) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testdocumentation.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testdocumentation.py new file mode 100644 index 0000000..bad0491 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testdocumentation.py @@ -0,0 +1,56 @@ +############################################################################## +# +# Copyright (c) Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from os.path import join +import os +import doctest +import unittest +import manuel.capture +import manuel.doctest +import manuel.testing +import zope.testing.module + +import ZODB + +def setUp(test): + test.globs.update( + ZODB=ZODB, + ) + zope.testing.module.setUp(test) + +def tearDown(test): + zope.testing.module.tearDown(test) + +def test_suite(): + base, src = os.path.split(os.path.dirname(os.path.dirname(ZODB.__file__))) + assert src == 'src', src + base = join(base, 'doc') + guide = join(base, 'guide') + reference = join(base, 'reference') + + return unittest.TestSuite(( + manuel.testing.TestSuite( + manuel.doctest.Manuel( + optionflags=doctest.IGNORE_EXCEPTION_DETAIL, + ) + manuel.capture.Manuel(), + join(guide, 'writing-persistent-objects.rst'), + join(guide, 'install-and-run.rst'), + join(guide, 'transactions-and-threading.rst'), + join(reference, 'zodb.rst'), + join(reference, 'storages.rst'), + setUp=setUp, tearDown=tearDown, + ), + )) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testfsIndex.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testfsIndex.py new file mode 100644 index 0000000..060bd67 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testfsIndex.py @@ -0,0 +1,239 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import doctest +import random +import unittest + +from ZODB.fsIndex import fsIndex +from ZODB.utils import p64, z64 +from ZODB.tests.util import setUp, tearDown +import six + +try: + xrange +except NameError: + # Py3: No xrange. + xrange = range + +class Test(unittest.TestCase): + + def setUp(self): + self.index = fsIndex() + + for i in range(200): + self.index[p64(i * 1000)] = (i * 1000 + 1) + + def test__del__(self): + index = self.index + self.assertTrue(p64(1000) in index) + self.assertTrue(p64(100*1000) in index) + + del self.index[p64(1000)] + del self.index[p64(100*1000)] + + self.assertTrue(p64(1000) not in index) + self.assertTrue(p64(100*1000) not in index) + + for key in list(self.index): + del index[key] + self.assertTrue(not index) + + # Whitebox. Make sure empty buckets are removed + self.assertTrue(not index._data) + + def testInserts(self): + index = self.index + + for i in range(0,200): + self.assertEqual((i,index[p64(i*1000)]), (i,(i*1000+1))) + + self.assertEqual(len(index), 200) + + key=p64(2000) + + self.assertEqual(index.get(key), 2001) + + key=p64(2001) + self.assertEqual(index.get(key), None) + self.assertEqual(index.get(key, ''), '') + + # self.assertTrue(len(index._data) > 1) + + def testUpdate(self): + index = self.index + d={} + + for i in range(200): + d[p64(i*1000)]=(i*1000+1) + + index.update(d) + + for i in range(400,600): + d[p64(i*1000)]=(i*1000+1) + + index.update(d) + + for i in range(100, 500): + d[p64(i*1000)]=(i*1000+2) + + index.update(d) + + self.assertEqual(index.get(p64(2000)), 2001) + self.assertEqual(index.get(p64(599000)), 599001) + self.assertEqual(index.get(p64(399000)), 399002) + self.assertEqual(len(index), 600) + + def testKeys(self): + keys = list(iter(self.index)) + keys.sort() + + for i, k in enumerate(keys): + self.assertEqual(k, p64(i * 1000)) + + keys = list(six.iterkeys(self.index)) + keys.sort() + + for i, k in enumerate(keys): + self.assertEqual(k, p64(i * 1000)) + + keys = self.index.keys() + keys.sort() + + for i, k in enumerate(keys): + self.assertEqual(k, p64(i * 1000)) + + def testValues(self): + values = list(six.itervalues(self.index)) + values.sort() + + for i, v in enumerate(values): + self.assertEqual(v, (i * 1000 + 1)) + + values = self.index.values() + values.sort() + + for i, v in enumerate(values): + self.assertEqual(v, (i * 1000 + 1)) + + def testItems(self): + items = list(six.iteritems(self.index)) + items.sort() + + for i, item in enumerate(items): + self.assertEqual(item, (p64(i * 1000), (i * 1000 + 1))) + + items = self.index.items() + items.sort() + + for i, item in enumerate(items): + self.assertEqual(item, (p64(i * 1000), (i * 1000 + 1))) + + def testMaxKey(self): + index = self.index + index.clear() + + # An empty index should complain. + self.assertRaises(ValueError, index.maxKey) + + # Now build up a tree with random values, and check maxKey at each + # step. + correct_max = b"" # smaller than anything we'll add + for i in range(1000): + key = p64(random.randrange(100000000)) + index[key] = i + correct_max = max(correct_max, key) + index_max = index.maxKey() + self.assertEqual(index_max, correct_max) + + index.clear() + a = b'\000\000\000\000\000\001\000\000' + b = b'\000\000\000\000\000\002\000\000' + c = b'\000\000\000\000\000\003\000\000' + d = b'\000\000\000\000\000\004\000\000' + index[a] = 1 + index[c] = 2 + self.assertEqual(index.maxKey(b), a) + self.assertEqual(index.maxKey(d), c) + self.assertRaises(ValueError, index.maxKey, z64) + + def testMinKey(self): + index = self.index + index.clear() + + # An empty index should complain. + self.assertRaises(ValueError, index.minKey) + + # Now build up a tree with random values, and check minKey at each + # step. + correct_min = b"\xff" * 8 # bigger than anything we'll add + for i in range(1000): + key = p64(random.randrange(100000000)) + index[key] = i + correct_min = min(correct_min, key) + index_min = index.minKey() + self.assertEqual(index_min, correct_min) + + index.clear() + a = b'\000\000\000\000\000\001\000\000' + b = b'\000\000\000\000\000\002\000\000' + c = b'\000\000\000\000\000\003\000\000' + d = b'\000\000\000\000\000\004\000\000' + index[a] = 1 + index[c] = 2 + self.assertEqual(index.minKey(b), c) + self.assertRaises(ValueError, index.minKey, d) + +def fsIndex_save_and_load(): + """ +fsIndex objects now have save methods for saving them to disk in a new +format. The fsIndex class has a load class method that can load data. + +Let's start by creating an fsIndex. We'll bother to allocate the +object ids to get multiple buckets: + + >>> index = fsIndex(dict((p64(i), i) for i in xrange(0, 1<<28, 1<<15))) + >>> len(index._data) + 4096 + +Now, we'll save the data to disk and then load it: + + >>> index.save(42, 'index') + +Note that we pass a file position, which gets saved with the index data. + + >>> info = fsIndex.load('index') + >>> info['pos'] + 42 + >>> info['index'].__getstate__() == index.__getstate__() + True + +If we save the data in the old format, we can still read it: + + >>> from ZODB._compat import dump + >>> from ZODB._compat import _protocol + >>> with open('old', 'wb') as fp: + ... dump(dict(pos=42, index=index), fp, _protocol) + >>> info = fsIndex.load('old') + >>> info['pos'] + 42 + >>> info['index'].__getstate__() == index.__getstate__() + True + + """ + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(Test)) + suite.addTest(doctest.DocTestSuite(setUp=setUp, tearDown=tearDown)) + return suite diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testfsoids.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testfsoids.py new file mode 100644 index 0000000..79343c6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testfsoids.py @@ -0,0 +1,194 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +r""" +fsoids test, of the workhorse fsoids.Trace class +================================================ + +Let's get a path to work with first. + +>>> path = 'Data.fs' + +More imports. + +>>> import ZODB +>>> from ZODB.FileStorage import FileStorage +>>> import transaction as txn +>>> from BTrees.OOBTree import OOBTree +>>> from ZODB.FileStorage.fsoids import Tracer # we're testing this + +Create an empty FileStorage. + +>>> st = FileStorage(path) + +There's not a lot interesting in an empty DB! + +>>> t = Tracer(path) +>>> t.register_oids(0x123456) +>>> t.register_oids(1) +>>> t.register_oids(0) +>>> t.run() +>>> t.report() +oid 0x00 0 revisions + this oid was not defined (no data record for it found) +oid 0x01 0 revisions + this oid was not defined (no data record for it found) +oid 0x123456 0 revisions + this oid was not defined (no data record for it found) + +That didn't tell us much, but does show that the specified oids are sorted +into increasing order. + +Create a root object and try again: + +>>> db = ZODB.DB(st) # yes, that creates a root object! +>>> t = Tracer(path) +>>> t.register_oids(0, 1) +>>> t.run(); t.report() #doctest: +ELLIPSIS +oid 0x00 persistent.mapping.PersistentMapping 1 revision + tid 0x... offset= ... + tid user='' + tid description='initial database creation' + new revision persistent.mapping.PersistentMapping at +oid 0x01 0 revisions + this oid was not defined (no data record for it found) + +So we see oid 0 has been used in our one transaction, and that it was created +there, and is a PersistentMapping. 4 is the file offset to the start of the +transaction record, and 52 is the file offset to the start of the data record +for oid 0 within this transaction. Because tids are timestamps too, the +"..." parts vary across runs. The initial line for a tid actually looks like +this: + + tid 0x035748597843b877 offset=4 2004-08-20 20:41:28.187000 + +Let's add a BTree and try again: + +>>> root = db.open().root() +>>> root['tree'] = OOBTree() +>>> txn.get().note(u'added an OOBTree') +>>> txn.get().commit() +>>> t = Tracer(path) +>>> t.register_oids(0, 1) +>>> t.run(); t.report() #doctest: +ELLIPSIS +oid 0x00 persistent.mapping.PersistentMapping 2 revisions + tid 0x... offset= ... + tid user='' + tid description='initial database creation' + new revision persistent.mapping.PersistentMapping at + tid 0x... offset= ... + tid user='' + tid description='added an OOBTree' + new revision persistent.mapping.PersistentMapping at + references 0x01 BTrees.OOBTree.OOBTree... at +oid 0x01 BTrees.OOBTree.OOBTree... 1 revision + tid 0x... offset= ... + tid user='' + tid description='added an OOBTree' + new revision BTrees.OOBTree.OOBTree... at + referenced by 0x00 persistent.mapping.PersistentMapping at + +So there are two revisions of oid 0 now, and the second references oid 1. + +One more, storing a reference in the BTree back to the root object: + +>>> tree = root['tree'] +>>> tree['root'] = root +>>> txn.get().note(u'circling back to the root') +>>> txn.get().commit() +>>> t = Tracer(path) +>>> t.register_oids(0, 1, 2) +>>> t.run(); t.report() #doctest: +ELLIPSIS +oid 0x00 persistent.mapping.PersistentMapping 2 revisions + tid 0x... offset= ... + tid user='' + tid description='initial database creation' + new revision persistent.mapping.PersistentMapping at + tid 0x... offset= ... + tid user='' + tid description='added an OOBTree' + new revision persistent.mapping.PersistentMapping at + references 0x01 BTrees.OOBTree.OOBTree... at + tid 0x... offset= ... + tid user='' + tid description='circling back to the root' + referenced by 0x01 BTrees.OOBTree.OOBTree... at +oid 0x01 BTrees.OOBTree.OOBTree... 2 revisions + tid 0x... offset= ... + tid user='' + tid description='added an OOBTree' + new revision BTrees.OOBTree.OOBTree... at + referenced by 0x00 persistent.mapping.PersistentMapping at + tid 0x... offset= ... + tid user='' + tid description='circling back to the root' + new revision BTrees.OOBTree.OOBTree... at + references 0x00 persistent.mapping.PersistentMapping at +oid 0x02 0 revisions + this oid was not defined (no data record for it found) + +Note that we didn't create any new object there (oid 2 is still unused), we +just made oid 1 refer to oid 0. Therefore there's a new "new revision" line +in the output for oid 1. Note that there's also new output for oid 0, even +though the root object didn't change: we got new output for oid 0 because +it's a traced oid and the new transaction made a new reference *to* it. + +Since the Trace constructor takes only one argument, the only sane thing +you can do to make it fail is to give it a path to a file that doesn't +exist: + +>>> Tracer('/eiruowieuu/lsijflfjlsijflsdf/eurowiurowioeuri/908479287.fs') +Traceback (most recent call last): + ... +ValueError: must specify an existing FileStorage + +You get the same kind of exception if you pass it a path to an existing +directory (the path must be to a file, not a directory): + +>>> import os +>>> Tracer(os.path.dirname(__file__)) +Traceback (most recent call last): + ... +ValueError: must specify an existing FileStorage + + +Clean up. +>>> st.close() +>>> st.cleanup() # remove .fs, .index, etc +""" + +import doctest +import re + +from zope.testing import renormalizing + +from .util import checker as util_checker +from .util import setUp +from .util import tearDown + +checker = renormalizing.RENormalizing([ + # Normalizing this makes diffs easier to read + (re.compile(r'\btid 0x[0-9a-f]+\b'), 'tid 0x...'), + (re.compile(r'\b\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d+\b'), '...'), + # Python 3 produces larger pickles, even when we use zodbpickle :( + # this changes all the offsets and sizes + (re.compile(r'\boffset=[0-9]+\b'), 'offset='), + (re.compile(r'\bat [0-9]+'), 'at '), +]) + + +def test_suite(): + return doctest.DocTestSuite(setUp=setUp, + tearDown=tearDown, + checker=util_checker + checker, + optionflags=doctest.REPORT_NDIFF) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testhistoricalconnections.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testhistoricalconnections.py new file mode 100644 index 0000000..aa69882 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testhistoricalconnections.py @@ -0,0 +1,25 @@ +############################################################################## +# +# Copyright (c) 2007 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import manuel.doctest +import manuel.footnote +import manuel.testing +import ZODB.tests.util + +def test_suite(): + return manuel.testing.TestSuite( + manuel.doctest.Manuel(checker=ZODB.tests.util.checker) + + manuel.footnote.Manuel(), + '../historical_connections.txt', + setUp=ZODB.tests.util.setUp, tearDown=ZODB.tests.util.tearDown, + ) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testmvcc.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testmvcc.py new file mode 100644 index 0000000..ff52af5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testmvcc.py @@ -0,0 +1,435 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +r""" +Multi-version concurrency control tests +======================================= + +Multi-version concurrency control (MVCC) exploits storages that store +multiple revisions of an object to avoid read conflicts. Normally +when an object is read from the storage, its most recent revision is +read. Under MVCC, an older revision may be read so that the transaction +sees a consistent view of the database. + +ZODB guarantees execution-time consistency: A single transaction will +always see a consistent view of the database while it is executing. +If transaction A is running, has already read an object O1, and a +different transaction B modifies object O2, then transaction A can no +longer read the current revision of O2. It must either read the +version of O2 that is consistent with O1 or raise a ReadConflictError. +When MVCC is in use, A will do the former. + +This note includes doctests that explain how MVCC is implemented (and +test that the implementation is correct). The tests use a +MinimalMemoryStorage that implements MVCC support, but not much else. + +***IMPORTANT***: The MVCC approach has changed since these tests were +originally written. The new approach is much simpler because we no +longer call load to get the current state of an object. We call +loadBefore instead, having gotten a transaction time at the start of a +transaction. As a result, the rhythm of the tests is a little odd, +because we no longer need to probe a complex dance that doesn't exist any more. + +>>> from ZODB.tests.test_storage import MinimalMemoryStorage +>>> from ZODB import DB +>>> st = MinimalMemoryStorage() +>>> db = DB(st) + +We will use two different connections with different transaction managers +to make sure that the connections act independently, even though they'll +be run from a single thread. + +>>> import transaction +>>> tm1 = transaction.TransactionManager() +>>> cn1 = db.open(transaction_manager=tm1) + +The test will just use some MinPO objects. The next few lines just +setup an initial database state. + +>>> from ZODB.tests.MinPO import MinPO +>>> r = cn1.root() +>>> r["a"] = MinPO(1) +>>> tm1.get().commit() # make sure the OIDs get allocated sequentially +>>> r["b"] = MinPO(1) +>>> tm1.get().commit() + +Now open a second connection. + +>>> tm2 = transaction.TransactionManager() +>>> cn2 = db.open(transaction_manager=tm2) +>>> from ZODB.utils import p64, u64 +>>> cn2._storage._start == p64(u64(st.lastTransaction()) + 1) +True +>>> txn_time2 = cn2._storage._start + +Connection high-water mark +-------------------------- + +The ZODB Connection tracks a transaction high-water mark, which +bounds the latest transaction id that can be read by the current +transaction and still present a consistent view of the database. +Transactions with ids up to but not including the high-water mark +are OK to read. At the beginning of a transaction, a connection +sets the high-water mark to just over the last transaction time the +storage has seen. + +>>> cn = db.open() + +>>> cn._storage._start == p64(u64(st.lastTransaction()) + 1) +True +>>> cn.db()._mvcc_storage.invalidate(100, dict.fromkeys([1, 2])) +>>> cn._storage._start == p64(u64(st.lastTransaction()) + 1) +True +>>> cn.db()._mvcc_storage.invalidate(200, dict.fromkeys([1, 2])) +>>> cn._storage._start == p64(u64(st.lastTransaction()) + 1) +True + +A connection's high-water mark is set to the transaction id taken from +the first invalidation processed by the connection. Transaction ids are +monotonically increasing, so the first one seen during the current +transaction remains the high-water mark for the duration of the +transaction. + +We'd like simple abort and commit calls to make txn boundaries, +but that doesn't work unless an object is modified. sync() will abort +a transaction and process invalidations. + +>>> cn.sync() +>>> cn._storage._start == p64(u64(st.lastTransaction()) + 1) +True + +Basic functionality +------------------- + +The next bit of code includes a simple MVCC test. One transaction +will modify "a." The other transaction will then modify "b" and commit. + +>>> r1 = cn1.root() +>>> r1["a"].value = 2 +>>> tm1.get().commit() +>>> txn = db.lastTransaction() + +The second connection already has its high-water mark set. + +>>> cn2._storage._start == txn_time2 +True + +It is safe to read "b," because it was not modified by the concurrent +transaction. + +>>> r2 = cn2.root() +>>> r2["b"]._p_serial < cn2._storage._start +True +>>> r2["b"].value +1 +>>> r2["b"].value = 2 + +It is not safe, however, to read the current revision of "a" because +it was modified at the high-water mark. If we read it, we'll get a +non-current version. + +>>> r2["a"].value +1 +>>> r2["a"]._p_serial < cn2._storage._start +True + +We can confirm that we have a non-current revision by asking the +storage. + +>>> db.storage.isCurrent(r2["a"]._p_oid, r2["a"]._p_serial) +False + +It's possible to modify "a", but we get a conflict error when we +commit the transaction. + +>>> r2["a"].value = 3 +>>> tm2.get().commit() # doctest: +ELLIPSIS +Traceback (most recent call last): + ... +ConflictError: database conflict error (oid 0x01, class ZODB.tests.MinPO... + +>>> tm2.get().abort() + +This example will demonstrate that we can commit a transaction if we only +modify current revisions. + +>>> cn2._storage._start == p64(u64(st.lastTransaction()) + 1) +True +>>> txn_time2 = cn2._storage._start + +>>> r1 = cn1.root() +>>> r1["a"].value = 3 +>>> tm1.get().commit() +>>> txn = db.lastTransaction() +>>> cn2._storage._start == txn_time2 +True + +>>> r2["b"].value = r2["a"].value + 1 +>>> r2["b"].value +3 +>>> tm2.get().commit() +>>> cn2._storage._start == p64(u64(st.lastTransaction()) + 1) +True + +Object cache +------------ + +A Connection keeps objects in its cache so that multiple database +references will always point to the same Python object. At +transaction boundaries, objects modified by other transactions are +ghostified so that the next transaction doesn't see stale state. We +need to be sure the non-current objects loaded by MVCC are always +ghosted. It should be trivial, because MVCC is only used when an +invalidation has been received for an object. + +First get the database back in an initial state. + +>>> cn1.sync() +>>> r1["a"].value = 0 +>>> r1["b"].value = 0 +>>> tm1.get().commit() + +>>> cn2.sync() +>>> r2["a"].value +0 +>>> r2["b"].value = 1 +>>> tm2.get().commit() + +>>> r1["b"].value +0 +>>> cn1.sync() # cn2 modified 'b', so cn1 should get a ghost for b +>>> r1["b"]._p_state # -1 means GHOST +-1 + +Closing the connection, committing a transaction, and aborting a transaction, +should all have the same effect on non-current objects in cache. + +>>> def testit(): +... cn1.sync() +... r1["a"].value = 0 +... r1["b"].value = 0 +... tm1.get().commit() +... cn2.sync() +... r2["b"].value = 1 +... tm2.get().commit() + +>>> testit() +>>> r1["b"]._p_state # 0 means UPTODATE, although note it's an older revision +0 +>>> r1["b"].value +0 +>>> r1["a"].value = 1 +>>> tm1.get().commit() +>>> r1["b"]._p_state +-1 + +When a connection is closed, it is saved by the database. It will be +reused by the next open() call (along with its object cache). + +>>> testit() +>>> r1["a"].value = 1 +>>> tm1.get().abort() +>>> cn1.close() +>>> cn3 = db.open() +>>> cn1 is cn3 +True +>>> r1 = cn1.root() + +Although "b" is a ghost in cn1 at this point (because closing a connection +has the same effect on non-current objects in the connection's cache as +committing a transaction), not every object is a ghost. The root was in +the cache and was current, so our first reference to it doesn't return +a ghost. + +>>> r1._p_state # UPTODATE +0 +>>> r1["b"]._p_state # GHOST +-1 + + +Interaction with Savepoints +--------------------------- + +Basically, making a savepoint shouldn't have any effect on what a thread +sees. Before ZODB 3.4.1, the internal TmpStore used when savepoints are +pending didn't delegate all the methods necessary to make this work, so +we'll do a quick test of that here. First get a clean slate: + +>>> cn1.close(); cn2.close() +>>> cn1 = db.open(transaction_manager=tm1) +>>> r1 = cn1.root() +>>> r1["a"].value = 0 +>>> r1["b"].value = 1 +>>> tm1.commit() + +Now modify "a", but not "b", and make a savepoint. + +>>> r1["a"].value = 42 +>>> sp = cn1.savepoint() + +Over in the other connection, modify "b" and commit it. This makes the +first connection's state for b "old". + +>>> cn2 = db.open(transaction_manager=tm2) +>>> r2 = cn2.root() +>>> r2["a"].value, r2["b"].value # shouldn't see the change to "a" +(0, 1) +>>> r2["b"].value = 43 +>>> tm2.commit() +>>> r2["a"].value, r2["b"].value +(0, 43) + +Now deactivate "b" in the first connection, and (re)fetch it. The first +connection should still see 1, due to MVCC, but to get this old state +TmpStore needs to handle the loadBefore() method. + +>>> r1["b"]._p_deactivate() + +Before 3.4.1, the next line died with + AttributeError: TmpStore instance has no attribute 'loadBefore' + +>>> r1["b"]._p_state # ghost +-1 +>>> r1["b"].value +1 + +Just for fun, finish the commit and make sure both connections see the +same things now. + +>>> tm1.commit() +>>> cn1.sync(); cn2.sync() +>>> r1["a"].value, r1["b"].value +(42, 43) +>>> r2["a"].value, r2["b"].value +(42, 43) + +>>> db.close() + +Late invalidation +----------------- + +The combination of ZEO and MVCC used to add more complexity. That's +why ZODB no-longer calls load. :) + +Rather than add all the complexity of ZEO to these tests, the +MinimalMemoryStorage has a hook. We'll write a subclass that will +deliver an invalidation when it loads (or loadBefore's) an object. +The hook allows us to test the Connection code. + +>>> class TestStorage(MinimalMemoryStorage): +... def __init__(self): +... self.hooked = {} +... self.count = 0 +... super(TestStorage, self).__init__() +... def registerDB(self, db): +... self.db = db +... def hook(self, oid, tid, version): +... if oid in self.hooked: +... self.db.invalidate(tid, {oid:1}) +... self.count += 1 + +We can execute this test with a single connection, because we're +synthesizing the invalidation that is normally generated by the second +connection. We need to create two revisions so that there is a +non-current revision to load. + +>>> ts = TestStorage() +>>> db = DB(ts) +>>> cn1 = db.open(transaction_manager=tm1) +>>> r1 = cn1.root() +>>> r1["a"] = MinPO(0) +>>> tm1.get().commit() # make sure the OIDs get allocated sequentially +>>> r1["b"] = MinPO(0) +>>> tm1.get().commit() +>>> r1["b"].value = 1 +>>> tm1.get().commit() +>>> cn1.cacheMinimize() # makes everything in cache a ghost + +>>> oid = r1["b"]._p_oid +>>> ts.hooked[oid] = 1 + +This test is kinda screwy because it depends on an old approach that +has changed. We'll hack the _txn_time to get the original expected +result, even though what's going on now is much simpler. + +>>> cn1._storage._start = ts.lastTransaction() + +Once the oid is hooked, an invalidation will be delivered the next +time it is activated. The code below activates the object, then +confirms that the hook worked and that the old state was retrieved. + +>>> oid in cn1._storage._invalidations +False +>>> r1["b"]._p_state +-1 +>>> r1["b"]._p_activate() +>>> oid in cn1._storage._invalidations +True +>>> ts.count +1 +>>> r1["b"].value +0 + +>>> db.close() + +No earlier revision available +----------------------------- + +We'll reuse the code from the example above, except that there will +only be a single revision of "b." As a result, the attempt to +activate "b" will result in a ReadConflictError. + +>>> ts = TestStorage() +>>> db = DB(ts) +>>> cn1 = db.open(transaction_manager=tm1) +>>> r1 = cn1.root() +>>> r1["a"] = MinPO(0) +>>> tm1.get().commit() # make sure the OIDs get allocated sequentially +>>> r1["b"] = MinPO(0) +>>> tm1.get().commit() +>>> cn1.cacheMinimize() # makes everything in cache a ghost + +>>> oid = r1["b"]._p_oid +>>> ts.hooked[oid] = 1 + +Again, once the oid is hooked, an invalidation will be delivered the next +time it is activated. The code below activates the object, but unlike the +section above, this is no older state to retrieve. + +>>> oid in cn1._storage._invalidations +False +>>> r1["b"]._p_state +-1 +>>> cn1._storage._start = ts.lastTransaction() +>>> r1["b"]._p_activate() # doctest: +ELLIPSIS +Traceback (most recent call last): + ... +ReadConflictError: ... + +>>> db.close() +""" +import doctest +import re + +from zope.testing import renormalizing + +checker = renormalizing.RENormalizing([ + # Python 3 bytes add a "b". + (re.compile("b('.*?')"), r"\1"), + # Python 3 adds module name to exceptions. + (re.compile("ZODB.POSException.ConflictError"), r"ConflictError"), + (re.compile("ZODB.POSException.ReadConflictError"), r"ReadConflictError"), + ]) + +def test_suite(): + return doctest.DocTestSuite(checker=checker) diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/testpersistentclass.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testpersistentclass.py new file mode 100644 index 0000000..aebbe9e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/testpersistentclass.py @@ -0,0 +1,97 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import doctest +import sys +import transaction +import unittest +import ZODB.persistentclass +import ZODB.tests.util + +def class_with_circular_ref_to_self(): + """ +It should be possible for a class to reger to itself. + + >>> C = ZODB.persistentclass.PersistentMetaClass('C', (object,), {}) + + >>> C.me = C + >>> db = ZODB.tests.util.DB() + >>> conn = db.open() + >>> conn.root()['C'] = C + >>> transaction.commit() + + >>> conn2 = db.open() + >>> C2 = conn2.root()['C'] + >>> c = C2() + >>> c.__class__.__name__ + 'C' + +""" + +def test_new_ghost_w_persistent_class(): + """ + Peristent meta classes work with PickleCache.new_ghost: + + >>> import ZODB.persistentclass + + >>> PC = ZODB.persistentclass.PersistentMetaClass('PC', (object,), {}) + + >>> PC._p_oid + >>> PC._p_jar + >>> PC._p_serial + >>> PC._p_changed + False + + >>> import persistent + >>> jar = object() + >>> cache = persistent.PickleCache(jar, 10, 100) + >>> cache.new_ghost(b'1', PC) + + >>> PC._p_oid == b'1' + True + >>> PC._p_jar is jar + True + >>> PC._p_serial + >>> PC._p_changed + False + """ + +# XXX need to update files to get newer testing package +class FakeModule(object): + def __init__(self, name, dict): + self.__dict__ = dict + self.__name__ = name + + +def setUp(test): + ZODB.tests.util.setUp(test) + test.globs['some_database'] = ZODB.tests.util.DB() + module = FakeModule('ZODB.persistentclass_txt', test.globs) + sys.modules[module.__name__] = module + +def tearDown(test): + test.globs['some_database'].close() + del sys.modules['ZODB.persistentclass_txt'] + ZODB.tests.util.tearDown(test) + +def test_suite(): + return unittest.TestSuite(( + doctest.DocFileSuite( + "../persistentclass.txt", + setUp=setUp, tearDown=tearDown, + checker=ZODB.tests.util.checker), + doctest.DocTestSuite(setUp=setUp, tearDown=tearDown), + )) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/util.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/util.py new file mode 100644 index 0000000..51e56be --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/util.py @@ -0,0 +1,340 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Conventience function for creating test databases +""" +from ZODB.MappingStorage import DB + +import atexit +import os +import persistent +import re +import tempfile +import time +import transaction +import unittest +import warnings +import ZODB.utils +from ZODB.Connection import TransactionMetaData +import zope.testing.setupstack +from zope.testing import renormalizing + +try: + from unittest import mock +except ImportError: + import mock + +import six +import functools +from time import time as _real_time +from time import gmtime as _real_gmtime +_current_time = _real_time() + + + +checker = renormalizing.RENormalizing([ + (re.compile("<(.*?) object at 0x[0-9a-f]*?>"), + r"<\1 object at 0x000000000000>"), + # Python 3 bytes add a "b". + (re.compile("b('.*?')"), + r"\1"), + (re.compile('b(".*?")'), + r"\1"), + # Persistent 4.4 changes the repr of persistent subclasses, + # and it is slightly different with the C extension and + # pure-Python module + (re.compile('ZODB.tests.testcrossdatabasereferences.'), + ''), + # Python 3 adds module name to exceptions. + (re.compile("ZODB.interfaces.BlobError"), + r"BlobError"), + (re.compile("ZODB.blob.BlobStorageError"), + r"BlobStorageError"), + (re.compile("ZODB.broken.BrokenModified"), + r"BrokenModified"), + (re.compile("ZODB.POSException.POSKeyError"), + r"POSKeyError"), + (re.compile("ZODB.POSException.ConflictError"), + r"ConflictError"), + (re.compile("ZODB.POSException.ReadConflictError"), + r"ReadConflictError"), + (re.compile("ZODB.POSException.InvalidObjectReference"), + r"InvalidObjectReference"), + (re.compile("ZODB.POSException.ReadOnlyHistoryError"), + r"ReadOnlyHistoryError"), + (re.compile("ZODB.POSException.Unsupported"), + r"Unsupported"), + (re.compile("ZConfig.ConfigurationSyntaxError"), + r"ConfigurationSyntaxError"), + ]) + +def setUp(test, name='test'): + clear_transaction_syncs() + transaction.abort() + d = tempfile.mkdtemp(prefix=name) + zope.testing.setupstack.register(test, zope.testing.setupstack.rmtree, d) + zope.testing.setupstack.register( + test, setattr, tempfile, 'tempdir', tempfile.tempdir) + tempfile.tempdir = d + zope.testing.setupstack.register(test, os.chdir, os.getcwd()) + os.chdir(d) + zope.testing.setupstack.register(test, transaction.abort) + +def tearDown(test): + clear_transaction_syncs() + zope.testing.setupstack.tearDown(test) + +class TestCase(unittest.TestCase): + + def setUp(self): + self.globs = {} + name = self.__class__.__name__ + mname = getattr(self, '_TestCase__testMethodName', '') + if mname: + name += '-' + mname + setUp(self, name) + + tearDown = tearDown + +def pack(db): + db.pack(time.time()+1) + +class P(persistent.Persistent): + + def __init__(self, name=None): + self.name = name + + def __repr__(self): + return 'P(%s)' % self.name + +class MininalTestLayer(object): + + __bases__ = () + __module__ = '' + def __init__(self, name): + self.__name__ = name + + def setUp(self): + self.here = os.getcwd() + self.tmp = tempfile.mkdtemp(self.__name__, dir=os.getcwd()) + os.chdir(self.tmp) + + # sigh. tearDown isn't called when a layer is run in a sub-process. + atexit.register(clean, self.tmp) + + def tearDown(self): + os.chdir(self.here) + zope.testing.setupstack.rmtree(self.tmp) + + testSetUp = testTearDown = lambda self: None + +def clean(tmp): + if os.path.isdir(tmp): + zope.testing.setupstack.rmtree(tmp) + +class AAAA_Test_Runner_Hack(unittest.TestCase): + """Hack to work around a bug in the test runner. + + The first later (lex sorted) is run first in the foreground + """ + + layer = MininalTestLayer('!no tests here!') + + def testNothing(self): + pass + +def assert_warning(category, func, warning_text=''): + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('default') + result = func() + for warning in w: + if ((warning.category is category) + and (warning_text in str(warning.message))): + return result + raise AssertionError(w) + +def assert_deprecated(func, warning_text=''): + return assert_warning(DeprecationWarning, func, warning_text) + +def wait(func=None, timeout=30): + if func is None: + return lambda f: wait(f, timeout) + for _ in range(int(timeout*100)): + if func(): + return + time.sleep(.01) + raise AssertionError + +def store(storage, oid, value='x', serial=ZODB.utils.z64): + if not isinstance(oid, bytes): + oid = ZODB.utils.p64(oid) + if not isinstance(serial, bytes): + serial = ZODB.utils.p64(serial) + t = TransactionMetaData() + storage.tpc_begin(t) + storage.store(oid, serial, value, '', t) + storage.tpc_vote(t) + storage.tpc_finish(t) + +def mess_with_time(test=None, globs=None, now=1278864701.5): + now = [now] + def faux_time(): + now[0] += 1 + return now[0] + + if test is None and globs is not None: + # sigh + faux_time.globs = globs + test = faux_time + + import time + zope.testing.setupstack.register(test, setattr, time, 'time', time.time) + + if isinstance(time,type): + time.time = staticmethod(faux_time) # jython + else: + time.time = faux_time + +def clear_transaction_syncs(): + """Clear data managers registered with the global transaction manager + + Many tests don't clean up synchronizer's registered with the + global transaction managers, which can wreak havoc with following + tests, now that connections interact with their storages at + transaction boundaries. We need to make sure that we clear any + registered data managers. + + For now, we'll use the transaction manager's + underware. Eventually, an transaction managers need to grow an API + for this. + """ + transaction.manager.clearSynchs() + + +class _TimeWrapper(object): + + def __init__(self, granularity=1.0): + self._granularity = granularity + self._lock = ZODB.utils.Lock() + self.fake_gmtime = mock.Mock() + self.fake_time = mock.Mock() + self._configure_fakes() + + def _configure_fakes(self): + def incr(): + global _current_time # pylint:disable=global-statement + with self._lock: + _current_time = max(_real_time(), _current_time + self._granularity) + return _current_time + self.fake_time.side_effect = incr + + def incr_gmtime(seconds=None): + if seconds is not None: + now = seconds + else: + now = incr() + return _real_gmtime(now) + self.fake_gmtime.side_effect = incr_gmtime + + def install_fakes(self): + time.time = self.fake_time + time.gmtime = self.fake_gmtime + + __enter__ = install_fakes + + def close(self, *args): + time.time = _real_time + time.gmtime = _real_gmtime + + __exit__ = close + + def __call__(self, func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + with self: + return func(*args, **kwargs) + return wrapper + + +def time_monotonically_increases(func_or_granularity): + """ + Decorate a unittest method with this function to cause the value + of :func:`time.time` and :func:`time.gmtime` to monotonically + increase by one each time it is called. This ensures things like + last modified dates always increase. + + We make three guarantees about the value of :func:`time.time` + returned while the decorated function is running: + + 1. It is always *at least* the value of the *real* + :func:`time.time`; + + 2. Each call returns a value greater than the previous call; + + 3. Those two constraints hold across different invocations of + functions decorated. This decorator can be applied to a + method in a test case:: + + class TestThing(unittest.TestCase) + @time_monotonically_increases + def test_method(self): + t = time.time() + ... + + It can also be applied to a bare function taking any number of + arguments:: + + @time_monotonically_increases + def utility_function(a, b, c=1): + t = time.time() + ... + + By default, the time will be incremented in 1.0 second intervals. + You can specify a particular granularity as an argument; this is + useful to keep from running too far ahead of the real clock:: + + @time_monotonically_increases(0.1) + def smaller_increment(): + t1 = time.time() + t2 = time.time() + assrt t2 == t1 + 0.1 + """ + if isinstance(func_or_granularity, (six.integer_types, float)): + # We're being used as a factory. + wrapper_factory = _TimeWrapper(func_or_granularity) + return wrapper_factory + + # We're being used bare + wrapper_factory = _TimeWrapper() + return wrapper_factory(func_or_granularity) + + +def reset_monotonic_time(value=0.0): + """ + Make the monotonic clock return the real time on its next + call. + """ + + global _current_time # pylint:disable=global-statement + _current_time = value + + +class MonotonicallyIncreasingTimeMinimalTestLayer(MininalTestLayer): + + def testSetUp(self): + self.time_manager = _TimeWrapper() + self.time_manager.install_fakes() + + def testTearDown(self): + self.time_manager.close() + reset_monotonic_time() diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/tests/warnhook.py b/thesisenv/lib/python3.6/site-packages/ZODB/tests/warnhook.py new file mode 100644 index 0000000..fe8bca1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/tests/warnhook.py @@ -0,0 +1,57 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import warnings + +class WarningsHook(object): + """Hook to capture warnings generated by Python. + + The function warnings.showwarning() is designed to be hooked by + application code, allowing the application to customize the way it + handles warnings. + + This hook captures the unformatted warning information and stores + it in a list. A test can inspect this list after the test is over. + + Issues: + + The warnings module has lots of delicate internal state. If + a warning has been reported once, it won't be reported again. It + may be necessary to extend this class with a mechanism for + modifying the internal state so that we can be guaranteed a + warning will be reported. + + If Python is run with a warnings filter, e.g. python -Werror, + then a test that is trying to inspect a particular warning will + fail. Perhaps this class can be extended to install more-specific + filters the test to work anyway. + """ + + def __init__(self): + self.original = None + self.warnings = [] + + def install(self): + self.original = warnings.showwarning + warnings.showwarning = self.showwarning + + def uninstall(self): + assert self.original is not None + warnings.showwarning = self.original + self.original = None + + def showwarning(self, message, category, filename, lineno): + self.warnings.append((str(message), category, filename, lineno)) + + def clear(self): + self.warnings = [] diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/transact.py b/thesisenv/lib/python3.6/site-packages/ZODB/transact.py new file mode 100644 index 0000000..f1d5518 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/transact.py @@ -0,0 +1,59 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Tools to simplify transactions within applications.""" + +from ZODB.POSException import ReadConflictError, ConflictError +import transaction + +def _commit(note): + t = transaction.get() + if note: + t.note(note) + t.commit() + +def transact(f, note=None, retries=5): + """Returns transactional version of function argument f. + + Higher-order function that converts a regular function into + a transactional function. The transactional function will + retry up to retries time before giving up. If note, it will + be added to the transaction metadata when it commits. + + The retries occur on ConflictErrors. If some other + TransactionError occurs, the transaction will not be retried. + """ + + # TODO: deal with ZEO disconnected errors? + + def g(*args, **kwargs): + n = retries + while n: + n -= 1 + try: + r = f(*args, **kwargs) + except ReadConflictError as msg: + transaction.abort() + if not n: + raise + continue + try: + _commit(note) + except ConflictError as msg: + transaction.abort() + if not n: + raise + continue + return r + raise RuntimeError("couldn't commit transaction") + return g diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/utils.py b/thesisenv/lib/python3.6/site-packages/ZODB/utils.py new file mode 100644 index 0000000..1df2460 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/utils.py @@ -0,0 +1,389 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +from __future__ import print_function +import os +import struct +import sys +import time +import threading +from binascii import hexlify, unhexlify + +from tempfile import mkstemp + +from persistent.timestamp import TimeStamp + +from ZODB._compat import Unpickler +from ZODB._compat import BytesIO +from ZODB._compat import ascii_bytes + +from six import PY2 + +__all__ = ['z64', + 'p64', + 'u64', + 'U64', + 'cp', + 'maxtid', + 'newTid', + 'oid_repr', + 'serial_repr', + 'tid_repr', + 'positive_id', + 'readable_tid_repr', + 'get_pickle_metadata', + 'locked', + ] + + +if PY2: + def as_bytes(obj): + "Convert obj into bytes" + return str(obj) + + def as_text(bytes): + "Convert bytes into string" + return bytes + + # Convert an element of a bytes object into an int + byte_ord = ord + byte_chr = chr + +else: + def as_bytes(obj): + if isinstance(obj, bytes): + # invoking str on a bytes object gives its repr() + return obj + return str(obj).encode("ascii") + + def as_text(bytes): + return bytes.decode("ascii") + + def byte_ord(byte): + return byte # elements of bytes are already ints + + def byte_chr(int): + return bytes((int,)) + +z64 = b'\0' * 8 + +maxtid = b'\x7f\xff\xff\xff\xff\xff\xff\xff' + +assert sys.hexversion >= 0x02030000 + +# The distinction between ints and longs is blurred in Python 2.2, +# so u64() are U64() really the same. + +_OID_STRUCT = struct.Struct('>Q') +_OID_PACK = _OID_STRUCT.pack +_OID_UNPACK = _OID_STRUCT.unpack + + +def p64(v): + """Pack an integer or long into a 8-byte string.""" + try: + return _OID_PACK(v) + except struct.error as e: + raise ValueError(*(e.args + (v,))) + +def u64(v): + """Unpack an 8-byte string into a 64-bit long integer.""" + try: + return _OID_UNPACK(v)[0] + except struct.error as e: + raise ValueError(*(e.args + (v,))) + +U64 = u64 + + +def cp(f1, f2, length=None, bufsize=64 * 1024): + """Copy all data from one file to another. + + It copies the data from the current position of the input file (f1) + appending it to the current position of the output file (f2). + + It copies at most 'length' bytes. If 'length' isn't given, it copies + until the end of the input file. + """ + read = f1.read + write = f2.write + n = bufsize + + if length is None: + old_pos = f1.tell() + f1.seek(0,2) + length = f1.tell() + f1.seek(old_pos) + + while length > 0: + if n > length: + n = length + data = read(n) + if not data: + break + write(data) + length -= len(data) + +def newTid(old): + t = time.time() + ts = TimeStamp(*time.gmtime(t)[:5]+(t%60,)) + if old is not None: + ts = ts.laterThan(TimeStamp(old)) + return ts.raw() + + +def oid_repr(oid): + if isinstance(oid, bytes) and len(oid) == 8: + # Convert to hex and strip leading zeroes. + as_hex = hexlify(oid).lstrip(b'0') + # Ensure two characters per input byte. + if len(as_hex) & 1: + as_hex = b'0' + as_hex + elif as_hex == b'': + as_hex = b'00' + return '0x' + as_hex.decode() + else: + return repr(oid) + +def repr_to_oid(repr): + repr = ascii_bytes(repr) + if repr.startswith(b"0x"): + repr = repr[2:] + as_bin = unhexlify(repr) + as_bin = b"\x00"*(8-len(as_bin)) + as_bin + return as_bin + +serial_repr = oid_repr +tid_repr = serial_repr + +# For example, produce +# '0x03441422948b4399 2002-04-14 20:50:34.815000' +# for 8-byte string tid b'\x03D\x14"\x94\x8bC\x99'. +def readable_tid_repr(tid): + result = tid_repr(tid) + if isinstance(tid, bytes) and len(tid) == 8: + result = "%s %s" % (result, TimeStamp(tid)) + return result + +# Addresses can "look negative" on some boxes, some of the time. If you +# feed a "negative address" to an %x format, Python 2.3 displays it as +# unsigned, but produces a FutureWarning, because Python 2.4 will display +# it as signed. So when you want to prodce an address, use positive_id() to +# obtain it. +# _ADDRESS_MASK is 2**(number_of_bits_in_a_native_pointer). Adding this to +# a negative address gives a positive int with the same hex representation as +# the significant bits in the original. + +_ADDRESS_MASK = 256 ** struct.calcsize('P') +def positive_id(obj): + """Return id(obj) as a non-negative integer.""" + + result = id(obj) + if result < 0: + result += _ADDRESS_MASK + assert result > 0 + return result + +# Given a ZODB pickle, return pair of strings (module_name, class_name). +# Do this without importing the module or class object. +# See ZODB/serialize.py's module docstring for the only docs that exist about +# ZODB pickle format. If the code here gets smarter, please update those +# docs to be at least as smart. The code here doesn't appear to make sense +# for what serialize.py calls formats 5 and 6. + +def get_pickle_metadata(data): + # Returns a 2-tuple of strings. + + # ZODB's data records contain two pickles. The first is the class + # of the object, the second is the object. We're only trying to + # pick apart the first here, to extract the module and class names. + if data[0] in (0x80, # Py3k indexes bytes -> int + b'\x80' # Python2 indexes bytes -> bytes + ): # protocol marker, protocol > 1 + data = data[2:] + if data.startswith(b'(c'): # pickle MARK GLOBAL opcode sequence + global_prefix = 2 + elif data.startswith(b'c'): # pickle GLOBAL opcode + global_prefix = 1 + else: + global_prefix = 0 + + if global_prefix: + # Formats 1 and 2. + # Don't actually unpickle a class, because it will attempt to + # load the class. Just break open the pickle and get the + # module and class from it. The module and class names are given by + # newline-terminated strings following the GLOBAL opcode. + modname, classname, rest = data.split(b'\n', 2) + modname = modname[global_prefix:] # strip GLOBAL opcode + return modname.decode(), classname.decode() + + # Else there are a bunch of other possible formats. + f = BytesIO(data) + u = Unpickler(f) + try: + class_info = u.load() + except Exception as err: + return '', '' + if isinstance(class_info, tuple): + if isinstance(class_info[0], tuple): + # Formats 3 and 4. + modname, classname = class_info[0] + else: + # Formats 5 and 6 (probably) end up here. + modname, classname = class_info + else: + # This isn't a known format. + modname = repr(class_info) + classname = '' + return modname, classname + +def mktemp(dir=None, prefix='tmp'): + """Create a temp file, known by name, in a semi-secure manner.""" + handle, filename = mkstemp(dir=dir, prefix=prefix) + os.close(handle) + return filename + +def check_precondition(precondition): + if not precondition(): + raise AssertionError( + "Failed precondition: ", + precondition.__doc__.strip()) + +class Locked(object): + + def __init__(self, func, inst=None, class_=None, preconditions=()): + self.__func__ = func + self.__self__ = inst + self.__self_class__ = class_ + self.preconditions = preconditions + + def __get__(self, inst, class_): + return self.__class__( + self.__func__, inst, class_, self.preconditions) + + def __call__(self, *args, **kw): + inst = self.__self__ + if inst is None: + inst = args[0] + func = self.__func__.__get__(self.__self__, self.__self_class__) + + with inst._lock: + for precondition in self.preconditions: + if not precondition(inst): + raise AssertionError( + "Failed precondition: ", + precondition.__doc__.strip()) + + return func(*args, **kw) + +class locked(object): + + def __init__(self, *preconditions): + self.preconditions = preconditions + + def __get__(self, inst, class_): + # We didn't get any preconditions, so we have a single "precondition", + # which is actually the function to call. + func, = self.preconditions + return Locked(func, inst, class_) + + def __call__(self, func): + return Locked(func, preconditions=self.preconditions) + + +if os.environ.get('DEBUG_LOCKING'): # pragma: no cover + # NOTE: This only works on Python 3. + class Lock(object): + + lock_class = threading.Lock + + def __init__(self): + self._lock = self.lock_class() + + def pr(self, name, a=None, kw=None): + f = sys._getframe(2) + if f.f_code.co_filename.endswith('ZODB/utils.py'): + f = sys._getframe(3) + f = '%s:%s' % (f.f_code.co_filename, f.f_lineno) + print(id(self), self._lock, threading.get_ident(), f, name, + a if a else '', kw if kw else '') + + def acquire(self, *a, **kw): + self.pr('acquire', a, kw) + return self._lock.acquire(*a, **kw) + + def release(self): + self.pr('release') + return self._lock.release() + + def __enter__(self): + self.pr('acquire') + return self._lock.acquire() + + def __exit__(self, *ignored): + self.pr('release') + return self._lock.release() + + class RLock(Lock): + + lock_class = threading.RLock + + class Condition(Lock): + + lock_class = threading.Condition + + def wait(self, *a, **kw): + self.pr('wait', a, kw) + return self._lock.wait(*a, **kw) + + def wait_for(self, *a, **kw): + self.pr('wait_for', a, kw) + return self._lock.wait_for(*a, **kw) + + def notify(self, *a, **kw): + self.pr('notify', a, kw) + return self._lock.notify(*a, **kw) + + def notify_all(self): + self.pr('notify_all') + return self._lock.notify_all() + + notifyAll = notify_all + +else: + + from threading import Condition, Lock, RLock + + +import ZODB.POSException + +def load_current(storage, oid, version=''): + """Load the most recent revision of an object by calling loadBefore + + Starting in ZODB 5, it's no longer necessary for storages to + provide a load method. + + This function is mainly intended to facilitate transitioning from + load to loadBefore. It's mainly useful for tests that are meant + to test storages, but do so by calling load on the storages. + + This function will likely become unnecessary and be deprecated + some time in the future. + """ + assert not version + r = storage.loadBefore(oid, maxtid) + if r is None: + raise ZODB.POSException.POSKeyError(oid) + assert r[2] is None + return r[:2] diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/utils.txt b/thesisenv/lib/python3.6/site-packages/ZODB/utils.txt new file mode 100644 index 0000000..542fecc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/utils.txt @@ -0,0 +1,204 @@ +ZODB Utilits Module +=================== + +The ZODB.utils module provides a number of helpful, somewhat random +:), utility functions. + + >>> import ZODB.utils + +This document documents a few of them. Over time, it may document +more. + +64-bit integers and strings +--------------------------------- + +ZODB uses 64-bit transaction ids that are typically represented as +strings, but are sometimes manipulated as integers. Object ids are +strings too and it is common to ise 64-bit strings that are just +packed integers. + +Functions p64 and u64 pack and unpack integers as strings: + + >>> ZODB.utils.p64(250347764455111456) + '\x03yi\xf7"\xa8\xfb ' + + >>> print(ZODB.utils.u64(b'\x03yi\xf7"\xa8\xfb ')) + 250347764455111456 + +The contant z64 has zero packed as a 64-bit string: + + >>> ZODB.utils.z64 + '\x00\x00\x00\x00\x00\x00\x00\x00' + +Transaction id generation +------------------------- + +Storages assign transaction ids as transactions are committed. These +are based on UTC time, but must be strictly increasing. The +newTid function akes this pretty easy. + +To see this work (in a predictable way), we'll first hack time.time: + + >>> import time + >>> old_time = time.time + >>> time_value = 1224825068.12 + >>> faux_time = lambda: time_value + >>> if isinstance(time,type): + ... time.time = staticmethod(faux_time) # Jython + ... else: + ... time.time = faux_time + +Now, if we ask for a new time stamp, we'll get one based on our faux +time: + + >>> tid = ZODB.utils.newTid(None) + >>> tid + '\x03yi\xf7"\xa54\x88' + +newTid requires an old tid as an argument. The old tid may be None, if +we don't have a previous transaction id. + +This time is based on the current time, which we can see by converting +it to a time stamp. + + >>> import ZODB.TimeStamp + >>> print(ZODB.TimeStamp.TimeStamp(tid)) + 2008-10-24 05:11:08.120000 + +To assure that we get a new tid that is later than the old, we can +pass an existing tid. Let's pass the tid we just got. + + >>> tid2 = ZODB.utils.newTid(tid) + >>> ZODB.utils.u64(tid), ZODB.utils.u64(tid2) + (250347764454864008, 250347764454864009) + +Here, since we called it at the same time, we got a time stamp that +was only slightly larger than the previos one. Of course, at a later +time, the time stamp we get will be based on the time: + + >>> time_value = 1224825069.12 + >>> tid = ZODB.utils.newTid(tid2) + >>> print(ZODB.TimeStamp.TimeStamp(tid)) + 2008-10-24 05:11:09.120000 + + + >>> time.time = old_time + + +Locking support +--------------- + +Storages are required to be thread safe. The locking descriptor helps +automate that. It arranges for a lock to be acquired when a function +is called and released when a function exits. To demonstrate this, +we'll create a "lock" type that simply prints when it is called: + + >>> class Lock: + ... def acquire(self): + ... print('acquire') + ... def release(self): + ... print('release') + ... def __enter__(self): + ... return self.acquire() + ... def __exit__(self, *ignored): + ... return self.release() + +Now we'll demonstrate the descriptor: + + >>> class C: + ... _lock = Lock() + ... _lock_acquire = _lock.acquire + ... _lock_release = _lock.release + ... + ... @ZODB.utils.locked + ... def meth(self, *args, **kw): + ... print('meth %r %r' %(args, kw)) + +The descriptor expects the instance it wraps to have a '_lock +attribute. + + >>> C().meth(1, 2, a=3) + acquire + meth (1, 2) {'a': 3} + release + +.. Edge cases + + We can get the method from the class: + + >>> C.meth # doctest: +ELLIPSIS + + + >>> C.meth(C()) + acquire + meth () {} + release + + >>> class C2: + ... _lock = Lock() + ... _lock_acquire = _lock.acquire + ... _lock_release = _lock.release + + # XXX: Py3: Pytohn 3 does not have the concept of an unbound method. + #>>> C.meth(C2()) # doctest: +NORMALIZE_WHITESPACE + #Traceback (most recent call last): + #... + #TypeError: unbound method meth() must be called with C instance + #as first argument (got C2 instance instead) + +Preconditions +------------- + +Often, we want to supply method preconditions. The locking descriptor +supports optional method preconditions [1]_. + + >>> class C: + ... def __init__(self): + ... self._lock = Lock() + ... self._opened = True + ... self._transaction = None + ... + ... def opened(self): + ... """The object is open + ... """ + ... print('checking if open') + ... return self._opened + ... + ... def not_in_transaction(self): + ... """The object is not in a transaction + ... """ + ... print('checking if in a transaction') + ... return self._transaction is None + ... + ... @ZODB.utils.locked(opened, not_in_transaction) + ... def meth(self, *args, **kw): + ... print('meth %r %r' % (args, kw)) + + >>> c = C() + >>> c.meth(1, 2, a=3) + acquire + checking if open + checking if in a transaction + meth (1, 2) {'a': 3} + release + + >>> c._transaction = 1 + >>> c.meth(1, 2, a=3) # doctest: +NORMALIZE_WHITESPACE + Traceback (most recent call last): + ... + AssertionError: + ('Failed precondition: ', 'The object is not in a transaction') + + >>> c._opened = False + >>> c.meth(1, 2, a=3) # doctest: +NORMALIZE_WHITESPACE + Traceback (most recent call last): + ... + AssertionError: ('Failed precondition: ', 'The object is open') + + +.. [1] Arguably, preconditions should be handled via separate + descriptors, but for ZODB storages, almost all methods need to be + locked. Combining preconditions with locking provides both + efficiency and concise expressions. A more general-purpose + facility would almost certainly provide separate descriptors for + preconditions. diff --git a/thesisenv/lib/python3.6/site-packages/ZODB/valuedoc.py b/thesisenv/lib/python3.6/site-packages/ZODB/valuedoc.py new file mode 100644 index 0000000..62e5959 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB/valuedoc.py @@ -0,0 +1,13 @@ +"""Work around an issue with defining class attribute documentation. + +See http://stackoverflow.com/questions/9153473/sphinx-values-for-attributes-reported-as-none/39276413 +""" + +class ValueDoc(object): + + def __init__(self, text): + self.text = text + + def __repr__(self): + return self.text + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/PKG-INFO b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..b69734a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/PKG-INFO @@ -0,0 +1,81 @@ +Metadata-Version: 2.1 +Name: ZODB3 +Version: 3.11.0 +Summary: ZODB3 - Meta release for ZODB, persistent, BTrees and ZEO +Home-page: UNKNOWN +Maintainer: Zope Foundation and Contributors +Maintainer-email: zodb-dev@zope.org +License: ZPL 2.1 +Description: ========================================================= + ZODB3 - Meta release for ZODB, persistent, BTrees and ZEO + ========================================================= + + The ZODB3 distribution is a "meta" distribution that requires projects: + ZODB, persistent, BTrees and ZEO, which, in the past, were included in + the ZODB 3 project. + + For more information on ZODB, persistent, BTrees, and ZEO, see the + respective project pages in PyPI: + + - http://pypi.python.org/pypi/ZODB + + - http://pypi.python.org/pypi/persistent + + - http://pypi.python.org/pypi/BTrees + + - http://pypi.python.org/pypi/ZEO + + and http://zodb.org. + + + ============== + Change History + ============== + + 3.11.0 (2014-05-22) + ===================== + + No changes from ZODB3 3.11.0a3: this release just makes the meta-package + version of ZODB3 installable by default under buldout 2.x. + + 3.11.0a3 (2013-03-02) + ===================== + + Made the setup.py file Python 3 compatible. Actual Python 3 compatibility + depends on the porting status of the BTrees, persistent, ZEO and ZODB + projects. + + 3.11.0a2 (2012-12-02) + ===================== + + Fixed: The ZODB3 "test" extra was inadvertently removed. + + Note that the text extra exists solely to allow other packages + to use ``ZODB [test]`` in their test dependencies. + + 3.11.0a1 (2012-12-01) + ===================== + + ZODB3 depends on: + + - persistent 4.0 + - BTrees 4.0 + - ZODB 4.0 + - ZEO 4.0 + +Platform: any +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Topic :: Database +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: Unix +Classifier: Framework :: ZODB +Provides-Extra: test diff --git a/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/SOURCES.txt b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..b967509 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,16 @@ +.gitignore +.travis.yml +CHANGES.txt +HISTORY.txt +MANIFEST.in +README.txt +buildout.cfg +pip-delete-this-directory.txt +setup.cfg +setup.py +ZODB3.egg-info/PKG-INFO +ZODB3.egg-info/SOURCES.txt +ZODB3.egg-info/dependency_links.txt +ZODB3.egg-info/not-zip-safe +ZODB3.egg-info/requires.txt +ZODB3.egg-info/top_level.txt \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/dependency_links.txt b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/installed-files.txt b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..b1be01d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/installed-files.txt @@ -0,0 +1,6 @@ +PKG-INFO +SOURCES.txt +dependency_links.txt +not-zip-safe +requires.txt +top_level.txt diff --git a/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/not-zip-safe b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/requires.txt b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/requires.txt new file mode 100644 index 0000000..8358d9c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/requires.txt @@ -0,0 +1,11 @@ +ZEO>=4.0.0dev +ZODB>=4.0.0dev +persistent>=4.0.0dev +BTrees>=4.0.0dev +transaction + +[test] +ZEO[test] +ZODB[test] +BTrees[test] +persistent[test] diff --git a/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ZODB3-3.11.0-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/card_me/__init__.py b/thesisenv/lib/python3.6/site-packages/card_me/__init__.py new file mode 100644 index 0000000..30ba3f0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/card_me/__init__.py @@ -0,0 +1,86 @@ +""" +VObject Overview +================ + vobject parses vCard or vCalendar files, returning a tree of Python objects. + It also provids an API to create vCard or vCalendar data structures which + can then be serialized. + + Parsing existing streams + ------------------------ + Streams containing one or many L{Component}s can be + parsed using L{readComponents}. As each Component + is parsed, vobject will attempt to give it a L{Behavior}. + If an appropriate Behavior is found, any base64, quoted-printable, or + backslash escaped data will automatically be decoded. Dates and datetimes + will be transformed to datetime.date or datetime.datetime instances. + Components containing recurrence information will have a special rruleset + attribute (a dateutil.rrule.rruleset instance). + + Validation + ---------- + L{Behavior} classes implement validation for + L{Component}s. To validate, an object must have all + required children. There (TODO: will be) a toggle to raise an exception or + just log unrecognized, non-experimental children and parameters. + + Creating objects programatically + -------------------------------- + A L{Component} can be created from scratch. No encoding + is necessary, serialization will encode data automatically. Factory + functions (TODO: will be) available to create standard objects. + + Serializing objects + ------------------- + Serialization: + - Looks for missing required children that can be automatically generated, + like a UID or a PRODID, and adds them + - Encodes all values that can be automatically encoded + - Checks to make sure the object is valid (unless this behavior is + explicitly disabled) + - Appends the serialized object to a buffer, or fills a new + buffer and returns it + + Examples + -------- + + >>> import datetime + >>> import dateutil.rrule as rrule + >>> x = iCalendar() + >>> x.add('vevent') + + >>> x + ]> + >>> v = x.vevent + >>> utc = icalendar.utc + >>> v.add('dtstart').value = datetime.datetime(2004, 12, 15, 14, tzinfo = utc) + >>> v + ]> + >>> x + ]>]> + >>> newrule = rrule.rruleset() + >>> newrule.rrule(rrule.rrule(rrule.WEEKLY, count=2, dtstart=v.dtstart.value)) + >>> v.rruleset = newrule + >>> list(v.rruleset) + [datetime.datetime(2004, 12, 15, 14, 0, tzinfo=tzutc()), datetime.datetime(2004, 12, 22, 14, 0, tzinfo=tzutc())] + >>> v.add('uid').value = "randomuid@MYHOSTNAME" + >>> print x.serialize() + BEGIN:VCALENDAR + VERSION:2.0 + PRODID:-//PYVOBJECT//NONSGML Version 1//EN + BEGIN:VEVENT + UID:randomuid@MYHOSTNAME + DTSTART:20041215T140000Z + RRULE:FREQ=WEEKLY;COUNT=2 + END:VEVENT + END:VCALENDAR + +""" + +from .base import newFromBehavior, readOne, readComponents +from . import icalendar, vcard + +def iCalendar(): + return newFromBehavior('vcalendar', '2.0') + +def vCard(): + return newFromBehavior('vcard', '3.0') diff --git a/thesisenv/lib/python3.6/site-packages/card_me/base.py b/thesisenv/lib/python3.6/site-packages/card_me/base.py new file mode 100644 index 0000000..841c5fb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/card_me/base.py @@ -0,0 +1,1160 @@ +"""card_me/vobject module for reading vCard and vCalendar files.""" + +from __future__ import print_function + +import copy +import logging +import re +import six +import sys + +#------------------------------------ Python 2/3 compatibility challenges ----- +# Python 3 no longer has a basestring type, so.... +try: + basestring = basestring +except NameError: + basestring = (str,bytes) + +## One more problem ... in python2 the str operator breaks on unicode +## objects containing non-ascii characters +try: + unicode + def str_(s): + if type(s) == unicode: + return s.encode('utf-8') + else: + return str(s) +except NameError: + def str_(s): + return s + +#------------------------------------ Logging ---------------------------------- +logger = logging.getLogger(__name__) +if not logging.getLogger().handlers: + handler = logging.StreamHandler() + formatter = logging.Formatter('%(name)s %(levelname)s %(message)s') + handler.setFormatter(formatter) + logger.addHandler(handler) +logger.setLevel(logging.ERROR) # Log errors +DEBUG = False # Don't waste time on debug calls + +#----------------------------------- Constants --------------------------------- +CR = '\r' +LF = '\n' +CRLF = CR + LF +SPACE = ' ' +TAB = '\t' +SPACEORTAB = SPACE + TAB + +#--------------------------------- Main classes -------------------------------- + + +class VBase(object): + """ + Base class for ContentLine and Component. + + @ivar behavior: + The Behavior class associated with this object, which controls + validation, transformations, and encoding. + @ivar parentBehavior: + The object's parent's behavior, or None if no behaviored parent exists. + @ivar isNative: + Boolean describing whether this component is a Native instance. + @ivar group: + An optional group prefix, should be used only to indicate sort order in + vCards, according to spec. + + Current spec: 4.0 (http://tools.ietf.org/html/rfc6350) + """ + def __init__(self, group=None, *args, **kwds): + super(VBase, self).__init__(*args, **kwds) + self.group = group + self.behavior = None + self.parentBehavior = None + self.isNative = False + + def copy(self, copyit): + self.group = copyit.group + self.behavior = copyit.behavior + self.parentBehavior = copyit.parentBehavior + self.isNative = copyit.isNative + + def validate(self, *args, **kwds): + """ + Call the behavior's validate method, or return True. + """ + if self.behavior: + return self.behavior.validate(self, *args, **kwds) + return True + + def getChildren(self): + """ + Return an iterable containing the contents of the object. + """ + return [] + + def clearBehavior(self, cascade=True): + """ + Set behavior to None. Do for all descendants if cascading. + """ + self.behavior=None + if cascade: + self.transformChildrenFromNative() + + def autoBehavior(self, cascade=False): + """ + Set behavior if name is in self.parentBehavior.knownChildren. + + If cascade is True, unset behavior and parentBehavior for all + descendants, then recalculate behavior and parentBehavior. + + """ + parentBehavior = self.parentBehavior + if parentBehavior is not None: + knownChildTup = parentBehavior.knownChildren.get(self.name, None) + if knownChildTup is not None: + behavior = getBehavior(self.name, knownChildTup[2]) + if behavior is not None: + self.setBehavior(behavior, cascade) + if isinstance(self, ContentLine) and self.encoded: + self.behavior.decode(self) + elif isinstance(self, ContentLine): + self.behavior = parentBehavior.defaultBehavior + if self.encoded and self.behavior: + self.behavior.decode(self) + + def setBehavior(self, behavior, cascade=True): + """ + Set behavior. If cascade is True, autoBehavior all descendants. + """ + self.behavior = behavior + if cascade: + for obj in self.getChildren(): + obj.parentBehavior = behavior + obj.autoBehavior(True) + + def transformToNative(self): + """ + Transform this object into a custom VBase subclass. + + transformToNative should always return a representation of this object. + It may do so by modifying self in place then returning self, or by + creating a new object. + + """ + if self.isNative or not self.behavior or not self.behavior.hasNative: + return self + else: + try: + return self.behavior.transformToNative(self) + except Exception as e: + # wrap errors in transformation in a ParseError + lineNumber = getattr(self, 'lineNumber', None) + + if isinstance(e, ParseError): + if lineNumber is not None: + e.lineNumber = lineNumber + raise + else: + msg = "In transformToNative, unhandled exception on line %s: %s: %s" + msg = msg % (lineNumber, sys.exc_info()[0], sys.exc_info()[1]) + raise ParseError(msg, lineNumber) + + def transformFromNative(self): + """ + Return self transformed into a ContentLine or Component if needed. + + May have side effects. If it does, transformFromNative and + transformToNative MUST have perfectly inverse side effects. Allowing + such side effects is convenient for objects whose transformations only + change a few attributes. + + Note that it isn't always possible for transformFromNative to be a + perfect inverse of transformToNative, in such cases transformFromNative + should return a new object, not self after modifications. + + """ + if self.isNative and self.behavior and self.behavior.hasNative: + try: + return self.behavior.transformFromNative(self) + except Exception as e: + # wrap errors in transformation in a NativeError + lineNumber = getattr(self, 'lineNumber', None) + if isinstance(e, NativeError): + if lineNumber is not None: + e.lineNumber = lineNumber + raise + else: + msg = "In transformFromNative, unhandled exception on line %s %s: %s" + msg = msg % (lineNumber, sys.exc_info()[0], sys.exc_info()[1]) + raise NativeError(msg, lineNumber) + else: + return self + + def transformChildrenToNative(self): + """Recursively replace children with their native representation.""" + pass + + def transformChildrenFromNative(self, clearBehavior=True): + """Recursively transform native children to vanilla representations.""" + pass + + def serialize(self, buf=None, lineLength=75, validate=True, behavior=None): + """ + Serialize to buf if it exists, otherwise return a string. + + Use self.behavior.serialize if behavior exists. + + """ + if not behavior: + behavior = self.behavior + + if behavior: + #print("serializing %s with behavior %s" % (self.name, behavior)) + if DEBUG: + logger.debug("serializing %s with behavior %s" % (self.name, behavior)) + return behavior.serialize(self, buf, lineLength, validate) + else: + if DEBUG: + logger.debug("serializing %s without behavior" % self.name) + return defaultSerialize(self, buf, lineLength) + + +def toVName(name, stripNum = 0, upper = False): + """ + Turn a Python name into an iCalendar style name, + optionally uppercase and with characters stripped off. + """ + if upper: + name = name.upper() + if stripNum != 0: + name = name[:-stripNum] + return name.replace('_', '-') + + +class ContentLine(VBase): + """ + Holds one content line for formats like vCard and vCalendar. + + For example:: + + + @ivar name: + The uppercased name of the contentline. + @ivar params: + A dictionary of parameters and associated lists of values (the list may + be empty for empty parameters). + @ivar value: + The value of the contentline. + @ivar singletonparams: + A list of parameters for which it's unclear if the string represents the + parameter name or the parameter value. In vCard 2.1, "The value string + can be specified alone in those cases where the value is unambiguous". + This is crazy, but we have to deal with it. + @ivar encoded: + A boolean describing whether the data in the content line is encoded. + Generally, text read from a serialized vCard or vCalendar should be + considered encoded. Data added programmatically should not be encoded. + @ivar lineNumber: + An optional line number associated with the contentline. + """ + def __init__(self, name, params, value, group=None, encoded=False, + isNative=False, lineNumber = None, *args, **kwds): + """ + Take output from parseLine, convert params list to dictionary. + + Group is used as a positional argument to match parseLine's return + + """ + super(ContentLine, self).__init__(group, *args, **kwds) + + self.name = name.upper() + self.encoded = encoded + self.params = {} + self.singletonparams = [] + self.isNative = isNative + self.lineNumber = lineNumber + self.value = value + + def updateTable(x): + if len(x) == 1: + self.singletonparams += x + else: + paramlist = self.params.setdefault(x[0].upper(), []) + paramlist.extend(x[1:]) + + list(map(updateTable, params)) + + qp = False + if 'ENCODING' in self.params: + if 'QUOTED-PRINTABLE' in self.params['ENCODING']: + qp = True + self.params['ENCODING'].remove('QUOTED-PRINTABLE') + if 0==len(self.params['ENCODING']): + del self.params['ENCODING'] + if 'QUOTED-PRINTABLE' in self.singletonparams: + qp = True + self.singletonparams.remove('QUOTED-PRINTABLE') + if qp: + self.value = self.value.decode('quoted-printable') + + @classmethod + def duplicate(clz, copyit): + newcopy = clz('', {}, '') + newcopy.copy(copyit) + return newcopy + + def copy(self, copyit): + super(ContentLine, self).copy(copyit) + self.name = copyit.name + self.value = copy.copy(copyit.value) + self.encoded = self.encoded + self.params = copy.copy(copyit.params) + for k, v in self.params.items(): + self.params[k] = copy.copy(v) + self.singletonparams = copy.copy(copyit.singletonparams) + self.lineNumber = copyit.lineNumber + + def __eq__(self, other): + try: + return (self.name == other.name) and (self.params == other.params) and (self.value == other.value) + except Exception: + return False + + def __getattr__(self, name): + """ + Make params accessible via self.foo_param or self.foo_paramlist. + + Underscores, legal in python variable names, are converted to dashes, + which are legal in IANA tokens. + + """ + try: + if name.endswith('_param'): + return self.params[toVName(name, 6, True)][0] + elif name.endswith('_paramlist'): + return self.params[toVName(name, 10, True)] + else: + raise AttributeError(name) + except KeyError: + raise AttributeError(name) + + def __setattr__(self, name, value): + """ + Make params accessible via self.foo_param or self.foo_paramlist. + + Underscores, legal in python variable names, are converted to dashes, + which are legal in IANA tokens. + + """ + if name.endswith('_param'): + if type(value) == list: + self.params[toVName(name, 6, True)] = value + else: + self.params[toVName(name, 6, True)] = [value] + elif name.endswith('_paramlist'): + if type(value) == list: + self.params[toVName(name, 10, True)] = value + else: + raise VObjectError("Parameter list set to a non-list") + else: + prop = getattr(self.__class__, name, None) + if isinstance(prop, property): + prop.fset(self, value) + else: + object.__setattr__(self, name, value) + + def __delattr__(self, name): + try: + if name.endswith('_param'): + del self.params[toVName(name, 6, True)] + elif name.endswith('_paramlist'): + del self.params[toVName(name, 10, True)] + else: + object.__delattr__(self, name) + except KeyError: + raise AttributeError(name) + + def valueRepr( self ): + """ + Transform the representation of the value + according to the behavior, if any. + """ + v = self.value + if self.behavior: + v = self.behavior.valueRepr( self ) + return v + + def __str__(self): + return "<%s%s%s>" % (self.name, self.params, self.valueRepr()) + + def __repr__(self): + return self.__str__() + + def prettyPrint(self, level = 0, tabwidth=3): + pre = ' ' * level * tabwidth + print(pre, self.name + ":", self.valueRepr()) + if self.params: + print(pre, "params for ", self.name + ':') + for k in self.params.keys(): + print(pre + ' ' * tabwidth, k, self.params[k]) + + +class Component(VBase): + """ + A complex property that can contain multiple ContentLines. + + For our purposes, a component must start with a BEGIN:xxxx line and end with + END:xxxx, or have a PROFILE:xxx line if a top-level component. + + @ivar contents: + A dictionary of lists of Component or ContentLine instances. The keys + are the lowercased names of child ContentLines or Components. + Note that BEGIN and END ContentLines are not included in contents. + @ivar name: + Uppercase string used to represent this Component, i.e VCARD if the + serialized object starts with BEGIN:VCARD. + @ivar useBegin: + A boolean flag determining whether BEGIN: and END: lines should + be serialized. + + """ + def __init__(self, name=None, *args, **kwds): + super(Component, self).__init__(*args, **kwds) + self.contents = {} + if name: + self.name=name.upper() + self.useBegin = True + else: + self.name = '' + self.useBegin = False + + self.autoBehavior() + + @classmethod + def duplicate(clz, copyit): + newcopy = clz() + newcopy.copy(copyit) + return newcopy + + def copy(self, copyit): + super(Component, self).copy(copyit) + + # deep copy of contents + self.contents = {} + for key, lvalue in copyit.contents.items(): + newvalue = [] + for value in lvalue: + newitem = value.duplicate(value) + newvalue.append(newitem) + self.contents[key] = newvalue + + self.name = copyit.name + self.useBegin = copyit.useBegin + + def setProfile(self, name): + """ + Assign a PROFILE to this unnamed component. + + Used by vCard, not by vCalendar. + + """ + if self.name or self.useBegin: + if self.name == name: + return + raise VObjectError("This component already has a PROFILE or uses BEGIN.") + self.name = name.upper() + + def __getattr__(self, name): + """ + For convenience, make self.contents directly accessible. + + Underscores, legal in python variable names, are converted to dashes, + which are legal in IANA tokens. + + """ + # if the object is being re-created by pickle, self.contents may not + # be set, don't get into an infinite loop over the issue + if name == 'contents': + return object.__getattribute__(self, name) + try: + if name.endswith('_list'): + return self.contents[toVName(name, 5)] + else: + return self.contents[toVName(name)][0] + except KeyError: + raise AttributeError(name) + + normal_attributes = ['contents','name','behavior','parentBehavior','group'] + def __setattr__(self, name, value): + """ + For convenience, make self.contents directly accessible. + + Underscores, legal in python variable names, are converted to dashes, + which are legal in IANA tokens. + + """ + if name not in self.normal_attributes and name.lower()==name: + if type(value) == list: + if name.endswith('_list'): + name = name[:-5] + self.contents[toVName(name)] = value + elif name.endswith('_list'): + raise VObjectError("Component list set to a non-list") + else: + self.contents[toVName(name)] = [value] + else: + prop = getattr(self.__class__, name, None) + if isinstance(prop, property): + prop.fset(self, value) + else: + object.__setattr__(self, name, value) + + def __delattr__(self, name): + try: + if name not in self.normal_attributes and name.lower()==name: + if name.endswith('_list'): + del self.contents[toVName(name, 5)] + else: + del self.contents[toVName(name)] + else: + object.__delattr__(self, name) + except KeyError: + raise AttributeError(name) + + def getChildValue(self, childName, default = None, childNumber = 0): + """ + Return a child's value (the first, by default), or None. + """ + child = self.contents.get(toVName(childName)) + if child is None: + return default + else: + return child[childNumber].value + + def add(self, objOrName, group = None): + """ + Add objOrName to contents, set behavior if it can be inferred. + + If objOrName is a string, create an empty component or line based on + behavior. If no behavior is found for the object, add a ContentLine. + + group is an optional prefix to the name of the object (see RFC 2425). + """ + if isinstance(objOrName, VBase): + obj = objOrName + if self.behavior: + obj.parentBehavior = self.behavior + obj.autoBehavior(True) + else: + name = objOrName.upper() + try: + id=self.behavior.knownChildren[name][2] + behavior = getBehavior(name, id) + if behavior.isComponent: + obj = Component(name) + else: + obj = ContentLine(name, [], '', group) + obj.parentBehavior = self.behavior + obj.behavior = behavior + obj = obj.transformToNative() + except (KeyError, AttributeError): + obj = ContentLine(objOrName, [], '', group) + if obj.behavior is None and self.behavior is not None: + if isinstance(obj, ContentLine): + obj.behavior = self.behavior.defaultBehavior + self.contents.setdefault(obj.name.lower(), []).append(obj) + return obj + + def remove(self, obj): + """Remove obj from contents.""" + named = self.contents.get(obj.name.lower()) + if named: + try: + named.remove(obj) + if len(named) == 0: + del self.contents[obj.name.lower()] + except ValueError: + pass; + + def getChildren(self): + """Return an iterable of all children.""" + for objList in self.contents.values(): + for obj in objList: yield obj + + def components(self): + """Return an iterable of all Component children.""" + return (i for i in self.getChildren() if isinstance(i, Component)) + + def lines(self): + """Return an iterable of all ContentLine children.""" + return (i for i in self.getChildren() if isinstance(i, ContentLine)) + + def sortChildKeys(self): + try: + first = [s for s in self.behavior.sortFirst if s in self.contents] + except Exception: + first = [] + return first + sorted(k for k in self.contents.keys() if k not in first) + + def getSortedChildren(self): + return [obj for k in self.sortChildKeys() for obj in self.contents[k]] + + def setBehaviorFromVersionLine(self, versionLine): + """Set behavior if one matches name, versionLine.value.""" + v = getBehavior(self.name, versionLine.value) + if v: + self.setBehavior(v) + + def transformChildrenToNative(self): + """ + Recursively replace children with their native representation. + + Sort to get dependency order right, like vtimezone before vevent. + + """ + for childArray in (self.contents[k] for k in self.sortChildKeys()): + for child in childArray: + child = child.transformToNative() + child.transformChildrenToNative() + + def transformChildrenFromNative(self, clearBehavior=True): + """ + Recursively transform native children to vanilla representations. + """ + for childArray in self.contents.values(): + for child in childArray: + child = child.transformFromNative() + child.transformChildrenFromNative(clearBehavior) + if clearBehavior: + child.behavior = None + child.parentBehavior = None + + def __str__(self): + if self.name: + return "<%s| %s>" % (self.name, self.getSortedChildren()) + else: + return u'<*unnamed*| {}>'.format(self.getSortedChildren()) + + def __repr__(self): + return self.__str__() + + def prettyPrint(self, level = 0, tabwidth=3): + pre = ' ' * level * tabwidth + print(pre, self.name) + if isinstance(self, Component): + for line in self.getChildren(): + line.prettyPrint(level + 1, tabwidth) + + +class VObjectError(Exception): + def __init__(self, msg, lineNumber=None): + self.msg = msg + if lineNumber is not None: + self.lineNumber = lineNumber + + def __str__(self): + if hasattr(self, 'lineNumber'): + return "At line %s: %s" % (self.lineNumber, self.msg) + else: + return repr(self.msg) + + +class ParseError(VObjectError): + pass + + +class ValidateError(VObjectError): + pass + + +class NativeError(VObjectError): + pass + + +#--------- Parsing functions and parseLine regular expressions ------------------ + +patterns = {} + +# Note that underscore is not legal for names, it's included because +# Lotus Notes uses it +patterns['name'] = '[a-zA-Z0-9\-_]+' +patterns['safe_char'] = '[^";:,]' +patterns['qsafe_char'] = '[^"]' + +# the combined Python string replacement and regex syntax is a little confusing; +# remember that %(foobar)s is replaced with patterns['foobar'], so for instance +# param_value is any number of safe_chars or any number of qsaf_chars surrounded +# by double quotes. + +patterns['param_value'] = ' "%(qsafe_char)s * " | %(safe_char)s * ' % patterns + + +# get a tuple of two elements, one will be empty, the other will have the value +patterns['param_value_grouped'] = """ +" ( %(qsafe_char)s * )" | ( %(safe_char)s + ) +""" % patterns + +# get a parameter and its values, without any saved groups +patterns['param'] = r""" +; (?: %(name)s ) # parameter name +(?: + (?: = (?: %(param_value)s ) )? # 0 or more parameter values, multiple + (?: , (?: %(param_value)s ) )* # parameters are comma separated +)* +""" % patterns + +# get a parameter, saving groups for name and value (value still needs parsing) +patterns['params_grouped'] = r""" +; ( %(name)s ) + +(?: = + ( + (?: (?: %(param_value)s ) )? # 0 or more parameter values, multiple + (?: , (?: %(param_value)s ) )* # parameters are comma separated + ) +)? +""" % patterns + +# get a full content line, break it up into group, name, parameters, and value +patterns['line'] = r""" +^ ((?P %(name)s)\.)?(?P %(name)s) # name group + (?P (?: %(param)s )* ) # params group (may be empty) +: (?P .* )$ # value group +""" % patterns + +' "%(qsafe_char)s*" | %(safe_char)s* ' + +param_values_re = re.compile(patterns['param_value_grouped'], re.VERBOSE) +params_re = re.compile(patterns['params_grouped'], re.VERBOSE) +line_re = re.compile(patterns['line'], re.DOTALL | re.VERBOSE) +begin_re = re.compile('BEGIN', re.IGNORECASE) + + +def parseParams(string): + all = params_re.findall(string) + allParameters = [] + for tup in all: + paramList = [tup[0]] # tup looks like (name, valuesString) + for pair in param_values_re.findall(tup[1]): + # pair looks like ('', value) or (value, '') + if pair[0] != '': + paramList.append(pair[0]) + else: + paramList.append(pair[1]) + allParameters.append(paramList) + return allParameters + + +def parseLine(line, lineNumber = None): + match = line_re.match(line) + if match is None: + raise ParseError("Failed to parse line: %s" % line, lineNumber) + # Underscores are replaced with dash to work around Lotus Notes + return (match.group('name').replace('_','-'), + parseParams(match.group('params')), + match.group('value'), match.group('group')) + +# logical line regular expressions + +patterns['lineend'] = r'(?:\r\n|\r|\n|$)' +patterns['wrap'] = r'%(lineend)s [\t ]' % patterns +patterns['logicallines'] = r""" +( + (?: [^\r\n] | %(wrap)s )* + %(lineend)s +) +""" % patterns + +patterns['wraporend'] = r'(%(wrap)s | %(lineend)s )' % patterns + +wrap_re = re.compile(patterns['wraporend'], re.VERBOSE) +logical_lines_re = re.compile(patterns['logicallines'], re.VERBOSE) + +testLines=""" +Line 0 text + , Line 0 continued. +Line 1;encoding=quoted-printable:this is an evil= + evil= + format. +Line 2 is a new line, it does not start with whitespace. +""" + +def getLogicalLines(fp, allowQP=True, findBegin=False): + """ + Iterate through a stream, yielding one logical line at a time. + + Because many applications still use vCard 2.1, we have to deal with the + quoted-printable encoding for long lines, as well as the vCard 3.0 and + vCalendar line folding technique, a whitespace character at the start + of the line. + + Quoted-printable data will be decoded in the Behavior decoding phase. + + # We're leaving this test in for awhile, because the unittest was ugly and dumb. + >>> from six import StringIO + >>> f=StringIO(testLines) + >>> for n, l in enumerate(getLogicalLines(f)): + ... print("Line %s: %s" % (n, l[0])) + ... + Line 0: Line 0 text, Line 0 continued. + Line 1: Line 1;encoding=quoted-printable:this is an evil= + evil= + format. + Line 2: Line 2 is a new line, it does not start with whitespace. + + """ + if not allowQP: + val = fp.read(-1) + + #Shouldn't need this anymore... + """ + if len(val) > 0: + if not findBegin: + val = val.decode('utf-8') + else: + for encoding in 'utf-8', 'utf-16-LE', 'utf-16-BE', 'iso-8859-1': + try: + val = val.decode(encoding) + if begin_re.search(val) is not None: + break + except UnicodeDecodeError: + pass + else: + raise ParseError('Could not find BEGIN when trying to determine encoding') + """ + # strip off any UTF8 BOMs which Python's UTF8 decoder leaves + #val = val.lstrip( unicode( codecs.BOM_UTF8, "utf8" ) ) + + lineNumber = 1 + for match in logical_lines_re.finditer(val): + line, n = wrap_re.subn('', match.group()) + if line != '': + yield line, lineNumber + lineNumber += n + + else: + quotedPrintable = False + newbuffer = six.StringIO + logicalLine = newbuffer() + lineNumber = 0 + lineStartNumber = 0 + while True: + line = fp.readline() + if line == '': + break + else: + line = line.rstrip(CRLF) + lineNumber += 1 + if line.rstrip() == '': + if logicalLine.tell() > 0: + yield logicalLine.getvalue(), lineStartNumber + lineStartNumber = lineNumber + logicalLine = newbuffer() + quotedPrintable = False + continue + + if quotedPrintable and allowQP: + logicalLine.write('\n') + logicalLine.write(line) + quotedPrintable = False + elif line[0] in SPACEORTAB: + logicalLine.write(line[1:]) + elif logicalLine.tell() > 0: + yield logicalLine.getvalue(), lineStartNumber + lineStartNumber = lineNumber + logicalLine = newbuffer() + logicalLine.write(line) + else: + logicalLine = newbuffer() + logicalLine.write(line) + + # vCard 2.1 allows parameters to be encoded without a parameter name. + # False positives are unlikely, but possible. + val = logicalLine.getvalue() + if val[-1]=='=' and val.lower().find('quoted-printable') >= 0: + quotedPrintable=True + + if logicalLine.tell() > 0: + yield logicalLine.getvalue(), lineStartNumber + + +def textLineToContentLine(text, n=None): + return ContentLine(*parseLine(text, n), **{'encoded':True, 'lineNumber' : n}) + + +def dquoteEscape(param): + """ + Return param, or "param" if ',' or ';' or ':' is in param. + """ + if param.find('"') >= 0: + raise VObjectError("Double quotes aren't allowed in parameter values.") + for char in ',;:': + if param.find(char) >= 0: + return '"'+ param + '"' + return param + +def foldOneLine(outbuf, input, lineLength = 75): + """ + Folding line procedure that ensures multi-byte utf-8 sequences are not broken across lines + + TO-DO: This all seems odd. Is it still needed, especially in python3? + """ + + if len(input) < lineLength: + # Optimize for unfolded line case + try: + outbuf.write(bytes(input, 'UTF-8')) + except Exception: + # fall back on py2 syntax + outbuf.write(input) + + else: + # Look for valid utf8 range and write that out + start = 0 + written = 0 + while written < len(input): + # Start max length -1 chars on from where we are + offset = start + lineLength - 1 + if offset >= len(input): + line = input[start:] + try: + outbuf.write(bytes(line, 'UTF-8')) + except Exception: + # fall back on py2 syntax + outbuf.write(line) + written = len(input) + else: + # Check whether next char is valid utf8 lead byte + # while (input[offset] > 0x7F) and ((ord(input[offset]) & 0xC0) == 0x80): + # # Step back until we have a valid char + # offset -= 1 + + line = input[start:offset] + try: + outbuf.write(bytes(line, 'UTF-8')) + outbuf.write(bytes("\r\n ", 'UTF-8')) + except Exception: + # fall back on py2 syntax + outbuf.write(line) + outbuf.write("\r\n ") + written += offset - start + start = offset + try: + outbuf.write(bytes("\r\n", 'UTF-8')) + except Exception: + # fall back on py2 syntax + outbuf.write("\r\n") + + +def defaultSerialize(obj, buf, lineLength): + """ + Encode and fold obj and its children, write to buf or return a string. + """ + + outbuf = buf or six.StringIO() + + if isinstance(obj, Component): + if obj.group is None: + groupString = '' + else: + groupString = obj.group + '.' + if obj.useBegin: + foldOneLine(outbuf, "{0}BEGIN:{1}".format(groupString, obj.name), lineLength) + for child in obj.getSortedChildren(): + # validate is recursive, we only need to validate once + child.serialize(outbuf, lineLength, validate=False) + # print('child serialized', str(child)) + if obj.useBegin: + foldOneLine(outbuf, "{0}END:{1}".format(groupString, obj.name), lineLength) + + elif isinstance(obj, ContentLine): + startedEncoded = obj.encoded + if obj.behavior and not startedEncoded: + obj.behavior.encode(obj) + + #s = codecs.getwriter('utf-8')(six.StringIO()) #unfolded buffer + s = six.StringIO() + + if obj.group is not None: + s.write(obj.group + '.') + s.write(obj.name.upper()) + keys = sorted(obj.params.keys()) + for key in keys: + paramstr = ','.join(dquoteEscape(p) for p in obj.params[key]) + s.write(";{}={}".format(key, paramstr)) + s.write(":{}".format(str_(obj.value))) + if obj.behavior and not startedEncoded: + obj.behavior.decode(obj) + foldOneLine(outbuf, s.getvalue(), lineLength) + + return buf or outbuf.getvalue() + + +class Stack: + def __init__(self): + self.stack = [] + def __len__(self): + return len(self.stack) + def top(self): + if len(self) == 0: return None + else: return self.stack[-1] + def topName(self): + if len(self) == 0: return None + else: return self.stack[-1].name + def modifyTop(self, item): + top = self.top() + if top: + top.add(item) + else: + new = Component() + self.push(new) + new.add(item) # add sets behavior for item and children + + def push(self, obj): + self.stack.append(obj) + + def pop(self): + return self.stack.pop() + + +def readComponents(streamOrString, validate=False, transform=True, + findBegin=True, ignoreUnreadable=False, allowQP=False): + """ + Generate one Component at a time from a stream. + """ + if isinstance(streamOrString, basestring): + stream = six.StringIO(str_(streamOrString)) + else: + stream = streamOrString + + try: + stack = Stack() + versionLine = None + n = 0 + for line, n in getLogicalLines(stream, allowQP, findBegin): + if ignoreUnreadable: + try: + vline = textLineToContentLine(line, n) + except VObjectError as e: + if e.lineNumber is not None: + msg = "Skipped line %(lineNumber)s, message: %(msg)s" + else: + msg = "Skipped a line, message: %(msg)s" + logger.error(msg % {'lineNumber' : e.lineNumber, 'msg' : str(e)}) + continue + else: + vline = textLineToContentLine(line, n) + if vline.name == "VERSION": + versionLine = vline + stack.modifyTop(vline) + elif vline.name == "BEGIN": + stack.push(Component(vline.value, group=vline.group)) + elif vline.name == "PROFILE": + if not stack.top(): + stack.push(Component()) + stack.top().setProfile(vline.value) + elif vline.name == "END": + if len(stack) == 0: + err = "Attempted to end the %s component but it was never opened" % vline.value + raise ParseError(err, n) + + if vline.value.upper() == stack.topName(): # START matches END + if len(stack) == 1: + component = stack.pop() + if versionLine is not None: + component.setBehaviorFromVersionLine(versionLine) + else: + behavior = getBehavior(component.name) + if behavior: + component.setBehavior(behavior) + if validate: + component.validate(raiseException=True) + if transform: + component.transformChildrenToNative() + yield component # EXIT POINT + else: + stack.modifyTop(stack.pop()) + else: + err = "%s component wasn't closed" + raise ParseError(err % stack.topName(), n) + else: + stack.modifyTop(vline) # not a START or END line + if stack.top(): + if stack.topName() is None: + logger.warning("Top level component was never named") + elif stack.top().useBegin: + raise ParseError("Component %s was never closed" % (stack.topName()), n) + yield stack.pop() + + except ParseError as e: + e.input = streamOrString + raise + + +def readOne(stream, validate=False, transform=True, findBegin=True, ignoreUnreadable=False, allowQP=False): + """ + Return the first component from stream. + """ + return next(readComponents(stream, validate, transform, findBegin, ignoreUnreadable, allowQP)) + + +#--------------------------- version registry ---------------------------------- +__behaviorRegistry={} + +def registerBehavior(behavior, name=None, default=False, id=None): + """Register the given behavior. + + If default is True (or if this is the first version registered with this + name), the version will be the default if no id is given. + + """ + if not name: + name=behavior.name.upper() + if id is None: + id=behavior.versionString + if name in __behaviorRegistry: + if default: + __behaviorRegistry[name].insert(0, (id, behavior)) + else: + __behaviorRegistry[name].append((id, behavior)) + else: + __behaviorRegistry[name]=[(id, behavior)] + +def getBehavior(name, id=None): + """Return a matching behavior if it exists, or None. + + If id is None, return the default for name. + + """ + name=name.upper() + if name in __behaviorRegistry: + if id: + for n, behavior in __behaviorRegistry[name]: + if n==id: + return behavior + + return __behaviorRegistry[name][0][1] + return None + +def newFromBehavior(name, id=None): + """ + Given a name, return a behaviored ContentLine or Component. + """ + name = name.upper() + behavior = getBehavior(name, id) + if behavior is None: + raise VObjectError("No behavior found named %s" % name) + if behavior.isComponent: + obj = Component(name) + else: + obj = ContentLine(name, [], '') + obj.behavior = behavior + obj.isNative = False + return obj + + +#--------------------------- Helper function ----------------------------------- +def backslashEscape(s): + s = s.replace("\\","\\\\").replace(";","\;").replace(",","\,") + return s.replace("\r\n", "\\n").replace("\n","\\n").replace("\r","\\n") diff --git a/thesisenv/lib/python3.6/site-packages/card_me/behavior.py b/thesisenv/lib/python3.6/site-packages/card_me/behavior.py new file mode 100644 index 0000000..451cd82 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/card_me/behavior.py @@ -0,0 +1,170 @@ +from . import base + +#------------------------ Abstract class for behavior -------------------------- +class Behavior(object): + """ + Behavior (validation, encoding, and transformations) for vobjects. + + Abstract class to describe vobject options, requirements and encodings. + + Behaviors are used for root components like VCALENDAR, for subcomponents + like VEVENT, and for individual lines in components. + + Behavior subclasses are not meant to be instantiated, all methods should + be classmethods. + + @cvar name: + The uppercase name of the object described by the class, or a generic + name if the class defines behavior for many objects. + @cvar description: + A brief excerpt from the RFC explaining the function of the component or + line. + @cvar versionString: + The string associated with the component, for instance, 2.0 if there's a + line like VERSION:2.0, an empty string otherwise. + @cvar knownChildren: + A dictionary with uppercased component/property names as keys and a + tuple (min, max, id) as value, where id is the id used by + L{registerBehavior}, min and max are the limits on how many of this child + must occur. None is used to denote no max or no id. + @cvar quotedPrintable: + A boolean describing whether the object should be encoded and decoded + using quoted printable line folding and character escaping. + @cvar defaultBehavior: + Behavior to apply to ContentLine children when no behavior is found. + @cvar hasNative: + A boolean describing whether the object can be transformed into a more + Pythonic object. + @cvar isComponent: + A boolean, True if the object should be a Component. + @cvar sortFirst: + The lower-case list of children which should come first when sorting. + @cvar allowGroup: + Whether or not vCard style group prefixes are allowed. + """ + name='' + description='' + versionString='' + knownChildren = {} + quotedPrintable = False + defaultBehavior = None + hasNative= False + isComponent = False + allowGroup = False + forceUTC = False + sortFirst = [] + + def __init__(self): + err="Behavior subclasses are not meant to be instantiated" + raise base.VObjectError(err) + + @classmethod + def validate(cls, obj, raiseException=False, complainUnrecognized=False): + """Check if the object satisfies this behavior's requirements. + + @param obj: + The L{ContentLine} or + L{Component} to be validated. + @param raiseException: + If True, raise a L{base.ValidateError} on validation failure. + Otherwise return a boolean. + @param complainUnrecognized: + If True, fail to validate if an uncrecognized parameter or child is + found. Otherwise log the lack of recognition. + + """ + if not cls.allowGroup and obj.group is not None: + err = "{0} has a group, but this object doesn't support groups".format(obj) + raise base.VObjectError(err) + if isinstance(obj, base.ContentLine): + return cls.lineValidate(obj, raiseException, complainUnrecognized) + elif isinstance(obj, base.Component): + count = {} + for child in obj.getChildren(): + if not child.validate(raiseException, complainUnrecognized): + return False + name=child.name.upper() + count[name] = count.get(name, 0) + 1 + for key, val in cls.knownChildren.items(): + if count.get(key,0) < val[0]: + if raiseException: + m = "%s components must contain at least %i %s" + raise base.ValidateError(m % (cls.name, val[0], key)) + return False + if val[1] and count.get(key,0) > val[1]: + if raiseException: + m = "%s components cannot contain more than %i %s" + raise base.ValidateError(m % (cls.name, val[1], key)) + return False + return True + else: + err = "{0} is not a Component or Contentline".format(obj) + raise base.VObjectError(err) + + @classmethod + def lineValidate(cls, line, raiseException, complainUnrecognized): + """Examine a line's parameters and values, return True if valid.""" + return True + + @classmethod + def decode(cls, line): + if line.encoded: line.encoded=0 + + @classmethod + def encode(cls, line): + if not line.encoded: line.encoded=1 + + @classmethod + def transformToNative(cls, obj): + """ + Turn a ContentLine or Component into a Python-native representation. + + If appropriate, turn dates or datetime strings into Python objects. + Components containing VTIMEZONEs turn into VtimezoneComponents. + + """ + return obj + + @classmethod + def transformFromNative(cls, obj): + """ + Inverse of transformToNative. + """ + raise base.NativeError("No transformFromNative defined") + + @classmethod + def generateImplicitParameters(cls, obj): + """Generate any required information that don't yet exist.""" + pass + + @classmethod + def serialize(cls, obj, buf, lineLength, validate=True): + """ + Set implicit parameters, do encoding, return unicode string. + + If validate is True, raise VObjectError if the line doesn't validate + after implicit parameters are generated. + + Default is to call base.defaultSerialize. + + """ + + cls.generateImplicitParameters(obj) + if validate: cls.validate(obj, raiseException=True) + + if obj.isNative: + transformed = obj.transformFromNative() + undoTransform = True + else: + transformed = obj + undoTransform = False + + out = base.defaultSerialize(transformed, buf, lineLength) + if undoTransform: + obj.transformToNative() + return out + + @classmethod + def valueRepr( cls, line ): + """return the representation of the given content line value""" + return line.value diff --git a/thesisenv/lib/python3.6/site-packages/card_me/change_tz.py b/thesisenv/lib/python3.6/site-packages/card_me/change_tz.py new file mode 100644 index 0000000..573dc97 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/card_me/change_tz.py @@ -0,0 +1,99 @@ +"""Translate an ics file's events to a different timezone.""" + +import sys + +from optparse import OptionParser +from . import icalendar, base + +try: + import PyICU +except: + PyICU = None + +from datetime import datetime + +def change_tz(cal, new_timezone, default, utc_only=False, utc_tz=icalendar.utc): + """Change the timezone of the specified component. + + Args: + cal (Component): the component to change + new_timezone (tzinfo): the timezone to change to + default (tzinfo): a timezone to assume if the dtstart or dtend in cal + doesn't have an existing timezone + utc_only (bool): only convert dates that are in utc + utc_tz (tzinfo): the tzinfo to compare to for UTC when processing + utc_only=True + """ + + for vevent in getattr(cal, 'vevent_list', []): + start = getattr(vevent, 'dtstart', None) + end = getattr(vevent, 'dtend', None) + for node in (start, end): + if node: + dt = node.value + if (isinstance(dt, datetime) and + (not utc_only or dt.tzinfo == utc_tz)): + if dt.tzinfo is None: + dt = dt.replace(tzinfo = default) + node.value = dt.astimezone(new_timezone) + +def main(): + options, args = get_options() + if PyICU is None: + print("Failure. change_tz requires PyICU, exiting") + elif options.list: + for tz_string in PyICU.TimeZone.createEnumeration(): + print(tz_string) + elif args: + utc_only = options.utc + if utc_only: + which = "only UTC" + else: + which = "all" + print("Converting %s events" % which) + ics_file = args[0] + if len(args) > 1: + timezone = PyICU.ICUtzinfo.getInstance(args[1]) + else: + timezone = PyICU.ICUtzinfo.default + print("... Reading %s" % ics_file) + cal = base.readOne(open(ics_file)) + change_tz(cal, timezone, PyICU.ICUtzinfo.default, utc_only) + + out_name = ics_file + '.converted' + print("... Writing %s" % out_name) + + out = file(out_name, 'wb') + cal.serialize(out) + print("Done") + + +version = "0.1" + +def get_options(): + ##### Configuration options ##### + + usage = """usage: %prog [options] ics_file [timezone]""" + parser = OptionParser(usage=usage, version=version) + parser.set_description("change_tz will convert the timezones in an ics file. ") + + parser.add_option("-u", "--only-utc", dest="utc", action="store_true", + default=False, help="Only change UTC events.") + parser.add_option("-l", "--list", dest="list", action="store_true", + default=False, help="List available timezones") + + + (cmdline_options, args) = parser.parse_args() + if not args and not cmdline_options.list: + print("error: too few arguments given") + print() + print(parser.format_help()) + return False, False + + return cmdline_options, args + +if __name__ == "__main__": + try: + main() + except KeyboardInterrupt: + print("Aborted") diff --git a/thesisenv/lib/python3.6/site-packages/card_me/hcalendar.py b/thesisenv/lib/python3.6/site-packages/card_me/hcalendar.py new file mode 100644 index 0000000..1520f42 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/card_me/hcalendar.py @@ -0,0 +1,128 @@ +""" +hCalendar: A microformat for serializing iCalendar data + (http://microformats.org/wiki/hcalendar) + +Here is a sample event in an iCalendar: + +BEGIN:VCALENDAR +PRODID:-//XYZproduct//EN +VERSION:2.0 +BEGIN:VEVENT +URL:http://www.web2con.com/ +DTSTART:20051005 +DTEND:20051008 +SUMMARY:Web 2.0 Conference +LOCATION:Argent Hotel\, San Francisco\, CA +END:VEVENT +END:VCALENDAR + +and an equivalent event in hCalendar format with various elements optimized appropriately. + + + + Web 2.0 Conference: + October 5- + 7, + at the Argent Hotel, San Francisco, CA + + +""" + +import six + +from datetime import date, datetime, timedelta + +from .base import CRLF, registerBehavior +from .icalendar import VCalendar2_0 + + +class HCalendar(VCalendar2_0): + name = 'HCALENDAR' + + @classmethod + def serialize(cls, obj, buf=None, lineLength=None, validate=True): + """ + Serialize iCalendar to HTML using the hCalendar microformat (http://microformats.org/wiki/hcalendar) + """ + + outbuf = buf or six.StringIO() + level = 0 # holds current indentation level + tabwidth = 3 + + def indent(): + return ' ' * level * tabwidth + + def out(s): + outbuf.write(indent()) + outbuf.write(s) + + # not serializing optional vcalendar wrapper + + vevents = obj.vevent_list + + for event in vevents: + out('' + CRLF) + level += 1 + + # URL + url = event.getChildValue("url") + if url: + out('' + CRLF) + level += 1 + # SUMMARY + summary = event.getChildValue("summary") + if summary: + out('' + summary + ':' + CRLF) + + # DTSTART + dtstart = event.getChildValue("dtstart") + if dtstart: + if type(dtstart) == date: + timeformat = "%A, %B %e" + machine = "%Y%m%d" + elif type(dtstart) == datetime: + timeformat = "%A, %B %e, %H:%M" + machine = "%Y%m%dT%H%M%S%z" + + #TODO: Handle non-datetime formats? + #TODO: Spec says we should handle when dtstart isn't included + + out('%s\r\n' % + (dtstart.strftime(machine), dtstart.strftime(timeformat))) + + # DTEND + dtend = event.getChildValue("dtend") + if not dtend: + duration = event.getChildValue("duration") + if duration: + dtend = duration + dtstart + # TODO: If lacking dtend & duration? + + if dtend: + human = dtend + # TODO: Human readable part could be smarter, excluding repeated data + if type(dtend) == date: + human = dtend - timedelta(days=1) + + out('- %s\r\n' % + (dtend.strftime(machine), human.strftime(timeformat))) + + # LOCATION + location = event.getChildValue("location") + if location: + out('at ' + location + '' + CRLF) + + description = event.getChildValue("description") + if description: + out('
' + description + '
' + CRLF) + + if url: + level -= 1 + out('
' + CRLF) + + level -= 1 + out('
' + CRLF) # close vevent + + return buf or outbuf.getvalue() + +registerBehavior(HCalendar) diff --git a/thesisenv/lib/python3.6/site-packages/card_me/icalendar.py b/thesisenv/lib/python3.6/site-packages/card_me/icalendar.py new file mode 100644 index 0000000..f29297a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/card_me/icalendar.py @@ -0,0 +1,1958 @@ +"""Definitions and behavior for iCalendar, also known as vCalendar 2.0""" + +from __future__ import print_function + +import datetime +import random # for generating a UID +import six +import socket +import string + +from dateutil import rrule, tz + +from . import behavior +from .base import ( + VObjectError, NativeError, ValidateError, ParseError, + Component, ContentLine, logger, registerBehavior, + backslashEscape, foldOneLine, str_ +) + + +#------------------------------- Constants ------------------------------------- +DATENAMES = ("rdate", "exdate") +RULENAMES = ("exrule", "rrule") +DATESANDRULES = ("exrule", "rrule", "rdate", "exdate") +PRODID = u"-//PYVOBJECT//NONSGML Version 1//EN" + +WEEKDAYS = "MO", "TU", "WE", "TH", "FR", "SA", "SU" +FREQUENCIES = ('YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', + 'SECONDLY') + +zeroDelta = datetime.timedelta(0) +twoHours = datetime.timedelta(hours=2) + + +#---------------------------- TZID registry ------------------------------------ +__tzidMap = {} + + +def toUnicode(s): + """Take a string or unicode, turn it into unicode, decoding as utf-8""" + if isinstance(s, six.binary_type): + s = s.decode('utf-8') + return s + + +def registerTzid(tzid, tzinfo): + """Register a tzid -> tzinfo mapping.""" + __tzidMap[toUnicode(tzid)] = tzinfo + + +def getTzid(tzid, smart=True): + """Return the tzid if it exists, or None.""" + tz = __tzidMap.get(toUnicode(tzid), None) + if smart and tzid and not tz: + try: + from pytz import timezone, UnknownTimeZoneError + try: + tz = timezone(tzid) + registerTzid(toUnicode(tzid), tz) + except UnknownTimeZoneError: + pass + except ImportError: + pass + return tz + +utc = tz.tzutc() +registerTzid("UTC", utc) + + +#-------------------- Helper subclasses ---------------------------------------- + + +class TimezoneComponent(Component): + """A VTIMEZONE object. + + VTIMEZONEs are parsed by tz.tzical, the resulting datetime.tzinfo + subclass is stored in self.tzinfo, self.tzid stores the TZID associated + with this timezone. + + @ivar name: + The uppercased name of the object, in this case always 'VTIMEZONE'. + @ivar tzinfo: + A datetime.tzinfo subclass representing this timezone. + @ivar tzid: + The string used to refer to this timezone. + + """ + def __init__(self, tzinfo=None, *args, **kwds): + """Accept an existing Component or a tzinfo class.""" + super(TimezoneComponent, self).__init__(*args, **kwds) + self.isNative = True + # hack to make sure a behavior is assigned + if self.behavior is None: + self.behavior = VTimezone + if tzinfo is not None: + self.tzinfo = tzinfo + if not hasattr(self, 'name') or self.name == '': + self.name = 'VTIMEZONE' + self.useBegin = True + + @classmethod + def registerTzinfo(obj, tzinfo): + """Register tzinfo if it's not already registered, return its tzid.""" + tzid = obj.pickTzid(tzinfo) + if tzid and not getTzid(tzid, False): + registerTzid(tzid, tzinfo) + return tzid + + def gettzinfo(self): + # workaround for dateutil failing to parse some experimental properties + good_lines = ('rdate', 'rrule', 'dtstart', 'tzname', 'tzoffsetfrom', + 'tzoffsetto', 'tzid') + # serialize encodes as utf-8, cStringIO will leave utf-8 alone + buffer = six.StringIO() + # allow empty VTIMEZONEs + if len(self.contents) == 0: + return None + + def customSerialize(obj): + if isinstance(obj, Component): + foldOneLine(buffer, u"BEGIN:" + obj.name) + for child in obj.lines(): + if child.name.lower() in good_lines: + child.serialize(buffer, 75, validate=False) + for comp in obj.components(): + customSerialize(comp) + foldOneLine(buffer, u"END:" + obj.name) + customSerialize(self) + buffer.seek(0) # tzical wants to read a stream + return tz.tzical(buffer).get() + + def settzinfo(self, tzinfo, start=2000, end=2030): + """Create appropriate objects in self to represent tzinfo. + + Collapse DST transitions to rrules as much as possible. + + Assumptions: + - DST <-> Standard transitions occur on the hour + - never within a month of one another + - twice or fewer times a year + - never in the month of December + - DST always moves offset exactly one hour later + - tzinfo classes dst method always treats times that could be in either + offset as being in the later regime + + """ + def fromLastWeek(dt): + """How many weeks from the end of the month dt is, starting from 1.""" + weekDelta = datetime.timedelta(weeks=1) + n = 1 + current = dt + weekDelta + while current.month == dt.month: + n += 1 + current += weekDelta + return n + + # lists of dictionaries defining rules which are no longer in effect + completed = {'daylight': [], 'standard': []} + + # dictionary defining rules which are currently in effect + working = {'daylight': None, 'standard': None} + + # rule may be based on the nth week of the month or the nth from the last + for year in range(start, end + 1): + newyear = datetime.datetime(year, 1, 1) + for transitionTo in 'daylight', 'standard': + transition = getTransition(transitionTo, year, tzinfo) + oldrule = working[transitionTo] + + if transition == newyear: + # transitionTo is in effect for the whole year + rule = {'end': None, + 'start': newyear, + 'month': 1, + 'weekday': None, + 'hour': None, + 'plus': None, + 'minus': None, + 'name': tzinfo.tzname(newyear), + 'offset': tzinfo.utcoffset(newyear), + 'offsetfrom': tzinfo.utcoffset(newyear)} + if oldrule is None: + # transitionTo was not yet in effect + working[transitionTo] = rule + else: + # transitionTo was already in effect + if (oldrule['offset'] != tzinfo.utcoffset(newyear)): + # old rule was different, it shouldn't continue + oldrule['end'] = year - 1 + completed[transitionTo].append(oldrule) + working[transitionTo] = rule + elif transition is None: + # transitionTo is not in effect + if oldrule is not None: + # transitionTo used to be in effect + oldrule['end'] = year - 1 + completed[transitionTo].append(oldrule) + working[transitionTo] = None + else: + # an offset transition was found + # Note plus must be coerced to int to account for Py2/3 differences + old_offset = tzinfo.utcoffset(transition - twoHours) + rule = {'end': None, # None, or an integer year + 'start': transition, # the datetime of transition + 'month': transition.month, + 'weekday': transition.weekday(), + 'hour': transition.hour, + 'name': tzinfo.tzname(transition), + 'plus': int((transition.day - 1) / 7 + 1), # nth week of the month + 'minus': fromLastWeek(transition), # nth from last week + 'offset': tzinfo.utcoffset(transition), + 'offsetfrom': old_offset} + + if oldrule is None: + working[transitionTo] = rule + else: + plusMatch = rule['plus'] == oldrule['plus'] + minusMatch = rule['minus'] == oldrule['minus'] + truth = plusMatch or minusMatch + for key in 'month', 'weekday', 'hour', 'offset': + truth = truth and rule[key] == oldrule[key] + if truth: + # the old rule is still true, limit to plus or minus + if not plusMatch: + oldrule['plus'] = None + if not minusMatch: + oldrule['minus'] = None + else: + # the new rule did not match the old + oldrule['end'] = year - 1 + completed[transitionTo].append(oldrule) + working[transitionTo] = rule + + for transitionTo in 'daylight', 'standard': + if working[transitionTo] is not None: + completed[transitionTo].append(working[transitionTo]) + + self.tzid = [] + self.daylight = [] + self.standard = [] + + self.add('tzid').value = self.pickTzid(tzinfo, True) + + # old = None # unused? + for transitionTo in 'daylight', 'standard': + for rule in completed[transitionTo]: + comp = self.add(transitionTo) + dtstart = comp.add('dtstart') + dtstart.value = rule['start'] + if rule['name'] is not None: + comp.add('tzname').value = rule['name'] + line = comp.add('tzoffsetto') + line.value = deltaToOffset(rule['offset']) + line = comp.add('tzoffsetfrom') + line.value = deltaToOffset(rule['offsetfrom']) + + if rule['plus'] is not None: + num = rule['plus'] + elif rule['minus'] is not None: + num = -1 * rule['minus'] + else: + num = None + if num is not None: + dayString = ";BYDAY=" + str(num) + WEEKDAYS[rule['weekday']] + else: + dayString = "" + if rule['end'] is not None: + if rule['hour'] is None: + # all year offset, with no rule + endDate = datetime.datetime(rule['end'], 1, 1) + else: + weekday = rrule.weekday(rule['weekday'], num) + du_rule = rrule.rrule( + rrule.YEARLY, + bymonth=rule['month'], + byweekday=weekday, + dtstart=datetime.datetime( + rule['end'], 1, 1, rule['hour'] + ) + ) + endDate = du_rule[0] + endDate = endDate.replace(tzinfo=utc) - rule['offsetfrom'] + endString = ";UNTIL=" + dateTimeToString(endDate) + else: + endString = '' + new_rule = "FREQ=YEARLY%s;BYMONTH=%s%s" % (dayString, rule['month'], endString) + + comp.add('rrule').value = new_rule + + tzinfo = property(gettzinfo, settzinfo) + # prevent Component's __setattr__ from overriding the tzinfo property + normal_attributes = Component.normal_attributes + ['tzinfo'] + + @staticmethod + def pickTzid(tzinfo, allowUTC=False): + """ + Given a tzinfo class, use known APIs to determine TZID, or use tzname. + """ + if tzinfo is None or (not allowUTC and tzinfo_eq(tzinfo, utc)): + #If tzinfo is UTC, we don't need a TZID + return None + # try PyICU's tzid key + if hasattr(tzinfo, 'tzid'): + return toUnicode(tzinfo.tzid) + + # try pytz zone key + if hasattr(tzinfo, 'zone'): + return toUnicode(tzinfo.zone) + + # try tzical's tzid key + elif hasattr(tzinfo, '_tzid'): + return toUnicode(tzinfo._tzid) + else: + # return tzname for standard (non-DST) time + notDST = datetime.timedelta(0) + for month in range(1, 13): + dt = datetime.datetime(2000, month, 1) + if tzinfo.dst(dt) == notDST: + return toUnicode(tzinfo.tzname(dt)) + # there was no standard time in 2000! + raise VObjectError("Unable to guess TZID for tzinfo %s" % tzinfo) + + def __str__(self): + return "" % getattr(self, 'tzid', 'No TZID') + + def __repr__(self): + return self.__str__() + + def prettyPrint(self, level, tabwidth): + pre = ' ' * level * tabwidth + print(pre, self.name) + print(pre, "TZID:", self.tzid) + print('') + + +class RecurringComponent(Component): + """ + A vCalendar component like VEVENT or VTODO which may recur. + + Any recurring component can have one or multiple RRULE, RDATE, + EXRULE, or EXDATE lines, and one or zero DTSTART lines. It can also have a + variety of children that don't have any recurrence information. + + In the example below, note that dtstart is included in the rruleset. + This is not the default behavior for dateutil's rrule implementation unless + dtstart would already have been a member of the recurrence rule, and as a + result, COUNT is wrong. This can be worked around when getting rruleset by + adjusting count down by one if an rrule has a count and dtstart isn't in its + result set, but by default, the rruleset property doesn't do this work + around, to access it getrruleset must be called with addRDate set True. + + @ivar rruleset: + A U{rruleset}. + """ + def __init__(self, *args, **kwds): + super(RecurringComponent, self).__init__(*args, **kwds) + + self.isNative = True + + def getrruleset(self, addRDate=False): + """ + Get an rruleset created from self. + + If addRDate is True, add an RDATE for dtstart if it's not included in + an RRULE, and count is decremented if it exists. + + Note that for rules which don't match DTSTART, DTSTART may not appear + in list(rruleset), although it should. By default, an RDATE is not + created in these cases, and count isn't updated, so dateutil may list + a spurious occurrence. + + """ + rruleset = None + for name in DATESANDRULES: + addfunc = None + for line in self.contents.get(name, ()): + # don't bother creating a rruleset unless there's a rule + if rruleset is None: + rruleset = rrule.rruleset() + if addfunc is None: + addfunc = getattr(rruleset, name) + + if name in DATENAMES: + if type(line.value[0]) == datetime.datetime: + map(addfunc, line.value) + elif type(line.value[0]) == datetime.date: + for dt in line.value: + addfunc(datetime.datetime(dt.year, dt.month, dt.day)) + else: + # ignore RDATEs with PERIOD values for now + pass + elif name in RULENAMES: + try: + dtstart = self.dtstart.value + except (AttributeError, KeyError): + # Special for VTODO - try DUE property instead + try: + if self.name == "VTODO": + dtstart = self.due.value + else: + # if there's no dtstart, just return None + print('failed to get dtstart with VTODO') + return None + except (AttributeError, KeyError): + # if there's no due, just return None + print('failed to find DUE at all.') + return None + + # a Ruby iCalendar library escapes semi-colons in rrules, + # so also remove any backslashes + value = str_(line.value).replace('\\', '') + rule = rrule.rrulestr(value, dtstart=dtstart) + until = rule._until + + if until is not None and isinstance(dtstart, datetime.datetime) and \ + (until.tzinfo != dtstart.tzinfo): + # dateutil converts the UNTIL date to a datetime, + # check to see if the UNTIL parameter value was a date + vals = dict(pair.split('=') for pair in + line.value.upper().split(';')) + if len(vals.get('UNTIL', '')) == 8: + until = datetime.datetime.combine(until.date(), dtstart.time()) + # While RFC2445 says UNTIL MUST be UTC, Chandler allows + # floating recurring events, and uses floating UNTIL values. + # Also, some odd floating UNTIL but timezoned DTSTART values + # have shown up in the wild, so put floating UNTIL values + # DTSTART's timezone + if until.tzinfo is None: + until = until.replace(tzinfo=dtstart.tzinfo) + + if dtstart.tzinfo is not None: + until = until.astimezone(dtstart.tzinfo) + + # RFC2445 actually states that UNTIL must be a UTC value. Whilst the + # changes above work OK, one problem case is if DTSTART is floating but + # UNTIL is properly specified as UTC (or with a TZID). In that case dateutil + # will fail datetime comparisons. There is no easy solution to this as + # there is no obvious timezone (at this point) to do proper floating time + # offset compisons. The best we can do is treat the UNTIL value as floating. + # This could mean incorrect determination of the last instance. The better + # solution here is to encourage clients to use COUNT rather than UNTIL + # when DTSTART is floating. + if dtstart.tzinfo is None: + until = until.replace(tzinfo=None) + + rule._until = until + + # add the rrule or exrule to the rruleset + addfunc(rule) + + if name == 'rrule' and addRDate: + try: + # dateutils does not work with all-day (datetime.date) items + # so we need to convert to a datetime.datetime + # (which is what dateutils does internally) + if not isinstance(dtstart, datetime.datetime): + adddtstart = datetime.datetime.fromordinal(dtstart.toordinal()) + else: + adddtstart = dtstart + if rruleset._rrule[-1][0] != adddtstart: + rruleset.rdate(adddtstart) + added = True + else: + added = False + except IndexError: + # it's conceivable that an rrule might have 0 datetimes + added = False + if added and rruleset._rrule[-1]._count is not None: + rruleset._rrule[-1]._count -= 1 + return rruleset + + def setrruleset(self, rruleset): + + # Get DTSTART from component (or DUE if no DTSTART in a VTODO) + try: + dtstart = self.dtstart.value + except (AttributeError, KeyError): + if self.name == "VTODO": + dtstart = self.due.value + else: + raise + + isDate = datetime.date == type(dtstart) + if isDate: + dtstart = datetime.datetime(dtstart.year, dtstart.month, dtstart.day) + untilSerialize = dateToString + else: + # make sure to convert time zones to UTC + untilSerialize = lambda x: dateTimeToString(x, True) + + for name in DATESANDRULES: + if name in self.contents: + del self.contents[name] + setlist = getattr(rruleset, '_' + name) + if name in DATENAMES: + setlist = list(setlist) # make a copy of the list + if name == 'rdate' and dtstart in setlist: + setlist.remove(dtstart) + if isDate: + setlist = [dt.date() for dt in setlist] + if len(setlist) > 0: + self.add(name).value = setlist + elif name in RULENAMES: + for rule in setlist: + buf = six.StringIO() + buf.write('FREQ=') + buf.write(FREQUENCIES[rule._freq]) + + values = {} + + if rule._interval != 1: + values['INTERVAL'] = [str(rule._interval)] + if rule._wkst != 0: # wkst defaults to Monday + values['WKST'] = [WEEKDAYS[rule._wkst]] + if rule._bysetpos is not None: + values['BYSETPOS'] = [str(i) for i in rule._bysetpos] + + if rule._count is not None: + values['COUNT'] = [str(rule._count)] + elif rule._until is not None: + values['UNTIL'] = [untilSerialize(rule._until)] + + days = [] + if ( + rule._byweekday is not None and ( + rrule.WEEKLY != rule._freq or + len(rule._byweekday) != 1 or + rule._dtstart.weekday() != rule._byweekday[0] + ) + ): + # ignore byweekday if freq is WEEKLY and day correlates + # with dtstart because it was automatically set by dateutil + days.extend(WEEKDAYS[n] for n in rule._byweekday) + + if rule._bynweekday is not None: + days.extend(n + WEEKDAYS[day] for day, n in rule._bynweekday) + + if len(days) > 0: + values['BYDAY'] = days + + if rule._bymonthday is not None and len(rule._bymonthday) > 0: + if not (rule._freq <= rrule.MONTHLY and + len(rule._bymonthday) == 1 and + rule._bymonthday[0] == rule._dtstart.day): + # ignore bymonthday if it's generated by dateutil + values['BYMONTHDAY'] = [str(n) for n in rule._bymonthday] + + if rule._bynmonthday is not None and len(rule._bynmonthday) > 0: + values.setdefault('BYMONTHDAY', []).extend( + str(n) for n in rule._bynmonthday + ) + + if rule._bymonth is not None and len(rule._bymonth) > 0: + if (rule._byweekday is not None or + len(rule._bynweekday or ()) > 0 or + not (rule._freq == rrule.YEARLY and + len(rule._bymonth) == 1 and + rule._bymonth[0] == rule._dtstart.month)): + # ignore bymonth if it's generated by dateutil + values['BYMONTH'] = [str(n) for n in rule._bymonth] + + if rule._byyearday is not None: + values['BYYEARDAY'] = [str(n) for n in rule._byyearday] + if rule._byweekno is not None: + values['BYWEEKNO'] = [str(n) for n in rule._byweekno] + + # byhour, byminute, bysecond are always ignored for now + + for key, paramvals in values.items(): + buf.write(';') + buf.write(key) + buf.write('=') + buf.write(','.join(paramvals)) + + self.add(name).value = buf.getvalue() + + rruleset = property(getrruleset, setrruleset) + + def __setattr__(self, name, value): + """For convenience, make self.contents directly accessible.""" + if name == 'rruleset': + self.setrruleset(value) + else: + super(RecurringComponent, self).__setattr__(name, value) + + +class TextBehavior(behavior.Behavior): + """Provide backslash escape encoding/decoding for single valued properties. + + TextBehavior also deals with base64 encoding if the ENCODING parameter is + explicitly set to BASE64. + + """ + base64string = 'BASE64' # vCard uses B + + @classmethod + def decode(cls, line): + """Remove backslash escaping from line.value.""" + if line.encoded: + encoding = getattr(line, 'encoding_param', None) + if encoding and encoding.upper() == cls.base64string: + line.value = line.value.decode('base64') + else: + line.value = stringToTextValues(line.value)[0] + line.encoded = False + + @classmethod + def encode(cls, line): + """Backslash escape line.value.""" + if not line.encoded: + encoding = getattr(line, 'encoding_param', None) + if encoding and encoding.upper() == cls.base64string: + line.value = line.value.encode('base64').replace('\n', '') + else: + line.value = backslashEscape(str_(line.value)) + line.encoded = True + + +class VCalendarComponentBehavior(behavior.Behavior): + defaultBehavior = TextBehavior + isComponent = True + + +class RecurringBehavior(VCalendarComponentBehavior): + """ + Parent Behavior for components which should be RecurringComponents. + """ + hasNative = True + + @staticmethod + def transformToNative(obj): + """ + Turn a recurring Component into a RecurringComponent. + """ + if not obj.isNative: + object.__setattr__(obj, '__class__', RecurringComponent) + obj.isNative = True + return obj + + @staticmethod + def transformFromNative(obj): + if obj.isNative: + object.__setattr__(obj, '__class__', Component) + obj.isNative = False + return obj + + @staticmethod + def generateImplicitParameters(obj): + """ + Generate a UID if one does not exist. + + This is just a dummy implementation, for now. + + """ + if not hasattr(obj, 'uid'): + rand = int(random.random() * 100000) + now = datetime.datetime.now(utc) + now = dateTimeToString(now) + host = socket.gethostname() + obj.add(ContentLine('UID', [], "{0} - {1}@{2}".format(now, rand, host))) + + +class DateTimeBehavior(behavior.Behavior): + """Parent Behavior for ContentLines containing one DATE-TIME.""" + hasNative = True + + @staticmethod + def transformToNative(obj): + """Turn obj.value into a datetime. + + RFC2445 allows times without time zone information, "floating times" + in some properties. Mostly, this isn't what you want, but when parsing + a file, real floating times are noted by setting to 'TRUE' the + X-VOBJ-FLOATINGTIME-ALLOWED parameter. + + """ + if obj.isNative: + return obj + obj.isNative = True + if obj.value == '': + return obj + obj.value = obj.value + #we're cheating a little here, parseDtstart allows DATE + obj.value = parseDtstart(obj) + if obj.value.tzinfo is None: + obj.params['X-VOBJ-FLOATINGTIME-ALLOWED'] = ['TRUE'] + if obj.params.get('TZID'): + # Keep a copy of the original TZID around + obj.params['X-VOBJ-ORIGINAL-TZID'] = [obj.params['TZID']] + del obj.params['TZID'] + return obj + + @classmethod + def transformFromNative(cls, obj): + """Replace the datetime in obj.value with an ISO 8601 string.""" + # print('transforming from native') + if obj.isNative: + obj.isNative = False + tzid = TimezoneComponent.registerTzinfo(obj.value.tzinfo) + obj.value = dateTimeToString(obj.value, cls.forceUTC) + if not cls.forceUTC and tzid is not None: + obj.tzid_param = tzid + if obj.params.get('X-VOBJ-ORIGINAL-TZID'): + if not hasattr(obj, 'tzid_param'): + obj.tzid_param = obj.x_vobj_original_tzid_param + del obj.params['X-VOBJ-ORIGINAL-TZID'] + + return obj + + +class UTCDateTimeBehavior(DateTimeBehavior): + """A value which must be specified in UTC.""" + forceUTC = True + + +class DateOrDateTimeBehavior(behavior.Behavior): + """Parent Behavior for ContentLines containing one DATE or DATE-TIME.""" + hasNative = True + + @staticmethod + def transformToNative(obj): + """Turn obj.value into a date or datetime.""" + if obj.isNative: + return obj + obj.isNative = True + if obj.value == '': + return obj + obj.value = obj.value + obj.value = parseDtstart(obj, allowSignatureMismatch=True) + if getattr(obj, 'value_param', 'DATE-TIME').upper() == 'DATE-TIME': + if hasattr(obj, 'tzid_param'): + # Keep a copy of the original TZID around + obj.params['X-VOBJ-ORIGINAL-TZID'] = [obj.tzid_param] + del obj.tzid_param + return obj + + @staticmethod + def transformFromNative(obj): + """Replace the date or datetime in obj.value with an ISO 8601 string.""" + if type(obj.value) == datetime.date: + obj.isNative = False + obj.value_param = 'DATE' + obj.value = dateToString(obj.value) + return obj + else: + return DateTimeBehavior.transformFromNative(obj) + + +class MultiDateBehavior(behavior.Behavior): + """ + Parent Behavior for ContentLines containing one or more DATE, DATE-TIME, or + PERIOD. + + """ + hasNative = True + + @staticmethod + def transformToNative(obj): + """ + Turn obj.value into a list of dates, datetimes, or + (datetime, timedelta) tuples. + + """ + if obj.isNative: + return obj + obj.isNative = True + if obj.value == '': + obj.value = [] + return obj + tzinfo = getTzid(getattr(obj, 'tzid_param', None)) + valueParam = getattr(obj, 'value_param', "DATE-TIME").upper() + valTexts = obj.value.split(",") + if valueParam == "DATE": + obj.value = [stringToDate(x) for x in valTexts] + elif valueParam == "DATE-TIME": + obj.value = [stringToDateTime(x, tzinfo) for x in valTexts] + elif valueParam == "PERIOD": + obj.value = [stringToPeriod(x, tzinfo) for x in valTexts] + return obj + + @staticmethod + def transformFromNative(obj): + """ + Replace the date, datetime or period tuples in obj.value with + appropriate strings. + + """ + if obj.value and type(obj.value[0]) == datetime.date: + obj.isNative = False + obj.value_param = 'DATE' + obj.value = ','.join([dateToString(val) for val in obj.value]) + return obj + # Fixme: handle PERIOD case + else: + if obj.isNative: + obj.isNative = False + transformed = [] + tzid = None + for val in obj.value: + if tzid is None and type(val) == datetime.datetime: + tzid = TimezoneComponent.registerTzinfo(val.tzinfo) + if tzid is not None: + obj.tzid_param = tzid + transformed.append(dateTimeToString(val)) + obj.value = ','.join(transformed) + return obj + + +class MultiTextBehavior(behavior.Behavior): + """Provide backslash escape encoding/decoding of each of several values. + + After transformation, value is a list of strings. + + """ + listSeparator = "," + + @classmethod + def decode(cls, line): + """Remove backslash escaping from line.value, then split on commas.""" + if line.encoded: + line.value = stringToTextValues(line.value, listSeparator=cls.listSeparator) + line.encoded = False + + @classmethod + def encode(cls, line): + """Backslash escape line.value.""" + if not line.encoded: + line.value = cls.listSeparator.join(backslashEscape(val) for val in line.value) + line.encoded = True + + +class SemicolonMultiTextBehavior(MultiTextBehavior): + listSeparator = ";" + + +#------------------------ Registered Behavior subclasses ----------------------- + + +class VCalendar2_0(VCalendarComponentBehavior): + """vCalendar 2.0 behavior. With added VAVAILABILITY support.""" + name = 'VCALENDAR' + description = 'vCalendar 2.0, also known as iCalendar.' + versionString = '2.0' + sortFirst = ('version', 'calscale', 'method', 'prodid', 'vtimezone') + knownChildren = { + 'CALSCALE': (0, 1, None), # min, max, behaviorRegistry id + 'METHOD': (0, 1, None), + 'VERSION': (0, 1, None), # required, but auto-generated + 'PRODID': (1, 1, None), + 'VTIMEZONE': (0, None, None), + 'VEVENT': (0, None, None), + 'VTODO': (0, None, None), + 'VJOURNAL': (0, None, None), + 'VFREEBUSY': (0, None, None), + 'VAVAILABILITY': (0, None, None), + } + + @classmethod + def generateImplicitParameters(cls, obj): + """Create PRODID, VERSION, and VTIMEZONEs if needed. + + VTIMEZONEs will need to exist whenever TZID parameters exist or when + datetimes with tzinfo exist. + + """ + for comp in obj.components(): + if comp.behavior is not None: + comp.behavior.generateImplicitParameters(comp) + if not hasattr(obj, 'prodid'): + obj.add(ContentLine('PRODID', [], PRODID)) + if not hasattr(obj, 'version'): + obj.add(ContentLine('VERSION', [], cls.versionString)) + tzidsUsed = {} + + def findTzids(obj, table): + if isinstance(obj, ContentLine) and (obj.behavior is None or + not obj.behavior.forceUTC): + if getattr(obj, 'tzid_param', None): + table[obj.tzid_param] = 1 + else: + if type(obj.value) == list: + for item in obj.value: + tzinfo = getattr(obj.value, 'tzinfo', None) + tzid = TimezoneComponent.registerTzinfo(tzinfo) + if tzid: + table[tzid] = 1 + else: + tzinfo = getattr(obj.value, 'tzinfo', None) + tzid = TimezoneComponent.registerTzinfo(tzinfo) + if tzid: + table[tzid] = 1 + for child in obj.getChildren(): + if obj.name != 'VTIMEZONE': + findTzids(child, table) + + findTzids(obj, tzidsUsed) + oldtzids = [toUnicode(x.tzid.value) for x in getattr(obj, 'vtimezone_list', [])] + for tzid in tzidsUsed.keys(): + tzid = toUnicode(tzid) + if tzid != u'UTC' and tzid not in oldtzids: + obj.add(TimezoneComponent(tzinfo=getTzid(tzid))) +registerBehavior(VCalendar2_0) + + +class VTimezone(VCalendarComponentBehavior): + """Timezone behavior.""" + name = 'VTIMEZONE' + hasNative = True + description = 'A grouping of component properties that defines a time zone.' + sortFirst = ('tzid', 'last-modified', 'tzurl', 'standard', 'daylight') + knownChildren = { + 'TZID': (1, 1, None), # min, max, behaviorRegistry id + 'LAST-MODIFIED': (0, 1, None), + 'TZURL': (0, 1, None), + 'STANDARD': (0, None, None), # NOTE: One of Standard or + 'DAYLIGHT': (0, None, None) # Daylight must appear + } + + @classmethod + def validate(cls, obj, raiseException, *args): + if not hasattr(obj, 'tzid') or obj.tzid.value is None: + if raiseException: + m = "VTIMEZONE components must contain a valid TZID" + raise ValidateError(m) + return False + if 'standard' in obj.contents or 'daylight' in obj.contents: + return super(VTimezone, cls).validate(obj, raiseException, *args) + else: + if raiseException: + m = "VTIMEZONE components must contain a STANDARD or a DAYLIGHT\ + component" + raise ValidateError(m) + return False + + @staticmethod + def transformToNative(obj): + if not obj.isNative: + object.__setattr__(obj, '__class__', TimezoneComponent) + obj.isNative = True + obj.registerTzinfo(obj.tzinfo) + return obj + + @staticmethod + def transformFromNative(obj): + return obj +registerBehavior(VTimezone) + + +class TZID(behavior.Behavior): + """Don't use TextBehavior for TZID. + + RFC2445 only allows TZID lines to be paramtext, so they shouldn't need any + encoding or decoding. Unfortunately, some Microsoft products use commas + in TZIDs which should NOT be treated as a multi-valued text property, nor + do we want to escape them. Leaving them alone works for Microsoft's breakage, + and doesn't affect compliant iCalendar streams. + """ +registerBehavior(TZID) + + +class DaylightOrStandard(VCalendarComponentBehavior): + hasNative = False + knownChildren = {'DTSTART': (1, 1, None), # min, max, behaviorRegistry id + 'RRULE': (0, 1, None)} + +registerBehavior(DaylightOrStandard, 'STANDARD') +registerBehavior(DaylightOrStandard, 'DAYLIGHT') + + +class VEvent(RecurringBehavior): + """Event behavior.""" + name = 'VEVENT' + sortFirst = ('uid', 'recurrence-id', 'dtstart', 'duration', 'dtend') + + description = """A grouping of component properties, and possibly including + "VALARM" calendar components, that represents a scheduled + amount of time on a calendar.""" + knownChildren = { + 'DTSTART': (0, 1, None), # min, max, behaviorRegistry id + 'CLASS': (0, 1, None), + 'CREATED': (0, 1, None), + 'DESCRIPTION': (0, 1, None), + 'GEO': (0, 1, None), + 'LAST-MODIFIED': (0, 1, None), + 'LOCATION': (0, 1, None), + 'ORGANIZER': (0, 1, None), + 'PRIORITY': (0, 1, None), + 'DTSTAMP': (0, 1, None), + 'SEQUENCE': (0, 1, None), + 'STATUS': (0, 1, None), + 'SUMMARY': (0, 1, None), + 'TRANSP': (0, 1, None), + 'UID': (1, 1, None), + 'URL': (0, 1, None), + 'RECURRENCE-ID': (0, 1, None), + 'DTEND': (0, 1, None), # NOTE: Only one of DtEnd or + 'DURATION': (0, 1, None), # Duration can appear + 'ATTACH': (0, None, None), + 'ATTENDEE': (0, None, None), + 'CATEGORIES': (0, None, None), + 'COMMENT': (0, None, None), + 'CONTACT': (0, None, None), + 'EXDATE': (0, None, None), + 'EXRULE': (0, None, None), + 'REQUEST-STATUS': (0, None, None), + 'RELATED-TO': (0, None, None), + 'RESOURCES': (0, None, None), + 'RDATE': (0, None, None), + 'RRULE': (0, None, None), + 'VALARM': (0, None, None) + } + + @classmethod + def validate(cls, obj, raiseException, *args): + if 'dtend' in obj.contents and 'duration' in obj.contents: + if raiseException: + m = "VEVENT components cannot contain both DTEND and DURATION\ + components" + raise ValidateError(m) + return False + else: + return super(VEvent, cls).validate(obj, raiseException, *args) + +registerBehavior(VEvent) + + +class VTodo(RecurringBehavior): + """To-do behavior.""" + name = 'VTODO' + description = """A grouping of component properties and possibly "VALARM" + calendar components that represent an action-item or assignment.""" + knownChildren = { + 'DTSTART': (0, 1, None), # min, max, behaviorRegistry id + 'CLASS': (0, 1, None), + 'COMPLETED': (0, 1, None), + 'CREATED': (0, 1, None), + 'DESCRIPTION': (0, 1, None), + 'GEO': (0, 1, None), + 'LAST-MODIFIED': (0, 1, None), + 'LOCATION': (0, 1, None), + 'ORGANIZER': (0, 1, None), + 'PERCENT': (0, 1, None), + 'PRIORITY': (0, 1, None), + 'DTSTAMP': (0, 1, None), + 'SEQUENCE': (0, 1, None), + 'STATUS': (0, 1, None), + 'SUMMARY': (0, 1, None), + 'UID': (0, 1, None), + 'URL': (0, 1, None), + 'RECURRENCE-ID': (0, 1, None), + 'DUE': (0, 1, None), # NOTE: Only one of Due or + 'DURATION': (0, 1, None), # Duration can appear + 'ATTACH': (0, None, None), + 'ATTENDEE': (0, None, None), + 'CATEGORIES': (0, None, None), + 'COMMENT': (0, None, None), + 'CONTACT': (0, None, None), + 'EXDATE': (0, None, None), + 'EXRULE': (0, None, None), + 'REQUEST-STATUS': (0, None, None), + 'RELATED-TO': (0, None, None), + 'RESOURCES': (0, None, None), + 'RDATE': (0, None, None), + 'RRULE': (0, None, None), + 'VALARM': (0, None, None) + } + + @classmethod + def validate(cls, obj, raiseException, *args): + if 'due' in obj.contents and 'duration' in obj.contents: + if raiseException: + m = "VTODO components cannot contain both DUE and DURATION\ + components" + raise ValidateError(m) + return False + else: + return super(VTodo, cls).validate(obj, raiseException, *args) + +registerBehavior(VTodo) + + +class VJournal(RecurringBehavior): + """Journal entry behavior.""" + name = 'VJOURNAL' + knownChildren = { + 'DTSTART': (0, 1, None), # min, max, behaviorRegistry id + 'CLASS': (0, 1, None), + 'CREATED': (0, 1, None), + 'DESCRIPTION': (0, 1, None), + 'LAST-MODIFIED': (0, 1, None), + 'ORGANIZER': (0, 1, None), + 'DTSTAMP': (0, 1, None), + 'SEQUENCE': (0, 1, None), + 'STATUS': (0, 1, None), + 'SUMMARY': (0, 1, None), + 'UID': (0, 1, None), + 'URL': (0, 1, None), + 'RECURRENCE-ID': (0, 1, None), + 'ATTACH': (0, None, None), + 'ATTENDEE': (0, None, None), + 'CATEGORIES': (0, None, None), + 'COMMENT': (0, None, None), + 'CONTACT': (0, None, None), + 'EXDATE': (0, None, None), + 'EXRULE': (0, None, None), + 'REQUEST-STATUS': (0, None, None), + 'RELATED-TO': (0, None, None), + 'RDATE': (0, None, None), + 'RRULE': (0, None, None) + } +registerBehavior(VJournal) + + +class VFreeBusy(VCalendarComponentBehavior): + """ + Free/busy state behavior. + """ + name = 'VFREEBUSY' + description = """A grouping of component properties that describe either a + request for free/busy time, describe a response to a request + for free/busy time or describe a published set of busy time.'""" + sortFirst = ('uid', 'dtstart', 'duration', 'dtend') + knownChildren = { + 'DTSTART': (0, 1, None), # min, max, behaviorRegistry id + 'CONTACT': (0, 1, None), + 'DTEND': (0, 1, None), + 'DURATION': (0, 1, None), + 'ORGANIZER': (0, 1, None), + 'DTSTAMP': (0, 1, None), + 'UID': (0, 1, None), + 'URL': (0, 1, None), + 'ATTENDEE': (0, None, None), + 'COMMENT': (0, None, None), + 'FREEBUSY': (0, None, None), + 'REQUEST-STATUS': (0, None, None) + } +registerBehavior(VFreeBusy) + + +class VAlarm(VCalendarComponentBehavior): + """Alarm behavior.""" + name = 'VALARM' + description = 'Alarms describe when and how to provide alerts about events and to-dos.' + knownChildren = { + 'ACTION': (1, 1, None), # min, max, behaviorRegistry id + 'TRIGGER': (1, 1, None), + 'DURATION': (0, 1, None), + 'REPEAT': (0, 1, None), + 'DESCRIPTION': (0, 1, None) + } + + @staticmethod + def generateImplicitParameters(obj): + """Create default ACTION and TRIGGER if they're not set.""" + try: + obj.action + except AttributeError: + obj.add('action').value = 'AUDIO' + try: + obj.trigger + except AttributeError: + obj.add('trigger').value = datetime.timedelta(0) + + @classmethod + def validate(cls, obj, raiseException, *args): + """ + #TODO + audioprop = 2*( + + ; 'action' and 'trigger' are both REQUIRED, + ; but MUST NOT occur more than once + + action / trigger / + + ; 'duration' and 'repeat' are both optional, + ; and MUST NOT occur more than once each, + ; but if one occurs, so MUST the other + + duration / repeat / + + ; the following is optional, + ; but MUST NOT occur more than once + + attach / + + dispprop = 3*( + + ; the following are all REQUIRED, + ; but MUST NOT occur more than once + + action / description / trigger / + + ; 'duration' and 'repeat' are both optional, + ; and MUST NOT occur more than once each, + ; but if one occurs, so MUST the other + + duration / repeat / + + emailprop = 5*( + + ; the following are all REQUIRED, + ; but MUST NOT occur more than once + + action / description / trigger / summary + + ; the following is REQUIRED, + ; and MAY occur more than once + + attendee / + + ; 'duration' and 'repeat' are both optional, + ; and MUST NOT occur more than once each, + ; but if one occurs, so MUST the other + + duration / repeat / + + procprop = 3*( + + ; the following are all REQUIRED, + ; but MUST NOT occur more than once + + action / attach / trigger / + + ; 'duration' and 'repeat' are both optional, + ; and MUST NOT occur more than once each, + ; but if one occurs, so MUST the other + + duration / repeat / + + ; 'description' is optional, + ; and MUST NOT occur more than once + + description / + if obj.contents.has_key('dtend') and obj.contents.has_key('duration'): + if raiseException: + m = "VEVENT components cannot contain both DTEND and DURATION\ + components" + raise ValidateError(m) + return False + else: + return super(VEvent, cls).validate(obj, raiseException, *args) + """ + return True +registerBehavior(VAlarm) + + +class VAvailability(VCalendarComponentBehavior): + """ + Availability state behavior. + + Used to represent user's available time slots. + + """ + name = 'VAVAILABILITY' + description = 'A component used to represent a user\'s available time slots.' + sortFirst = ('uid', 'dtstart', 'duration', 'dtend') + knownChildren = { + 'UID': (1, 1, None), # min, max, behaviorRegistry id + 'DTSTAMP': (1, 1, None), + 'BUSYTYPE': (0, 1, None), + 'CREATED': (0, 1, None), + 'DTSTART': (0, 1, None), + 'LAST-MODIFIED': (0, 1, None), + 'ORGANIZER': (0, 1, None), + 'SEQUENCE': (0, 1, None), + 'SUMMARY': (0, 1, None), + 'URL': (0, 1, None), + 'DTEND': (0, 1, None), + 'DURATION': (0, 1, None), + 'CATEGORIES': (0, None, None), + 'COMMENT': (0, None, None), + 'CONTACT': (0, None, None), + 'AVAILABLE': (0, None, None), + } + + @classmethod + def validate(cls, obj, raiseException, *args): + if 'dtend' in obj.contents and 'duration' in obj.contents: + if raiseException: + m = "VAVAILABILITY components cannot contain both DTEND and DURATION components" + raise ValidateError(m) + return False + else: + return super(VAvailability, cls).validate(obj, raiseException, *args) +registerBehavior(VAvailability) + + +class Available(RecurringBehavior): + """ + Event behavior. + """ + name = 'AVAILABLE' + sortFirst = ('uid', 'recurrence-id', 'dtstart', 'duration', 'dtend') + + description = 'Defines a period of time in which a user is normally available.' + knownChildren = { + 'DTSTAMP': (1, 1, None), # min, max, behaviorRegistry id + 'DTSTART': (1, 1, None), + 'UID': (1, 1, None), + 'DTEND': (0, 1, None), # NOTE: One of DtEnd or + 'DURATION': (0, 1, None), # Duration must appear, but not both + 'CREATED': (0, 1, None), + 'LAST-MODIFIED': (0, 1, None), + 'RECURRENCE-ID': (0, 1, None), + 'RRULE': (0, 1, None), + 'SUMMARY': (0, 1, None), + 'CATEGORIES': (0, None, None), + 'COMMENT': (0, None, None), + 'CONTACT': (0, None, None), + 'EXDATE': (0, None, None), + 'RDATE': (0, None, None), + } + + @classmethod + def validate(cls, obj, raiseException, *args): + has_dtend = 'dtend' in obj.contents + has_duration = 'duration' in obj.contents + if has_dtend and has_duration: + if raiseException: + m = "AVAILABLE components cannot contain both DTEND and DURATION\ + properties" + raise ValidateError(m) + return False + elif not (has_dtend or has_duration): + if raiseException: + m = "AVAILABLE components must contain one of DTEND or DURATION\ + properties" + raise ValidateError(m) + return False + else: + return super(Available, cls).validate(obj, raiseException, *args) +registerBehavior(Available) + + +class Duration(behavior.Behavior): + """Behavior for Duration ContentLines. Transform to datetime.timedelta.""" + name = 'DURATION' + hasNative = True + + @staticmethod + def transformToNative(obj): + """Turn obj.value into a datetime.timedelta.""" + if obj.isNative: + return obj + obj.isNative = True + obj.value = obj.value + if obj.value == '': + return obj + else: + deltalist = stringToDurations(obj.value) + #When can DURATION have multiple durations? For now: + if len(deltalist) == 1: + obj.value = deltalist[0] + return obj + else: + raise ParseError("DURATION must have a single duration string.") + + @staticmethod + def transformFromNative(obj): + """Replace the datetime.timedelta in obj.value with an RFC2445 string. + """ + if not obj.isNative: + return obj + obj.isNative = False + obj.value = timedeltaToString(obj.value) + return obj +registerBehavior(Duration) + + +class Trigger(behavior.Behavior): + """DATE-TIME or DURATION""" + name = 'TRIGGER' + description = 'This property specifies when an alarm will trigger.' + hasNative = True + forceUTC = True + + @staticmethod + def transformToNative(obj): + """Turn obj.value into a timedelta or datetime.""" + if obj.isNative: + return obj + value = getattr(obj, 'value_param', 'DURATION').upper() + if hasattr(obj, 'value_param'): + del obj.value_param + if obj.value == '': + obj.isNative = True + return obj + elif value == 'DURATION': + try: + return Duration.transformToNative(obj) + except ParseError: + logger.warning( + "TRIGGER not recognized as DURATION, trying " + "DATE-TIME, because iCal sometimes exports " + "DATE-TIMEs without setting VALUE=DATE-TIME" + ) + try: + obj.isNative = False + dt = DateTimeBehavior.transformToNative(obj) + return dt + except: + msg = "TRIGGER with no VALUE not recognized as DURATION or as DATE-TIME" + raise ParseError(msg) + elif value == 'DATE-TIME': + #TRIGGERs with DATE-TIME values must be in UTC, we could validate + #that fact, for now we take it on faith. + return DateTimeBehavior.transformToNative(obj) + else: + raise ParseError("VALUE must be DURATION or DATE-TIME") + + @staticmethod + def transformFromNative(obj): + if type(obj.value) == datetime.datetime: + obj.value_param = 'DATE-TIME' + return UTCDateTimeBehavior.transformFromNative(obj) + elif type(obj.value) == datetime.timedelta: + return Duration.transformFromNative(obj) + else: + raise NativeError("Native TRIGGER values must be timedelta or datetime") +registerBehavior(Trigger) + + +class PeriodBehavior(behavior.Behavior): + """ + A list of (date-time, timedelta) tuples. + + """ + hasNative = True + + @staticmethod + def transformToNative(obj): + """ + Convert comma separated periods into tuples. + """ + if obj.isNative: + return obj + obj.isNative = True + if obj.value == '': + obj.value = [] + return obj + tzinfo = getTzid(getattr(obj, 'tzid_param', None)) + obj.value = [stringToPeriod(x, tzinfo) for x in obj.value.split(",")] + return obj + + @classmethod + def transformFromNative(cls, obj): + """Convert the list of tuples in obj.value to strings.""" + if obj.isNative: + obj.isNative = False + transformed = [] + for tup in obj.value: + transformed.append(periodToString(tup, cls.forceUTC)) + if len(transformed) > 0: + tzid = TimezoneComponent.registerTzinfo(tup[0].tzinfo) + if not cls.forceUTC and tzid is not None: + obj.tzid_param = tzid + + obj.value = ','.join(transformed) + + return obj + + +class FreeBusy(PeriodBehavior): + """Free or busy period of time, must be specified in UTC.""" + name = 'FREEBUSY' + forceUTC = True +registerBehavior(FreeBusy, 'FREEBUSY') + + +class RRule(behavior.Behavior): + """ + Dummy behavior to avoid having RRULEs being treated as text lines (and thus + having semi-colons inaccurately escaped). + """ +registerBehavior(RRule, 'RRULE') +registerBehavior(RRule, 'EXRULE') + + +#------------------------ Registration of common classes ----------------------- + + +utcDateTimeList = ['LAST-MODIFIED', 'CREATED', 'COMPLETED', 'DTSTAMP'] +list(map(lambda x: registerBehavior(UTCDateTimeBehavior, x), utcDateTimeList)) + +dateTimeOrDateList = ['DTEND', 'DTSTART', 'DUE', 'RECURRENCE-ID'] +list(map(lambda x: registerBehavior(DateOrDateTimeBehavior, x), dateTimeOrDateList)) + +registerBehavior(MultiDateBehavior, 'RDATE') +registerBehavior(MultiDateBehavior, 'EXDATE') + + +textList = ['CALSCALE', 'METHOD', 'PRODID', 'CLASS', 'COMMENT', 'DESCRIPTION', 'LOCATION', + 'STATUS', 'SUMMARY', 'TRANSP', 'CONTACT', 'RELATED-TO', 'UID', 'ACTION', 'BUSYTYPE'] +list(map(lambda x: registerBehavior(TextBehavior, x), textList)) + +list(map(lambda x: registerBehavior(MultiTextBehavior, x), ['CATEGORIES', 'RESOURCES'])) +registerBehavior(SemicolonMultiTextBehavior, 'REQUEST-STATUS') + + +#------------------------ Serializing helper functions ------------------------- + +def numToDigits(num, places): + """Helper, for converting numbers to textual digits.""" + s = str(num) + if len(s) < places: + return ("0" * (places - len(s))) + s + elif len(s) > places: + return s[len(s) - places:] + else: + return s + + +def timedeltaToString(delta): + """ + Convert timedelta to an ical DURATION. + """ + if delta.days == 0: + sign = 1 + else: + sign = delta.days / abs(delta.days) + delta = abs(delta) + days = delta.days + hours = int(delta.seconds / 3600) + minutes = int((delta.seconds % 3600) / 60) + seconds = int(delta.seconds % 60) + + output = '' + if sign == -1: + output += '-' + output += 'P' + if days: + output += '{}D'.format(days) + if hours or minutes or seconds: + output += 'T' + elif not days: # Deal with zero duration + output += 'T0S' + if hours: + output += '{}H'.format(hours) + if minutes: + output += '{}M'.format(minutes) + if seconds: + output += '{}S'.format(seconds) + return output + + +def timeToString(dateOrDateTime): + """ + Wraps dateToString and dateTimeToString, returning the results + of either based on the type of the argument + """ + if hasattr(dateOrDateTime, 'hour'): + return dateTimeToString(dateOrDateTime) + return dateToString(dateOrDateTime) + + +def dateToString(date): + year = numToDigits(date.year, 4) + month = numToDigits(date.month, 2) + day = numToDigits(date.day, 2) + return year + month + day + + +def dateTimeToString(dateTime, convertToUTC=False): + """ + Ignore tzinfo unless convertToUTC. Output string. + """ + if dateTime.tzinfo and convertToUTC: + dateTime = dateTime.astimezone(utc) + + datestr = "{}{}{}T{}{}{}".format( + numToDigits(dateTime.year, 4), + numToDigits(dateTime.month, 2), + numToDigits(dateTime.day, 2), + numToDigits(dateTime.hour, 2), + numToDigits(dateTime.minute, 2), + numToDigits(dateTime.second, 2), + ) + if tzinfo_eq(dateTime.tzinfo, utc): + datestr += "Z" + return datestr + + +def deltaToOffset(delta): + absDelta = abs(delta) + hours = int(absDelta.seconds / 3600) + hoursString = numToDigits(hours, 2) + minutesString = '00' + if absDelta == delta: + signString = "+" + else: + signString = "-" + return signString + hoursString + minutesString + + +def periodToString(period, convertToUTC=False): + txtstart = dateTimeToString(period[0], convertToUTC) + if isinstance(period[1], datetime.timedelta): + txtend = timedeltaToString(period[1]) + else: + txtend = dateTimeToString(period[1], convertToUTC) + return txtstart + "/" + txtend + + +#----------------------- Parsing functions ------------------------------------- + + +def isDuration(s): + s = s.upper() + return (s.find("P") != -1) and (s.find("P") < 2) + + +def stringToDate(s): + year = int(s[0:4]) + month = int(s[4:6]) + day = int(s[6:8]) + return datetime.date(year, month, day) + + +def stringToDateTime(s, tzinfo=None): + """Returns datetime.datetime object.""" + try: + year = int(s[0:4]) + month = int(s[4:6]) + day = int(s[6:8]) + hour = int(s[9:11]) + minute = int(s[11:13]) + second = int(s[13:15]) + if len(s) > 15: + if s[15] == 'Z': + tzinfo = utc + except: + raise ParseError("'%s' is not a valid DATE-TIME" % s) + year = year and year or 2000 + return datetime.datetime(year, month, day, hour, minute, second, 0, tzinfo) + + +# DQUOTE included to work around iCal's penchant for backslash escaping it, +# although it isn't actually supposed to be escaped according to rfc2445 TEXT +escapableCharList = '\\;,Nn"' + + +def stringToTextValues(s, listSeparator=',', charList=None, strict=False): + """Returns list of strings.""" + + if charList is None: + charList = escapableCharList + + def escapableChar(c): + return c in charList + + def error(msg): + if strict: + raise ParseError(msg) + else: + #logger.error(msg) + print(msg) + + #vars which control state machine + charIterator = enumerate(s) + state = "read normal" + + current = [] + results = [] + + while True: + try: + charIndex, char = next(charIterator) + except: + char = "eof" + + if state == "read normal": + if char == '\\': + state = "read escaped char" + elif char == listSeparator: + state = "read normal" + current = "".join(current) + results.append(current) + current = [] + elif char == "eof": + state = "end" + else: + state = "read normal" + current.append(char) + + elif state == "read escaped char": + if escapableChar(char): + state = "read normal" + if char in 'nN': + current.append('\n') + else: + current.append(char) + else: + state = "read normal" + # leave unrecognized escaped characters for later passes + current.append('\\' + char) + + elif state == "end": # an end state + if len(current) or len(results) == 0: + current = "".join(current) + results.append(current) + return results + + elif state == "error": # an end state + return results + + else: + state = "error" + error("error: unknown state: '%s' reached in %s" % (state, s)) + + +def stringToDurations(s, strict=False): + """Returns list of timedelta objects.""" + def makeTimedelta(sign, week, day, hour, minute, sec): + if sign == "-": + sign = -1 + else: + sign = 1 + week = int(week) + day = int(day) + hour = int(hour) + minute = int(minute) + sec = int(sec) + return sign * datetime.timedelta( + weeks=week, + days=day, + hours=hour, + minutes=minute, + seconds=sec + ) + + def error(msg): + if strict: + raise ParseError(msg) + else: + raise ParseError(msg) + #logger.error(msg) + + #vars which control state machine + charIterator = enumerate(s) + state = "start" + + durations = [] + current = "" + sign = None + week = 0 + day = 0 + hour = 0 + minute = 0 + sec = 0 + + while True: + try: + charIndex, char = next(charIterator) + except: + charIndex += 1 + char = "eof" + + if state == "start": + if char == '+': + state = "start" + sign = char + elif char == '-': + state = "start" + sign = char + elif char.upper() == 'P': + state = "read field" + elif char == "eof": + state = "error" + error("got end-of-line while reading in duration: " + s) + elif char in string.digits: + state = "read field" + current = current + char # update this part when updating "read field" + else: + state = "error" + # print("got unexpected character {} reading in duration: {}".format(char, s)) + error("got unexpected character {} reading in duration: {}".format(char, s)) + + elif state == "read field": + if (char in string.digits): + state = "read field" + current = current + char # update part above when updating "read field" + elif char.upper() == 'T': + state = "read field" + elif char.upper() == 'W': + state = "read field" + week = current + current = "" + elif char.upper() == 'D': + state = "read field" + day = current + current = "" + elif char.upper() == 'H': + state = "read field" + hour = current + current = "" + elif char.upper() == 'M': + state = "read field" + minute = current + current = "" + elif char.upper() == 'S': + state = "read field" + sec = current + current = "" + elif char == ",": + state = "start" + durations.append(makeTimedelta(sign, week, day, hour, minute, sec)) + current = "" + sign = None + week = None + day = None + hour = None + minute = None + sec = None + elif char == "eof": + state = "end" + else: + state = "error" + error("got unexpected character reading in duration: " + s) + + elif state == "end": # an end state + if (sign or week or day or hour or minute or sec): + durations.append(makeTimedelta(sign, week, day, hour, minute, sec)) + return durations + + elif state == "error": # an end state + error("in error state") + return durations + + else: + state = "error" + error("error: unknown state: '%s' reached in %s" % (state, s)) + + +def parseDtstart(contentline, allowSignatureMismatch=False): + """ + Convert a contentline's value into a date or date-time. + + A variety of clients don't serialize dates with the appropriate VALUE + parameter, so rather than failing on these (technically invalid) lines, + if allowSignatureMismatch is True, try to parse both varieties. + + """ + tzinfo = getTzid(getattr(contentline, 'tzid_param', None)) + valueParam = getattr(contentline, 'value_param', 'DATE-TIME').upper() + if valueParam == "DATE": + return stringToDate(contentline.value) + elif valueParam == "DATE-TIME": + try: + return stringToDateTime(contentline.value, tzinfo) + except: + if allowSignatureMismatch: + return stringToDate(contentline.value) + else: + raise + + +def stringToPeriod(s, tzinfo=None): + values = s.split("/") + start = stringToDateTime(values[0], tzinfo) + valEnd = values[1] + if isDuration(valEnd): # period-start = date-time "/" dur-value + delta = stringToDurations(valEnd)[0] + return (start, delta) + else: + return (start, stringToDateTime(valEnd, tzinfo)) + + +def getTransition(transitionTo, year, tzinfo): + """Return the datetime of the transition to/from DST, or None.""" + + def firstTransition(iterDates, test): + """ + Return the last date not matching test, or None if all tests matched. + """ + success = None + for dt in iterDates: + if not test(dt): + success = dt + else: + if success is not None: + return success + return success # may be None + + def generateDates(year, month=None, day=None): + """Iterate over possible dates with unspecified values.""" + months = range(1, 13) + days = range(1, 32) + hours = range(0, 24) + if month is None: + for month in months: + yield datetime.datetime(year, month, 1) + elif day is None: + for day in days: + try: + yield datetime.datetime(year, month, day) + except ValueError: + pass + else: + for hour in hours: + yield datetime.datetime(year, month, day, hour) + + assert transitionTo in ('daylight', 'standard') + if transitionTo == 'daylight': + def test(dt): + return tzinfo.dst(dt) != zeroDelta + elif transitionTo == 'standard': + def test(dt): + return tzinfo.dst(dt) == zeroDelta + newyear = datetime.datetime(year, 1, 1) + monthDt = firstTransition(generateDates(year), test) + if monthDt is None: + return newyear + elif monthDt.month == 12: + return None + else: + # there was a good transition somewhere in a non-December month + month = monthDt.month + day = firstTransition(generateDates(year, month), test).day + uncorrected = firstTransition(generateDates(year, month, day), test) + if transitionTo == 'standard': + # assuming tzinfo.dst returns a new offset for the first + # possible hour, we need to add one hour for the offset change + # and another hour because firstTransition returns the hour + # before the transition + return uncorrected + datetime.timedelta(hours=2) + else: + return uncorrected + datetime.timedelta(hours=1) + + +def tzinfo_eq(tzinfo1, tzinfo2, startYear=2000, endYear=2020): + """ + Compare offsets and DST transitions from startYear to endYear. + """ + if tzinfo1 == tzinfo2: + return True + elif tzinfo1 is None or tzinfo2 is None: + return False + + def dt_test(dt): + if dt is None: + return True + return tzinfo1.utcoffset(dt) == tzinfo2.utcoffset(dt) + + if not dt_test(datetime.datetime(startYear, 1, 1)): + return False + for year in range(startYear, endYear): + for transitionTo in 'daylight', 'standard': + t1 = getTransition(transitionTo, year, tzinfo1) + t2 = getTransition(transitionTo, year, tzinfo2) + if t1 != t2 or not dt_test(t1): + return False + return True + + +#------------------- Testing and running functions ----------------------------- +if __name__ == '__main__': + import tests + tests._test() diff --git a/thesisenv/lib/python3.6/site-packages/card_me/ics_diff.py b/thesisenv/lib/python3.6/site-packages/card_me/ics_diff.py new file mode 100644 index 0000000..be91570 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/card_me/ics_diff.py @@ -0,0 +1,221 @@ +from __future__ import print_function + +from optparse import OptionParser + +from .base import Component, getBehavior, newFromBehavior, readOne + +""" +Compare VTODOs and VEVENTs in two iCalendar sources. +""" + +def getSortKey(component): + def getUID(component): + return component.getChildValue('uid', '') + + # it's not quite as simple as getUID, need to account for recurrenceID and + # sequence + + def getSequence(component): + sequence = component.getChildValue('sequence', 0) + return "%05d" % int(sequence) + + def getRecurrenceID(component): + recurrence_id = component.getChildValue('recurrence_id', None) + if recurrence_id is None: + return '0000-00-00' + else: + return recurrence_id.isoformat() + + return getUID(component) + getSequence(component) + getRecurrenceID(component) + +def sortByUID(components): + return sorted(components, key=getSortKey) + +def deleteExtraneous(component, ignore_dtstamp=False): + """ + Recursively walk the component's children, deleting extraneous details like + X-VOBJ-ORIGINAL-TZID. + """ + for comp in component.components(): + deleteExtraneous(comp, ignore_dtstamp) + for line in component.lines(): + if line.params.has_key('X-VOBJ-ORIGINAL-TZID'): + del line.params['X-VOBJ-ORIGINAL-TZID'] + if ignore_dtstamp and hasattr(component, 'dtstamp_list'): + del component.dtstamp_list + +def diff(left, right): + """ + Take two VCALENDAR components, compare VEVENTs and VTODOs in them, + return a list of object pairs containing just UID and the bits + that didn't match, using None for objects that weren't present in one + version or the other. + + When there are multiple ContentLines in one VEVENT, for instance many + DESCRIPTION lines, such lines original order is assumed to be + meaningful. Order is also preserved when comparing (the unlikely case + of) multiple parameters of the same type in a ContentLine + + """ + + def processComponentLists(leftList, rightList): + output = [] + rightIndex = 0 + rightListSize = len(rightList) + + for comp in leftList: + if rightIndex >= rightListSize: + output.append((comp, None)) + else: + leftKey = getSortKey(comp) + rightComp = rightList[rightIndex] + rightKey = getSortKey(rightComp) + while leftKey > rightKey: + output.append((None, rightComp)) + rightIndex += 1 + if rightIndex >= rightListSize: + output.append((comp, None)) + break + else: + rightComp = rightList[rightIndex] + rightKey = getSortKey(rightComp) + + if leftKey < rightKey: + output.append((comp, None)) + elif leftKey == rightKey: + rightIndex += 1 + matchResult = processComponentPair(comp, rightComp) + if matchResult is not None: + output.append(matchResult) + + return output + + def newComponent(name, body): + if body is None: + return None + else: + c = Component(name) + c.behavior = getBehavior(name) + c.isNative = True + return c + + def processComponentPair(leftComp, rightComp): + """ + Return None if a match, or a pair of components including UIDs and + any differing children. + + """ + leftChildKeys = leftComp.contents.keys() + rightChildKeys = rightComp.contents.keys() + + differentContentLines = [] + differentComponents = {} + + for key in leftChildKeys: + rightList = rightComp.contents.get(key, []) + if isinstance(leftComp.contents[key][0], Component): + compDifference = processComponentLists(leftComp.contents[key], + rightList) + if len(compDifference) > 0: + differentComponents[key] = compDifference + + elif leftComp.contents[key] != rightList: + differentContentLines.append((leftComp.contents[key], + rightList)) + + for key in rightChildKeys: + if key not in leftChildKeys: + if isinstance(rightComp.contents[key][0], Component): + differentComponents[key] = ([], rightComp.contents[key]) + else: + differentContentLines.append(([], rightComp.contents[key])) + + if len(differentContentLines) == 0 and len(differentComponents) == 0: + return None + else: + left = newFromBehavior(leftComp.name) + right = newFromBehavior(leftComp.name) + # add a UID, if one existed, despite the fact that they'll always be + # the same + uid = leftComp.getChildValue('uid') + if uid is not None: + left.add( 'uid').value = uid + right.add('uid').value = uid + + for name, childPairList in differentComponents.items(): + leftComponents, rightComponents = zip(*childPairList) + if len(leftComponents) > 0: + # filter out None + left.contents[name] = filter(None, leftComponents) + if len(rightComponents) > 0: + # filter out None + right.contents[name] = filter(None, rightComponents) + + for leftChildLine, rightChildLine in differentContentLines: + nonEmpty = leftChildLine or rightChildLine + name = nonEmpty[0].name + if leftChildLine is not None: + left.contents[name] = leftChildLine + if rightChildLine is not None: + right.contents[name] = rightChildLine + + return left, right + + + vevents = processComponentLists(sortByUID(getattr(left, 'vevent_list', [])), + sortByUID(getattr(right, 'vevent_list', []))) + + vtodos = processComponentLists(sortByUID(getattr(left, 'vtodo_list', [])), + sortByUID(getattr(right, 'vtodo_list', []))) + + return vevents + vtodos + +def prettyDiff(leftObj, rightObj): + for left, right in diff(leftObj, rightObj): + print("<<<<<<<<<<<<<<<") + if left is not None: + left.prettyPrint() + print("===============") + if right is not None: + right.prettyPrint() + print(">>>>>>>>>>>>>>>") + print + + +def main(): + options, args = getOptions() + if args: + ignore_dtstamp = options.ignore + ics_file1, ics_file2 = args + cal1 = readOne(file(ics_file1)) + cal2 = readOne(file(ics_file2)) + deleteExtraneous(cal1, ignore_dtstamp=ignore_dtstamp) + deleteExtraneous(cal2, ignore_dtstamp=ignore_dtstamp) + prettyDiff(cal1, cal2) + +version = "0.1" + +def getOptions(): + ##### Configuration options ##### + + usage = "usage: %prog [options] ics_file1 ics_file2" + parser = OptionParser(usage=usage, version=version) + parser.set_description("ics_diff will print a comparison of two iCalendar files ") + + parser.add_option("-i", "--ignore-dtstamp", dest="ignore", action="store_true", + default=False, help="ignore DTSTAMP lines [default: False]") + + (cmdline_options, args) = parser.parse_args() + if len(args) < 2: + print("error: too few arguments given") + print + print(parser.format_help()) + return False, False + + return cmdline_options, args + +if __name__ == "__main__": + try: + main() + except KeyboardInterrupt: + print("Aborted") diff --git a/thesisenv/lib/python3.6/site-packages/card_me/vcard.py b/thesisenv/lib/python3.6/site-packages/card_me/vcard.py new file mode 100644 index 0000000..6fd2ee1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/card_me/vcard.py @@ -0,0 +1,325 @@ +"""Definitions and behavior for vCard 3.0""" + +from . import behavior + +from .base import ContentLine, registerBehavior, backslashEscape +from .icalendar import stringToTextValues + + +# Python 3 no longer has a basestring type, so.... +try: + basestring = basestring +except NameError: + basestring = (str,bytes) + +#------------------------ vCard structs ---------------------------------------- + +class Name(object): + def __init__(self, family = '', given = '', additional = '', prefix = '', + suffix = ''): + """Each name attribute can be a string or a list of strings.""" + self.family = family + self.given = given + self.additional = additional + self.prefix = prefix + self.suffix = suffix + + @staticmethod + def toString(val): + """Turn a string or array value into a string.""" + if type(val) in (list, tuple): + return ' '.join(val) + return val + + def __str__(self): + eng_order = ('prefix', 'given', 'additional', 'family', 'suffix') + out = ' '.join(self.toString(getattr(self, val)) for val in eng_order) + return out + + def __repr__(self): + return "" % self.__str__() + + def __eq__(self, other): + try: + return (self.family == other.family and + self.given == other.given and + self.additional == other.additional and + self.prefix == other.prefix and + self.suffix == other.suffix) + except: + return False + + +class Address(object): + def __init__(self, street = '', city = '', region = '', code = '', + country = '', box = '', extended = ''): + """ + Each name attribute can be a string or a list of strings. + """ + self.box = box + self.extended = extended + self.street = street + self.city = city + self.region = region + self.code = code + self.country = country + + @staticmethod + def toString(val, join_char='\n'): + """ + Turn a string or array value into a string. + """ + if type(val) in (list, tuple): + return join_char.join(val) + return val + + lines = ('box', 'extended', 'street') + one_line = ('city', 'region', 'code') + + def __str__(self): + lines = '\n'.join(self.toString(getattr(self, val)) for val in self.lines if getattr(self, val)) + one_line = tuple(self.toString(getattr(self, val), ' ') for val in self.one_line) + lines += "\n%s, %s %s" % one_line + if self.country: + lines += '\n' + self.toString(self.country) + return lines + + def __repr__(self): + return "" % self + + def __eq__(self, other): + try: + return (self.box == other.box and + self.extended == other.extended and + self.street == other.street and + self.city == other.city and + self.region == other.region and + self.code == other.code and + self.country == other.country) + except: + False + + +#------------------------ Registered Behavior subclasses ----------------------- + +class VCardTextBehavior(behavior.Behavior): + """Provide backslash escape encoding/decoding for single valued properties. + + TextBehavior also deals with base64 encoding if the ENCODING parameter is + explicitly set to BASE64. + + """ + allowGroup = True + base64string = 'B' + + @classmethod + def decode(cls, line): + """Remove backslash escaping from line.valueDecode line, either to remove + backslash espacing, or to decode base64 encoding. The content line should + contain a ENCODING=b for base64 encoding, but Apple Addressbook seems to + export a singleton parameter of 'BASE64', which does not match the 3.0 + vCard spec. If we encouter that, then we transform the parameter to + ENCODING=b""" + if line.encoded: + if 'BASE64' in line.singletonparams: + line.singletonparams.remove('BASE64') + line.encoding_param = cls.base64string + encoding = getattr(line, 'encoding_param', None) + if encoding: + line.value = line.value.decode('base64') + else: + line.value = stringToTextValues(line.value)[0] + line.encoded=False + + @classmethod + def encode(cls, line): + """Backslash escape line.value.""" + if not line.encoded: + encoding = getattr(line, 'encoding_param', None) + if encoding and encoding.upper() == cls.base64string: + line.value = line.value.encode('base64').replace('\n', '') + else: + line.value = backslashEscape(line.value) + line.encoded=True + + +class VCardBehavior(behavior.Behavior): + allowGroup = True + defaultBehavior = VCardTextBehavior + + +class VCard3_0(VCardBehavior): + """ + vCard 3.0 behavior. + """ + name = 'VCARD' + description = 'vCard 3.0, defined in rfc2426' + versionString = '3.0' + isComponent = True + sortFirst = ('version', 'prodid', 'uid') + knownChildren = {'N': (1, 1, None),#min, max, behaviorRegistry id + 'FN': (1, 1, None), + 'VERSION': (1, 1, None),#required, auto-generated + 'PRODID': (0, 1, None), + 'LABEL': (0, None, None), + 'UID': (0, None, None), + 'ADR': (0, None, None), + 'ORG': (0, None, None), + 'PHOTO': (0, None, None), + 'CATEGORIES':(0, None, None) + } + + @classmethod + def generateImplicitParameters(cls, obj): + """ + Create PRODID, VERSION, and VTIMEZONEs if needed. + + VTIMEZONEs will need to exist whenever TZID parameters exist or when + datetimes with tzinfo exist. + + """ + if not hasattr(obj, 'version'): + obj.add(ContentLine('VERSION', [], cls.versionString)) +registerBehavior(VCard3_0, default=True) + + +class FN(VCardTextBehavior): + name = "FN" + description = 'Formatted name' +registerBehavior(FN) + +class Label(VCardTextBehavior): + name = "Label" + description = 'Formatted address' +registerBehavior(Label) + +wacky_apple_photo_serialize = True +REALLY_LARGE = 1E50 + + +class Photo(VCardTextBehavior): + name = "Photo" + description = 'Photograph' + @classmethod + def valueRepr( cls, line ): + return " (BINARY PHOTO DATA at 0x%s) " % id( line.value ) + + @classmethod + def serialize(cls, obj, buf, lineLength, validate): + """Apple's Address Book is *really* weird with images, it expects + base64 data to have very specific whitespace. It seems Address Book + can handle PHOTO if it's not wrapped, so don't wrap it.""" + if wacky_apple_photo_serialize: + lineLength = REALLY_LARGE + VCardTextBehavior.serialize(obj, buf, lineLength, validate) + +registerBehavior(Photo) + +def toListOrString(string): + stringList = stringToTextValues(string) + if len(stringList) == 1: + return stringList[0] + else: + return stringList + +def splitFields(string): + """Return a list of strings or lists from a Name or Address.""" + return [toListOrString(i) for i in + stringToTextValues(string, listSeparator=';', charList=';')] + +def toList(stringOrList): + if isinstance(stringOrList, basestring): + return [stringOrList] + return stringOrList + +def serializeFields(obj, order=None): + """Turn an object's fields into a ';' and ',' seperated string. + + If order is None, obj should be a list, backslash escape each field and + return a ';' separated string. + """ + fields = [] + if order is None: + fields = [backslashEscape(val) for val in obj] + else: + for field in order: + escapedValueList = [backslashEscape(val) for val in + toList(getattr(obj, field))] + fields.append(','.join(escapedValueList)) + return ';'.join(fields) + + +NAME_ORDER = ('family', 'given', 'additional', 'prefix', 'suffix') +ADDRESS_ORDER = ('box', 'extended', 'street', 'city', 'region', 'code', 'country') + + +class NameBehavior(VCardBehavior): + """A structured name.""" + hasNative = True + + @staticmethod + def transformToNative(obj): + """Turn obj.value into a Name.""" + if obj.isNative: return obj + obj.isNative = True + obj.value = Name(**dict(zip(NAME_ORDER, splitFields(obj.value)))) + return obj + + @staticmethod + def transformFromNative(obj): + """Replace the Name in obj.value with a string.""" + obj.isNative = False + obj.value = serializeFields(obj.value, NAME_ORDER) + return obj +registerBehavior(NameBehavior, 'N') + + +class AddressBehavior(VCardBehavior): + """ + A structured address. + """ + hasNative = True + + @staticmethod + def transformToNative(obj): + """ + Turn obj.value into an Address. + """ + if obj.isNative: + return obj + obj.isNative = True + obj.value = Address(**dict(zip(ADDRESS_ORDER, splitFields(obj.value)))) + return obj + + @staticmethod + def transformFromNative(obj): + """ + Replace the Address in obj.value with a string. + """ + obj.isNative = False + obj.value = serializeFields(obj.value, ADDRESS_ORDER) + return obj +registerBehavior(AddressBehavior, 'ADR') + + +class OrgBehavior(VCardBehavior): + """A list of organization values and sub-organization values.""" + hasNative = True + + @staticmethod + def transformToNative(obj): + """Turn obj.value into a list.""" + if obj.isNative: return obj + obj.isNative = True + # obj.value = splitFields(obj.value) + return obj + + @staticmethod + def transformFromNative(obj): + """Replace the list in obj.value with a string.""" + if not obj.isNative: return obj + obj.isNative = False + obj.value = serializeFields(obj.value) + return obj +registerBehavior(OrgBehavior, 'ORG') + diff --git a/thesisenv/lib/python3.6/site-packages/card_me/win32tz.py b/thesisenv/lib/python3.6/site-packages/card_me/win32tz.py new file mode 100644 index 0000000..35f997b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/card_me/win32tz.py @@ -0,0 +1,156 @@ +import _winreg +import struct +import datetime + +handle=_winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE) +tzparent=_winreg.OpenKey(handle, + "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Time Zones") +parentsize=_winreg.QueryInfoKey(tzparent)[0] + +localkey=_winreg.OpenKey(handle, + "SYSTEM\\CurrentControlSet\\Control\\TimeZoneInformation") +WEEKS=datetime.timedelta(7) + +def list_timezones(): + """Return a list of all time zones known to the system.""" + l=[] + for i in xrange(parentsize): + l.append(_winreg.EnumKey(tzparent, i)) + return l + +class win32tz(datetime.tzinfo): + """tzinfo class based on win32's timezones available in the registry. + + >>> local = win32tz('Central Standard Time') + >>> oct1 = datetime.datetime(month=10, year=2004, day=1, tzinfo=local) + >>> dec1 = datetime.datetime(month=12, year=2004, day=1, tzinfo=local) + >>> oct1.dst() + datetime.timedelta(0, 3600) + >>> dec1.dst() + datetime.timedelta(0) + >>> braz = win32tz('E. South America Standard Time') + >>> braz.dst(oct1) + datetime.timedelta(0) + >>> braz.dst(dec1) + datetime.timedelta(0, 3600) + + """ + def __init__(self, name): + self.data=win32tz_data(name) + + def utcoffset(self, dt): + if self._isdst(dt): + return datetime.timedelta(minutes=self.data.dstoffset) + else: + return datetime.timedelta(minutes=self.data.stdoffset) + + def dst(self, dt): + if self._isdst(dt): + minutes = self.data.dstoffset - self.data.stdoffset + return datetime.timedelta(minutes=minutes) + else: + return datetime.timedelta(0) + + def tzname(self, dt): + if self._isdst(dt): return self.data.dstname + else: return self.data.stdname + + def _isdst(self, dt): + dat=self.data + dston = pickNthWeekday(dt.year, dat.dstmonth, dat.dstdayofweek, + dat.dsthour, dat.dstminute, dat.dstweeknumber) + dstoff = pickNthWeekday(dt.year, dat.stdmonth, dat.stddayofweek, + dat.stdhour, dat.stdminute, dat.stdweeknumber) + if dston < dstoff: + if dston <= dt.replace(tzinfo=None) < dstoff: return True + else: return False + else: + if dstoff <= dt.replace(tzinfo=None) < dston: return False + else: return True + + def __repr__(self): + return "" % self.data.display + +def pickNthWeekday(year, month, dayofweek, hour, minute, whichweek): + """dayofweek == 0 means Sunday, whichweek > 4 means last instance""" + first = datetime.datetime(year=year, month=month, hour=hour, minute=minute, + day=1) + weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7 + 1)) + for n in xrange(whichweek - 1, -1, -1): + dt=weekdayone + n * WEEKS + if dt.month == month: return dt + + +class win32tz_data(object): + """Read a registry key for a timezone, expose its contents.""" + + def __init__(self, path): + """Load path, or if path is empty, load local time.""" + if path: + keydict=valuesToDict(_winreg.OpenKey(tzparent, path)) + self.display = keydict['Display'] + self.dstname = keydict['Dlt'] + self.stdname = keydict['Std'] + + #see http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm + tup = struct.unpack('=3l16h', keydict['TZI']) + self.stdoffset = -tup[0]-tup[1] #Bias + StandardBias * -1 + self.dstoffset = self.stdoffset - tup[2] # + DaylightBias * -1 + + offset=3 + self.stdmonth = tup[1 + offset] + self.stddayofweek = tup[2 + offset] #Sunday=0 + self.stdweeknumber = tup[3 + offset] #Last = 5 + self.stdhour = tup[4 + offset] + self.stdminute = tup[5 + offset] + + offset=11 + self.dstmonth = tup[1 + offset] + self.dstdayofweek = tup[2 + offset] #Sunday=0 + self.dstweeknumber = tup[3 + offset] #Last = 5 + self.dsthour = tup[4 + offset] + self.dstminute = tup[5 + offset] + + else: + keydict=valuesToDict(localkey) + + self.stdname = keydict['StandardName'] + self.dstname = keydict['DaylightName'] + + sourcekey=_winreg.OpenKey(tzparent, self.stdname) + self.display = valuesToDict(sourcekey)['Display'] + + self.stdoffset = -keydict['Bias']-keydict['StandardBias'] + self.dstoffset = self.stdoffset - keydict['DaylightBias'] + + #see http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm + tup = struct.unpack('=8h', keydict['StandardStart']) + + offset=0 + self.stdmonth = tup[1 + offset] + self.stddayofweek = tup[2 + offset] #Sunday=0 + self.stdweeknumber = tup[3 + offset] #Last = 5 + self.stdhour = tup[4 + offset] + self.stdminute = tup[5 + offset] + + tup = struct.unpack('=8h', keydict['DaylightStart']) + self.dstmonth = tup[1 + offset] + self.dstdayofweek = tup[2 + offset] #Sunday=0 + self.dstweeknumber = tup[3 + offset] #Last = 5 + self.dsthour = tup[4 + offset] + self.dstminute = tup[5 + offset] + +def valuesToDict(key): + """Convert a registry key's values to a dictionary.""" + dict={} + size=_winreg.QueryInfoKey(key)[1] + for i in xrange(size): + dict[_winreg.EnumValue(key, i)[0]]=_winreg.EnumValue(key, i)[1] + return dict + +def _test(): + import win32tz, doctest + doctest.testmod(win32tz, verbose=0) + +if __name__ == '__main__': + _test() \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..c0f044d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst @@ -0,0 +1,70 @@ +Chardet: The Universal Character Encoding Detector +-------------------------------------------------- + +.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg + :alt: Build status + :target: https://travis-ci.org/chardet/chardet + +.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg + :target: https://coveralls.io/r/chardet/chardet + +.. image:: https://img.shields.io/pypi/v/chardet.svg + :target: https://warehouse.python.org/project/chardet/ + :alt: Latest version on PyPI + +.. image:: https://img.shields.io/pypi/l/chardet.svg + :alt: License + + +Detects + - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) + - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) + - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) + - EUC-KR, ISO-2022-KR (Korean) + - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) + - ISO-8859-5, windows-1251 (Bulgarian) + - ISO-8859-1, windows-1252 (Western European languages) + - ISO-8859-7, windows-1253 (Greek) + - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) + - TIS-620 (Thai) + +.. note:: + Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily + disabled until we can retrain the models. + +Requires Python 2.6, 2.7, or 3.3+. + +Installation +------------ + +Install from `PyPI `_:: + + pip install chardet + +Documentation +------------- + +For users, docs are now available at https://chardet.readthedocs.io/. + +Command-line Tool +----------------- + +chardet comes with a command-line script which reports on the encodings of one +or more files:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +About +----- + +This is a continuation of Mark Pilgrim's excellent chardet. Previously, two +versions needed to be maintained: one that supported python 2.x and one that +supported python 3.x. We've recently merged with `Ian Cordasco `_'s +`charade `_ fork, so now we have one +coherent version that works for Python 2.6+. + +:maintainer: Dan Blanchard + + diff --git a/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA new file mode 100644 index 0000000..1427867 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA @@ -0,0 +1,96 @@ +Metadata-Version: 2.0 +Name: chardet +Version: 3.0.4 +Summary: Universal encoding detector for Python 2 and 3 +Home-page: https://github.com/chardet/chardet +Author: Daniel Blanchard +Author-email: dan.blanchard@gmail.com +License: LGPL +Keywords: encoding,i18n,xml +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Linguistic + +Chardet: The Universal Character Encoding Detector +-------------------------------------------------- + +.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg + :alt: Build status + :target: https://travis-ci.org/chardet/chardet + +.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg + :target: https://coveralls.io/r/chardet/chardet + +.. image:: https://img.shields.io/pypi/v/chardet.svg + :target: https://warehouse.python.org/project/chardet/ + :alt: Latest version on PyPI + +.. image:: https://img.shields.io/pypi/l/chardet.svg + :alt: License + + +Detects + - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) + - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) + - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) + - EUC-KR, ISO-2022-KR (Korean) + - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) + - ISO-8859-5, windows-1251 (Bulgarian) + - ISO-8859-1, windows-1252 (Western European languages) + - ISO-8859-7, windows-1253 (Greek) + - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) + - TIS-620 (Thai) + +.. note:: + Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily + disabled until we can retrain the models. + +Requires Python 2.6, 2.7, or 3.3+. + +Installation +------------ + +Install from `PyPI `_:: + + pip install chardet + +Documentation +------------- + +For users, docs are now available at https://chardet.readthedocs.io/. + +Command-line Tool +----------------- + +chardet comes with a command-line script which reports on the encodings of one +or more files:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +About +----- + +This is a continuation of Mark Pilgrim's excellent chardet. Previously, two +versions needed to be maintained: one that supported python 2.x and one that +supported python 3.x. We've recently merged with `Ian Cordasco `_'s +`charade `_ fork, so now we have one +coherent version that works for Python 2.6+. + +:maintainer: Dan Blanchard + + diff --git a/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD new file mode 100644 index 0000000..5965619 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD @@ -0,0 +1,91 @@ +../../../bin/chardetect,sha256=kgFiZzwaiKxVcnUwdry2JuEJ9652cbKluDIcwcrnRnU,253 +chardet-3.0.4.dist-info/DESCRIPTION.rst,sha256=PQ4sBsMyKFZkjC6QpmbpLn0UtCNyeb-ZqvCGEgyZMGk,2174 +chardet-3.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +chardet-3.0.4.dist-info/METADATA,sha256=RV_2I4B1Z586DL8oVO5Kp7X5bUdQ5EuKAvNoAEF8wSw,3239 +chardet-3.0.4.dist-info/RECORD,, +chardet-3.0.4.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +chardet-3.0.4.dist-info/entry_points.txt,sha256=fAMmhu5eJ-zAJ-smfqQwRClQ3-nozOCmvJ6-E8lgGJo,60 +chardet-3.0.4.dist-info/metadata.json,sha256=0htbRM18ujyGZDdfowgAqj6Hq2eQtwzwyhaEveKntgo,1375 +chardet-3.0.4.dist-info/top_level.txt,sha256=AowzBbZy4x8EirABDdJSLJZMkJ_53iIag8xfKR6D7kI,8 +chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559 +chardet/__pycache__/__init__.cpython-36.pyc,, +chardet/__pycache__/big5freq.cpython-36.pyc,, +chardet/__pycache__/big5prober.cpython-36.pyc,, +chardet/__pycache__/chardistribution.cpython-36.pyc,, +chardet/__pycache__/charsetgroupprober.cpython-36.pyc,, +chardet/__pycache__/charsetprober.cpython-36.pyc,, +chardet/__pycache__/codingstatemachine.cpython-36.pyc,, +chardet/__pycache__/compat.cpython-36.pyc,, +chardet/__pycache__/cp949prober.cpython-36.pyc,, +chardet/__pycache__/enums.cpython-36.pyc,, +chardet/__pycache__/escprober.cpython-36.pyc,, +chardet/__pycache__/escsm.cpython-36.pyc,, +chardet/__pycache__/eucjpprober.cpython-36.pyc,, +chardet/__pycache__/euckrfreq.cpython-36.pyc,, +chardet/__pycache__/euckrprober.cpython-36.pyc,, +chardet/__pycache__/euctwfreq.cpython-36.pyc,, +chardet/__pycache__/euctwprober.cpython-36.pyc,, +chardet/__pycache__/gb2312freq.cpython-36.pyc,, +chardet/__pycache__/gb2312prober.cpython-36.pyc,, +chardet/__pycache__/hebrewprober.cpython-36.pyc,, +chardet/__pycache__/jisfreq.cpython-36.pyc,, +chardet/__pycache__/jpcntx.cpython-36.pyc,, +chardet/__pycache__/langbulgarianmodel.cpython-36.pyc,, +chardet/__pycache__/langcyrillicmodel.cpython-36.pyc,, +chardet/__pycache__/langgreekmodel.cpython-36.pyc,, +chardet/__pycache__/langhebrewmodel.cpython-36.pyc,, +chardet/__pycache__/langhungarianmodel.cpython-36.pyc,, +chardet/__pycache__/langthaimodel.cpython-36.pyc,, +chardet/__pycache__/langturkishmodel.cpython-36.pyc,, +chardet/__pycache__/latin1prober.cpython-36.pyc,, +chardet/__pycache__/mbcharsetprober.cpython-36.pyc,, +chardet/__pycache__/mbcsgroupprober.cpython-36.pyc,, +chardet/__pycache__/mbcssm.cpython-36.pyc,, +chardet/__pycache__/sbcharsetprober.cpython-36.pyc,, +chardet/__pycache__/sbcsgroupprober.cpython-36.pyc,, +chardet/__pycache__/sjisprober.cpython-36.pyc,, +chardet/__pycache__/universaldetector.cpython-36.pyc,, +chardet/__pycache__/utf8prober.cpython-36.pyc,, +chardet/__pycache__/version.cpython-36.pyc,, +chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 +chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 +chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 +chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787 +chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 +chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +chardet/cli/__pycache__/__init__.cpython-36.pyc,, +chardet/cli/__pycache__/chardetect.cpython-36.pyc,, +chardet/cli/chardetect.py,sha256=YBO8L4mXo0WR6_-Fjh_8QxPBoEBNqB9oNxNrdc54AQs,2738 +chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 +chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134 +chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 +chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 +chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 +chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 +chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 +chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 +chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 +chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 +chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 +chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 +chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 +chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 +chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 +chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 +chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839 +chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948 +chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688 +chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345 +chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592 +chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290 +chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102 +chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 +chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 +chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 +chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 +chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657 +chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546 +chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 +chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485 +chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 +chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242 diff --git a/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt new file mode 100644 index 0000000..a884269 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +chardetect = chardet.cli.chardetect:main + diff --git a/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json new file mode 100644 index 0000000..8cdf025 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Text Processing :: Linguistic"], "extensions": {"python.commands": {"wrap_console": {"chardetect": "chardet.cli.chardetect:main"}}, "python.details": {"contacts": [{"email": "dan.blanchard@gmail.com", "name": "Daniel Blanchard", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/chardet/chardet"}}, "python.exports": {"console_scripts": {"chardetect": "chardet.cli.chardetect:main"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["encoding", "i18n", "xml"], "license": "LGPL", "metadata_version": "2.0", "name": "chardet", "summary": "Universal encoding detector for Python 2 and 3", "test_requires": [{"requires": ["hypothesis", "pytest"]}], "version": "3.0.4"} \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt new file mode 100644 index 0000000..79236f2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt @@ -0,0 +1 @@ +chardet diff --git a/thesisenv/lib/python3.6/site-packages/chardet/__init__.py b/thesisenv/lib/python3.6/site-packages/chardet/__init__.py new file mode 100644 index 0000000..0f9f820 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/__init__.py @@ -0,0 +1,39 @@ +######################## BEGIN LICENSE BLOCK ######################## +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +from .compat import PY2, PY3 +from .universaldetector import UniversalDetector +from .version import __version__, VERSION + + +def detect(byte_str): + """ + Detect the encoding of the given byte string. + + :param byte_str: The byte sequence to examine. + :type byte_str: ``bytes`` or ``bytearray`` + """ + if not isinstance(byte_str, bytearray): + if not isinstance(byte_str, bytes): + raise TypeError('Expected object of type bytes or bytearray, got: ' + '{0}'.format(type(byte_str))) + else: + byte_str = bytearray(byte_str) + detector = UniversalDetector() + detector.feed(byte_str) + return detector.close() diff --git a/thesisenv/lib/python3.6/site-packages/chardet/big5freq.py b/thesisenv/lib/python3.6/site-packages/chardet/big5freq.py new file mode 100644 index 0000000..38f3251 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/big5freq.py @@ -0,0 +1,386 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Big5 frequency table +# by Taiwan's Mandarin Promotion Council +# +# +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +#Char to FreqOrder table +BIG5_TABLE_SIZE = 5376 + +BIG5_CHAR_TO_FREQ_ORDER = ( + 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 +3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 +1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 + 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 +3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 +4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 +5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 + 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 + 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 + 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 +2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 +1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 +3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 + 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 +3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 +2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 + 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 +3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 +1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 +5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 + 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 +5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 +1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 + 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 + 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 +3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 +3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 + 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 +2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 +2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 + 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 + 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 +3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 +1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 +1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 +1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 +2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 + 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 +4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 +1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 +5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 +2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 + 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 + 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 + 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 + 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 +5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 + 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 +1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 + 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 + 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 +5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 +1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 + 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 +3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 +4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 +3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 + 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 + 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 +1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 +4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 +3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 +3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 +2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 +5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 +3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 +5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 +1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 +2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 +1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 + 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 +1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 +4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 +3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 + 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 + 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 + 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 +2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 +5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 +1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 +2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 +1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 +1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 +5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 +5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 +5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 +3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 +4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 +4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 +2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 +5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 +3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 + 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 +5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 +5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 +1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 +2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 +3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 +4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 +5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 +3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 +4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 +1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 +1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 +4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 +1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 + 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 +1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 +1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 +3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 + 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 +5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 +2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 +1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 +1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 +5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 + 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 +4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 + 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 +2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 + 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 +1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 +1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 + 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 +4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 +4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 +1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 +3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 +5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 +5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 +1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 +2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 +1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 +3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 +2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 +3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 +2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 +4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 +4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 +3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 + 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 +3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 + 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 +3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 +4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 +3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 +1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 +5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 + 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 +5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 +1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 + 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 +4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 +4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 + 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 +2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 +2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 +3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 +1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 +4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 +2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 +1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 +1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 +2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 +3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 +1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 +5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 +1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 +4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 +1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 + 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 +1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 +4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 +4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 +2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 +1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 +4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 + 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 +5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 +2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 +3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 +4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 + 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 +5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 +5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 +1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 +4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 +4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 +2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 +3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 +3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 +2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 +1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 +4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 +3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 +3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 +2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 +4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 +5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 +3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 +2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 +3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 +1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 +2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 +3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 +4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 +2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 +2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 +5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 +1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 +2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 +1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 +3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 +4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 +2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 +3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 +3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 +2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 +4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 +2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 +3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 +4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 +5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 +3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 + 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 +1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 +4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 +1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 +4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 +5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 + 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 +5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 +5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 +2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 +3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 +2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 +2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 + 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 +1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 +4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 +3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 +3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 + 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 +2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 + 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 +2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 +4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 +1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 +4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 +1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 +3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 + 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 +3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 +5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 +5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 +3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 +3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 +1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 +2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 +5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 +1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 +1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 +3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 + 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 +1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 +4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 +5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 +2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 +3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 + 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 +1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 +2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 +2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 +5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 +5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 +5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 +2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 +2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 +1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 +4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 +3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 +3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 +4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 +4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 +2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 +2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 +5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 +4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 +5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 +4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 + 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 + 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 +1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 +3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 +4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 +1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 +5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 +2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 +2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 +3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 +5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 +1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 +3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 +5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 +1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 +5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 +2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 +3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 +2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 +3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 +3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 +3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 +4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 + 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 +2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 +4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 +3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 +5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 +1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 +5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 + 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 +1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 + 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 +4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 +1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 +4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 +1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 + 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 +3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 +4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 +5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 + 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 +3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 + 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 +2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 +) + diff --git a/thesisenv/lib/python3.6/site-packages/chardet/big5prober.py b/thesisenv/lib/python3.6/site-packages/chardet/big5prober.py new file mode 100644 index 0000000..98f9970 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/big5prober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import Big5DistributionAnalysis +from .mbcssm import BIG5_SM_MODEL + + +class Big5Prober(MultiByteCharSetProber): + def __init__(self): + super(Big5Prober, self).__init__() + self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) + self.distribution_analyzer = Big5DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "Big5" + + @property + def language(self): + return "Chinese" diff --git a/thesisenv/lib/python3.6/site-packages/chardet/chardistribution.py b/thesisenv/lib/python3.6/site-packages/chardet/chardistribution.py new file mode 100644 index 0000000..c0395f4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/chardistribution.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, + EUCTW_TYPICAL_DISTRIBUTION_RATIO) +from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, + EUCKR_TYPICAL_DISTRIBUTION_RATIO) +from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, + GB2312_TYPICAL_DISTRIBUTION_RATIO) +from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, + BIG5_TYPICAL_DISTRIBUTION_RATIO) +from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, + JIS_TYPICAL_DISTRIBUTION_RATIO) + + +class CharDistributionAnalysis(object): + ENOUGH_DATA_THRESHOLD = 1024 + SURE_YES = 0.99 + SURE_NO = 0.01 + MINIMUM_DATA_THRESHOLD = 3 + + def __init__(self): + # Mapping table to get frequency order from char order (get from + # GetOrder()) + self._char_to_freq_order = None + self._table_size = None # Size of above table + # This is a constant value which varies from language to language, + # used in calculating confidence. See + # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html + # for further detail. + self.typical_distribution_ratio = None + self._done = None + self._total_chars = None + self._freq_chars = None + self.reset() + + def reset(self): + """reset analyser, clear any state""" + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + self._total_chars = 0 # Total characters encountered + # The number of characters whose frequency order is less than 512 + self._freq_chars = 0 + + def feed(self, char, char_len): + """feed a character with known length""" + if char_len == 2: + # we only care about 2-bytes character in our distribution analysis + order = self.get_order(char) + else: + order = -1 + if order >= 0: + self._total_chars += 1 + # order is valid + if order < self._table_size: + if 512 > self._char_to_freq_order[order]: + self._freq_chars += 1 + + def get_confidence(self): + """return confidence based on existing data""" + # if we didn't receive any character in our consideration range, + # return negative answer + if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: + return self.SURE_NO + + if self._total_chars != self._freq_chars: + r = (self._freq_chars / ((self._total_chars - self._freq_chars) + * self.typical_distribution_ratio)) + if r < self.SURE_YES: + return r + + # normalize confidence (we don't want to be 100% sure) + return self.SURE_YES + + def got_enough_data(self): + # It is not necessary to receive all data to draw conclusion. + # For charset detection, certain amount of data is enough + return self._total_chars > self.ENOUGH_DATA_THRESHOLD + + def get_order(self, byte_str): + # We do not handle characters based on the original encoding string, + # but convert this encoding string to a number, here called order. + # This allows multiple encodings of a language to share one frequency + # table. + return -1 + + +class EUCTWDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCTWDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER + self._table_size = EUCTW_TABLE_SIZE + self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-TW encoding, we are interested + # first byte range: 0xc4 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xC4: + return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 + else: + return -1 + + +class EUCKRDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCKRDistributionAnalysis, self).__init__() + self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER + self._table_size = EUCKR_TABLE_SIZE + self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-KR encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char = byte_str[0] + if first_char >= 0xB0: + return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 + else: + return -1 + + +class GB2312DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(GB2312DistributionAnalysis, self).__init__() + self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER + self._table_size = GB2312_TABLE_SIZE + self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for GB2312 encoding, we are interested + # first byte range: 0xb0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0xB0) and (second_char >= 0xA1): + return 94 * (first_char - 0xB0) + second_char - 0xA1 + else: + return -1 + + +class Big5DistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(Big5DistributionAnalysis, self).__init__() + self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER + self._table_size = BIG5_TABLE_SIZE + self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for big5 encoding, we are interested + # first byte range: 0xa4 -- 0xfe + # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if first_char >= 0xA4: + if second_char >= 0xA1: + return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 + else: + return 157 * (first_char - 0xA4) + second_char - 0x40 + else: + return -1 + + +class SJISDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(SJISDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for sjis encoding, we are interested + # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe + # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe + # no validation needed here. State machine has done that + first_char, second_char = byte_str[0], byte_str[1] + if (first_char >= 0x81) and (first_char <= 0x9F): + order = 188 * (first_char - 0x81) + elif (first_char >= 0xE0) and (first_char <= 0xEF): + order = 188 * (first_char - 0xE0 + 31) + else: + return -1 + order = order + second_char - 0x40 + if second_char > 0x7F: + order = -1 + return order + + +class EUCJPDistributionAnalysis(CharDistributionAnalysis): + def __init__(self): + super(EUCJPDistributionAnalysis, self).__init__() + self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER + self._table_size = JIS_TABLE_SIZE + self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO + + def get_order(self, byte_str): + # for euc-JP encoding, we are interested + # first byte range: 0xa0 -- 0xfe + # second byte range: 0xa1 -- 0xfe + # no validation needed here. State machine has done that + char = byte_str[0] + if char >= 0xA0: + return 94 * (char - 0xA1) + byte_str[1] - 0xa1 + else: + return -1 diff --git a/thesisenv/lib/python3.6/site-packages/chardet/charsetgroupprober.py b/thesisenv/lib/python3.6/site-packages/chardet/charsetgroupprober.py new file mode 100644 index 0000000..8b3738e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/charsetgroupprober.py @@ -0,0 +1,106 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState +from .charsetprober import CharSetProber + + +class CharSetGroupProber(CharSetProber): + def __init__(self, lang_filter=None): + super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) + self._active_num = 0 + self.probers = [] + self._best_guess_prober = None + + def reset(self): + super(CharSetGroupProber, self).reset() + self._active_num = 0 + for prober in self.probers: + if prober: + prober.reset() + prober.active = True + self._active_num += 1 + self._best_guess_prober = None + + @property + def charset_name(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.charset_name + + @property + def language(self): + if not self._best_guess_prober: + self.get_confidence() + if not self._best_guess_prober: + return None + return self._best_guess_prober.language + + def feed(self, byte_str): + for prober in self.probers: + if not prober: + continue + if not prober.active: + continue + state = prober.feed(byte_str) + if not state: + continue + if state == ProbingState.FOUND_IT: + self._best_guess_prober = prober + return self.state + elif state == ProbingState.NOT_ME: + prober.active = False + self._active_num -= 1 + if self._active_num <= 0: + self._state = ProbingState.NOT_ME + return self.state + return self.state + + def get_confidence(self): + state = self.state + if state == ProbingState.FOUND_IT: + return 0.99 + elif state == ProbingState.NOT_ME: + return 0.01 + best_conf = 0.0 + self._best_guess_prober = None + for prober in self.probers: + if not prober: + continue + if not prober.active: + self.logger.debug('%s not active', prober.charset_name) + continue + conf = prober.get_confidence() + self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) + if best_conf < conf: + best_conf = conf + self._best_guess_prober = prober + if not self._best_guess_prober: + return 0.0 + return best_conf diff --git a/thesisenv/lib/python3.6/site-packages/chardet/charsetprober.py b/thesisenv/lib/python3.6/site-packages/chardet/charsetprober.py new file mode 100644 index 0000000..eac4e59 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/charsetprober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging +import re + +from .enums import ProbingState + + +class CharSetProber(object): + + SHORTCUT_THRESHOLD = 0.95 + + def __init__(self, lang_filter=None): + self._state = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + + def reset(self): + self._state = ProbingState.DETECTING + + @property + def charset_name(self): + return None + + def feed(self, buf): + pass + + @property + def state(self): + return self._state + + def get_confidence(self): + return 0.0 + + @staticmethod + def filter_high_byte_only(buf): + buf = re.sub(b'([\x00-\x7F])+', b' ', buf) + return buf + + @staticmethod + def filter_international_words(buf): + """ + We define three types of bytes: + alphabet: english alphabets [a-zA-Z] + international: international characters [\x80-\xFF] + marker: everything else [^a-zA-Z\x80-\xFF] + + The input buffer can be thought to contain a series of words delimited + by markers. This function works to filter all words that contain at + least one international character. All contiguous sequences of markers + are replaced by a single space ascii character. + + This filter applies to all scripts which do not use English characters. + """ + filtered = bytearray() + + # This regex expression filters out only words that have at-least one + # international character. The word may include one marker character at + # the end. + words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', + buf) + + for word in words: + filtered.extend(word[:-1]) + + # If the last character in the word is a marker, replace it with a + # space as markers shouldn't affect our analysis (they are used + # similarly across all languages and may thus have similar + # frequencies). + last_char = word[-1:] + if not last_char.isalpha() and last_char < b'\x80': + last_char = b' ' + filtered.extend(last_char) + + return filtered + + @staticmethod + def filter_with_english_letters(buf): + """ + Returns a copy of ``buf`` that retains only the sequences of English + alphabet and high byte characters that are not between <> characters. + Also retains English alphabet and high byte characters immediately + before occurrences of >. + + This filter can be applied to all scripts which contain both English + characters and extended ASCII characters, but is currently only used by + ``Latin1Prober``. + """ + filtered = bytearray() + in_tag = False + prev = 0 + + for curr in range(len(buf)): + # Slice here to get bytes instead of an int with Python 3 + buf_char = buf[curr:curr + 1] + # Check if we're coming out of or entering an HTML tag + if buf_char == b'>': + in_tag = False + elif buf_char == b'<': + in_tag = True + + # If current character is not extended-ASCII and not alphabetic... + if buf_char < b'\x80' and not buf_char.isalpha(): + # ...and we're not in a tag + if curr > prev and not in_tag: + # Keep everything after last non-extended-ASCII, + # non-alphabetic character + filtered.extend(buf[prev:curr]) + # Output a space to delimit stretch we kept + filtered.extend(b' ') + prev = curr + 1 + + # If we're not in a tag... + if not in_tag: + # Keep everything after last non-extended-ASCII, non-alphabetic + # character + filtered.extend(buf[prev:]) + + return filtered diff --git a/thesisenv/lib/python3.6/site-packages/chardet/cli/__init__.py b/thesisenv/lib/python3.6/site-packages/chardet/cli/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/cli/__init__.py @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/chardet/cli/chardetect.py b/thesisenv/lib/python3.6/site-packages/chardet/cli/chardetect.py new file mode 100644 index 0000000..f0a4cc5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/cli/chardetect.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +""" +Script which takes one or more file paths and reports on their detected +encodings + +Example:: + + % chardetect somefile someotherfile + somefile: windows-1252 with confidence 0.5 + someotherfile: ascii with confidence 1.0 + +If no paths are provided, it takes its input from stdin. + +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import argparse +import sys + +from chardet import __version__ +from chardet.compat import PY2 +from chardet.universaldetector import UniversalDetector + + +def description_of(lines, name='stdin'): + """ + Return a string describing the probable encoding of a file or + list of strings. + + :param lines: The lines to get the encoding of. + :type lines: Iterable of bytes + :param name: Name of file or collection of lines + :type name: str + """ + u = UniversalDetector() + for line in lines: + line = bytearray(line) + u.feed(line) + # shortcut out of the loop to save reading further - particularly useful if we read a BOM. + if u.done: + break + u.close() + result = u.result + if PY2: + name = name.decode(sys.getfilesystemencoding(), 'ignore') + if result['encoding']: + return '{0}: {1} with confidence {2}'.format(name, result['encoding'], + result['confidence']) + else: + return '{0}: no result'.format(name) + + +def main(argv=None): + """ + Handles command line arguments and gets things started. + + :param argv: List of arguments, as if specified on the command-line. + If None, ``sys.argv[1:]`` is used instead. + :type argv: list of str + """ + # Get command line arguments + parser = argparse.ArgumentParser( + description="Takes one or more file paths and reports their detected \ + encodings") + parser.add_argument('input', + help='File whose encoding we would like to determine. \ + (default: stdin)', + type=argparse.FileType('rb'), nargs='*', + default=[sys.stdin if PY2 else sys.stdin.buffer]) + parser.add_argument('--version', action='version', + version='%(prog)s {0}'.format(__version__)) + args = parser.parse_args(argv) + + for f in args.input: + if f.isatty(): + print("You are running chardetect interactively. Press " + + "CTRL-D twice at the start of a blank line to signal the " + + "end of your input. If you want help, run chardetect " + + "--help\n", file=sys.stderr) + print(description_of(f, f.name)) + + +if __name__ == '__main__': + main() diff --git a/thesisenv/lib/python3.6/site-packages/chardet/codingstatemachine.py b/thesisenv/lib/python3.6/site-packages/chardet/codingstatemachine.py new file mode 100644 index 0000000..68fba44 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/codingstatemachine.py @@ -0,0 +1,88 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import logging + +from .enums import MachineState + + +class CodingStateMachine(object): + """ + A state machine to verify a byte sequence for a particular encoding. For + each byte the detector receives, it will feed that byte to every active + state machine available, one byte at a time. The state machine changes its + state based on its previous state and the byte it receives. There are 3 + states in a state machine that are of interest to an auto-detector: + + START state: This is the state to start with, or a legal byte sequence + (i.e. a valid code point) for character has been identified. + + ME state: This indicates that the state machine identified a byte sequence + that is specific to the charset it is designed for and that + there is no other possible encoding which can contain this byte + sequence. This will to lead to an immediate positive answer for + the detector. + + ERROR state: This indicates the state machine identified an illegal byte + sequence for that encoding. This will lead to an immediate + negative answer for this encoding. Detector will exclude this + encoding from consideration from here on. + """ + def __init__(self, sm): + self._model = sm + self._curr_byte_pos = 0 + self._curr_char_len = 0 + self._curr_state = None + self.logger = logging.getLogger(__name__) + self.reset() + + def reset(self): + self._curr_state = MachineState.START + + def next_state(self, c): + # for each byte we get its class + # if it is first byte, we also get byte length + byte_class = self._model['class_table'][c] + if self._curr_state == MachineState.START: + self._curr_byte_pos = 0 + self._curr_char_len = self._model['char_len_table'][byte_class] + # from byte's class and state_table, we get its next state + curr_state = (self._curr_state * self._model['class_factor'] + + byte_class) + self._curr_state = self._model['state_table'][curr_state] + self._curr_byte_pos += 1 + return self._curr_state + + def get_current_charlen(self): + return self._curr_char_len + + def get_coding_state_machine(self): + return self._model['name'] + + @property + def language(self): + return self._model['language'] diff --git a/thesisenv/lib/python3.6/site-packages/chardet/compat.py b/thesisenv/lib/python3.6/site-packages/chardet/compat.py new file mode 100644 index 0000000..ddd7468 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/compat.py @@ -0,0 +1,34 @@ +######################## BEGIN LICENSE BLOCK ######################## +# Contributor(s): +# Dan Blanchard +# Ian Cordasco +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +import sys + + +if sys.version_info < (3, 0): + PY2 = True + PY3 = False + base_str = (str, unicode) + text_type = unicode +else: + PY2 = False + PY3 = True + base_str = (bytes, str) + text_type = str diff --git a/thesisenv/lib/python3.6/site-packages/chardet/cp949prober.py b/thesisenv/lib/python3.6/site-packages/chardet/cp949prober.py new file mode 100644 index 0000000..efd793a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/cp949prober.py @@ -0,0 +1,49 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .chardistribution import EUCKRDistributionAnalysis +from .codingstatemachine import CodingStateMachine +from .mbcharsetprober import MultiByteCharSetProber +from .mbcssm import CP949_SM_MODEL + + +class CP949Prober(MultiByteCharSetProber): + def __init__(self): + super(CP949Prober, self).__init__() + self.coding_sm = CodingStateMachine(CP949_SM_MODEL) + # NOTE: CP949 is a superset of EUC-KR, so the distribution should be + # not different. + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "CP949" + + @property + def language(self): + return "Korean" diff --git a/thesisenv/lib/python3.6/site-packages/chardet/enums.py b/thesisenv/lib/python3.6/site-packages/chardet/enums.py new file mode 100644 index 0000000..0451207 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/enums.py @@ -0,0 +1,76 @@ +""" +All of the Enums that are used throughout the chardet package. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + + +class InputState(object): + """ + This enum represents the different states a universal detector can be in. + """ + PURE_ASCII = 0 + ESC_ASCII = 1 + HIGH_BYTE = 2 + + +class LanguageFilter(object): + """ + This enum represents the different language filters we can apply to a + ``UniversalDetector``. + """ + CHINESE_SIMPLIFIED = 0x01 + CHINESE_TRADITIONAL = 0x02 + JAPANESE = 0x04 + KOREAN = 0x08 + NON_CJK = 0x10 + ALL = 0x1F + CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL + CJK = CHINESE | JAPANESE | KOREAN + + +class ProbingState(object): + """ + This enum represents the different states a prober can be in. + """ + DETECTING = 0 + FOUND_IT = 1 + NOT_ME = 2 + + +class MachineState(object): + """ + This enum represents the different states a state machine can be in. + """ + START = 0 + ERROR = 1 + ITS_ME = 2 + + +class SequenceLikelihood(object): + """ + This enum represents the likelihood of a character following the previous one. + """ + NEGATIVE = 0 + UNLIKELY = 1 + LIKELY = 2 + POSITIVE = 3 + + @classmethod + def get_num_categories(cls): + """:returns: The number of likelihood categories in the enum.""" + return 4 + + +class CharacterCategory(object): + """ + This enum represents the different categories language models for + ``SingleByteCharsetProber`` put characters into. + + Anything less than CONTROL is considered a letter. + """ + UNDEFINED = 255 + LINE_BREAK = 254 + SYMBOL = 253 + DIGIT = 252 + CONTROL = 251 diff --git a/thesisenv/lib/python3.6/site-packages/chardet/escprober.py b/thesisenv/lib/python3.6/site-packages/chardet/escprober.py new file mode 100644 index 0000000..c70493f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/escprober.py @@ -0,0 +1,101 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, ProbingState, MachineState +from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, + ISO2022KR_SM_MODEL) + + +class EscCharSetProber(CharSetProber): + """ + This CharSetProber uses a "code scheme" approach for detecting encodings, + whereby easily recognizable escape or shift sequences are relied on to + identify these encodings. + """ + + def __init__(self, lang_filter=None): + super(EscCharSetProber, self).__init__(lang_filter=lang_filter) + self.coding_sm = [] + if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: + self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) + self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) + if self.lang_filter & LanguageFilter.JAPANESE: + self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) + if self.lang_filter & LanguageFilter.KOREAN: + self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) + self.active_sm_count = None + self._detected_charset = None + self._detected_language = None + self._state = None + self.reset() + + def reset(self): + super(EscCharSetProber, self).reset() + for coding_sm in self.coding_sm: + if not coding_sm: + continue + coding_sm.active = True + coding_sm.reset() + self.active_sm_count = len(self.coding_sm) + self._detected_charset = None + self._detected_language = None + + @property + def charset_name(self): + return self._detected_charset + + @property + def language(self): + return self._detected_language + + def get_confidence(self): + if self._detected_charset: + return 0.99 + else: + return 0.00 + + def feed(self, byte_str): + for c in byte_str: + for coding_sm in self.coding_sm: + if not coding_sm or not coding_sm.active: + continue + coding_state = coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + coding_sm.active = False + self.active_sm_count -= 1 + if self.active_sm_count <= 0: + self._state = ProbingState.NOT_ME + return self.state + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + self._detected_charset = coding_sm.get_coding_state_machine() + self._detected_language = coding_sm.language + return self.state + + return self.state diff --git a/thesisenv/lib/python3.6/site-packages/chardet/escsm.py b/thesisenv/lib/python3.6/site-packages/chardet/escsm.py new file mode 100644 index 0000000..0069523 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/escsm.py @@ -0,0 +1,246 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +HZ_CLS = ( +1,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,0,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,4,0,5,2,0, # 78 - 7f +1,1,1,1,1,1,1,1, # 80 - 87 +1,1,1,1,1,1,1,1, # 88 - 8f +1,1,1,1,1,1,1,1, # 90 - 97 +1,1,1,1,1,1,1,1, # 98 - 9f +1,1,1,1,1,1,1,1, # a0 - a7 +1,1,1,1,1,1,1,1, # a8 - af +1,1,1,1,1,1,1,1, # b0 - b7 +1,1,1,1,1,1,1,1, # b8 - bf +1,1,1,1,1,1,1,1, # c0 - c7 +1,1,1,1,1,1,1,1, # c8 - cf +1,1,1,1,1,1,1,1, # d0 - d7 +1,1,1,1,1,1,1,1, # d8 - df +1,1,1,1,1,1,1,1, # e0 - e7 +1,1,1,1,1,1,1,1, # e8 - ef +1,1,1,1,1,1,1,1, # f0 - f7 +1,1,1,1,1,1,1,1, # f8 - ff +) + +HZ_ST = ( +MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 + 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f + 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 + 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f +) + +HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +HZ_SM_MODEL = {'class_table': HZ_CLS, + 'class_factor': 6, + 'state_table': HZ_ST, + 'char_len_table': HZ_CHAR_LEN_TABLE, + 'name': "HZ-GB-2312", + 'language': 'Chinese'} + +ISO2022CN_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,0,0,0,0, # 20 - 27 +0,3,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,4,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022CN_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 + 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f +) + +ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, + 'class_factor': 9, + 'state_table': ISO2022CN_ST, + 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, + 'name': "ISO-2022-CN", + 'language': 'Chinese'} + +ISO2022JP_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,2,2, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,7,0,0,0, # 20 - 27 +3,0,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +6,0,4,0,8,0,0,0, # 40 - 47 +0,9,5,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022JP_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 +MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 +) + +ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + +ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, + 'class_factor': 10, + 'state_table': ISO2022JP_ST, + 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, + 'name': "ISO-2022-JP", + 'language': 'Japanese'} + +ISO2022KR_CLS = ( +2,0,0,0,0,0,0,0, # 00 - 07 +0,0,0,0,0,0,0,0, # 08 - 0f +0,0,0,0,0,0,0,0, # 10 - 17 +0,0,0,1,0,0,0,0, # 18 - 1f +0,0,0,0,3,0,0,0, # 20 - 27 +0,4,0,0,0,0,0,0, # 28 - 2f +0,0,0,0,0,0,0,0, # 30 - 37 +0,0,0,0,0,0,0,0, # 38 - 3f +0,0,0,5,0,0,0,0, # 40 - 47 +0,0,0,0,0,0,0,0, # 48 - 4f +0,0,0,0,0,0,0,0, # 50 - 57 +0,0,0,0,0,0,0,0, # 58 - 5f +0,0,0,0,0,0,0,0, # 60 - 67 +0,0,0,0,0,0,0,0, # 68 - 6f +0,0,0,0,0,0,0,0, # 70 - 77 +0,0,0,0,0,0,0,0, # 78 - 7f +2,2,2,2,2,2,2,2, # 80 - 87 +2,2,2,2,2,2,2,2, # 88 - 8f +2,2,2,2,2,2,2,2, # 90 - 97 +2,2,2,2,2,2,2,2, # 98 - 9f +2,2,2,2,2,2,2,2, # a0 - a7 +2,2,2,2,2,2,2,2, # a8 - af +2,2,2,2,2,2,2,2, # b0 - b7 +2,2,2,2,2,2,2,2, # b8 - bf +2,2,2,2,2,2,2,2, # c0 - c7 +2,2,2,2,2,2,2,2, # c8 - cf +2,2,2,2,2,2,2,2, # d0 - d7 +2,2,2,2,2,2,2,2, # d8 - df +2,2,2,2,2,2,2,2, # e0 - e7 +2,2,2,2,2,2,2,2, # e8 - ef +2,2,2,2,2,2,2,2, # f0 - f7 +2,2,2,2,2,2,2,2, # f8 - ff +) + +ISO2022KR_ST = ( +MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f +MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f +MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 +) + +ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) + +ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, + 'class_factor': 6, + 'state_table': ISO2022KR_ST, + 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, + 'name': "ISO-2022-KR", + 'language': 'Korean'} + + diff --git a/thesisenv/lib/python3.6/site-packages/chardet/eucjpprober.py b/thesisenv/lib/python3.6/site-packages/chardet/eucjpprober.py new file mode 100644 index 0000000..20ce8f7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/eucjpprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import ProbingState, MachineState +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCJPDistributionAnalysis +from .jpcntx import EUCJPContextAnalysis +from .mbcssm import EUCJP_SM_MODEL + + +class EUCJPProber(MultiByteCharSetProber): + def __init__(self): + super(EUCJPProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) + self.distribution_analyzer = EUCJPDistributionAnalysis() + self.context_analyzer = EUCJPContextAnalysis() + self.reset() + + def reset(self): + super(EUCJPProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return "EUC-JP" + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char, char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/thesisenv/lib/python3.6/site-packages/chardet/euckrfreq.py b/thesisenv/lib/python3.6/site-packages/chardet/euckrfreq.py new file mode 100644 index 0000000..b68078c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/euckrfreq.py @@ -0,0 +1,195 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology + +# 128 --> 0.79 +# 256 --> 0.92 +# 512 --> 0.986 +# 1024 --> 0.99944 +# 2048 --> 0.99999 +# +# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 +# Random Distribution Ration = 512 / (2350-512) = 0.279. +# +# Typical Distribution Ratio + +EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 + +EUCKR_TABLE_SIZE = 2352 + +# Char to FreqOrder table , +EUCKR_CHAR_TO_FREQ_ORDER = ( + 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, +1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, +1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, + 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, + 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, + 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, +1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, + 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, + 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, +1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, +1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, +1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, +1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, +1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, + 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, +1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, +1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, +1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, +1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, + 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, +1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, + 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, + 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, +1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, + 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, +1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, + 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, + 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, +1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, +1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, +1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, +1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, + 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, +1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, + 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, + 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, +1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, +1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, +1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, +1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, +1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, +1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, + 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, + 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, + 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, +1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, + 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, +1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, + 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, + 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, +2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, + 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, + 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, +2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, +2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, +2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, + 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, + 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, +2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, + 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, +1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, +2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, +1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, +2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, +2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, +1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, + 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, +2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, +2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, + 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, + 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, +2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, +1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, +2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, +2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, +2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, +2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, +2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, +2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, +1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, +2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, +2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, +2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, +2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, +2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, +1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, +1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, +2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, +1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, +2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, +1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, + 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, +2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, + 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, +2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, + 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, +2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, +2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, + 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, +2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, +1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, + 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, +1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, +2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, +1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, +2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, + 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, +2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, +1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, +2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, +1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, +2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, +1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, + 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, +2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, +2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, + 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, + 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, +1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, +1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, + 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, +2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, +2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, + 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, + 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, + 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, +2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, + 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, + 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, +2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, +2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, + 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, +2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, +1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, + 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, +2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, +2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, +2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, + 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, + 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, + 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, +2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, +2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, +2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, +1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, +2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, + 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 +) + diff --git a/thesisenv/lib/python3.6/site-packages/chardet/euckrprober.py b/thesisenv/lib/python3.6/site-packages/chardet/euckrprober.py new file mode 100644 index 0000000..345a060 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/euckrprober.py @@ -0,0 +1,47 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCKRDistributionAnalysis +from .mbcssm import EUCKR_SM_MODEL + + +class EUCKRProber(MultiByteCharSetProber): + def __init__(self): + super(EUCKRProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) + self.distribution_analyzer = EUCKRDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-KR" + + @property + def language(self): + return "Korean" diff --git a/thesisenv/lib/python3.6/site-packages/chardet/euctwfreq.py b/thesisenv/lib/python3.6/site-packages/chardet/euctwfreq.py new file mode 100644 index 0000000..ed7a995 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/euctwfreq.py @@ -0,0 +1,387 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# EUCTW frequency table +# Converted from big5 work +# by Taiwan's Mandarin Promotion Council +# + +# 128 --> 0.42261 +# 256 --> 0.57851 +# 512 --> 0.74851 +# 1024 --> 0.89384 +# 2048 --> 0.97583 +# +# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 +# Random Distribution Ration = 512/(5401-512)=0.105 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR + +EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 + +# Char to FreqOrder table , +EUCTW_TABLE_SIZE = 5376 + +EUCTW_CHAR_TO_FREQ_ORDER = ( + 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 +3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 +1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 + 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 +3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 +4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 +7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 + 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 + 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 + 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 +2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 +1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 +3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 + 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 +1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 +3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 +2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 + 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 +3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 +1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 +7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 + 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 +7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 +1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 + 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 + 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 +3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 +3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 + 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 +2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 +2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 + 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 + 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 +3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 +1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 +1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 +1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 +2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 + 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 +4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 +1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 +7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 +2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 + 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 + 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 + 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 + 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 +7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 + 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 +1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 + 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 + 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 +7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 +1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 + 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 +3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 +4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 +3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 + 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 + 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 +1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 +4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 +3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 +3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 +2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 +7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 +3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 +7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 +1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 +2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 +1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 + 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 +1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 +4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 +3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 + 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 + 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 + 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 +2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 +7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 +1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 +2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 +1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 +1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 +7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 +7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 +7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 +3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 +4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 +1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 +7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 +2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 +7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 +3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 +3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 +7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 +2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 +7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 + 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 +4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 +2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 +7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 +3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 +2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 +2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 + 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 +2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 +1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 +1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 +2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 +1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 +7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 +7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 +2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 +4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 +1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 +7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 + 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 +4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 + 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 +2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 + 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 +1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 +1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 + 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 +3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 +3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 +1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 +3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 +7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 +7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 +1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 +2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 +1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 +3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 +2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 +3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 +2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 +4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 +4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 +3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 + 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 +3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 + 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 +3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 +3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 +3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 +1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 +7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 + 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 +7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 +1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 + 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 +4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 +3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 + 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 +2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 +2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 +3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 +1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 +4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 +2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 +1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 +1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 +2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 +3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 +1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 +7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 +1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 +4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 +1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 + 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 +1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 +3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 +3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 +2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 +1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 +4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 + 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 +7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 +2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 +3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 +4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 + 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 +7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 +7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 +1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 +4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 +3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 +2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 +3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 +3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 +2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 +1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 +4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 +3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 +3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 +2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 +4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 +7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 +3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 +2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 +3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 +1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 +2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 +3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 +4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 +2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 +2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 +7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 +1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 +2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 +1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 +3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 +4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 +2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 +3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 +3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 +2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 +4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 +2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 +3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 +4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 +7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 +3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 + 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 +1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 +4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 +1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 +4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 +7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 + 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 +7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 +2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 +1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 +1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 +3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 + 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 + 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 + 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 +3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 +2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 + 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 +7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 +1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 +3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 +7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 +1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 +7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 +4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 +1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 +2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 +2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 +4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 + 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 + 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 +3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 +3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 +1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 +2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 +7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 +1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 +1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 +3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 + 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 +1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 +4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 +7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 +2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 +3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 + 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 +1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 +2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 +2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 +7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 +7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 +7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 +2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 +2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 +1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 +4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 +3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 +3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 +4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 +4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 +2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 +2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 +7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 +4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 +7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 +2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 +1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 +3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 +4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 +2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 + 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 +2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 +1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 +2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 +2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 +4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 +7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 +1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 +3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 +7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 +1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 +8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 +2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 +8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 +2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 +2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 +8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 +8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 +8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 + 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 +8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 +4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 +3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 +8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 +1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 +8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 + 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 +1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 + 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 +4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 +1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 +4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 +1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 + 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 +3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 +4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 +8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 + 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 +3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 + 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 +2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 +) + diff --git a/thesisenv/lib/python3.6/site-packages/chardet/euctwprober.py b/thesisenv/lib/python3.6/site-packages/chardet/euctwprober.py new file mode 100644 index 0000000..35669cc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/euctwprober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import EUCTWDistributionAnalysis +from .mbcssm import EUCTW_SM_MODEL + +class EUCTWProber(MultiByteCharSetProber): + def __init__(self): + super(EUCTWProber, self).__init__() + self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) + self.distribution_analyzer = EUCTWDistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "EUC-TW" + + @property + def language(self): + return "Taiwan" diff --git a/thesisenv/lib/python3.6/site-packages/chardet/gb2312freq.py b/thesisenv/lib/python3.6/site-packages/chardet/gb2312freq.py new file mode 100644 index 0000000..697837b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/gb2312freq.py @@ -0,0 +1,283 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# GB2312 most frequently used character table +# +# Char to FreqOrder table , from hz6763 + +# 512 --> 0.79 -- 0.79 +# 1024 --> 0.92 -- 0.13 +# 2048 --> 0.98 -- 0.06 +# 6768 --> 1.00 -- 0.02 +# +# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 +# Random Distribution Ration = 512 / (3755 - 512) = 0.157 +# +# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR + +GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 + +GB2312_TABLE_SIZE = 3760 + +GB2312_CHAR_TO_FREQ_ORDER = ( +1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, +2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, +2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, + 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, +1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, +1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, + 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, +1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, +2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, +3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, + 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, +1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, + 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, +2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, + 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, +2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, +1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, +3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, + 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, +1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, + 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, +2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, +1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, +3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, +1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, +2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, +1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, + 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, +3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, +3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, + 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, +3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, + 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, +1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, +3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, +2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, +1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, + 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, +1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, +4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, + 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, +3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, +3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, + 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, +1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, +2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, +1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, +1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, + 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, +3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, +3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, +4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, + 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, +3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, +1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, +1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, +4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, + 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, + 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, +3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, +1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, + 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, +1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, +2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, + 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, + 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, + 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, +3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, +4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, +3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, + 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, +2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, +2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, +2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, + 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, +2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, + 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, + 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, + 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, +3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, +2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, +2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, +1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, + 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, +2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, + 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, + 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, +1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, +1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, + 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, + 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, +1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, +2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, +3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, +2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, +2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, +2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, +3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, +1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, +1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, +2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, +1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, +3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, +1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, +1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, +3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, + 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, +2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, +1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, +4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, +1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, +1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, +3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, +1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, + 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, + 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, +1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, + 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, +1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, +1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, + 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, +3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, +4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, +3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, +2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, +2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, +1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, +3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, +2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, +1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, +1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, + 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, +2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, +2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, +3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, +4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, +3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, + 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, +3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, +2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, +1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, + 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, + 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, +3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, +4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, +2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, +1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, +1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, + 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, +1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, +3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, + 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, + 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, +1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, + 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, +1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, + 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, +2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, + 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, +2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, +2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, +1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, +1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, +2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, + 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, +1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, +1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, +2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, +2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, +3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, +1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, +4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, + 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, + 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, +3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, +1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, + 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, +3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, +1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, +4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, +1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, +2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, +1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, + 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, +1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, +3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, + 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, +2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, + 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, +1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, +1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, +1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, +3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, +2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, +3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, +3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, +3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, + 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, +2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, + 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, +2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, + 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, +1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, + 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, + 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, +1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, +3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, +3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, +1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, +1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, +3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, +2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, +2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, +1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, +3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, + 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, +4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, +1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, +2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, +3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, +3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, +1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, + 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, + 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, +2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, + 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, +1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, + 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, +1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, +1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, +1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, +1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, +1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, + 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, + 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 +) + diff --git a/thesisenv/lib/python3.6/site-packages/chardet/gb2312prober.py b/thesisenv/lib/python3.6/site-packages/chardet/gb2312prober.py new file mode 100644 index 0000000..8446d2d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/gb2312prober.py @@ -0,0 +1,46 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import GB2312DistributionAnalysis +from .mbcssm import GB2312_SM_MODEL + +class GB2312Prober(MultiByteCharSetProber): + def __init__(self): + super(GB2312Prober, self).__init__() + self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) + self.distribution_analyzer = GB2312DistributionAnalysis() + self.reset() + + @property + def charset_name(self): + return "GB2312" + + @property + def language(self): + return "Chinese" diff --git a/thesisenv/lib/python3.6/site-packages/chardet/hebrewprober.py b/thesisenv/lib/python3.6/site-packages/chardet/hebrewprober.py new file mode 100644 index 0000000..b0e1bf4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/hebrewprober.py @@ -0,0 +1,292 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Shy Shalom +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +# This prober doesn't actually recognize a language or a charset. +# It is a helper prober for the use of the Hebrew model probers + +### General ideas of the Hebrew charset recognition ### +# +# Four main charsets exist in Hebrew: +# "ISO-8859-8" - Visual Hebrew +# "windows-1255" - Logical Hebrew +# "ISO-8859-8-I" - Logical Hebrew +# "x-mac-hebrew" - ?? Logical Hebrew ?? +# +# Both "ISO" charsets use a completely identical set of code points, whereas +# "windows-1255" and "x-mac-hebrew" are two different proper supersets of +# these code points. windows-1255 defines additional characters in the range +# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific +# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. +# x-mac-hebrew defines similar additional code points but with a different +# mapping. +# +# As far as an average Hebrew text with no diacritics is concerned, all four +# charsets are identical with respect to code points. Meaning that for the +# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters +# (including final letters). +# +# The dominant difference between these charsets is their directionality. +# "Visual" directionality means that the text is ordered as if the renderer is +# not aware of a BIDI rendering algorithm. The renderer sees the text and +# draws it from left to right. The text itself when ordered naturally is read +# backwards. A buffer of Visual Hebrew generally looks like so: +# "[last word of first line spelled backwards] [whole line ordered backwards +# and spelled backwards] [first word of first line spelled backwards] +# [end of line] [last word of second line] ... etc' " +# adding punctuation marks, numbers and English text to visual text is +# naturally also "visual" and from left to right. +# +# "Logical" directionality means the text is ordered "naturally" according to +# the order it is read. It is the responsibility of the renderer to display +# the text from right to left. A BIDI algorithm is used to place general +# punctuation marks, numbers and English text in the text. +# +# Texts in x-mac-hebrew are almost impossible to find on the Internet. From +# what little evidence I could find, it seems that its general directionality +# is Logical. +# +# To sum up all of the above, the Hebrew probing mechanism knows about two +# charsets: +# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are +# backwards while line order is natural. For charset recognition purposes +# the line order is unimportant (In fact, for this implementation, even +# word order is unimportant). +# Logical Hebrew - "windows-1255" - normal, naturally ordered text. +# +# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be +# specifically identified. +# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew +# that contain special punctuation marks or diacritics is displayed with +# some unconverted characters showing as question marks. This problem might +# be corrected using another model prober for x-mac-hebrew. Due to the fact +# that x-mac-hebrew texts are so rare, writing another model prober isn't +# worth the effort and performance hit. +# +#### The Prober #### +# +# The prober is divided between two SBCharSetProbers and a HebrewProber, +# all of which are managed, created, fed data, inquired and deleted by the +# SBCSGroupProber. The two SBCharSetProbers identify that the text is in +# fact some kind of Hebrew, Logical or Visual. The final decision about which +# one is it is made by the HebrewProber by combining final-letter scores +# with the scores of the two SBCharSetProbers to produce a final answer. +# +# The SBCSGroupProber is responsible for stripping the original text of HTML +# tags, English characters, numbers, low-ASCII punctuation characters, spaces +# and new lines. It reduces any sequence of such characters to a single space. +# The buffer fed to each prober in the SBCS group prober is pure text in +# high-ASCII. +# The two SBCharSetProbers (model probers) share the same language model: +# Win1255Model. +# The first SBCharSetProber uses the model normally as any other +# SBCharSetProber does, to recognize windows-1255, upon which this model was +# built. The second SBCharSetProber is told to make the pair-of-letter +# lookup in the language model backwards. This in practice exactly simulates +# a visual Hebrew model using the windows-1255 logical Hebrew model. +# +# The HebrewProber is not using any language model. All it does is look for +# final-letter evidence suggesting the text is either logical Hebrew or visual +# Hebrew. Disjointed from the model probers, the results of the HebrewProber +# alone are meaningless. HebrewProber always returns 0.00 as confidence +# since it never identifies a charset by itself. Instead, the pointer to the +# HebrewProber is passed to the model probers as a helper "Name Prober". +# When the Group prober receives a positive identification from any prober, +# it asks for the name of the charset identified. If the prober queried is a +# Hebrew model prober, the model prober forwards the call to the +# HebrewProber to make the final decision. In the HebrewProber, the +# decision is made according to the final-letters scores maintained and Both +# model probers scores. The answer is returned in the form of the name of the +# charset identified, either "windows-1255" or "ISO-8859-8". + +class HebrewProber(CharSetProber): + # windows-1255 / ISO-8859-8 code points of interest + FINAL_KAF = 0xea + NORMAL_KAF = 0xeb + FINAL_MEM = 0xed + NORMAL_MEM = 0xee + FINAL_NUN = 0xef + NORMAL_NUN = 0xf0 + FINAL_PE = 0xf3 + NORMAL_PE = 0xf4 + FINAL_TSADI = 0xf5 + NORMAL_TSADI = 0xf6 + + # Minimum Visual vs Logical final letter score difference. + # If the difference is below this, don't rely solely on the final letter score + # distance. + MIN_FINAL_CHAR_DISTANCE = 5 + + # Minimum Visual vs Logical model score difference. + # If the difference is below this, don't rely at all on the model score + # distance. + MIN_MODEL_DISTANCE = 0.01 + + VISUAL_HEBREW_NAME = "ISO-8859-8" + LOGICAL_HEBREW_NAME = "windows-1255" + + def __init__(self): + super(HebrewProber, self).__init__() + self._final_char_logical_score = None + self._final_char_visual_score = None + self._prev = None + self._before_prev = None + self._logical_prober = None + self._visual_prober = None + self.reset() + + def reset(self): + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + # The two last characters seen in the previous buffer, + # mPrev and mBeforePrev are initialized to space in order to simulate + # a word delimiter at the beginning of the data + self._prev = ' ' + self._before_prev = ' ' + # These probers are owned by the group prober. + + def set_model_probers(self, logicalProber, visualProber): + self._logical_prober = logicalProber + self._visual_prober = visualProber + + def is_final(self, c): + return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, + self.FINAL_PE, self.FINAL_TSADI] + + def is_non_final(self, c): + # The normal Tsadi is not a good Non-Final letter due to words like + # 'lechotet' (to chat) containing an apostrophe after the tsadi. This + # apostrophe is converted to a space in FilterWithoutEnglishLetters + # causing the Non-Final tsadi to appear at an end of a word even + # though this is not the case in the original text. + # The letters Pe and Kaf rarely display a related behavior of not being + # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' + # for example legally end with a Non-Final Pe or Kaf. However, the + # benefit of these letters as Non-Final letters outweighs the damage + # since these words are quite rare. + return c in [self.NORMAL_KAF, self.NORMAL_MEM, + self.NORMAL_NUN, self.NORMAL_PE] + + def feed(self, byte_str): + # Final letter analysis for logical-visual decision. + # Look for evidence that the received buffer is either logical Hebrew + # or visual Hebrew. + # The following cases are checked: + # 1) A word longer than 1 letter, ending with a final letter. This is + # an indication that the text is laid out "naturally" since the + # final letter really appears at the end. +1 for logical score. + # 2) A word longer than 1 letter, ending with a Non-Final letter. In + # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, + # should not end with the Non-Final form of that letter. Exceptions + # to this rule are mentioned above in isNonFinal(). This is an + # indication that the text is laid out backwards. +1 for visual + # score + # 3) A word longer than 1 letter, starting with a final letter. Final + # letters should not appear at the beginning of a word. This is an + # indication that the text is laid out backwards. +1 for visual + # score. + # + # The visual score and logical score are accumulated throughout the + # text and are finally checked against each other in GetCharSetName(). + # No checking for final letters in the middle of words is done since + # that case is not an indication for either Logical or Visual text. + # + # We automatically filter out all 7-bit characters (replace them with + # spaces) so the word boundary detection works properly. [MAP] + + if self.state == ProbingState.NOT_ME: + # Both model probers say it's not them. No reason to continue. + return ProbingState.NOT_ME + + byte_str = self.filter_high_byte_only(byte_str) + + for cur in byte_str: + if cur == ' ': + # We stand on a space - a word just ended + if self._before_prev != ' ': + # next-to-last char was not a space so self._prev is not a + # 1 letter word + if self.is_final(self._prev): + # case (1) [-2:not space][-1:final letter][cur:space] + self._final_char_logical_score += 1 + elif self.is_non_final(self._prev): + # case (2) [-2:not space][-1:Non-Final letter][ + # cur:space] + self._final_char_visual_score += 1 + else: + # Not standing on a space + if ((self._before_prev == ' ') and + (self.is_final(self._prev)) and (cur != ' ')): + # case (3) [-2:space][-1:final letter][cur:not space] + self._final_char_visual_score += 1 + self._before_prev = self._prev + self._prev = cur + + # Forever detecting, till the end or until both model probers return + # ProbingState.NOT_ME (handled above) + return ProbingState.DETECTING + + @property + def charset_name(self): + # Make the decision: is it Logical or Visual? + # If the final letter score distance is dominant enough, rely on it. + finalsub = self._final_char_logical_score - self._final_char_visual_score + if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # It's not dominant enough, try to rely on the model scores instead. + modelsub = (self._logical_prober.get_confidence() + - self._visual_prober.get_confidence()) + if modelsub > self.MIN_MODEL_DISTANCE: + return self.LOGICAL_HEBREW_NAME + if modelsub < -self.MIN_MODEL_DISTANCE: + return self.VISUAL_HEBREW_NAME + + # Still no good, back to final letter distance, maybe it'll save the + # day. + if finalsub < 0.0: + return self.VISUAL_HEBREW_NAME + + # (finalsub > 0 - Logical) or (don't know what to do) default to + # Logical. + return self.LOGICAL_HEBREW_NAME + + @property + def language(self): + return 'Hebrew' + + @property + def state(self): + # Remain active as long as any of the model probers are active. + if (self._logical_prober.state == ProbingState.NOT_ME) and \ + (self._visual_prober.state == ProbingState.NOT_ME): + return ProbingState.NOT_ME + return ProbingState.DETECTING diff --git a/thesisenv/lib/python3.6/site-packages/chardet/jisfreq.py b/thesisenv/lib/python3.6/site-packages/chardet/jisfreq.py new file mode 100644 index 0000000..83fc082 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/jisfreq.py @@ -0,0 +1,325 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# Sampling from about 20M text materials include literature and computer technology +# +# Japanese frequency table, applied to both S-JIS and EUC-JP +# They are sorted in order. + +# 128 --> 0.77094 +# 256 --> 0.85710 +# 512 --> 0.92635 +# 1024 --> 0.97130 +# 2048 --> 0.99431 +# +# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 +# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 +# +# Typical Distribution Ratio, 25% of IDR + +JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 + +# Char to FreqOrder table , +JIS_TABLE_SIZE = 4368 + +JIS_CHAR_TO_FREQ_ORDER = ( + 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 +3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 +1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 +2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 +2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 +5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 +1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 +5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 +5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 +5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 +5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 +5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 +5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 +1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 +1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 +1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 +2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 +3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 +3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 + 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 + 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 +1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 + 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 +5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 + 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 + 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 + 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 + 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 + 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 +5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 +5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 +5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 +4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 +5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 +5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 +5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 +5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 +5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 +5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 +5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 +5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 +5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 +3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 +5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 +5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 +5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 +5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 +5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 +5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 +5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 +5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 +5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 +5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 +5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 +5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 +5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 +5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 +5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 +5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 +5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 +5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 +5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 +5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 +5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 +5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 +5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 +5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 +5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 +5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 +5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 +5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 +5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 +5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 +5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 +5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 +5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 +5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 +5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 +5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 +5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 +5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 +6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 +6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 +6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 +6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 +6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 +6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 +6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 +6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 +4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 + 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 + 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 +1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 +1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 + 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 +3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 +3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 + 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 +3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 +3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 + 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 +2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 + 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 +3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 +1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 + 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 +1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 + 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 +2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 +2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 +2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 +2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 +1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 +1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 +1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 +1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 +2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 +1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 +2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 +1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 +1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 +1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 +1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 +1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 +1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 + 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 + 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 +1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 +2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 +2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 +2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 +3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 +3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 + 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 +3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 +1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 + 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 +2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 +1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 + 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 +3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 +4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 +2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 +1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 +2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 +1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 + 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 + 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 +1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 +2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 +2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 +2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 +3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 +1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 +2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 + 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 + 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 + 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 +1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 +2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 + 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 +1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 +1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 + 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 +1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 +1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 +1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 + 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 +2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 + 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 +2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 +3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 +2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 +1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 +6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 +1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 +2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 +1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 + 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 + 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 +3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 +3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 +1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 +1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 +1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 +1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 + 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 + 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 +2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 + 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 +3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 +2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 + 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 +1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 +2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 + 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 +1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 + 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 +4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 +2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 +1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 + 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 +1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 +2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 + 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 +6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 +1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 +1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 +2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 +3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 + 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 +3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 +1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 + 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 +1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 + 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 +3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 + 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 +2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 + 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 +4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 +2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 +1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 +1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 +1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 + 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 +1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 +3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 +1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 +3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 + 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 + 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 + 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 +2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 +1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 + 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 +1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 + 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 +1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 + 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 + 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 + 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 +1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 +1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 +2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 +4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 + 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 +1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 + 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 +1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 +3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 +1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 +2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 +2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 +1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 +1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 +2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 + 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 +2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 +1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 +1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 +1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 +1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 +3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 +2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 +2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 + 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 +3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 +3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 +1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 +2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 +1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 +2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 +) + + diff --git a/thesisenv/lib/python3.6/site-packages/chardet/jpcntx.py b/thesisenv/lib/python3.6/site-packages/chardet/jpcntx.py new file mode 100644 index 0000000..20044e4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/jpcntx.py @@ -0,0 +1,233 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + + +# This is hiragana 2-char sequence table, the number in each cell represents its frequency category +jp2CharContext = ( +(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), +(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), +(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), +(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), +(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), +(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), +(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), +(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), +(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), +(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), +(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), +(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), +(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), +(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), +(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), +(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), +(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), +(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), +(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), +(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), +(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), +(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), +(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), +(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), +(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), +(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), +(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), +(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), +(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), +(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), +(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), +(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), +(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), +(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), +(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), +(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), +(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), +(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), +(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), +(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), +(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), +(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), +(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), +(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), +(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), +(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), +(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), +(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), +(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), +(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), +(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), +(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), +(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), +(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), +(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), +(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), +(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), +(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), +(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), +(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), +(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), +(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), +(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), +(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), +(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), +(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), +(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), +(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), +(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), +(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), +(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), +(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), +(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), +(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), +(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), +(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), +) + +class JapaneseContextAnalysis(object): + NUM_OF_CATEGORY = 6 + DONT_KNOW = -1 + ENOUGH_REL_THRESHOLD = 100 + MAX_REL_THRESHOLD = 1000 + MINIMUM_DATA_THRESHOLD = 4 + + def __init__(self): + self._total_rel = None + self._rel_sample = None + self._need_to_skip_char_num = None + self._last_char_order = None + self._done = None + self.reset() + + def reset(self): + self._total_rel = 0 # total sequence received + # category counters, each integer counts sequence in its category + self._rel_sample = [0] * self.NUM_OF_CATEGORY + # if last byte in current buffer is not the last byte of a character, + # we need to know how many bytes to skip in next buffer + self._need_to_skip_char_num = 0 + self._last_char_order = -1 # The order of previous char + # If this flag is set to True, detection is done and conclusion has + # been made + self._done = False + + def feed(self, byte_str, num_bytes): + if self._done: + return + + # The buffer we got is byte oriented, and a character may span in more than one + # buffers. In case the last one or two byte in last buffer is not + # complete, we record how many byte needed to complete that character + # and skip these bytes here. We can choose to record those bytes as + # well and analyse the character once it is complete, but since a + # character will not make much difference, by simply skipping + # this character will simply our logic and improve performance. + i = self._need_to_skip_char_num + while i < num_bytes: + order, char_len = self.get_order(byte_str[i:i + 2]) + i += char_len + if i > num_bytes: + self._need_to_skip_char_num = i - num_bytes + self._last_char_order = -1 + else: + if (order != -1) and (self._last_char_order != -1): + self._total_rel += 1 + if self._total_rel > self.MAX_REL_THRESHOLD: + self._done = True + break + self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 + self._last_char_order = order + + def got_enough_data(self): + return self._total_rel > self.ENOUGH_REL_THRESHOLD + + def get_confidence(self): + # This is just one way to calculate confidence. It works well for me. + if self._total_rel > self.MINIMUM_DATA_THRESHOLD: + return (self._total_rel - self._rel_sample[0]) / self._total_rel + else: + return self.DONT_KNOW + + def get_order(self, byte_str): + return -1, 1 + +class SJISContextAnalysis(JapaneseContextAnalysis): + def __init__(self): + super(SJISContextAnalysis, self).__init__() + self._charset_name = "SHIFT_JIS" + + @property + def charset_name(self): + return self._charset_name + + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): + char_len = 2 + if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): + self._charset_name = "CP932" + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 202) and (0x9F <= second_char <= 0xF1): + return second_char - 0x9F, char_len + + return -1, char_len + +class EUCJPContextAnalysis(JapaneseContextAnalysis): + def get_order(self, byte_str): + if not byte_str: + return -1, 1 + # find out current char's byte length + first_char = byte_str[0] + if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): + char_len = 2 + elif first_char == 0x8F: + char_len = 3 + else: + char_len = 1 + + # return its order if it is hiragana + if len(byte_str) > 1: + second_char = byte_str[1] + if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): + return second_char - 0xA1, char_len + + return -1, char_len + + diff --git a/thesisenv/lib/python3.6/site-packages/chardet/langbulgarianmodel.py b/thesisenv/lib/python3.6/site-packages/chardet/langbulgarianmodel.py new file mode 100644 index 0000000..2aa4fb2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/langbulgarianmodel.py @@ -0,0 +1,228 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +# this table is modified base on win1251BulgarianCharToOrderMap, so +# only number <64 is sure valid + +Latin5_BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 +210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 + 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 + 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 +) + +win1251BulgarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 +110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 +253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 +116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 +206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 +221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 + 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 + 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 + 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 + 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 + 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 + 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 96.9392% +# first 1024 sequences:3.0618% +# rest sequences: 0.2992% +# negative sequences: 0.0020% +BulgarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, +3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, +0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, +0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, +0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, +0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, +0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, +2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, +3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, +3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, +1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, +3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, +1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, +2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, +2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, +3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, +1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, +2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, +2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, +1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, +2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, +2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, +2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, +1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, +2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, +1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, +3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, +1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, +3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, +1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, +2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, +1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, +2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, +1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, +2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, +1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, +2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, +1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, +2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, +1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, +0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, +1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, +1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, +1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, +0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, +1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, +1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, +1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +) + +Latin5BulgarianModel = { + 'char_to_order_map': Latin5_BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Bulgairan', +} + +Win1251BulgarianModel = { + 'char_to_order_map': win1251BulgarianCharToOrderMap, + 'precedence_matrix': BulgarianLangModel, + 'typical_positive_ratio': 0.969392, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Bulgarian', +} diff --git a/thesisenv/lib/python3.6/site-packages/chardet/langcyrillicmodel.py b/thesisenv/lib/python3.6/site-packages/chardet/langcyrillicmodel.py new file mode 100644 index 0000000..e5f9a1f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/langcyrillicmodel.py @@ -0,0 +1,333 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# KOI8-R language model +# Character Mapping Table: +KOI8R_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 +223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 +238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 + 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 + 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 + 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 + 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 +) + +win1251_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +) + +latin5_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +macCyrillic_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, +239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, +) + +IBM855_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 +191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, +206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, + 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, +220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, +230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, + 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, + 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, +250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, +) + +IBM866_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 +155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 +253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 + 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 + 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, + 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, + 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, +191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, +207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, +223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, + 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, +239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 97.6601% +# first 1024 sequences: 2.3389% +# rest sequences: 0.1237% +# negative sequences: 0.0009% +RussianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, +0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, +0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, +1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, +1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, +2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, +1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, +3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, +1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, +2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, +1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, +1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, +1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, +1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, +3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, +1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, +2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, +1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, +2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, +0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, +1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, +1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, +1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, +3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, +3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, +1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, +1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, +0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, +1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, +1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, +0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, +1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, +2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, +1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, +1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, +2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, +1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, +0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, +2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, +1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, +1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, +0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, +0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, +1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, +0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, +0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, +2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, +0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +) + +Koi8rModel = { + 'char_to_order_map': KOI8R_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "KOI8-R", + 'language': 'Russian', +} + +Win1251CyrillicModel = { + 'char_to_order_map': win1251_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "windows-1251", + 'language': 'Russian', +} + +Latin5CyrillicModel = { + 'char_to_order_map': latin5_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-5", + 'language': 'Russian', +} + +MacCyrillicModel = { + 'char_to_order_map': macCyrillic_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "MacCyrillic", + 'language': 'Russian', +} + +Ibm866Model = { + 'char_to_order_map': IBM866_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM866", + 'language': 'Russian', +} + +Ibm855Model = { + 'char_to_order_map': IBM855_char_to_order_map, + 'precedence_matrix': RussianLangModel, + 'typical_positive_ratio': 0.976601, + 'keep_english_letter': False, + 'charset_name': "IBM855", + 'language': 'Russian', +} diff --git a/thesisenv/lib/python3.6/site-packages/chardet/langgreekmodel.py b/thesisenv/lib/python3.6/site-packages/chardet/langgreekmodel.py new file mode 100644 index 0000000..5332221 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/langgreekmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin7_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +win1253_char_to_order_map = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 + 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 +253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 + 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 +253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 +253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 +110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 + 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 +124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 + 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.2851% +# first 1024 sequences:1.7001% +# rest sequences: 0.0359% +# negative sequences: 0.0148% +GreekLangModel = ( +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, +2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, +2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, +2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, +0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, +3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, +3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, +2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, +2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, +0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, +0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, +0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, +0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, +0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, +0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, +0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, +0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, +0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, +0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, +0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, +0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, +0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, +0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, +0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, +0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, +0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, +0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, +0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, +0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, +0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, +0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, +0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, +0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, +0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, +0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin7GreekModel = { + 'char_to_order_map': Latin7_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "ISO-8859-7", + 'language': 'Greek', +} + +Win1253GreekModel = { + 'char_to_order_map': win1253_char_to_order_map, + 'precedence_matrix': GreekLangModel, + 'typical_positive_ratio': 0.982851, + 'keep_english_letter': False, + 'charset_name': "windows-1253", + 'language': 'Greek', +} diff --git a/thesisenv/lib/python3.6/site-packages/chardet/langhebrewmodel.py b/thesisenv/lib/python3.6/site-packages/chardet/langhebrewmodel.py new file mode 100644 index 0000000..58f4c87 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/langhebrewmodel.py @@ -0,0 +1,200 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Simon Montagu +# Portions created by the Initial Developer are Copyright (C) 2005 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Shoshannah Forbes - original C code (?) +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Windows-1255 language model +# Character Mapping Table: +WIN1255_CHAR_TO_ORDER_MAP = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 + 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 +253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 + 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 +124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, +215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, + 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, +106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, + 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, +238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, + 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, + 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 98.4004% +# first 1024 sequences: 1.5981% +# rest sequences: 0.087% +# negative sequences: 0.0015% +HEBREW_LANG_MODEL = ( +0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, +3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, +1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, +1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, +1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, +1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, +0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, +1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, +0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, +0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, +0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, +0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, +0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, +0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, +0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, +3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, +0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, +0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, +0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, +0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, +3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, +0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, +0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, +0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, +0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, +0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, +1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, +0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, +1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, +2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, +2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, +0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, +0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, +) + +Win1255HebrewModel = { + 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, + 'precedence_matrix': HEBREW_LANG_MODEL, + 'typical_positive_ratio': 0.984004, + 'keep_english_letter': False, + 'charset_name': "windows-1255", + 'language': 'Hebrew', +} diff --git a/thesisenv/lib/python3.6/site-packages/chardet/langhungarianmodel.py b/thesisenv/lib/python3.6/site-packages/chardet/langhungarianmodel.py new file mode 100644 index 0000000..bb7c095 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/langhungarianmodel.py @@ -0,0 +1,225 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin2_HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, +175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, + 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, + 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, +245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +win1250HungarianCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, + 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, +253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, + 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, +161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, +177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, +191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, + 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, +221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, +232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, + 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, +245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 94.7368% +# first 1024 sequences:5.2623% +# rest sequences: 0.8894% +# negative sequences: 0.0009% +HungarianLangModel = ( +0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, +3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, +3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, +3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, +0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, +3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, +0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, +0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, +3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, +1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, +1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, +3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, +2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, +2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, +2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, +2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, +2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, +1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, +1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, +3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, +1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, +1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, +2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, +2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, +2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, +3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, +2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, +1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, +1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, +2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, +1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, +2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, +1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, +1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, +2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, +2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, +2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, +1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, +1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, +0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, +2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, +1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, +2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, +1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, +1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, +2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, +2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, +2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, +1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, +2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, +0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, +2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +) + +Latin2HungarianModel = { + 'char_to_order_map': Latin2_HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-2", + 'language': 'Hungarian', +} + +Win1250HungarianModel = { + 'char_to_order_map': win1250HungarianCharToOrderMap, + 'precedence_matrix': HungarianLangModel, + 'typical_positive_ratio': 0.947368, + 'keep_english_letter': True, + 'charset_name': "windows-1250", + 'language': 'Hungarian', +} diff --git a/thesisenv/lib/python3.6/site-packages/chardet/langthaimodel.py b/thesisenv/lib/python3.6/site-packages/chardet/langthaimodel.py new file mode 100644 index 0000000..15f94c2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/langthaimodel.py @@ -0,0 +1,199 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# The following result for thai was collected from a limited sample (1M). + +# Character Mapping Table: +TIS620CharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 +253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 +252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 +253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 +188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 +253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 + 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 +209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, +223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, +236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, + 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, + 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, + 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, + 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, + 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, +) + +# Model Table: +# total sequences: 100% +# first 512 sequences: 92.6386% +# first 1024 sequences:7.3177% +# rest sequences: 1.0230% +# negative sequences: 0.0436% +ThaiLangModel = ( +0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, +0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, +3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, +0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, +3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, +3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, +3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, +3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, +3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, +2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, +3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, +0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, +1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, +3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, +3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, +1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, +0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, +0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, +3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, +2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, +3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, +0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, +3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, +3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, +2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, +3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, +2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, +3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, +3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, +3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, +3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, +3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, +1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, +0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, +0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, +3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, +3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, +1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, +3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, +3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, +0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, +0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, +1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, +1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, +3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, +0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, +0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, +3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, +0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, +0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, +0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, +0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, +0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, +0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, +3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, +0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, +0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, +3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, +2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, +0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, +3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, +1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, +1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, +1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, +1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +TIS620ThaiModel = { + 'char_to_order_map': TIS620CharToOrderMap, + 'precedence_matrix': ThaiLangModel, + 'typical_positive_ratio': 0.926386, + 'keep_english_letter': False, + 'charset_name': "TIS-620", + 'language': 'Thai', +} diff --git a/thesisenv/lib/python3.6/site-packages/chardet/langturkishmodel.py b/thesisenv/lib/python3.6/site-packages/chardet/langturkishmodel.py new file mode 100644 index 0000000..a427a45 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/langturkishmodel.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Communicator client code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Özgür Baskın - Turkish Language Model +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +# 255: Control characters that usually does not exist in any text +# 254: Carriage/Return +# 253: symbol (punctuation) that does not belong to word +# 252: 0 - 9 + +# Character Mapping Table: +Latin5_TurkishCharToOrderMap = ( +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, +255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, + 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, +255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, + 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, +180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, +164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, +150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, + 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, +124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, + 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, + 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, + 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, +) + +TurkishLangModel = ( +3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, +3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, +3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, +3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, +3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, +3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, +2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, +3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, +1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, +3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, +3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, +2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, +2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, +3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, +3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, +0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, +3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, +3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, +1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, +3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, +2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, +3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, +0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, +1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, +3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, +1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, +3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, +0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, +3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, +1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, +1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, +2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, +2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, +0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, +2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, +3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, +2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, +0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, +3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, +1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, +1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, +0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, +3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, +0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, +3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, +1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, +3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, +1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, +2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, +0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, +3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, +0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, +0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, +3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, +0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, +0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, +3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, +0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, +3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, +0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, +0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, +3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, +3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, +0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, +3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, +0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, +0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, +0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, +3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, +0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, +3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, +0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, +0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, +0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, +2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, +1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, +3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, +0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, +0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, +3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, +0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, +2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, +2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, +0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, +1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, +0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, +0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, +) + +Latin5TurkishModel = { + 'char_to_order_map': Latin5_TurkishCharToOrderMap, + 'precedence_matrix': TurkishLangModel, + 'typical_positive_ratio': 0.970290, + 'keep_english_letter': True, + 'charset_name': "ISO-8859-9", + 'language': 'Turkish', +} diff --git a/thesisenv/lib/python3.6/site-packages/chardet/latin1prober.py b/thesisenv/lib/python3.6/site-packages/chardet/latin1prober.py new file mode 100644 index 0000000..7d1e8c2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/latin1prober.py @@ -0,0 +1,145 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +CLASS_NUM = 8 # total classes + +Latin1_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 + OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F + UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 + OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF + ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF + ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 + ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF + ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 + ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +Latin1ClassModel = ( +# UDF OTH ASC ASS ACV ACO ASV ASO + 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, # ASO +) + + +class Latin1Prober(CharSetProber): + def __init__(self): + super(Latin1Prober, self).__init__() + self._last_char_class = None + self._freq_counter = None + self.reset() + + def reset(self): + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + CharSetProber.reset(self) + + @property + def charset_name(self): + return "ISO-8859-1" + + @property + def language(self): + return "" + + def feed(self, byte_str): + byte_str = self.filter_with_english_letters(byte_str) + for c in byte_str: + char_class = Latin1_CharToClass[c] + freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self): + if self.state == ProbingState.NOT_ME: + return 0.01 + + total = sum(self._freq_counter) + if total < 0.01: + confidence = 0.0 + else: + confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) + / total) + if confidence < 0.0: + confidence = 0.0 + # lower the confidence of latin1 so that other more accurate + # detector can take priority. + confidence = confidence * 0.73 + return confidence diff --git a/thesisenv/lib/python3.6/site-packages/chardet/mbcharsetprober.py b/thesisenv/lib/python3.6/site-packages/chardet/mbcharsetprober.py new file mode 100644 index 0000000..6256ecf --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/mbcharsetprober.py @@ -0,0 +1,91 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState + + +class MultiByteCharSetProber(CharSetProber): + """ + MultiByteCharSetProber + """ + + def __init__(self, lang_filter=None): + super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) + self.distribution_analyzer = None + self.coding_sm = None + self._last_char = [0, 0] + + def reset(self): + super(MultiByteCharSetProber, self).reset() + if self.coding_sm: + self.coding_sm.reset() + if self.distribution_analyzer: + self.distribution_analyzer.reset() + self._last_char = [0, 0] + + @property + def charset_name(self): + raise NotImplementedError + + @property + def language(self): + raise NotImplementedError + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.distribution_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + return self.distribution_analyzer.get_confidence() diff --git a/thesisenv/lib/python3.6/site-packages/chardet/mbcsgroupprober.py b/thesisenv/lib/python3.6/site-packages/chardet/mbcsgroupprober.py new file mode 100644 index 0000000..530abe7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/mbcsgroupprober.py @@ -0,0 +1,54 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# Proofpoint, Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .utf8prober import UTF8Prober +from .sjisprober import SJISProber +from .eucjpprober import EUCJPProber +from .gb2312prober import GB2312Prober +from .euckrprober import EUCKRProber +from .cp949prober import CP949Prober +from .big5prober import Big5Prober +from .euctwprober import EUCTWProber + + +class MBCSGroupProber(CharSetGroupProber): + def __init__(self, lang_filter=None): + super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) + self.probers = [ + UTF8Prober(), + SJISProber(), + EUCJPProber(), + GB2312Prober(), + EUCKRProber(), + CP949Prober(), + Big5Prober(), + EUCTWProber() + ] + self.reset() diff --git a/thesisenv/lib/python3.6/site-packages/chardet/mbcssm.py b/thesisenv/lib/python3.6/site-packages/chardet/mbcssm.py new file mode 100644 index 0000000..8360d0f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/mbcssm.py @@ -0,0 +1,572 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .enums import MachineState + +# BIG5 + +BIG5_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 4,4,4,4,4,4,4,4, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 4,3,3,3,3,3,3,3, # a0 - a7 + 3,3,3,3,3,3,3,3, # a8 - af + 3,3,3,3,3,3,3,3, # b0 - b7 + 3,3,3,3,3,3,3,3, # b8 - bf + 3,3,3,3,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +BIG5_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 +) + +BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) + +BIG5_SM_MODEL = {'class_table': BIG5_CLS, + 'class_factor': 5, + 'state_table': BIG5_ST, + 'char_len_table': BIG5_CHAR_LEN_TABLE, + 'name': 'Big5'} + +# CP949 + +CP949_CLS = ( + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f + 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f + 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f + 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f + 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f + 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f + 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f + 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f + 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f + 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af + 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf + 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef + 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff +) + +CP949_ST = ( +#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 +) + +CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) + +CP949_SM_MODEL = {'class_table': CP949_CLS, + 'class_factor': 10, + 'state_table': CP949_ST, + 'char_len_table': CP949_CHAR_LEN_TABLE, + 'name': 'CP949'} + +# EUC-JP + +EUCJP_CLS = ( + 4,4,4,4,4,4,4,4, # 00 - 07 + 4,4,4,4,4,4,5,5, # 08 - 0f + 4,4,4,4,4,4,4,4, # 10 - 17 + 4,4,4,5,4,4,4,4, # 18 - 1f + 4,4,4,4,4,4,4,4, # 20 - 27 + 4,4,4,4,4,4,4,4, # 28 - 2f + 4,4,4,4,4,4,4,4, # 30 - 37 + 4,4,4,4,4,4,4,4, # 38 - 3f + 4,4,4,4,4,4,4,4, # 40 - 47 + 4,4,4,4,4,4,4,4, # 48 - 4f + 4,4,4,4,4,4,4,4, # 50 - 57 + 4,4,4,4,4,4,4,4, # 58 - 5f + 4,4,4,4,4,4,4,4, # 60 - 67 + 4,4,4,4,4,4,4,4, # 68 - 6f + 4,4,4,4,4,4,4,4, # 70 - 77 + 4,4,4,4,4,4,4,4, # 78 - 7f + 5,5,5,5,5,5,5,5, # 80 - 87 + 5,5,5,5,5,5,1,3, # 88 - 8f + 5,5,5,5,5,5,5,5, # 90 - 97 + 5,5,5,5,5,5,5,5, # 98 - 9f + 5,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,0,5 # f8 - ff +) + +EUCJP_ST = ( + 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f + 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 +) + +EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) + +EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, + 'class_factor': 6, + 'state_table': EUCJP_ST, + 'char_len_table': EUCJP_CHAR_LEN_TABLE, + 'name': 'EUC-JP'} + +# EUC-KR + +EUCKR_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,3,3,3, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,3,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 2,2,2,2,2,2,2,2, # e0 - e7 + 2,2,2,2,2,2,2,2, # e8 - ef + 2,2,2,2,2,2,2,2, # f0 - f7 + 2,2,2,2,2,2,2,0 # f8 - ff +) + +EUCKR_ST = ( + MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f +) + +EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) + +EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, + 'class_factor': 4, + 'state_table': EUCKR_ST, + 'char_len_table': EUCKR_CHAR_LEN_TABLE, + 'name': 'EUC-KR'} + +# EUC-TW + +EUCTW_CLS = ( + 2,2,2,2,2,2,2,2, # 00 - 07 + 2,2,2,2,2,2,0,0, # 08 - 0f + 2,2,2,2,2,2,2,2, # 10 - 17 + 2,2,2,0,2,2,2,2, # 18 - 1f + 2,2,2,2,2,2,2,2, # 20 - 27 + 2,2,2,2,2,2,2,2, # 28 - 2f + 2,2,2,2,2,2,2,2, # 30 - 37 + 2,2,2,2,2,2,2,2, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,2, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,6,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,3,4,4,4,4,4,4, # a0 - a7 + 5,5,1,1,1,1,1,1, # a8 - af + 1,1,1,1,1,1,1,1, # b0 - b7 + 1,1,1,1,1,1,1,1, # b8 - bf + 1,1,3,1,3,3,3,3, # c0 - c7 + 3,3,3,3,3,3,3,3, # c8 - cf + 3,3,3,3,3,3,3,3, # d0 - d7 + 3,3,3,3,3,3,3,3, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,3,3,3, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,3,3,0 # f8 - ff +) + +EUCTW_ST = ( + MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 + MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 + MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) + +EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, + 'class_factor': 7, + 'state_table': EUCTW_ST, + 'char_len_table': EUCTW_CHAR_LEN_TABLE, + 'name': 'x-euc-tw'} + +# GB2312 + +GB2312_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 3,3,3,3,3,3,3,3, # 30 - 37 + 3,3,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,4, # 78 - 7f + 5,6,6,6,6,6,6,6, # 80 - 87 + 6,6,6,6,6,6,6,6, # 88 - 8f + 6,6,6,6,6,6,6,6, # 90 - 97 + 6,6,6,6,6,6,6,6, # 98 - 9f + 6,6,6,6,6,6,6,6, # a0 - a7 + 6,6,6,6,6,6,6,6, # a8 - af + 6,6,6,6,6,6,6,6, # b0 - b7 + 6,6,6,6,6,6,6,6, # b8 - bf + 6,6,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 6,6,6,6,6,6,6,6, # e0 - e7 + 6,6,6,6,6,6,6,6, # e8 - ef + 6,6,6,6,6,6,6,6, # f0 - f7 + 6,6,6,6,6,6,6,0 # f8 - ff +) + +GB2312_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 + 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f +) + +# To be accurate, the length of class 6 can be either 2 or 4. +# But it is not necessary to discriminate between the two since +# it is used for frequency analysis only, and we are validating +# each code range there as well. So it is safe to set it to be +# 2 here. +GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) + +GB2312_SM_MODEL = {'class_table': GB2312_CLS, + 'class_factor': 7, + 'state_table': GB2312_ST, + 'char_len_table': GB2312_CHAR_LEN_TABLE, + 'name': 'GB2312'} + +# Shift_JIS + +SJIS_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 2,2,2,2,2,2,2,2, # 40 - 47 + 2,2,2,2,2,2,2,2, # 48 - 4f + 2,2,2,2,2,2,2,2, # 50 - 57 + 2,2,2,2,2,2,2,2, # 58 - 5f + 2,2,2,2,2,2,2,2, # 60 - 67 + 2,2,2,2,2,2,2,2, # 68 - 6f + 2,2,2,2,2,2,2,2, # 70 - 77 + 2,2,2,2,2,2,2,1, # 78 - 7f + 3,3,3,3,3,2,2,3, # 80 - 87 + 3,3,3,3,3,3,3,3, # 88 - 8f + 3,3,3,3,3,3,3,3, # 90 - 97 + 3,3,3,3,3,3,3,3, # 98 - 9f + #0xa0 is illegal in sjis encoding, but some pages does + #contain such byte. We need to be more error forgiven. + 2,2,2,2,2,2,2,2, # a0 - a7 + 2,2,2,2,2,2,2,2, # a8 - af + 2,2,2,2,2,2,2,2, # b0 - b7 + 2,2,2,2,2,2,2,2, # b8 - bf + 2,2,2,2,2,2,2,2, # c0 - c7 + 2,2,2,2,2,2,2,2, # c8 - cf + 2,2,2,2,2,2,2,2, # d0 - d7 + 2,2,2,2,2,2,2,2, # d8 - df + 3,3,3,3,3,3,3,3, # e0 - e7 + 3,3,3,3,3,4,4,4, # e8 - ef + 3,3,3,3,3,3,3,3, # f0 - f7 + 3,3,3,3,3,0,0,0) # f8 - ff + + +SJIS_ST = ( + MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 +) + +SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) + +SJIS_SM_MODEL = {'class_table': SJIS_CLS, + 'class_factor': 6, + 'state_table': SJIS_ST, + 'char_len_table': SJIS_CHAR_LEN_TABLE, + 'name': 'Shift_JIS'} + +# UCS2-BE + +UCS2BE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2BE_ST = ( + 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 + 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f + 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 + 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f + 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) + +UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, + 'class_factor': 6, + 'state_table': UCS2BE_ST, + 'char_len_table': UCS2BE_CHAR_LEN_TABLE, + 'name': 'UTF-16BE'} + +# UCS2-LE + +UCS2LE_CLS = ( + 0,0,0,0,0,0,0,0, # 00 - 07 + 0,0,1,0,0,2,0,0, # 08 - 0f + 0,0,0,0,0,0,0,0, # 10 - 17 + 0,0,0,3,0,0,0,0, # 18 - 1f + 0,0,0,0,0,0,0,0, # 20 - 27 + 0,3,3,3,3,3,0,0, # 28 - 2f + 0,0,0,0,0,0,0,0, # 30 - 37 + 0,0,0,0,0,0,0,0, # 38 - 3f + 0,0,0,0,0,0,0,0, # 40 - 47 + 0,0,0,0,0,0,0,0, # 48 - 4f + 0,0,0,0,0,0,0,0, # 50 - 57 + 0,0,0,0,0,0,0,0, # 58 - 5f + 0,0,0,0,0,0,0,0, # 60 - 67 + 0,0,0,0,0,0,0,0, # 68 - 6f + 0,0,0,0,0,0,0,0, # 70 - 77 + 0,0,0,0,0,0,0,0, # 78 - 7f + 0,0,0,0,0,0,0,0, # 80 - 87 + 0,0,0,0,0,0,0,0, # 88 - 8f + 0,0,0,0,0,0,0,0, # 90 - 97 + 0,0,0,0,0,0,0,0, # 98 - 9f + 0,0,0,0,0,0,0,0, # a0 - a7 + 0,0,0,0,0,0,0,0, # a8 - af + 0,0,0,0,0,0,0,0, # b0 - b7 + 0,0,0,0,0,0,0,0, # b8 - bf + 0,0,0,0,0,0,0,0, # c0 - c7 + 0,0,0,0,0,0,0,0, # c8 - cf + 0,0,0,0,0,0,0,0, # d0 - d7 + 0,0,0,0,0,0,0,0, # d8 - df + 0,0,0,0,0,0,0,0, # e0 - e7 + 0,0,0,0,0,0,0,0, # e8 - ef + 0,0,0,0,0,0,0,0, # f0 - f7 + 0,0,0,0,0,0,4,5 # f8 - ff +) + +UCS2LE_ST = ( + 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f + MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f + 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 + 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f + 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 +) + +UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) + +UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, + 'class_factor': 6, + 'state_table': UCS2LE_ST, + 'char_len_table': UCS2LE_CHAR_LEN_TABLE, + 'name': 'UTF-16LE'} + +# UTF-8 + +UTF8_CLS = ( + 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value + 1,1,1,1,1,1,0,0, # 08 - 0f + 1,1,1,1,1,1,1,1, # 10 - 17 + 1,1,1,0,1,1,1,1, # 18 - 1f + 1,1,1,1,1,1,1,1, # 20 - 27 + 1,1,1,1,1,1,1,1, # 28 - 2f + 1,1,1,1,1,1,1,1, # 30 - 37 + 1,1,1,1,1,1,1,1, # 38 - 3f + 1,1,1,1,1,1,1,1, # 40 - 47 + 1,1,1,1,1,1,1,1, # 48 - 4f + 1,1,1,1,1,1,1,1, # 50 - 57 + 1,1,1,1,1,1,1,1, # 58 - 5f + 1,1,1,1,1,1,1,1, # 60 - 67 + 1,1,1,1,1,1,1,1, # 68 - 6f + 1,1,1,1,1,1,1,1, # 70 - 77 + 1,1,1,1,1,1,1,1, # 78 - 7f + 2,2,2,2,3,3,3,3, # 80 - 87 + 4,4,4,4,4,4,4,4, # 88 - 8f + 4,4,4,4,4,4,4,4, # 90 - 97 + 4,4,4,4,4,4,4,4, # 98 - 9f + 5,5,5,5,5,5,5,5, # a0 - a7 + 5,5,5,5,5,5,5,5, # a8 - af + 5,5,5,5,5,5,5,5, # b0 - b7 + 5,5,5,5,5,5,5,5, # b8 - bf + 0,0,6,6,6,6,6,6, # c0 - c7 + 6,6,6,6,6,6,6,6, # c8 - cf + 6,6,6,6,6,6,6,6, # d0 - d7 + 6,6,6,6,6,6,6,6, # d8 - df + 7,8,8,8,8,8,8,8, # e0 - e7 + 8,8,8,8,8,9,8,8, # e8 - ef + 10,11,11,11,11,11,11,11, # f0 - f7 + 12,13,13,13,14,15,0,0 # f8 - ff +) + +UTF8_ST = ( + MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 + 9, 11, 8, 7, 6, 5, 4, 3,#08-0f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 + MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f + MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f + MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f + MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f + MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af + MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf + MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 + MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf +) + +UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) + +UTF8_SM_MODEL = {'class_table': UTF8_CLS, + 'class_factor': 16, + 'state_table': UTF8_ST, + 'char_len_table': UTF8_CHAR_LEN_TABLE, + 'name': 'UTF-8'} diff --git a/thesisenv/lib/python3.6/site-packages/chardet/sbcharsetprober.py b/thesisenv/lib/python3.6/site-packages/chardet/sbcharsetprober.py new file mode 100644 index 0000000..0adb51d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/sbcharsetprober.py @@ -0,0 +1,132 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import CharacterCategory, ProbingState, SequenceLikelihood + + +class SingleByteCharSetProber(CharSetProber): + SAMPLE_SIZE = 64 + SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 + POSITIVE_SHORTCUT_THRESHOLD = 0.95 + NEGATIVE_SHORTCUT_THRESHOLD = 0.05 + + def __init__(self, model, reversed=False, name_prober=None): + super(SingleByteCharSetProber, self).__init__() + self._model = model + # TRUE if we need to reverse every pair in the model lookup + self._reversed = reversed + # Optional auxiliary prober for name decision + self._name_prober = name_prober + self._last_order = None + self._seq_counters = None + self._total_seqs = None + self._total_char = None + self._freq_char = None + self.reset() + + def reset(self): + super(SingleByteCharSetProber, self).reset() + # char order of last character + self._last_order = 255 + self._seq_counters = [0] * SequenceLikelihood.get_num_categories() + self._total_seqs = 0 + self._total_char = 0 + # characters that fall in our sampling range + self._freq_char = 0 + + @property + def charset_name(self): + if self._name_prober: + return self._name_prober.charset_name + else: + return self._model['charset_name'] + + @property + def language(self): + if self._name_prober: + return self._name_prober.language + else: + return self._model.get('language') + + def feed(self, byte_str): + if not self._model['keep_english_letter']: + byte_str = self.filter_international_words(byte_str) + if not byte_str: + return self.state + char_to_order_map = self._model['char_to_order_map'] + for i, c in enumerate(byte_str): + # XXX: Order is in range 1-64, so one would think we want 0-63 here, + # but that leads to 27 more test failures than before. + order = char_to_order_map[c] + # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but + # CharacterCategory.SYMBOL is actually 253, so we use CONTROL + # to make it closer to the original intent. The only difference + # is whether or not we count digits and control characters for + # _total_char purposes. + if order < CharacterCategory.CONTROL: + self._total_char += 1 + if order < self.SAMPLE_SIZE: + self._freq_char += 1 + if self._last_order < self.SAMPLE_SIZE: + self._total_seqs += 1 + if not self._reversed: + i = (self._last_order * self.SAMPLE_SIZE) + order + model = self._model['precedence_matrix'][i] + else: # reverse the order of the letters in the lookup + i = (order * self.SAMPLE_SIZE) + self._last_order + model = self._model['precedence_matrix'][i] + self._seq_counters[model] += 1 + self._last_order = order + + charset_name = self._model['charset_name'] + if self.state == ProbingState.DETECTING: + if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: + confidence = self.get_confidence() + if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, we have a winner', + charset_name, confidence) + self._state = ProbingState.FOUND_IT + elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, below negative ' + 'shortcut threshhold %s', charset_name, + confidence, + self.NEGATIVE_SHORTCUT_THRESHOLD) + self._state = ProbingState.NOT_ME + + return self.state + + def get_confidence(self): + r = 0.01 + if self._total_seqs > 0: + r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / + self._total_seqs / self._model['typical_positive_ratio']) + r = r * self._freq_char / self._total_char + if r >= 1.0: + r = 0.99 + return r diff --git a/thesisenv/lib/python3.6/site-packages/chardet/sbcsgroupprober.py b/thesisenv/lib/python3.6/site-packages/chardet/sbcsgroupprober.py new file mode 100644 index 0000000..98e95dc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/sbcsgroupprober.py @@ -0,0 +1,73 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetgroupprober import CharSetGroupProber +from .sbcharsetprober import SingleByteCharSetProber +from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, + Latin5CyrillicModel, MacCyrillicModel, + Ibm866Model, Ibm855Model) +from .langgreekmodel import Latin7GreekModel, Win1253GreekModel +from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel +# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel +from .langthaimodel import TIS620ThaiModel +from .langhebrewmodel import Win1255HebrewModel +from .hebrewprober import HebrewProber +from .langturkishmodel import Latin5TurkishModel + + +class SBCSGroupProber(CharSetGroupProber): + def __init__(self): + super(SBCSGroupProber, self).__init__() + self.probers = [ + SingleByteCharSetProber(Win1251CyrillicModel), + SingleByteCharSetProber(Koi8rModel), + SingleByteCharSetProber(Latin5CyrillicModel), + SingleByteCharSetProber(MacCyrillicModel), + SingleByteCharSetProber(Ibm866Model), + SingleByteCharSetProber(Ibm855Model), + SingleByteCharSetProber(Latin7GreekModel), + SingleByteCharSetProber(Win1253GreekModel), + SingleByteCharSetProber(Latin5BulgarianModel), + SingleByteCharSetProber(Win1251BulgarianModel), + # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) + # after we retrain model. + # SingleByteCharSetProber(Latin2HungarianModel), + # SingleByteCharSetProber(Win1250HungarianModel), + SingleByteCharSetProber(TIS620ThaiModel), + SingleByteCharSetProber(Latin5TurkishModel), + ] + hebrew_prober = HebrewProber() + logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, + False, hebrew_prober) + visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, + hebrew_prober) + hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) + self.probers.extend([hebrew_prober, logical_hebrew_prober, + visual_hebrew_prober]) + + self.reset() diff --git a/thesisenv/lib/python3.6/site-packages/chardet/sjisprober.py b/thesisenv/lib/python3.6/site-packages/chardet/sjisprober.py new file mode 100644 index 0000000..9e29623 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/sjisprober.py @@ -0,0 +1,92 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .mbcharsetprober import MultiByteCharSetProber +from .codingstatemachine import CodingStateMachine +from .chardistribution import SJISDistributionAnalysis +from .jpcntx import SJISContextAnalysis +from .mbcssm import SJIS_SM_MODEL +from .enums import ProbingState, MachineState + + +class SJISProber(MultiByteCharSetProber): + def __init__(self): + super(SJISProber, self).__init__() + self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) + self.distribution_analyzer = SJISDistributionAnalysis() + self.context_analyzer = SJISContextAnalysis() + self.reset() + + def reset(self): + super(SJISProber, self).reset() + self.context_analyzer.reset() + + @property + def charset_name(self): + return self.context_analyzer.charset_name + + @property + def language(self): + return "Japanese" + + def feed(self, byte_str): + for i in range(len(byte_str)): + coding_state = self.coding_sm.next_state(byte_str[i]) + if coding_state == MachineState.ERROR: + self.logger.debug('%s %s prober hit error at byte %s', + self.charset_name, self.language, i) + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + char_len = self.coding_sm.get_current_charlen() + if i == 0: + self._last_char[1] = byte_str[0] + self.context_analyzer.feed(self._last_char[2 - char_len:], + char_len) + self.distribution_analyzer.feed(self._last_char, char_len) + else: + self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 + - char_len], char_len) + self.distribution_analyzer.feed(byte_str[i - 1:i + 1], + char_len) + + self._last_char[0] = byte_str[-1] + + if self.state == ProbingState.DETECTING: + if (self.context_analyzer.got_enough_data() and + (self.get_confidence() > self.SHORTCUT_THRESHOLD)): + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + context_conf = self.context_analyzer.get_confidence() + distrib_conf = self.distribution_analyzer.get_confidence() + return max(context_conf, distrib_conf) diff --git a/thesisenv/lib/python3.6/site-packages/chardet/universaldetector.py b/thesisenv/lib/python3.6/site-packages/chardet/universaldetector.py new file mode 100644 index 0000000..7b4e92d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/universaldetector.py @@ -0,0 +1,286 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### +""" +Module containing the UniversalDetector detector class, which is the primary +class a user of ``chardet`` should use. + +:author: Mark Pilgrim (initial port to Python) +:author: Shy Shalom (original C code) +:author: Dan Blanchard (major refactoring for 3.0) +:author: Ian Cordasco +""" + + +import codecs +import logging +import re + +from .charsetgroupprober import CharSetGroupProber +from .enums import InputState, LanguageFilter, ProbingState +from .escprober import EscCharSetProber +from .latin1prober import Latin1Prober +from .mbcsgroupprober import MBCSGroupProber +from .sbcsgroupprober import SBCSGroupProber + + +class UniversalDetector(object): + """ + The ``UniversalDetector`` class underlies the ``chardet.detect`` function + and coordinates all of the different charset probers. + + To get a ``dict`` containing an encoding and its confidence, you can simply + run: + + .. code:: + + u = UniversalDetector() + u.feed(some_bytes) + u.close() + detected = u.result + + """ + + MINIMUM_THRESHOLD = 0.20 + HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') + ESC_DETECTOR = re.compile(b'(\033|~{)') + WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') + ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', + 'iso-8859-2': 'Windows-1250', + 'iso-8859-5': 'Windows-1251', + 'iso-8859-6': 'Windows-1256', + 'iso-8859-7': 'Windows-1253', + 'iso-8859-8': 'Windows-1255', + 'iso-8859-9': 'Windows-1254', + 'iso-8859-13': 'Windows-1257'} + + def __init__(self, lang_filter=LanguageFilter.ALL): + self._esc_charset_prober = None + self._charset_probers = [] + self.result = None + self.done = None + self._got_data = None + self._input_state = None + self._last_char = None + self.lang_filter = lang_filter + self.logger = logging.getLogger(__name__) + self._has_win_bytes = None + self.reset() + + def reset(self): + """ + Reset the UniversalDetector and all of its probers back to their + initial states. This is called by ``__init__``, so you only need to + call this directly in between analyses of different documents. + """ + self.result = {'encoding': None, 'confidence': 0.0, 'language': None} + self.done = False + self._got_data = False + self._has_win_bytes = False + self._input_state = InputState.PURE_ASCII + self._last_char = b'' + if self._esc_charset_prober: + self._esc_charset_prober.reset() + for prober in self._charset_probers: + prober.reset() + + def feed(self, byte_str): + """ + Takes a chunk of a document and feeds it through all of the relevant + charset probers. + + After calling ``feed``, you can check the value of the ``done`` + attribute to see if you need to continue feeding the + ``UniversalDetector`` more data, or if it has made a prediction + (in the ``result`` attribute). + + .. note:: + You should always call ``close`` when you're done feeding in your + document if ``done`` is not already ``True``. + """ + if self.done: + return + + if not len(byte_str): + return + + if not isinstance(byte_str, bytearray): + byte_str = bytearray(byte_str) + + # First check for known BOMs, since these are guaranteed to be correct + if not self._got_data: + # If the data starts with BOM, we know it is UTF + if byte_str.startswith(codecs.BOM_UTF8): + # EF BB BF UTF-8 with BOM + self.result = {'encoding': "UTF-8-SIG", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_UTF32_LE, + codecs.BOM_UTF32_BE)): + # FF FE 00 00 UTF-32, little-endian BOM + # 00 00 FE FF UTF-32, big-endian BOM + self.result = {'encoding': "UTF-32", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\xFE\xFF\x00\x00'): + # FE FF 00 00 UCS-4, unusual octet order BOM (3412) + self.result = {'encoding': "X-ISO-10646-UCS-4-3412", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith(b'\x00\x00\xFF\xFE'): + # 00 00 FF FE UCS-4, unusual octet order BOM (2143) + self.result = {'encoding': "X-ISO-10646-UCS-4-2143", + 'confidence': 1.0, + 'language': ''} + elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): + # FF FE UTF-16, little endian BOM + # FE FF UTF-16, big endian BOM + self.result = {'encoding': "UTF-16", + 'confidence': 1.0, + 'language': ''} + + self._got_data = True + if self.result['encoding'] is not None: + self.done = True + return + + # If none of those matched and we've only see ASCII so far, check + # for high bytes and escape sequences + if self._input_state == InputState.PURE_ASCII: + if self.HIGH_BYTE_DETECTOR.search(byte_str): + self._input_state = InputState.HIGH_BYTE + elif self._input_state == InputState.PURE_ASCII and \ + self.ESC_DETECTOR.search(self._last_char + byte_str): + self._input_state = InputState.ESC_ASCII + + self._last_char = byte_str[-1:] + + # If we've seen escape sequences, use the EscCharSetProber, which + # uses a simple state machine to check for known escape sequences in + # HZ and ISO-2022 encodings, since those are the only encodings that + # use such sequences. + if self._input_state == InputState.ESC_ASCII: + if not self._esc_charset_prober: + self._esc_charset_prober = EscCharSetProber(self.lang_filter) + if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': + self._esc_charset_prober.charset_name, + 'confidence': + self._esc_charset_prober.get_confidence(), + 'language': + self._esc_charset_prober.language} + self.done = True + # If we've seen high bytes (i.e., those with values greater than 127), + # we need to do more complicated checks using all our multi-byte and + # single-byte probers that are left. The single-byte probers + # use character bigram distributions to determine the encoding, whereas + # the multi-byte probers use a combination of character unigram and + # bigram distributions. + elif self._input_state == InputState.HIGH_BYTE: + if not self._charset_probers: + self._charset_probers = [MBCSGroupProber(self.lang_filter)] + # If we're checking non-CJK encodings, use single-byte prober + if self.lang_filter & LanguageFilter.NON_CJK: + self._charset_probers.append(SBCSGroupProber()) + self._charset_probers.append(Latin1Prober()) + for prober in self._charset_probers: + if prober.feed(byte_str) == ProbingState.FOUND_IT: + self.result = {'encoding': prober.charset_name, + 'confidence': prober.get_confidence(), + 'language': prober.language} + self.done = True + break + if self.WIN_BYTE_DETECTOR.search(byte_str): + self._has_win_bytes = True + + def close(self): + """ + Stop analyzing the current document and come up with a final + prediction. + + :returns: The ``result`` attribute, a ``dict`` with the keys + `encoding`, `confidence`, and `language`. + """ + # Don't bother with checks if we're already done + if self.done: + return self.result + self.done = True + + if not self._got_data: + self.logger.debug('no data received!') + + # Default to ASCII if it is all we've seen so far + elif self._input_state == InputState.PURE_ASCII: + self.result = {'encoding': 'ascii', + 'confidence': 1.0, + 'language': ''} + + # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD + elif self._input_state == InputState.HIGH_BYTE: + prober_confidence = None + max_prober_confidence = 0.0 + max_prober = None + for prober in self._charset_probers: + if not prober: + continue + prober_confidence = prober.get_confidence() + if prober_confidence > max_prober_confidence: + max_prober_confidence = prober_confidence + max_prober = prober + if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): + charset_name = max_prober.charset_name + lower_charset_name = max_prober.charset_name.lower() + confidence = max_prober.get_confidence() + # Use Windows encoding name instead of ISO-8859 if we saw any + # extra Windows-specific bytes + if lower_charset_name.startswith('iso-8859'): + if self._has_win_bytes: + charset_name = self.ISO_WIN_MAP.get(lower_charset_name, + charset_name) + self.result = {'encoding': charset_name, + 'confidence': confidence, + 'language': max_prober.language} + + # Log all prober confidences if none met MINIMUM_THRESHOLD + if self.logger.getEffectiveLevel() == logging.DEBUG: + if self.result['encoding'] is None: + self.logger.debug('no probers hit minimum threshold') + for group_prober in self._charset_probers: + if not group_prober: + continue + if isinstance(group_prober, CharSetGroupProber): + for prober in group_prober.probers: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + else: + self.logger.debug('%s %s confidence = %s', + prober.charset_name, + prober.language, + prober.get_confidence()) + return self.result diff --git a/thesisenv/lib/python3.6/site-packages/chardet/utf8prober.py b/thesisenv/lib/python3.6/site-packages/chardet/utf8prober.py new file mode 100644 index 0000000..6c3196c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/utf8prober.py @@ -0,0 +1,82 @@ +######################## BEGIN LICENSE BLOCK ######################## +# The Original Code is mozilla.org code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1998 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Mark Pilgrim - port to Python +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from .charsetprober import CharSetProber +from .enums import ProbingState, MachineState +from .codingstatemachine import CodingStateMachine +from .mbcssm import UTF8_SM_MODEL + + + +class UTF8Prober(CharSetProber): + ONE_CHAR_PROB = 0.5 + + def __init__(self): + super(UTF8Prober, self).__init__() + self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) + self._num_mb_chars = None + self.reset() + + def reset(self): + super(UTF8Prober, self).reset() + self.coding_sm.reset() + self._num_mb_chars = 0 + + @property + def charset_name(self): + return "utf-8" + + @property + def language(self): + return "" + + def feed(self, byte_str): + for c in byte_str: + coding_state = self.coding_sm.next_state(c) + if coding_state == MachineState.ERROR: + self._state = ProbingState.NOT_ME + break + elif coding_state == MachineState.ITS_ME: + self._state = ProbingState.FOUND_IT + break + elif coding_state == MachineState.START: + if self.coding_sm.get_current_charlen() >= 2: + self._num_mb_chars += 1 + + if self.state == ProbingState.DETECTING: + if self.get_confidence() > self.SHORTCUT_THRESHOLD: + self._state = ProbingState.FOUND_IT + + return self.state + + def get_confidence(self): + unlike = 0.99 + if self._num_mb_chars < 6: + unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars + return 1.0 - unlike + else: + return unlike diff --git a/thesisenv/lib/python3.6/site-packages/chardet/version.py b/thesisenv/lib/python3.6/site-packages/chardet/version.py new file mode 100644 index 0000000..bb2a34a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/chardet/version.py @@ -0,0 +1,9 @@ +""" +This module exists only to simplify retrieving the version number of chardet +from within setup.py and from chardet subpackages. + +:author: Dan Blanchard (dan.blanchard@gmail.com) +""" + +__version__ = "3.0.4" +VERSION = __version__.split('.') diff --git a/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/DESCRIPTION.rst b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..a138685 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,59 @@ +Decorator module +================= + +:Author: Michele Simionato +:E-mail: michele.simionato@gmail.com +:Requires: Python from 2.6 to 3.6 +:Download page: http://pypi.python.org/pypi/decorator +:Installation: ``pip install decorator`` +:License: BSD license + +Installation +------------- + +If you are lazy, just perform + + `$ pip install decorator` + +which will install just the module on your system. + +If you prefer to install the full distribution from source, including +the documentation, clone the `GitHub repo`_ or download the tarball_, unpack it and run + + `$ pip install .` + +in the main directory, possibly as superuser. + +.. _tarball: http://pypi.python.org/pypi/decorator +.. _GitHub repo: https://github.com/micheles/decorator + +Testing +-------- + +If you have the source code installation you can run the tests with + + `$ python src/tests/test.py -v` + +or (if you have setuptools installed) + + `$ python setup.py test` + +Notice that you may run into trouble if in your system there +is an older version of the decorator module; in such a case remove the +old version. It is safe even to copy the module `decorator.py` over +an existing one, since we kept backward-compatibility for a long time. + +Repository +--------------- + +The project is hosted on GitHub. You can look at the source here: + + https://github.com/micheles/decorator + +Documentation +--------------- + +The documentation has been moved to http://decorator.readthedocs.io/en/latest/ +You can download a PDF version of it from http://media.readthedocs.org/pdf/decorator/latest/decorator.pdf + + diff --git a/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/INSTALLER b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/METADATA b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/METADATA new file mode 100644 index 0000000..9f0926e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/METADATA @@ -0,0 +1,88 @@ +Metadata-Version: 2.0 +Name: decorator +Version: 4.3.0 +Summary: Better living through Python with decorators +Home-page: https://github.com/micheles/decorator +Author: Michele Simionato +Author-email: michele.simionato@gmail.com +License: new BSD License +Keywords: decorators generic utility +Platform: All +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities + +Decorator module +================= + +:Author: Michele Simionato +:E-mail: michele.simionato@gmail.com +:Requires: Python from 2.6 to 3.6 +:Download page: http://pypi.python.org/pypi/decorator +:Installation: ``pip install decorator`` +:License: BSD license + +Installation +------------- + +If you are lazy, just perform + + `$ pip install decorator` + +which will install just the module on your system. + +If you prefer to install the full distribution from source, including +the documentation, clone the `GitHub repo`_ or download the tarball_, unpack it and run + + `$ pip install .` + +in the main directory, possibly as superuser. + +.. _tarball: http://pypi.python.org/pypi/decorator +.. _GitHub repo: https://github.com/micheles/decorator + +Testing +-------- + +If you have the source code installation you can run the tests with + + `$ python src/tests/test.py -v` + +or (if you have setuptools installed) + + `$ python setup.py test` + +Notice that you may run into trouble if in your system there +is an older version of the decorator module; in such a case remove the +old version. It is safe even to copy the module `decorator.py` over +an existing one, since we kept backward-compatibility for a long time. + +Repository +--------------- + +The project is hosted on GitHub. You can look at the source here: + + https://github.com/micheles/decorator + +Documentation +--------------- + +The documentation has been moved to http://decorator.readthedocs.io/en/latest/ +You can download a PDF version of it from http://media.readthedocs.org/pdf/decorator/latest/decorator.pdf + + diff --git a/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/RECORD b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/RECORD new file mode 100644 index 0000000..821c4a3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/RECORD @@ -0,0 +1,10 @@ +__pycache__/decorator.cpython-36.pyc,, +decorator-4.3.0.dist-info/DESCRIPTION.rst,sha256=eywbIQHHl8KGO3WYQrZNCllgPHb8Gvzl5LXhoJnr7Gs,1536 +decorator-4.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +decorator-4.3.0.dist-info/METADATA,sha256=9XUqIdazEArlifx6qJcsvcyAJqhGL9JeovuWwojYXoQ,2722 +decorator-4.3.0.dist-info/RECORD,, +decorator-4.3.0.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 +decorator-4.3.0.dist-info/metadata.json,sha256=4tqrOOToRQnzL3QuvR4C437g4E-oG_GLkWQVSh8FjFE,1263 +decorator-4.3.0.dist-info/pbr.json,sha256=AL84oUUWQHwkd8OCPhLRo2NJjU5MDdmXMqRHv-posqs,47 +decorator-4.3.0.dist-info/top_level.txt,sha256=Kn6eQjo83ctWxXVyBMOYt0_YpjRjBznKYVuNyuC_DSI,10 +decorator.py,sha256=dCGfrfgaTJk9WPgLl979P-FE85vpJhWmo2PF7UWSEvg,16298 diff --git a/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/WHEEL b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/WHEEL new file mode 100644 index 0000000..7332a41 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/metadata.json b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/metadata.json new file mode 100644 index 0000000..a9ba188 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Software Development :: Libraries", "Topic :: Utilities"], "extensions": {"python.details": {"contacts": [{"email": "michele.simionato@gmail.com", "name": "Michele Simionato", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/micheles/decorator"}}}, "generator": "bdist_wheel (0.30.0)", "keywords": ["decorators", "generic", "utility"], "license": "new BSD License", "metadata_version": "2.0", "name": "decorator", "platform": "All", "summary": "Better living through Python with decorators", "version": "4.3.0"} \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/pbr.json b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/pbr.json new file mode 100644 index 0000000..cd04599 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/pbr.json @@ -0,0 +1 @@ +{"is_release": false, "git_version": "8608a46"} \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/top_level.txt new file mode 100644 index 0000000..3fe18a4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/decorator-4.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +decorator diff --git a/thesisenv/lib/python3.6/site-packages/decorator.py b/thesisenv/lib/python3.6/site-packages/decorator.py new file mode 100644 index 0000000..44303ee --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/decorator.py @@ -0,0 +1,432 @@ +# ######################### LICENSE ############################ # + +# Copyright (c) 2005-2018, Michele Simionato +# All rights reserved. + +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: + +# Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# Redistributions in bytecode form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. + +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +# DAMAGE. + +""" +Decorator module, see http://pypi.python.org/pypi/decorator +for the documentation. +""" +from __future__ import print_function + +import re +import sys +import inspect +import operator +import itertools +import collections + +__version__ = '4.3.0' + +if sys.version >= '3': + from inspect import getfullargspec + + def get_init(cls): + return cls.__init__ +else: + FullArgSpec = collections.namedtuple( + 'FullArgSpec', 'args varargs varkw defaults ' + 'kwonlyargs kwonlydefaults annotations') + + def getfullargspec(f): + "A quick and dirty replacement for getfullargspec for Python 2.X" + return FullArgSpec._make(inspect.getargspec(f) + ([], None, {})) + + def get_init(cls): + return cls.__init__.__func__ + +try: + iscoroutinefunction = inspect.iscoroutinefunction +except AttributeError: + # let's assume there are no coroutine functions in old Python + def iscoroutinefunction(f): + return False + + +DEF = re.compile(r'\s*def\s*([_\w][_\w\d]*)\s*\(') + + +# basic functionality +class FunctionMaker(object): + """ + An object with the ability to create functions with a given signature. + It has attributes name, doc, module, signature, defaults, dict and + methods update and make. + """ + + # Atomic get-and-increment provided by the GIL + _compile_count = itertools.count() + + # make pylint happy + args = varargs = varkw = defaults = kwonlyargs = kwonlydefaults = () + + def __init__(self, func=None, name=None, signature=None, + defaults=None, doc=None, module=None, funcdict=None): + self.shortsignature = signature + if func: + # func can be a class or a callable, but not an instance method + self.name = func.__name__ + if self.name == '': # small hack for lambda functions + self.name = '_lambda_' + self.doc = func.__doc__ + self.module = func.__module__ + if inspect.isfunction(func): + argspec = getfullargspec(func) + self.annotations = getattr(func, '__annotations__', {}) + for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs', + 'kwonlydefaults'): + setattr(self, a, getattr(argspec, a)) + for i, arg in enumerate(self.args): + setattr(self, 'arg%d' % i, arg) + allargs = list(self.args) + allshortargs = list(self.args) + if self.varargs: + allargs.append('*' + self.varargs) + allshortargs.append('*' + self.varargs) + elif self.kwonlyargs: + allargs.append('*') # single star syntax + for a in self.kwonlyargs: + allargs.append('%s=None' % a) + allshortargs.append('%s=%s' % (a, a)) + if self.varkw: + allargs.append('**' + self.varkw) + allshortargs.append('**' + self.varkw) + self.signature = ', '.join(allargs) + self.shortsignature = ', '.join(allshortargs) + self.dict = func.__dict__.copy() + # func=None happens when decorating a caller + if name: + self.name = name + if signature is not None: + self.signature = signature + if defaults: + self.defaults = defaults + if doc: + self.doc = doc + if module: + self.module = module + if funcdict: + self.dict = funcdict + # check existence required attributes + assert hasattr(self, 'name') + if not hasattr(self, 'signature'): + raise TypeError('You are decorating a non function: %s' % func) + + def update(self, func, **kw): + "Update the signature of func with the data in self" + func.__name__ = self.name + func.__doc__ = getattr(self, 'doc', None) + func.__dict__ = getattr(self, 'dict', {}) + func.__defaults__ = self.defaults + func.__kwdefaults__ = self.kwonlydefaults or None + func.__annotations__ = getattr(self, 'annotations', None) + try: + frame = sys._getframe(3) + except AttributeError: # for IronPython and similar implementations + callermodule = '?' + else: + callermodule = frame.f_globals.get('__name__', '?') + func.__module__ = getattr(self, 'module', callermodule) + func.__dict__.update(kw) + + def make(self, src_templ, evaldict=None, addsource=False, **attrs): + "Make a new function from a given template and update the signature" + src = src_templ % vars(self) # expand name and signature + evaldict = evaldict or {} + mo = DEF.search(src) + if mo is None: + raise SyntaxError('not a valid function template\n%s' % src) + name = mo.group(1) # extract the function name + names = set([name] + [arg.strip(' *') for arg in + self.shortsignature.split(',')]) + for n in names: + if n in ('_func_', '_call_'): + raise NameError('%s is overridden in\n%s' % (n, src)) + + if not src.endswith('\n'): # add a newline for old Pythons + src += '\n' + + # Ensure each generated function has a unique filename for profilers + # (such as cProfile) that depend on the tuple of (, + # , ) being unique. + filename = '' % (next(self._compile_count),) + try: + code = compile(src, filename, 'single') + exec(code, evaldict) + except Exception: + print('Error in generated code:', file=sys.stderr) + print(src, file=sys.stderr) + raise + func = evaldict[name] + if addsource: + attrs['__source__'] = src + self.update(func, **attrs) + return func + + @classmethod + def create(cls, obj, body, evaldict, defaults=None, + doc=None, module=None, addsource=True, **attrs): + """ + Create a function from the strings name, signature and body. + evaldict is the evaluation dictionary. If addsource is true an + attribute __source__ is added to the result. The attributes attrs + are added, if any. + """ + if isinstance(obj, str): # "name(signature)" + name, rest = obj.strip().split('(', 1) + signature = rest[:-1] # strip a right parens + func = None + else: # a function + name = None + signature = None + func = obj + self = cls(func, name, signature, defaults, doc, module) + ibody = '\n'.join(' ' + line for line in body.splitlines()) + caller = evaldict.get('_call_') # when called from `decorate` + if caller and iscoroutinefunction(caller): + body = ('async def %(name)s(%(signature)s):\n' + ibody).replace( + 'return', 'return await') + else: + body = 'def %(name)s(%(signature)s):\n' + ibody + return self.make(body, evaldict, addsource, **attrs) + + +def decorate(func, caller, extras=()): + """ + decorate(func, caller) decorates a function using a caller. + """ + evaldict = dict(_call_=caller, _func_=func) + es = '' + for i, extra in enumerate(extras): + ex = '_e%d_' % i + evaldict[ex] = extra + es += ex + ', ' + fun = FunctionMaker.create( + func, "return _call_(_func_, %s%%(shortsignature)s)" % es, + evaldict, __wrapped__=func) + if hasattr(func, '__qualname__'): + fun.__qualname__ = func.__qualname__ + return fun + + +def decorator(caller, _func=None): + """decorator(caller) converts a caller function into a decorator""" + if _func is not None: # return a decorated function + # this is obsolete behavior; you should use decorate instead + return decorate(_func, caller) + # else return a decorator function + defaultargs, defaults = '', () + if inspect.isclass(caller): + name = caller.__name__.lower() + doc = 'decorator(%s) converts functions/generators into ' \ + 'factories of %s objects' % (caller.__name__, caller.__name__) + elif inspect.isfunction(caller): + if caller.__name__ == '': + name = '_lambda_' + else: + name = caller.__name__ + doc = caller.__doc__ + nargs = caller.__code__.co_argcount + ndefs = len(caller.__defaults__ or ()) + defaultargs = ', '.join(caller.__code__.co_varnames[nargs-ndefs:nargs]) + if defaultargs: + defaultargs += ',' + defaults = caller.__defaults__ + else: # assume caller is an object with a __call__ method + name = caller.__class__.__name__.lower() + doc = caller.__call__.__doc__ + evaldict = dict(_call=caller, _decorate_=decorate) + dec = FunctionMaker.create( + '%s(%s func)' % (name, defaultargs), + 'if func is None: return lambda func: _decorate_(func, _call, (%s))\n' + 'return _decorate_(func, _call, (%s))' % (defaultargs, defaultargs), + evaldict, doc=doc, module=caller.__module__, __wrapped__=caller) + if defaults: + dec.__defaults__ = defaults + (None,) + return dec + + +# ####################### contextmanager ####################### # + +try: # Python >= 3.2 + from contextlib import _GeneratorContextManager +except ImportError: # Python >= 2.5 + from contextlib import GeneratorContextManager as _GeneratorContextManager + + +class ContextManager(_GeneratorContextManager): + def __call__(self, func): + """Context manager decorator""" + return FunctionMaker.create( + func, "with _self_: return _func_(%(shortsignature)s)", + dict(_self_=self, _func_=func), __wrapped__=func) + + +init = getfullargspec(_GeneratorContextManager.__init__) +n_args = len(init.args) +if n_args == 2 and not init.varargs: # (self, genobj) Python 2.7 + def __init__(self, g, *a, **k): + return _GeneratorContextManager.__init__(self, g(*a, **k)) + ContextManager.__init__ = __init__ +elif n_args == 2 and init.varargs: # (self, gen, *a, **k) Python 3.4 + pass +elif n_args == 4: # (self, gen, args, kwds) Python 3.5 + def __init__(self, g, *a, **k): + return _GeneratorContextManager.__init__(self, g, a, k) + ContextManager.__init__ = __init__ + +_contextmanager = decorator(ContextManager) + + +def contextmanager(func): + # Enable Pylint config: contextmanager-decorators=decorator.contextmanager + return _contextmanager(func) + + +# ############################ dispatch_on ############################ # + +def append(a, vancestors): + """ + Append ``a`` to the list of the virtual ancestors, unless it is already + included. + """ + add = True + for j, va in enumerate(vancestors): + if issubclass(va, a): + add = False + break + if issubclass(a, va): + vancestors[j] = a + add = False + if add: + vancestors.append(a) + + +# inspired from simplegeneric by P.J. Eby and functools.singledispatch +def dispatch_on(*dispatch_args): + """ + Factory of decorators turning a function into a generic function + dispatching on the given arguments. + """ + assert dispatch_args, 'No dispatch args passed' + dispatch_str = '(%s,)' % ', '.join(dispatch_args) + + def check(arguments, wrong=operator.ne, msg=''): + """Make sure one passes the expected number of arguments""" + if wrong(len(arguments), len(dispatch_args)): + raise TypeError('Expected %d arguments, got %d%s' % + (len(dispatch_args), len(arguments), msg)) + + def gen_func_dec(func): + """Decorator turning a function into a generic function""" + + # first check the dispatch arguments + argset = set(getfullargspec(func).args) + if not set(dispatch_args) <= argset: + raise NameError('Unknown dispatch arguments %s' % dispatch_str) + + typemap = {} + + def vancestors(*types): + """ + Get a list of sets of virtual ancestors for the given types + """ + check(types) + ras = [[] for _ in range(len(dispatch_args))] + for types_ in typemap: + for t, type_, ra in zip(types, types_, ras): + if issubclass(t, type_) and type_ not in t.mro(): + append(type_, ra) + return [set(ra) for ra in ras] + + def ancestors(*types): + """ + Get a list of virtual MROs, one for each type + """ + check(types) + lists = [] + for t, vas in zip(types, vancestors(*types)): + n_vas = len(vas) + if n_vas > 1: + raise RuntimeError( + 'Ambiguous dispatch for %s: %s' % (t, vas)) + elif n_vas == 1: + va, = vas + mro = type('t', (t, va), {}).mro()[1:] + else: + mro = t.mro() + lists.append(mro[:-1]) # discard t and object + return lists + + def register(*types): + """ + Decorator to register an implementation for the given types + """ + check(types) + + def dec(f): + check(getfullargspec(f).args, operator.lt, ' in ' + f.__name__) + typemap[types] = f + return f + return dec + + def dispatch_info(*types): + """ + An utility to introspect the dispatch algorithm + """ + check(types) + lst = [] + for anc in itertools.product(*ancestors(*types)): + lst.append(tuple(a.__name__ for a in anc)) + return lst + + def _dispatch(dispatch_args, *args, **kw): + types = tuple(type(arg) for arg in dispatch_args) + try: # fast path + f = typemap[types] + except KeyError: + pass + else: + return f(*args, **kw) + combinations = itertools.product(*ancestors(*types)) + next(combinations) # the first one has been already tried + for types_ in combinations: + f = typemap.get(types_) + if f is not None: + return f(*args, **kw) + + # else call the default implementation + return func(*args, **kw) + + return FunctionMaker.create( + func, 'return _f_(%s, %%(shortsignature)s)' % dispatch_str, + dict(_f_=_dispatch), register=register, default=func, + typemap=typemap, vancestors=vancestors, ancestors=ancestors, + dispatch_info=dispatch_info, __wrapped__=func) + + gen_func_dec.__name__ = 'dispatch_on' + dispatch_str + return gen_func_dec diff --git a/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/INSTALLER b/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/METADATA b/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/METADATA new file mode 100644 index 0000000..fe7340a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/METADATA @@ -0,0 +1,277 @@ +Metadata-Version: 2.1 +Name: django-newsletter +Version: 0.7 +Summary: Django app for managing multiple mass-mailing lists with both plaintext as well as HTML templates (and pluggable WYSIWYG editors for messages), images and a smart queueing system all right from the admin interface. +Home-page: http://github.com/dokterbob/django-newsletter/ +Author: Mathijs de Bruin +Author-email: mathijs@mathijsfietst.nl +License: AGPL +Platform: UNKNOWN +Classifier: Development Status :: 6 - Mature +Classifier: Environment :: Web Environment +Classifier: Framework :: Django +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Affero General Public License v3 +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Utilities +Requires-Dist: Django (>=1.8.19) +Requires-Dist: python-card-me (<1.0) +Requires-Dist: ldif3 (<3.2) +Requires-Dist: chardet +Requires-Dist: surlex (>=0.2.0) +Requires-Dist: sorl-thumbnail (>=11.12.1b) +Requires-Dist: six +Requires-Dist: unicodecsv (<0.15) +Requires-Dist: Pillow + +################# +django-newsletter +################# + +.. image:: https://img.shields.io/pypi/v/django-newsletter.svg + :target: https://pypi.python.org/pypi/django-newsletter + +.. image:: https://img.shields.io/travis/dokterbob/django-newsletter/master.svg + :target: http://travis-ci.org/dokterbob/django-newsletter + +.. image:: https://coveralls.io/repos/dokterbob/django-newsletter/badge.svg?branch=master&service=github + :target: https://coveralls.io/github/dokterbob/django-newsletter?branch=master + +.. image:: https://landscape.io/github/dokterbob/django-newsletter/master/landscape.svg?style=flat + :target: https://landscape.io/github/dokterbob/django-newsletter/master + :alt: Code Health + +Newsletter application for the Django web framework. + +What is it? +=========== +Django app for managing multiple mass-mailing lists with both plaintext as +well as HTML templates with rich text widget integration, images and a smart +queueing system all right from the admin interface. + +Status +====== +We are currently using this package in several large to medium scale production +environments, but it should be considered a permanent work in progress. + +Documentation +============= +Extended documentation is available on +`Read the Docs `_. + +Translations +============ +All strings have been translated to German, French, English, Russian, Polish, Dutch, Italian, Arabic, Brazilian Portuguese, Icelandic and Czech with more languages on their way. + +Contributions to translations are welcome through `Transifex `_. Strings will be included as +soon as near-full coverage is reached. + +.. image:: https://www.transifex.com/projects/p/django-newsletter/resource/django/chart/image_png + :target: http://www.transifex.net/projects/p/django-newsletter/ + +Compatibility +============= +Currently, django-newsletter is officially supported for Django 1.11 and Django 2.0 with +Django 2.1 support pending. We support Python +2.7, 3.4, 3.5, 3.6 with 3.7 untested (ref #256). + +Requirements +============ +Please refer to `requirements.txt `_ +for an updated list of required packages. + +Tests +========== +Fairly extensive tests are available for internal frameworks, web +(un)subscription and mail sending. Sending a newsletter to large groups of recipients +(+15k) has been confirmed to work in multiple production environments. Tests +for pull req's and the master branch are automatically run through +`Travis CI `_. + +Contributing +============= +.. image:: https://badge.waffle.io/dokterbob/django-newsletter.png?label=ready&title=Ready + :target: https://waffle.io/dokterbob/django-newsletter + :alt: 'Stories in Ready' + +.. image:: https://badge.waffle.io/dokterbob/django-newsletter.png?label=in%20progress&title=Progress + :target: https://waffle.io/dokterbob/django-newsletter + :alt: 'Stories in Progress' + +.. image:: https://badge.waffle.io/dokterbob/django-newsletter.png?label=under%20review&title=Review + :target: https://waffle.io/dokterbob/django-newsletter + :alt: 'Stories Under Review' + +Should you wish to contribute, great! Please have a look at the `waffle.io board `_. Issues in the 'Ready' column are ready for implementation, just drag the issue to 'In Progress' and then to 'Review'. Issues in the backlog require some further discussion concering the scope and methods of implementation, please feel free to mingle in discussions. Lastly, should you see an issue with the 'Review' status, feel free to help out other contributors with your feedback. + +Feedback +======== +If you find any bugs or have feature request for django-newsletter, don't hesitate to +open up an issue on `GitHub `_ +(but please make sure your issue hasn't been noticed before, finding duplicates is a +waste of time). When modifying or adding features to django-newsletter in a fork, be +sure to let me know what you're building and how you're building it. That way we can +coordinate whether, when and how it will end up in the main fork and (eventually) an +official release. + +In general: thanks for the support, feedback, patches and code that's been flowing in +over the years! Django has a truly great community. <3 + +License +======= +This application is released +under the GNU Affero General Public License version 3. + + +Changes +======= + +0.7 (13-07-2018) +------------------ + +- Fixed security issue allowing subscription without confirmation (#108). +- Updated locales (ar, de). +- Proper default Newsletter for Message, fixes #224. +- Instructions for embedding a sign-up form (#250). +- Migration issue with Django 2.0 (#254). +- Miscellaneous documentation improvements. + +0.7b2 (25-03-2018) +------------------ + +- Drop support for deprecated Django 1.10. +- Introduce `submit_newsletter` management command, deprecating cron job and + dropping `django-extensions` dependency. + +- Fix for encoding of non-ASCII recipient names for Django < 1.9 (#244). +- Allow programmatic access Article and Submission save() methods (#246). + +0.7b1 (16-11-2017) +------------------ + +- Support for Django 1.10, 1.11 and tentative support for 2.0. +- Drop support for Django 1.9. +- Added support for Python 3.6. + +- Isolated the send_message process in anticipation of dropping of + django-extensions dependency (#39). +- Custom ArticleFormSet for improved Article sortorder, hidden + by default. (#194) +- Move tests to separate directory, exclude from binaries and use + Django's native test runner. (#206) +- Cleanup of form validation. (#209) +- Settings for delays between emails, batches and the size of batches. (#223) +- Add missing translatable strings in templates. (#220) +- Added translations for es, el_GR. +- Updated translations for fa, fr, nl. + +Security fixes +^^^^^^^^^^^^^^ + +- Don’t leak username in unsubscribe form. +- Use Django’s crypto code to generate random code. + +Small fixes +^^^^^^^^^^^ + +- Add MySQL contrib to export list of subscribers. +- Add note about EMAIL_* settings in installation docs. +- Added test for `Message.__str__`. +- Warnings when files cannot be read in setup.py. +- Move test requirements to their approriate place. Closes (#190) +- Note on upgrading from <0.5. +- Added documentation on premailers. Closes (#178) +- Display email on import confirmation page. +- Fix broken links in requirements. (#205) +- Move Pillow to requirements, fixes (#202). +- Add a second subscription for mailing tests. +- Require Django 1.8.18 (latest point release). +- HTML5 doctype for default templates. + +0.6 (2-2-2016) +-------------- + +- Added support for Django 1.8 and 1.9, and dropped support for older versions. +- Added support for native Django migrations, replacing South migrations. +- Added Python 3.4/3.5 support and dropped Python 2.6 support. +- Replaced IPAddressField by GenericIPAddressField (#131). +- Fixed addresses serialization with JSON-based sessions (#104). +- Add List-Unsubscribe header to sent messages (#169). +- Added Polish and Brazilian Portuguese translations. +- Significantly improved test coverage. + +Small fixes +^^^^^^^^^^^ + +- Submission admin always takes last message (#170). +- Check that user has "add_subscription" permission when importing subscriptions (#128). +- Fix for Submission.publish_date default value (#125). +- Change subscription status in admin to radio field (#122). +- Make the Submissions list display the Publish date and time with respect to the server's timezone (#112). +- Several smaller issues: #107, #121, #123 + +0.5.2 (1-5-2014) +---------------- + +- Additional locale support: Arabic, Czech, French and Islandic +- Run tests on Django 1.7 beta and Python 3.3 (but allow failure) + +0.5.1 (21-11-2013) +------------------ + +- Added Italian translation, thanks to azanibellato. +- Support for pluggable/custom user models (#101). +- Proper Sphinx documentation with autodoc on Read the Docs (#90). +- Compatibility with Django 1.6 thanks to @jnss (#97). +- Include default message templates in package (#95). +- Fix database to template file migration for non-ASCII characters (#94). +- Fix small issues with vCard imports (mainly mimetype-related). + +0.5 (03-10-2013) +---------------- + +- Added proxy for app-specific settings. +- Optional skipping of email confirmation in views (`CONFIRM_EMAIL_`). +- Russian translation (contributed by ak3n). +- Added explicit HTML toogle to Newsletter model. +- Fix JavaScript submit link on "Add submission", ported to use jQuery. +- Replacement of remaining function based views with class based equivalents. +- Move message templates from database to files. + +0.4.1 (15-04-2013) +------------------ + +- Started keeping a decent history file. (Finally...) +- Support Django 1.5; make use of class based generic views +- Drop Django 1.3 and Python 2.5 support. +- 100% test coverage for views +- Farsi translations (contributed by rohamn) +- French translations (contributed by smalter) +- Admin actions for subscribing/unsubscribing (contributed by jnns) +- Introduced django-webtest for some tests +- Exempt previews from XFrame protection (fixes #54) + +0.4 (20-11-2012) +---------------- + +- Major code cleanup; PEP8, imports, restructuring, removal of legacy code +- Improved testing throgh Travis and better test coverage +- South migrations +- Added German translation (contributed by jnns) +- WYSIWYG editor is now optional and pluggable, Imperavi and TinyMCE supported +- Timezone-aware date-times when Django 1.4 is used +- Ue of Django 1.3's messages framework +- Many small bugfixes (see GitHub issues) +- Drop support for Django 1.2 +- Automatic detection of charset, encoding and dialect for CSV import +- Much cleaner log messages with proper message substitution +- Use Django's staticfiles contrib for static assets in admin interface +- Use surlex for more readable URL templates +- Use sorl-thumbnail for article images and default templates + + diff --git a/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/RECORD b/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/RECORD new file mode 100644 index 0000000..eeb4bfc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/RECORD @@ -0,0 +1,165 @@ +django_newsletter-0.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +django_newsletter-0.7.dist-info/METADATA,sha256=mStyKdzgXBKnvypayYE5eO6Cr4P1HDtxl6VC5kqEEQg,11270 +django_newsletter-0.7.dist-info/RECORD,, +django_newsletter-0.7.dist-info/WHEEL,sha256=gduuPyBvFJQSQ0zdyxF7k0zynDXbIbvg5ZBHoXum5uk,110 +django_newsletter-0.7.dist-info/pbr.json,sha256=sYj6OB_-w7_PdUKUoRS-B9H22pcFB6vCcUbAjHg45V0,47 +django_newsletter-0.7.dist-info/top_level.txt,sha256=vxkSml-YFcIsSfUBUJHl37wLi64TqLZElH0dimomm3k,11 +newsletter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +newsletter/__pycache__/__init__.cpython-36.pyc,, +newsletter/__pycache__/admin.cpython-36.pyc,, +newsletter/__pycache__/admin_forms.cpython-36.pyc,, +newsletter/__pycache__/admin_utils.cpython-36.pyc,, +newsletter/__pycache__/compat.cpython-36.pyc,, +newsletter/__pycache__/forms.cpython-36.pyc,, +newsletter/__pycache__/models.cpython-36.pyc,, +newsletter/__pycache__/settings.cpython-36.pyc,, +newsletter/__pycache__/urls.cpython-36.pyc,, +newsletter/__pycache__/utils.cpython-36.pyc,, +newsletter/__pycache__/validators.cpython-36.pyc,, +newsletter/__pycache__/views.cpython-36.pyc,, +newsletter/addressimport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +newsletter/addressimport/__pycache__/__init__.cpython-36.pyc,, +newsletter/addressimport/__pycache__/parsers.cpython-36.pyc,, +newsletter/addressimport/parsers.py,sha256=RPQ6IEo8-94Ioz7MNHkJNvb0KQM8Yl57t7a7NlrnaLY,9585 +newsletter/admin.py,sha256=LtuGTCP7c5YzBIwvkv5d_MAk1woKvIf5Gx20JvjnPMI,18041 +newsletter/admin_forms.py,sha256=yqhZ607Uw47_EZ8iyWU0mltRiIIlg3rmqz_aL-p0kbM,5984 +newsletter/admin_utils.py,sha256=nzdO7G5CPjaP1dqdaxu-Y9x8dwW8UuXSQSj0V-dbX74,1698 +newsletter/compat.py,sha256=kHFyLQSHPqp4PHRA2sMhNQzxsqem3gs3e52ST22Rxo8,457 +newsletter/forms.py,sha256=rz6vKByN8H4rxWMRPS7XWg0Vlh3Bu_bgj6yKitFOa6c,4527 +newsletter/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +newsletter/jobs/__pycache__/__init__.cpython-36.pyc,, +newsletter/jobs/daily/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +newsletter/jobs/daily/__pycache__/__init__.cpython-36.pyc,, +newsletter/jobs/hourly/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +newsletter/jobs/hourly/__pycache__/__init__.cpython-36.pyc,, +newsletter/jobs/hourly/__pycache__/submit.cpython-36.pyc,, +newsletter/jobs/hourly/submit.py,sha256=u6he9y0UGaad7G07asvd2y8YI3NUvOowsim0WeV1zx0,421 +newsletter/jobs/monthly/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +newsletter/jobs/monthly/__pycache__/__init__.cpython-36.pyc,, +newsletter/jobs/weekly/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +newsletter/jobs/weekly/__pycache__/__init__.cpython-36.pyc,, +newsletter/locale/ar/LC_MESSAGES/django.mo,sha256=yR1kIAgIaVsTa8jj6PG1JmLIZUAcdCUwjg3Kp2KntVQ,17202 +newsletter/locale/ar/LC_MESSAGES/django.po,sha256=egiX_Fx-dUhsTEsYssMQe-8lxFX56Qv5fmQ6lw1fbuI,27292 +newsletter/locale/ar/LC_MESSAGES/djangojs.mo,sha256=s6FLUv_5I7D1_hAROtADseBTIqR5vNNxbFyYLj2SMRo,957 +newsletter/locale/ar/LC_MESSAGES/djangojs.po,sha256=VZU88a-4LF0DtLnIkmyEqrvsI9l6isTJSCStelQ6dtY,1241 +newsletter/locale/cs_CZ/LC_MESSAGES/django.mo,sha256=lKvlNjL3QfHm0DfUPdtDq-e8BonoB01WvA3PuFzQ6hw,14956 +newsletter/locale/cs_CZ/LC_MESSAGES/django.po,sha256=pLumagtvDf6SLkmEVMPZZa5WnZbVIyxq4qdrTP56MpI,23837 +newsletter/locale/cs_CZ/LC_MESSAGES/djangojs.mo,sha256=9qllaGAqam_5OzZ9RKjpEsJUxn8LgBsAWViV85H-v9M,891 +newsletter/locale/cs_CZ/LC_MESSAGES/djangojs.po,sha256=l-X-x6RoU2fT17Fq3umJHIEk2OeYhX7LfVdaldnbYXg,1152 +newsletter/locale/de/LC_MESSAGES/django.mo,sha256=0u8I-FQu3aXi0jQLM_Taybeyav9mT_mTnTQ46k54cHY,14614 +newsletter/locale/de/LC_MESSAGES/django.po,sha256=t5ve3fE5Lsem9uerD3VERdlprYgYwJFPYKrqK8NVGBI,26110 +newsletter/locale/de/LC_MESSAGES/djangojs.mo,sha256=G65N9Y2_SSpSDLnMAIrdIub6cGaVq23cH3XpF6Dlz2s,806 +newsletter/locale/de/LC_MESSAGES/djangojs.po,sha256=LcFY-ivVX5MUzAcfESxWDcvFJnXQxIAWZEPOnVjUJl4,1055 +newsletter/locale/el_GR/.DS_Store,sha256=yDnCBtJ-cFuX96-D05SPwlTgp-eFOM2FKgLK4CNLJRk,6148 +newsletter/locale/el_GR/LC_MESSAGES/.DS_Store,sha256=1lFlJ5EFymdzGAUAaI30vcaaLHt3F1LwpG7xILf9jsM,6148 +newsletter/locale/el_GR/LC_MESSAGES/django.mo,sha256=VZMTWr1ZjWF0bYhiRlu7cBVYnQPW1YEJ5Ag7Tr9dWWo,17836 +newsletter/locale/el_GR/LC_MESSAGES/django.po,sha256=Mkbjcz_r4U0qkuwItlcPbWF46GXiXZvLrzA0Ed-vdqI,28686 +newsletter/locale/el_GR/LC_MESSAGES/djangojs.mo,sha256=iWoVuJvdIkDSWgp_pxwj0M5j24-B65q5LHbF7KVRNw8,1035 +newsletter/locale/el_GR/LC_MESSAGES/djangojs.po,sha256=U39E_SQ1BTQgglLp05UTQwCdpU-r8yyMvBfDi68x1d4,1321 +newsletter/locale/en/LC_MESSAGES/django.mo,sha256=x7-bK5XTe5RQgSLm5G8gqRbNr4_YvAeFwMlic4vFtaM,378 +newsletter/locale/en/LC_MESSAGES/django.po,sha256=wQkrg8YQYXMyLPGmnqd_9R-YtlHUMtouCIjEswbgYJc,19510 +newsletter/locale/en/LC_MESSAGES/djangojs.mo,sha256=XYybQBZaTej6_T0V2osObuIGPJ6kccgLacEYzfsMV-4,367 +newsletter/locale/en/LC_MESSAGES/djangojs.po,sha256=xp3dx_E_HSgEqwTUJfKIl_8e4GVGY60tpK9n3xU8RbE,785 +newsletter/locale/es/LC_MESSAGES/django.mo,sha256=LCezL8A5M--YMjrIYch_Nm0hw_G8bgM7wOEQ7i3B1H0,16228 +newsletter/locale/es/LC_MESSAGES/django.po,sha256=an2Rql-srog0XGjwFyqKYTO0cgppJB5WP4QopzJ0sWY,26059 +newsletter/locale/es/LC_MESSAGES/djangojs.mo,sha256=OhSNt4dv68xovseA_MzUzR-r1WlZ8sb0UZ8RRmLvHt0,833 +newsletter/locale/es/LC_MESSAGES/djangojs.po,sha256=IWK9AJdEn9_ZdZ2tAv56Y5iFNl6KmvKxANaGbvnw8ZE,1107 +newsletter/locale/fa/LC_MESSAGES/django.mo,sha256=A7JgGf2MwW0ZAe-A8iyNbCq0zblLUbHVnYgmjHJ_D-4,16046 +newsletter/locale/fa/LC_MESSAGES/django.po,sha256=huQiecvpy1AkHZxJb9m8QrSKuoW0w6ESFIPpZThFZjo,25150 +newsletter/locale/fa/LC_MESSAGES/djangojs.mo,sha256=NKkgD652Ah-hCD3ae9la71zg5Go1EGyNImudtV1NJK8,869 +newsletter/locale/fa/LC_MESSAGES/djangojs.po,sha256=VDgOBTDk2f0mF7LliDVzEI998OofHqgojKZGr8UaRcM,1137 +newsletter/locale/fr/LC_MESSAGES/django.mo,sha256=HfghN3mXZa3vaOKABWieJyKapL6L-1OCvHP4Vnx78mg,16535 +newsletter/locale/fr/LC_MESSAGES/django.po,sha256=AIqi9plgAxocHmXT3OtKgaZruyi6xLoM7XTxgRED9q8,26269 +newsletter/locale/fr/LC_MESSAGES/djangojs.mo,sha256=ZPGLJf4w5JFezNz0yg0paCKilOxfjfcsYA-OF4UDMdk,835 +newsletter/locale/fr/LC_MESSAGES/djangojs.po,sha256=t6VHraruQ44GKR0rsAoksgNmybDdq5gHjucR2BW2ZjA,1106 +newsletter/locale/is_IS/LC_MESSAGES/django.mo,sha256=TjtkZBNGBVd0s-rGpzbUbtzlclylA2kVMdQxpFAXKBo,13686 +newsletter/locale/is_IS/LC_MESSAGES/django.po,sha256=KixmHIxUGm-xQpZgWk_oLK7Hp2TKvvGIgJsGwAv0q-k,22544 +newsletter/locale/is_IS/LC_MESSAGES/djangojs.mo,sha256=iisfU7K4UV1UbXMIZ1spm50roPBTAe16j8yEnmSdt4w,888 +newsletter/locale/is_IS/LC_MESSAGES/djangojs.po,sha256=RY0k5mdEQReVCxQ3d1Ks0y4dtbw2GYSBEVGbw4FCVFc,1154 +newsletter/locale/it/LC_MESSAGES/django.mo,sha256=6d-jX9qF9KlsX3YMgsgx1NkTdsV4UTeSLF5qNRn0nzI,14304 +newsletter/locale/it/LC_MESSAGES/django.po,sha256=Bo1RSx9hSy4FkfFwJEtTBLpVgoSGkd9huLtepkZaG2M,23168 +newsletter/locale/it/LC_MESSAGES/djangojs.mo,sha256=k_vidANJ3hJDizmIuLpUkPmUps75R7nJFZP3Y-50XGQ,830 +newsletter/locale/it/LC_MESSAGES/djangojs.po,sha256=EIm8zQuyLDfJzkFnK80KGp7m8VU4AjMyJi0VaprMsD4,1104 +newsletter/locale/nl/LC_MESSAGES/django.mo,sha256=4SgnMYl096UWvzBaBqi2Ahkp1dJTvdyYzVEBqbelSOs,13736 +newsletter/locale/nl/LC_MESSAGES/django.po,sha256=pHfdakhAa8amyDvTU_1jN_U1HiVWIcZVW2euSmgfURQ,22208 +newsletter/locale/nl/LC_MESSAGES/djangojs.mo,sha256=M8rB1kQmBIU8xDMqM3xjMD8NWSi1t0vTYOWaw9oiRy8,810 +newsletter/locale/nl/LC_MESSAGES/djangojs.po,sha256=SyrPtlNf6Vb7X3k9qCChl5TV5WkGS7SYwwK2x5RoRgM,1038 +newsletter/locale/pl/LC_MESSAGES/django.mo,sha256=kIZylwTLb8qMAxivhunvGjF3jazG2XcgDHufDRtuX34,14442 +newsletter/locale/pl/LC_MESSAGES/django.po,sha256=kB2WQ_EgQV9X7nWuMHa043b2K2jXmvMVAPRz6ANcLX8,23418 +newsletter/locale/pl/LC_MESSAGES/djangojs.mo,sha256=zsv-9s4q6ANF5e5I81KG2bkTsL_6DJ177slbc-uYhEw,857 +newsletter/locale/pl/LC_MESSAGES/djangojs.po,sha256=QRMlxxCjUeSwRZXGHwHVXaW4grFvqnCvYXTPNJ3rFcI,1123 +newsletter/locale/pt_BR/LC_MESSAGES/django.mo,sha256=0Z1PdM8OGg14_Tr7nEpT2uOSqhI210z5Kn5OFr4wEjU,14233 +newsletter/locale/pt_BR/LC_MESSAGES/django.po,sha256=aMyffqjzdPtkNT5k8AQLnAkJpBMlS-mKIIe7ah8eJx8,23366 +newsletter/locale/pt_BR/LC_MESSAGES/djangojs.mo,sha256=n-7PjEtGNV03lWanA496nh6Qcv9FLtiKvdXc4q4mL24,812 +newsletter/locale/pt_BR/LC_MESSAGES/djangojs.po,sha256=KH2Pi1i1QBwXFvp205cVO7sf2w0o3fA4bed56Aaxo90,1076 +newsletter/locale/pt_PT/LC_MESSAGES/djangojs.mo,sha256=44X_sE0AAPo7BG_SYN4cZZ-w9xhPmKZu71a5n0gGj0w,808 +newsletter/locale/pt_PT/LC_MESSAGES/djangojs.po,sha256=ax2eySeV02DCKP6WH4CExOIuIoAOEccwKNzgnC7pbR0,1078 +newsletter/locale/ru/LC_MESSAGES/django.mo,sha256=cDYBPbPL2jICtN9O_6nAXmAKTL21gw5avvsOudY-IMA,18264 +newsletter/locale/ru/LC_MESSAGES/django.po,sha256=QKP9gK4ETtMRZM_khqFAXWCN557t6yEyqD59CBF5KHQ,27214 +newsletter/locale/ru/LC_MESSAGES/djangojs.mo,sha256=vEiRE-kK2rMI-pRCwjHZ1cif8X4IDmdBzTuuiXrAakY,1047 +newsletter/locale/ru/LC_MESSAGES/djangojs.po,sha256=Xp_W3kF1LUFzo4D1YkoHpb18Q00g7Ds8FvOGXg-Xs30,1319 +newsletter/locale/zh_CN/LC_MESSAGES/djangojs.mo,sha256=2H3OwL3MQN9WrbIqXZOWjUaKiizYl7sDU6ooH9DFfo8,779 +newsletter/locale/zh_CN/LC_MESSAGES/djangojs.po,sha256=hYETzYnxA2xzuGmxGUeq9p-F0oOkmaFP7c94zYI2cUg,1058 +newsletter/management/__init__.py,sha256=EQCW7xWgrl-ghnBttJ48lOX4Cl0EtQraXUOHT6yFIt0,68 +newsletter/management/__pycache__/__init__.cpython-36.pyc,, +newsletter/management/commands/__init__.py,sha256=EQCW7xWgrl-ghnBttJ48lOX4Cl0EtQraXUOHT6yFIt0,68 +newsletter/management/commands/__pycache__/__init__.cpython-36.pyc,, +newsletter/management/commands/__pycache__/submit_newsletter.cpython-36.pyc,, +newsletter/management/commands/submit_newsletter.py,sha256=JlvKND3Av-Il2hbo5TzhX_vJVNZfgmlwClCYmKwLhkc,962 +newsletter/migrations/0001_initial.py,sha256=bRrCuQ71rfOE-MbCrZHhuP_usmIsZMLsVVSXj8R_oOE,7580 +newsletter/migrations/0002_auto_20150416_1555.py,sha256=Hf102rjsPxdtIvaWWPrQ2EB0YKhvkNRxWMm6fBUj2Ug,1458 +newsletter/migrations/0003_auto_20160226_1518.py,sha256=9x7Ank_qZDl1fm2dggwBFzutBt4ou_TA03EnToiKKOo,1233 +newsletter/migrations/0004_auto_20180407_1043.py,sha256=WB0a9KlRpEozj2f_JpHijaz4gw3WpLhaPKqaYfHwM30,660 +newsletter/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +newsletter/migrations/__pycache__/0001_initial.cpython-36.pyc,, +newsletter/migrations/__pycache__/0002_auto_20150416_1555.cpython-36.pyc,, +newsletter/migrations/__pycache__/0003_auto_20160226_1518.cpython-36.pyc,, +newsletter/migrations/__pycache__/0004_auto_20180407_1043.cpython-36.pyc,, +newsletter/migrations/__pycache__/__init__.cpython-36.pyc,, +newsletter/models.py,sha256=Sq8Pl_MxstQVvWEBUXyCkO9g5orBwM_ZyzoDdMFZ-BA,23358 +newsletter/settings.py,sha256=EXs5CwJ0Qc_BWhTfTS0Ezf2X_iaVdP64ElS1NuGUQ08,3167 +newsletter/static/newsletter/admin/img/icon-no.gif,sha256=ddvoxUxExaSonlRzF3qGhIGAq8Bri-sTTVFVXIakqAI,176 +newsletter/static/newsletter/admin/img/icon-yes.gif,sha256=K6yiGeSx9Hbslx9UUpnkEd-uieFWjYc0nbEbv3mva-s,299 +newsletter/static/newsletter/admin/img/submitting.gif,sha256=l4ieGXLesDwk0_T4SmcZIz76l3MLb9fdxxi9bljmxwA,1100 +newsletter/static/newsletter/admin/img/waiting.gif,sha256=dRTCs80_8mVtAiBVWyeO5HPkQfnuLTZCfG-WuRjSx48,100 +newsletter/static/newsletter/admin/js/submit_interface.js,sha256=Fw5ncjsLFxDBp72qdSMHhYg20HLIBdqDOvaLIkrgYu0,889 +newsletter/static/newsletter/admin/js/subscriber_lookup.js,sha256=rNeW4AbUy6lb4BCQCKY0B8__DZ5dd3Fedn5VRZIfHyI,1336 +newsletter/templates/admin/newsletter/message/change_form.html,sha256=ajsDbxCJCkVlY4ThQWaeDvHCfW2qNfhxJs216L4UeO8,371 +newsletter/templates/admin/newsletter/message/preview.html,sha256=8B1yGUwgNBhsFItSVEu131U0EphDI0Bj6EiwdXp8ns8,1198 +newsletter/templates/admin/newsletter/submission/change_form.html,sha256=DfLsIy8RSh_IU5k331zV6K5CfSUCORQlKpDjSO_0VVo,890 +newsletter/templates/admin/newsletter/subscription/change_list.html,sha256=b3x0jgbXCfkJRmDm5lGwA6qp2043kKNRa9rj6P0t_lw,218 +newsletter/templates/admin/newsletter/subscription/confirmimportform.html,sha256=zuv83tYKYLnO50Ec-vTI2KI-GtsP4diqi5Bsy5IbgEk,1017 +newsletter/templates/admin/newsletter/subscription/importform.html,sha256=eBWxzrCMOskrSHHfyguVVSZBWMtOXq2Y0lDgCWd2zmo,938 +newsletter/templates/newsletter/common.html,sha256=VBBgdLdgfOTcWQV-kNOsoc9JoaKKaDo_e-srcFyjWzE,320 +newsletter/templates/newsletter/message/message.html,sha256=MPgYDhnXcBXKKo1FmDru2J9Ygdyg7kZggWoZUWcjAx4,1110 +newsletter/templates/newsletter/message/message.txt,sha256=gBdDCZhz4VJYsaVZSSrSsZnbhBu8xlWy6r_gVnJjRc8,355 +newsletter/templates/newsletter/message/message_subject.txt,sha256=h6NNCyO0gSyJ72UPckSDwvOwxUbRRXDD0aGdPy6tbfc,44 +newsletter/templates/newsletter/message/subscribe.html,sha256=uRPnnKxxSJem7L-wE6eYon9idoUNxx_sbPTqsiOIz6A,601 +newsletter/templates/newsletter/message/subscribe.txt,sha256=Kc0MjFrR8M0DF76LT8weI-uj6fv8dZ5ULn5rJcwoiBU,410 +newsletter/templates/newsletter/message/subscribe_subject.txt,sha256=0klF5jaOymA7LSJUbUW3xNxsq09XSEKnd9qqqBB_sgA,75 +newsletter/templates/newsletter/message/unsubscribe.html,sha256=eDGwozR_QUWS4XiGpB9JlXR-drbr-3HBkVfOjKCGl4c,610 +newsletter/templates/newsletter/message/unsubscribe.txt,sha256=mlgJnaPzxdp_Y7MaaceCTrueWXGExX-boUc7utUjzyM,416 +newsletter/templates/newsletter/message/unsubscribe_subject.txt,sha256=I0LyRyS0tIU622UCcgKug4XCZgYqBoKZuaOGMcJJ9AY,77 +newsletter/templates/newsletter/message/update.html,sha256=9MYpInDogaSH9o2UJcmIq19F_C3C3V-PlADLJLTmwHM,633 +newsletter/templates/newsletter/message/update.txt,sha256=BcDoCqPYktLtwiDB33UnllqtRevYcakWvY484YWN62g,433 +newsletter/templates/newsletter/message/update_subject.txt,sha256=RqUWWFfv3lA4xyLd3Ad5lt629Ws_VRlRVgFbAYYcmBo,73 +newsletter/templates/newsletter/newsletter_detail.html,sha256=c3_F1bbTN1P4LIfHdQMSpR56CTjfiinhjdWHxhjD9pE,891 +newsletter/templates/newsletter/newsletter_list.html,sha256=N7tCpzTmzqU7n7R9XOZDJaimur01PXZVjOZLYGlS5-A,1108 +newsletter/templates/newsletter/submission_archive.html,sha256=2efSyl07Ti-qw7qljmKbQY76UnipIRMjr_2tYRDbGIo,508 +newsletter/templates/newsletter/subscription_activate.html,sha256=YXyHMcQjJ5W3xt1ipM469hOMHKYDmxlZWKV9k2qggFg,540 +newsletter/templates/newsletter/subscription_subscribe.html,sha256=F9WIYChekMiOLwKJBbtvcfMFeFUNONy2rlALAlPmMl4,1002 +newsletter/templates/newsletter/subscription_subscribe_activated.html,sha256=lOJCAvqhwzxuAtyR3OwDJSogTwEwrcAAn-vT28mDPKY,390 +newsletter/templates/newsletter/subscription_subscribe_email_sent.html,sha256=WfWVSURVRC87oYwckRVE1-3CjLuB-YjQPPbagIPqqvc,462 +newsletter/templates/newsletter/subscription_subscribe_user.html,sha256=_XMpEL_ZvuFeI9SkaU63hUkspzsrYBZbXXlG9pudGM8,695 +newsletter/templates/newsletter/subscription_unsubscribe.html,sha256=tXnNIpZQ7EMWThwhwZLIf9QXq1ch253rQ-WGZd9hhxQ,1008 +newsletter/templates/newsletter/subscription_unsubscribe_activated.html,sha256=VTnq_9dF-2Y6d4d2L-66X4b9rvm7VlY49JX5KO5uaMs,380 +newsletter/templates/newsletter/subscription_unsubscribe_email_sent.html,sha256=prLMe-qJIS5UDSk60fzgTefe_s5O7gJ7MB6RZ8DM1cU,430 +newsletter/templates/newsletter/subscription_unsubscribe_user.html,sha256=twAUEiVVDIB8TzPc37I0x8lCsIwDoEqUrd58_PeKDbg,709 +newsletter/templates/newsletter/subscription_update.html,sha256=Sl84sJQ_62yt1oUOkDGUhtelPT-c6taMEKJC7Dlv30g,1006 +newsletter/templates/newsletter/subscription_update_activated.html,sha256=qu5rCMnxdYsuuS6mGEa1AvaC5AuoflQjwhcYXqogL5Y,388 +newsletter/templates/newsletter/subscription_update_email_sent.html,sha256=9bFJK95MOtck6d_d0cSeb_iVzkqWScUuY6AV62OCpiI,448 +newsletter/templates/widget/image.html,sha256=QKPjKYoSg2-JdFtK0HHV6cZjMymXHvS54U_e35Ee79A,372 +newsletter/urls.py,sha256=6jdIxlnEW2aiIk-qcSb6hy1dlVSJWxwWIpuppfGAWC8,2777 +newsletter/utils.py,sha256=x3en3ZCc8XPUbYdawpRIFiDqv8-nlrW9Q2HiRAJKZNY,1025 +newsletter/validators.py,sha256=RwqmfxbmyRf0qRWmQcd-p5vtM-gdC-Oz8HLB2badHoQ,641 +newsletter/views.py,sha256=AHl7uQHZRFWB09mAftaqCg6GKDmlbjdrIjXN0GedWCo,19164 diff --git a/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/WHEEL b/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/WHEEL new file mode 100644 index 0000000..1316c41 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/pbr.json b/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/pbr.json new file mode 100644 index 0000000..04d020c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/pbr.json @@ -0,0 +1 @@ +{"is_release": false, "git_version": "0b4c953"} \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/top_level.txt new file mode 100644 index 0000000..c20ef30 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/django_newsletter-0.7.dist-info/top_level.txt @@ -0,0 +1 @@ +newsletter diff --git a/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/PKG-INFO b/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..7b9226f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/PKG-INFO @@ -0,0 +1,59 @@ +Metadata-Version: 1.1 +Name: ldif3 +Version: 3.1.1 +Summary: generate and parse LDIF data (see RFC 2849). +Home-page: https://github.com/xi/ldif3 +Author: Tobias Bengfort +Author-email: tobias.bengfort@posteo.de +License: BSD +Description: ldif3 - generate and parse LDIF data (see `RFC 2849`_). + + This is a fork of the ``ldif`` module from `python-ldap`_ with python3/unicode + support. See the first entry in CHANGES.rst for a more complete list of + differences. + + Usage + ----- + + Parse LDIF from a file (or ``BytesIO``):: + + from ldif3 import LDIFParser + from pprint import pprint + + parser = LDIFParser(open('data.ldif', 'rb')) + for dn, entry in parser.parse(): + print('got entry record: %s' % dn) + pprint(record) + + + Write LDIF to a file (or ``BytesIO``):: + + from ldif3 import LDIFWriter + + writer = LDIFWriter(open('data.ldif', 'wb')) + writer.unparse('mail=alice@example.com', { + 'cn': ['Alice Alison'], + 'mail': ['alice@example.com'], + 'objectclass': ['top', 'person'], + }) + + Unicode support + --------------- + + The stream object that is passed to parser or writer must be a byte + stream. It must use UTF-8 encoding as described in the spec. + + The parsed objects (``dn`` and the keys and values of ``record``) on the + other hand are unicode strings. + + + .. _RFC 2849: https://tools.ietf.org/html/rfc2849 + .. _python-ldap: http://www.python-ldap.org/ + +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: License :: OSI Approved :: BSD License +Classifier: Intended Audience :: Developers +Classifier: Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP diff --git a/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/SOURCES.txt b/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..0e1cad1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,13 @@ +CHANGES.rst +MANIFEST.in +README.rst +ldif3.py +setup.cfg +setup.py +docs/Makefile +docs/conf.py +docs/index.rst +ldif3.egg-info/PKG-INFO +ldif3.egg-info/SOURCES.txt +ldif3.egg-info/dependency_links.txt +ldif3.egg-info/top_level.txt \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/dependency_links.txt b/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/installed-files.txt b/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..7086a43 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/installed-files.txt @@ -0,0 +1,6 @@ +../__pycache__/ldif3.cpython-36.pyc +../ldif3.py +PKG-INFO +SOURCES.txt +dependency_links.txt +top_level.txt diff --git a/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..6cdc231 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ldif3-3.1.1-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +ldif3 diff --git a/thesisenv/lib/python3.6/site-packages/ldif3.py b/thesisenv/lib/python3.6/site-packages/ldif3.py new file mode 100644 index 0000000..1789896 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/ldif3.py @@ -0,0 +1,341 @@ +"""ldif3 - generate and parse LDIF data (see RFC 2849).""" + +from __future__ import unicode_literals + +__version__ = '3.1.1' + +__all__ = [ + # constants + 'LDIF_PATTERN', + # classes + 'LDIFWriter', + 'LDIFParser', +] + +import base64 +import re +import logging +from collections import OrderedDict + +try: # pragma: nocover + from urlparse import urlparse + from urllib import urlopen +except ImportError: # pragma: nocover + from urllib.parse import urlparse + from urllib.request import urlopen + +log = logging.getLogger('ldif3') + +ATTRTYPE_PATTERN = r'[\w;.-]+(;[\w_-]+)*' +ATTRVALUE_PATTERN = r'(([^,]|\\,)+|".*?")' +ATTR_PATTERN = ATTRTYPE_PATTERN + r'[ ]*=[ ]*' + ATTRVALUE_PATTERN +RDN_PATTERN = ATTR_PATTERN + r'([ ]*\+[ ]*' + ATTR_PATTERN + r')*[ ]*' +DN_PATTERN = RDN_PATTERN + r'([ ]*,[ ]*' + RDN_PATTERN + r')*[ ]*' +DN_REGEX = re.compile('^%s$' % DN_PATTERN) + +LDIF_PATTERN = ('^((dn(:|::) %(DN_PATTERN)s)|(%(ATTRTYPE_PATTERN)' + 's(:|::) .*)$)+' % vars()) + +MOD_OPS = ['add', 'delete', 'replace'] +CHANGE_TYPES = ['add', 'delete', 'modify', 'modrdn'] + + +def is_dn(s): + """Return True if s is a LDAP DN.""" + if s == '': + return True + rm = DN_REGEX.match(s) + return rm is not None and rm.group(0) == s + + +UNSAFE_STRING_PATTERN = '(^[ :<]|[\000\n\r\200-\377])' +UNSAFE_STRING_RE = re.compile(UNSAFE_STRING_PATTERN) + + +def lower(l): + """Return a list with the lowercased items of l.""" + return [i.lower() for i in l or []] + + +class LDIFWriter(object): + """Write LDIF entry or change records to file object. + + :type output_file: file-like object in binary mode + :param output_file: File for output + + :type base64_attrs: List[string] + :param base64_attrs: List of attribute types to be base64-encoded in any + case + + :type cols: int + :param cols: Specifies how many columns a line may have before it is + folded into many lines + + :type line_sep: bytearray + :param line_sep: line separator + """ + + def __init__( + self, output_file, base64_attrs=[], cols=76, line_sep=b'\n'): + self._output_file = output_file + self._base64_attrs = lower(base64_attrs) + self._cols = cols + self._line_sep = line_sep + + self.records_written = 0 #: number of records that have been written + + def _fold_line(self, line): + """Write string line as one or more folded lines.""" + if len(line) <= self._cols: + self._output_file.write(line) + self._output_file.write(self._line_sep) + else: + pos = self._cols + self._output_file.write(line[0:self._cols]) + self._output_file.write(self._line_sep) + while pos < len(line): + self._output_file.write(b' ') + end = min(len(line), pos + self._cols - 1) + self._output_file.write(line[pos:end]) + self._output_file.write(self._line_sep) + pos = end + + def _needs_base64_encoding(self, attr_type, attr_value): + """Return True if attr_value has to be base-64 encoded. + + This is the case because of special chars or because attr_type is in + self._base64_attrs + """ + return attr_type.lower() in self._base64_attrs or \ + UNSAFE_STRING_RE.search(attr_value) is not None + + def _unparse_attr(self, attr_type, attr_value): + """Write a single attribute type/value pair.""" + if self._needs_base64_encoding(attr_type, attr_value): + encoded = base64.encodestring(attr_value.encode('utf8'))\ + .replace(b'\n', b'')\ + .decode('utf8') + line = ':: '.join([attr_type, encoded]) + else: + line = ': '.join([attr_type, attr_value]) + self._fold_line(line.encode('utf8')) + + def _unparse_entry_record(self, entry): + """ + :type entry: Dict[string, List[string]] + :param entry: Dictionary holding an entry + """ + for attr_type in sorted(entry.keys()): + for attr_value in entry[attr_type]: + self._unparse_attr(attr_type, attr_value) + + def _unparse_changetype(self, mod_len): + """Detect and write the changetype.""" + if mod_len == 2: + changetype = 'add' + elif mod_len == 3: + changetype = 'modify' + else: + raise ValueError("modlist item of wrong length") + + self._unparse_attr('changetype', changetype) + + def _unparse_change_record(self, modlist): + """ + :type modlist: List[Tuple] + :param modlist: List of additions (2-tuple) or modifications (3-tuple) + """ + mod_len = len(modlist[0]) + self._unparse_changetype(mod_len) + + for mod in modlist: + if len(mod) != mod_len: + raise ValueError("Subsequent modlist item of wrong length") + + if mod_len == 2: + mod_type, mod_vals = mod + elif mod_len == 3: + mod_op, mod_type, mod_vals = mod + self._unparse_attr(MOD_OPS[mod_op], mod_type) + + for mod_val in mod_vals: + self._unparse_attr(mod_type, mod_val) + + if mod_len == 3: + self._output_file.write(b'-' + self._line_sep) + + def unparse(self, dn, record): + """Write an entry or change record to the output file. + + :type dn: string + :param dn: distinguished name + + :type record: Union[Dict[string, List[string]], List[Tuple]] + :param record: Either a dictionary holding an entry or a list of + additions (2-tuple) or modifications (3-tuple). + """ + self._unparse_attr('dn', dn) + if isinstance(record, dict): + self._unparse_entry_record(record) + elif isinstance(record, list): + self._unparse_change_record(record) + else: + raise ValueError("Argument record must be dictionary or list") + self._output_file.write(self._line_sep) + self.records_written += 1 + + +class LDIFParser(object): + """Read LDIF entry or change records from file object. + + :type input_file: file-like object in binary mode + :param input_file: file to read the LDIF input from + + :type ignored_attr_types: List[string] + :param ignored_attr_types: List of attribute types that will be ignored + + :type process_url_schemes: List[bytearray] + :param process_url_schemes: List of URL schemes to process with urllib. + An empty list turns off all URL processing and the attribute is + ignored completely. + + :type line_sep: bytearray + :param line_sep: line separator + + :type strict: boolean + :param strict: If set to ``False``, recoverable parse errors will produce + log warnings rather than exceptions. + """ + + def _strip_line_sep(self, s): + """Strip trailing line separators from s, but no other whitespaces.""" + if s[-2:] == b'\r\n': + return s[:-2] + elif s[-1:] == b'\n': + return s[:-1] + else: + return s + + def __init__( + self, + input_file, + ignored_attr_types=[], + process_url_schemes=[], + line_sep=b'\n', + strict=True): + self._input_file = input_file + self._process_url_schemes = lower(process_url_schemes) + self._ignored_attr_types = lower(ignored_attr_types) + self._line_sep = line_sep + self._strict = strict + + self.line_counter = 0 #: number of lines that have been read + self.byte_counter = 0 #: number of bytes that have been read + self.records_read = 0 #: number of records that have been read + + def _iter_unfolded_lines(self): + """Iter input unfoled lines. Skip comments.""" + line = self._input_file.readline() + while line: + self.line_counter += 1 + self.byte_counter += len(line) + + line = self._strip_line_sep(line) + + nextline = self._input_file.readline() + while nextline and nextline[:1] == b' ': + line += self._strip_line_sep(nextline)[1:] + nextline = self._input_file.readline() + + if not line.startswith(b'#'): + yield line + line = nextline + + def _iter_blocks(self): + """Iter input lines in blocks separated by blank lines.""" + lines = [] + for line in self._iter_unfolded_lines(): + if line: + lines.append(line) + else: + self.records_read += 1 + yield lines + lines = [] + if lines: + self.records_read += 1 + yield lines + + def _parse_attr(self, line): + """Parse a single attribute type/value pair.""" + colon_pos = line.index(b':') + attr_type = line[0:colon_pos] + if line[colon_pos:].startswith(b'::'): + attr_value = base64.decodestring(line[colon_pos + 2:]) + elif line[colon_pos:].startswith(b':<'): + url = line[colon_pos + 2:].strip() + attr_value = b'' + if self._process_url_schemes: + u = urlparse(url) + if u[0] in self._process_url_schemes: + attr_value = urlopen(url.decode('ascii')).read() + else: + attr_value = line[colon_pos + 1:].strip() + return attr_type.decode('utf8'), attr_value.decode('utf8') + + def _error(self, msg): + if self._strict: + raise ValueError(msg) + else: + log.warning(msg) + + def _check_dn(self, dn, attr_value): + """Check dn attribute for issues.""" + if dn is not None: + self._error('Two lines starting with dn: in one record.') + if not is_dn(attr_value): + self._error('No valid string-representation of ' + 'distinguished name %s.' % attr_value) + + def _check_changetype(self, dn, changetype, attr_value): + """Check changetype attribute for issues.""" + if dn is None: + self._error('Read changetype: before getting valid dn: line.') + if changetype is not None: + self._error('Two lines starting with changetype: in one record.') + if attr_value not in CHANGE_TYPES: + self._error('changetype value %s is invalid.' % attr_value) + + def _parse_entry_record(self, lines): + """Parse a single entry record from a list of lines.""" + dn = None + entry = OrderedDict() + + for line in lines: + attr_type, attr_value = self._parse_attr(line) + + if attr_type == 'dn': + self._check_dn(dn, attr_value) + dn = attr_value + elif attr_type == 'version' and dn is None: + pass # version = 1 + else: + if dn is None: + self._error('First line of record does not start ' + 'with "dn:": %s' % attr_type) + if attr_value is not None and \ + attr_type.lower() not in self._ignored_attr_types: + if attr_type in entry: + entry[attr_type].append(attr_value) + else: + entry[attr_type] = [attr_value] + + return dn, entry + + def parse(self): + """Iterate LDIF entry records. + + :rtype: Iterator[Tuple[string, Dict]] + :return: (dn, entry) + """ + for block in self._iter_blocks(): + yield self._parse_entry_record(block) diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/__init__.py b/thesisenv/lib/python3.6/site-packages/newsletter/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/addressimport/__init__.py b/thesisenv/lib/python3.6/site-packages/newsletter/addressimport/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/addressimport/parsers.py b/thesisenv/lib/python3.6/site-packages/newsletter/addressimport/parsers.py new file mode 100644 index 0000000..5bf6adb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/addressimport/parsers.py @@ -0,0 +1,351 @@ +import logging +logger = logging.getLogger(__name__) + +import io + +from django import forms +from django.core.exceptions import ValidationError +from django.core.validators import validate_email +from django.utils.translation import ugettext as _ + +from newsletter.models import Subscription + + +class AddressList(object): + """ List with unique addresses. """ + + def __init__(self, newsletter, ignore_errors=False): + self.newsletter = newsletter + self.ignore_errors = ignore_errors + self.addresses = {} + + def add(self, email, name=None, location='unknown location'): + """ Add name to list. """ + + logger.debug("Going to add %s <%s>", name, email) + + name = check_name(name, self.ignore_errors) + email = check_email(email, self.ignore_errors) + + try: + validate_email(email) + except ValidationError: + logger.warning( + "Entry '%s' does not contain a valid e-mail address at %s." + % (email, location) + ) + + if not self.ignore_errors: + raise forms.ValidationError(_( + "Entry '%s' does not contain a valid " + "e-mail address.") % name + ) + + # Skip this entry + return + + if email in self.addresses: + logger.warning( + "Entry '%s' contains a duplicate entry at %s." + % (email, location) + ) + + if not self.ignore_errors: + raise forms.ValidationError(_( + "The address file contains duplicate entries " + "for '%s'.") % email) + + # Skip this entry + return + + if subscription_exists(self.newsletter, email, name): + logger.warning( + "Entry '%s' is already subscribed to at %s." + % (email, location) + ) + + if not self.ignore_errors: + raise forms.ValidationError( + _("Some entries are already subscribed to.")) + + # Skip this entry + return + + self.addresses[email] = name + + +def subscription_exists(newsletter, email, name=None): + """ + Return wheter or not a subscription exists. + """ + qs = Subscription.objects.filter( + newsletter__id=newsletter.id, + subscribed=True, + email_field__exact=email) + + return qs.exists() + + +def check_email(email, ignore_errors=False): + """ + Check (length of) email address. + + TODO: Update this to perform full validation on email. + """ + + logger.debug("Checking e-mail address %s", email) + + email_length = Subscription._meta.get_field('email_field').max_length + + # Get rid of leading/trailing spaces + email = email.strip() + + if len(email) <= email_length or ignore_errors: + return email[:email_length] + else: + raise forms.ValidationError( + _( + "E-mail address %(email)s too long, maximum length is " + "%(email_length)s characters." + ) % { + "email": email, + "email_length": email_length + } + ) + + +def check_name(name, ignore_errors=False): + """ + Check (length of) name. + + TODO: Update this to perform full validation on name. + """ + logger.debug("Checking name: %s", name) + + name_length = Subscription._meta.get_field('name_field').max_length + + # Get rid of leading/trailing spaces + name = name.strip() + + if len(name) <= name_length or ignore_errors: + return name[:name_length] + else: + raise forms.ValidationError( + _( + "Name %(name)s too long, maximum length is " + "%(name_length)s characters." + ) % { + "name": name, + "name_length": name_length + } + ) + + +def get_encoding(myfile): + """ Returns encoding of file, rewinding the file after detection. """ + + # Detect encoding + from chardet.universaldetector import UniversalDetector + + detector = UniversalDetector() + + for line in myfile.readlines(): + detector.feed(line) + if detector.done: + break + + detector.close() + encoding = detector.result['encoding'] + + # Reset the file index + myfile.seek(0) + + return encoding + + +def parse_csv(myfile, newsletter, ignore_errors=False): + """ + Parse addresses from CSV file-object into newsletter. + + Returns a dictionary mapping email addresses into Subscription objects. + """ + + import unicodecsv + + encoding = get_encoding(myfile) + + # Attempt to detect the dialect + # Ref: https://bugs.python.org/issue5332 + encodedfile = io.TextIOWrapper(myfile, encoding=encoding, newline='') + dialect = unicodecsv.Sniffer().sniff(encodedfile.read(1024)) + + # Reset the file index + myfile.seek(0) + + logger.info('Detected encoding %s and dialect %s for CSV file', + encoding, dialect) + + myreader = unicodecsv.reader(myfile, dialect=dialect, encoding=encoding) + + firstrow = next(myreader) + + # Find name column + colnum = 0 + namecol = None + for column in firstrow: + if "name" in column.lower() or _("name") in column.lower(): + namecol = colnum + + if "display" in column.lower() or \ + _("display") in column.lower(): + break + + colnum += 1 + + if namecol is None: + raise forms.ValidationError(_( + "Name column not found. The name of this column should be " + "either 'name' or '%s'.") % _("name") + ) + + logger.debug("Name column found: '%s'", firstrow[namecol]) + + # Find email column + colnum = 0 + mailcol = None + for column in firstrow: + if 'email' in column.lower() or \ + 'e-mail' in column.lower() or \ + _("e-mail") in column.lower(): + + mailcol = colnum + + break + + colnum += 1 + + if mailcol is None: + raise forms.ValidationError(_( + "E-mail column not found. The name of this column should be " + "either 'email', 'e-mail' or '%(email)s'.") % + {'email': _("e-mail")} + ) + + logger.debug("E-mail column found: '%s'", firstrow[mailcol]) + + if namecol == mailcol: + raise forms.ValidationError( + _( + "Could not properly determine the proper columns in the " + "CSV-file. There should be a field called 'name' or " + "'%(name)s' and one called 'e-mail' or '%(e-mail)s'." + ) % { + "name": _("name"), + "e-mail": _("e-mail") + } + ) + + logger.debug('Extracting data.') + + address_list = AddressList(newsletter, ignore_errors) + + for row in myreader: + if not max(namecol, mailcol) < len(row): + logger.warning( + "Column count does not match for row number %d", + myreader.line_num, extra=dict(data={'row': row}) + ) + + if ignore_errors: + # Skip this record + continue + else: + raise forms.ValidationError(_( + "Row with content '%(row)s' does not contain a name and " + "email field.") % {'row': row} + ) + + address_list.add( + row[mailcol], row[namecol], location="line %d" % myreader.line_num + ) + + return address_list.addresses + + +def parse_vcard(myfile, newsletter, ignore_errors=False): + """ + Parse addresses from vCard file-object into newsletter. + + Returns a dictionary mapping email addresses into Subscription objects. + """ + import card_me + + encoding = get_encoding(myfile) + encodedfile = io.TextIOWrapper(myfile, encoding=encoding) + + try: + myvcards = card_me.readComponents(encodedfile) + except card_me.VObjectError as e: + raise forms.ValidationError( + _(u"Error reading vCard file: %s" % e) + ) + + address_list = AddressList(newsletter, ignore_errors) + + for myvcard in myvcards: + if hasattr(myvcard, 'fn'): + name = myvcard.fn.value + else: + + + name = None + + # Do we have an email address? + # If not: either continue to the next vcard or raise validation error. + if hasattr(myvcard, 'email'): + email = myvcard.email.value + elif not ignore_errors: + raise forms.ValidationError( + _("Entry '%s' contains no email address.") % name) + else: + continue + + address_list.add(email, name) + + return address_list.addresses + + +def parse_ldif(myfile, newsletter, ignore_errors=False): + """ + Parse addresses from LDIF file-object into newsletter. + + Returns a dictionary mapping email addresses into Subscription objects. + """ + + from ldif3 import LDIFParser + + address_list = AddressList(newsletter, ignore_errors) + + try: + parser = LDIFParser(myfile) + + for dn, entry in parser.parse(): + if 'mail' in entry: + email = entry['mail'][0] + + if 'cn' in entry: + name = entry['cn'][0] + else: + name = None + + address_list.add(email, name) + + elif not ignore_errors: + raise forms.ValidationError( + _("Some entries have no e-mail address.")) + + except ValueError as e: + if not ignore_errors: + raise forms.ValidationError(e) + + return address_list.addresses diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/admin.py b/thesisenv/lib/python3.6/site-packages/newsletter/admin.py new file mode 100644 index 0000000..20157f3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/admin.py @@ -0,0 +1,533 @@ +from __future__ import unicode_literals + +import logging +logger = logging.getLogger(__name__) + +import six + +from django.db import models + +from django.conf import settings +from django.conf.urls import url + +from django.contrib import admin, messages +from django.contrib.sites.models import Site + +from django.core import serializers +from django.core.exceptions import PermissionDenied + +from django.http import HttpResponse, HttpResponseRedirect, Http404 + +from django.shortcuts import render + +from django.utils.html import format_html +from django.utils.translation import ugettext as _, ungettext +from django.utils.formats import date_format + +from django.views.decorators.clickjacking import xframe_options_sameorigin +try: + from django.views.i18n import JavaScriptCatalog + HAS_CBV_JSCAT = True +except ImportError: # Django < 1.10 + from django.views.i18n import javascript_catalog + HAS_CBV_JSCAT = False + +from sorl.thumbnail.admin import AdminImageMixin + +from .models import ( + Newsletter, Subscription, Article, Message, Submission +) + +from django.utils.timezone import now + +from .admin_forms import ( + SubmissionAdminForm, SubscriptionAdminForm, ImportForm, ConfirmForm, + ArticleFormSet +) +from .admin_utils import ExtendibleModelAdminMixin, make_subscription + +from .compat import get_context, reverse + +from .settings import newsletter_settings + +# Contsruct URL's for icons +ICON_URLS = { + 'yes': '%snewsletter/admin/img/icon-yes.gif' % settings.STATIC_URL, + 'wait': '%snewsletter/admin/img/waiting.gif' % settings.STATIC_URL, + 'submit': '%snewsletter/admin/img/submitting.gif' % settings.STATIC_URL, + 'no': '%snewsletter/admin/img/icon-no.gif' % settings.STATIC_URL +} + + +class NewsletterAdmin(admin.ModelAdmin): + list_display = ( + 'title', 'admin_subscriptions', 'admin_messages', 'admin_submissions' + ) + prepopulated_fields = {'slug': ('title',)} + + """ List extensions """ + def _admin_url(self, obj, model, text): + url = reverse('admin:%s_%s_changelist' % + (model._meta.app_label, model._meta.model_name), + current_app=self.admin_site.name) + + return format_html( + '{}', url, obj.id, text + ) + + def admin_messages(self, obj): + return self._admin_url(obj, Message, _("Messages")) + admin_messages.short_description = '' + + def admin_subscriptions(self, obj): + return self._admin_url(obj, Subscription, _("Subscriptions")) + admin_subscriptions.short_description = '' + + def admin_submissions(self, obj): + return self._admin_url(obj, Submission, _("Submissions")) + admin_submissions.short_description = '' + + +class NewsletterAdminLinkMixin(object): + def admin_newsletter(self, obj): + opts = Newsletter._meta + newsletter = obj.newsletter + url = reverse('admin:%s_%s_change' % (opts.app_label, opts.model_name), + args=(newsletter.id,), current_app=self.admin_site.name) + + return format_html('{}', url, newsletter) + admin_newsletter.short_description = _('newsletter') + + +class SubmissionAdmin(NewsletterAdminLinkMixin, ExtendibleModelAdminMixin, + admin.ModelAdmin): + form = SubmissionAdminForm + list_display = ( + 'admin_message', 'admin_newsletter', 'admin_publish_date', 'publish', + 'admin_status_text', 'admin_status' + ) + date_hierarchy = 'publish_date' + list_filter = ('newsletter', 'publish', 'sent') + save_as = True + filter_horizontal = ('subscriptions',) + + """ List extensions """ + def admin_message(self, obj): + return format_html('{}', obj.id, obj.message.title) + admin_message.short_description = _('submission') + + def admin_publish_date(self, obj): + if obj.publish_date: + return date_format(obj.publish_date, 'DATETIME_FORMAT') + else: + return '' + admin_publish_date.short_description = _("publish date") + + def admin_status(self, obj): + if obj.prepared: + if obj.sent: + return format_html( + '{}', + ICON_URLS['yes'], self.admin_status_text(obj) + ) + else: + if obj.publish_date > now(): + return format_html( + '{}', + ICON_URLS['wait'], self.admin_status_text(obj) + ) + else: + return format_html( + '{}', + ICON_URLS['wait'], self.admin_status_text(obj) + ) + else: + return format_html( + '{}', + ICON_URLS['no'], self.admin_status_text(obj) + ) + admin_status.short_description = '' + + def admin_status_text(self, obj): + if obj.prepared: + if obj.sent: + return _("Sent.") + else: + if obj.publish_date > now(): + return _("Delayed submission.") + else: + return _("Submitting.") + else: + return _("Not sent.") + admin_status_text.short_description = _('Status') + + """ Views """ + def submit(self, request, object_id): + submission = self._getobj(request, object_id) + + if submission.sent or submission.prepared: + messages.info(request, _("Submission already sent.")) + change_url = reverse( + 'admin:newsletter_submission_change', args=[object_id] + ) + return HttpResponseRedirect(change_url) + + submission.prepared = True + submission.save() + + messages.info(request, _("Your submission is being sent.")) + + changelist_url = reverse('admin:newsletter_submission_changelist') + return HttpResponseRedirect(changelist_url) + + """ URLs """ + def get_urls(self): + urls = super(SubmissionAdmin, self).get_urls() + + my_urls = [ + url( + r'^(.+)/submit/$', + self._wrap(self.submit), + name=self._view_name('submit') + ) + ] + + return my_urls + urls + + +StackedInline = admin.StackedInline +if ( + newsletter_settings.RICHTEXT_WIDGET + and newsletter_settings.RICHTEXT_WIDGET.__name__ == "ImperaviWidget" +): + # Imperavi works a little differently + # It's not just a field, it's also a media class and a method. + # To avoid complications, we reuse ImperaviStackedInlineAdmin + try: + from imperavi.admin import ImperaviStackedInlineAdmin + StackedInline = ImperaviStackedInlineAdmin + except ImportError: + # Log a warning when import fails as to aid debugging. + logger.warning( + 'Error importing ImperaviStackedInlineAdmin. ' + 'Imperavi WYSIWYG text editor might not work.' + ) + + +class ArticleInline(AdminImageMixin, StackedInline): + model = Article + extra = 2 + formset = ArticleFormSet + fieldsets = ( + (None, { + 'fields': ('title', 'text') + }), + (_('Optional'), { + 'fields': ('sortorder', 'url', 'image'), + 'classes': ('collapse',) + }), + ) + + if newsletter_settings.RICHTEXT_WIDGET: + formfield_overrides = { + models.TextField: {'widget': newsletter_settings.RICHTEXT_WIDGET}, + } + + +class MessageAdmin(NewsletterAdminLinkMixin, ExtendibleModelAdminMixin, + admin.ModelAdmin): + save_as = True + list_display = ( + 'admin_title', 'admin_newsletter', 'admin_preview', 'date_create', + 'date_modify' + ) + list_filter = ('newsletter', ) + date_hierarchy = 'date_create' + prepopulated_fields = {'slug': ('title',)} + + inlines = [ArticleInline, ] + + """ List extensions """ + def admin_title(self, obj): + return format_html('{}', obj.id, obj.title) + admin_title.short_description = _('message') + + def admin_preview(self, obj): + url = reverse('admin:' + self._view_name('preview'), args=(obj.id,), + current_app=self.admin_site.name) + return format_html('{}', url, _("Preview")) + admin_preview.short_description = '' + + """ Views """ + def preview(self, request, object_id): + return render( + request, + "admin/newsletter/message/preview.html", + {'message': self._getobj(request, object_id)}, + ) + + @xframe_options_sameorigin + def preview_html(self, request, object_id): + message = self._getobj(request, object_id) + + if not message.html_template: + raise Http404(_( + 'No HTML template associated with the newsletter this ' + 'message belongs to.' + )) + + c = get_context({'message': message, + 'site': Site.objects.get_current(), + 'newsletter': message.newsletter, + 'date': now(), + 'STATIC_URL': settings.STATIC_URL, + 'MEDIA_URL': settings.MEDIA_URL}) + + return HttpResponse(message.html_template.render(c)) + + @xframe_options_sameorigin + def preview_text(self, request, object_id): + message = self._getobj(request, object_id) + + c = get_context({ + 'message': message, + 'site': Site.objects.get_current(), + 'newsletter': message.newsletter, + 'date': now(), + 'STATIC_URL': settings.STATIC_URL, + 'MEDIA_URL': settings.MEDIA_URL + }, autoescape=False) + + return HttpResponse( + message.text_template.render(c), + content_type='text/plain' + ) + + def submit(self, request, object_id): + submission = Submission.from_message(self._getobj(request, object_id)) + + change_url = reverse( + 'admin:newsletter_submission_change', args=[submission.id]) + + return HttpResponseRedirect(change_url) + + def subscribers_json(self, request, object_id): + message = self._getobj(request, object_id) + + json = serializers.serialize( + "json", message.newsletter.get_subscriptions(), fields=() + ) + return HttpResponse(json, content_type='application/json') + + """ URLs """ + def get_urls(self): + urls = super(MessageAdmin, self).get_urls() + + my_urls = [ + url(r'^(.+)/preview/$', + self._wrap(self.preview), + name=self._view_name('preview')), + url(r'^(.+)/preview/html/$', + self._wrap(self.preview_html), + name=self._view_name('preview_html')), + url(r'^(.+)/preview/text/$', + self._wrap(self.preview_text), + name=self._view_name('preview_text')), + url(r'^(.+)/submit/$', + self._wrap(self.submit), + name=self._view_name('submit')), + url(r'^(.+)/subscribers/json/$', + self._wrap(self.subscribers_json), + name=self._view_name('subscribers_json')), + ] + + return my_urls + urls + + +class SubscriptionAdmin(NewsletterAdminLinkMixin, ExtendibleModelAdminMixin, + admin.ModelAdmin): + form = SubscriptionAdminForm + list_display = ( + 'name', 'email', 'admin_newsletter', 'admin_subscribe_date', + 'admin_unsubscribe_date', 'admin_status_text', 'admin_status' + ) + list_display_links = ('name', 'email') + list_filter = ( + 'newsletter', 'subscribed', 'unsubscribed', 'subscribe_date' + ) + search_fields = ( + 'name_field', 'email_field', 'user__first_name', 'user__last_name', + 'user__email' + ) + readonly_fields = ( + 'ip', 'subscribe_date', 'unsubscribe_date', 'activation_code' + ) + date_hierarchy = 'subscribe_date' + actions = ['make_subscribed', 'make_unsubscribed'] + exclude = ['unsubscribed'] + + """ List extensions """ + def admin_status(self, obj): + img_tag = '{}' + alt_txt = self.admin_status_text(obj) + if obj.unsubscribed: + return format_html(img_tag, ICON_URLS['no'], alt_txt) + + if obj.subscribed: + return format_html(img_tag, ICON_URLS['yes'], alt_txt) + else: + return format_html(img_tag, ICON_URLS['wait'], alt_txt) + admin_status.short_description = '' + + def admin_status_text(self, obj): + if obj.subscribed: + return _("Subscribed") + elif obj.unsubscribed: + return _("Unsubscribed") + else: + return _("Unactivated") + admin_status_text.short_description = _('Status') + + def admin_subscribe_date(self, obj): + if obj.subscribe_date: + return date_format(obj.subscribe_date) + else: + return '' + admin_subscribe_date.short_description = _("subscribe date") + + def admin_unsubscribe_date(self, obj): + if obj.unsubscribe_date: + return date_format(obj.unsubscribe_date) + else: + return '' + admin_unsubscribe_date.short_description = _("unsubscribe date") + + """ Actions """ + def make_subscribed(self, request, queryset): + rows_updated = queryset.update(subscribed=True) + self.message_user( + request, + ungettext( + "%d user has been successfully subscribed.", + "%d users have been successfully subscribed.", + rows_updated + ) % rows_updated + ) + make_subscribed.short_description = _("Subscribe selected users") + + def make_unsubscribed(self, request, queryset): + rows_updated = queryset.update(subscribed=False) + self.message_user( + request, + ungettext( + "%d user has been successfully unsubscribed.", + "%d users have been successfully unsubscribed.", + rows_updated + ) % rows_updated + ) + make_unsubscribed.short_description = _("Unsubscribe selected users") + + """ Views """ + def subscribers_import(self, request): + if not request.user.has_perm('newsletter.add_subscription'): + raise PermissionDenied() + if request.POST: + form = ImportForm(request.POST, request.FILES) + if form.is_valid(): + request.session['addresses'] = form.get_addresses() + request.session['newsletter_pk'] = \ + form.cleaned_data['newsletter'].pk + + confirm_url = reverse( + 'admin:newsletter_subscription_import_confirm' + ) + return HttpResponseRedirect(confirm_url) + else: + form = ImportForm() + + return render( + request, + "admin/newsletter/subscription/importform.html", + {'form': form}, + ) + + def subscribers_import_confirm(self, request): + # If no addresses are in the session, start all over. + + if 'addresses' not in request.session: + import_url = reverse('admin:newsletter_subscription_import') + return HttpResponseRedirect(import_url) + + addresses = request.session['addresses'] + newsletter = Newsletter.objects.get( + pk=request.session['newsletter_pk'] + ) + + logger.debug('Confirming addresses: %s', addresses) + + if request.POST: + form = ConfirmForm(request.POST) + if form.is_valid(): + try: + for email, name in six.iteritems(addresses): + address_inst = make_subscription( + newsletter, email, name + ) + address_inst.save() + finally: + del request.session['addresses'] + del request.session['newsletter_pk'] + + messages.success( + request, + ungettext( + "%d subscription has been successfully added.", + "%d subscriptions have been successfully added.", + len(addresses) + ) % len(addresses) + ) + + changelist_url = reverse( + 'admin:newsletter_subscription_changelist' + ) + return HttpResponseRedirect(changelist_url) + else: + form = ConfirmForm() + + return render( + request, + "admin/newsletter/subscription/confirmimportform.html", + {'form': form, 'subscribers': addresses}, + ) + + """ URLs """ + def get_urls(self): + urls = super(SubscriptionAdmin, self).get_urls() + + my_urls = [ + url(r'^import/$', + self._wrap(self.subscribers_import), + name=self._view_name('import')), + url(r'^import/confirm/$', + self._wrap(self.subscribers_import_confirm), + name=self._view_name('import_confirm')), + ] + # Translated JS strings - these should be app-wide but are + # only used in this part of the admin. For now, leave them here. + if HAS_CBV_JSCAT: + my_urls.append(url(r'^jsi18n/$', + JavaScriptCatalog.as_view(packages=('newsletter',)), + name='newsletter_js18n')) + else: + my_urls.append(url(r'^jsi18n/$', + javascript_catalog, + {'packages': ('newsletter',)}, + name='newsletter_js18n')) + + return my_urls + urls + + +admin.site.register(Newsletter, NewsletterAdmin) +admin.site.register(Submission, SubmissionAdmin) +admin.site.register(Message, MessageAdmin) +admin.site.register(Subscription, SubscriptionAdmin) diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/admin_forms.py b/thesisenv/lib/python3.6/site-packages/newsletter/admin_forms.py new file mode 100644 index 0000000..59aa0c1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/admin_forms.py @@ -0,0 +1,180 @@ +import logging + +from django import forms + +from django.contrib.admin import widgets, options + +from django.utils.translation import ugettext as _ + +from .models import Subscription, Newsletter, Submission +from .addressimport.parsers import parse_csv, parse_vcard, parse_ldif + + +logger = logging.getLogger(__name__) + + +class ImportForm(forms.Form): + + def clean(self): + # If there are validation errors earlier on, don't bother. + if not ('address_file' in self.cleaned_data and + 'ignore_errors' in self.cleaned_data and + 'newsletter' in self.cleaned_data): + return self.cleaned_data + # TESTME: Should an error be raised here or not? + # raise forms.ValidationError(_("No file has been specified.")) + + ignore_errors = self.cleaned_data['ignore_errors'] + newsletter = self.cleaned_data['newsletter'] + + myfield = self.base_fields['address_file'] + myvalue = myfield.widget.value_from_datadict( + self.data, self.files, self.add_prefix('address_file')) + + content_type = myvalue.content_type + allowed_types = ('text/plain', 'application/octet-stream', + 'text/vcard', 'text/directory', 'text/x-vcard', + 'application/vnd.ms-excel', + 'text/comma-separated-values', 'text/csv', + 'application/csv', 'application/excel', + 'application/vnd.msexcel', 'text/anytext') + if content_type not in allowed_types: + raise forms.ValidationError(_( + "File type '%s' was not recognized.") % content_type) + + ext = myvalue.name.rsplit('.', 1)[-1].lower() + if ext == 'vcf': + self.addresses = parse_vcard( + myvalue.file, newsletter, ignore_errors) + + elif ext == 'ldif': + self.addresses = parse_ldif( + myvalue.file, newsletter, ignore_errors) + + elif ext == 'csv': + self.addresses = parse_csv( + myvalue.file, newsletter, ignore_errors) + + else: + raise forms.ValidationError( + _("File extension '%s' was not recognized.") % ext) + + if len(self.addresses) == 0: + raise forms.ValidationError( + _("No entries could found in this file.")) + + return self.cleaned_data + + def get_addresses(self): + return getattr(self, 'addresses', {}) + + newsletter = forms.ModelChoiceField( + label=_("Newsletter"), + queryset=Newsletter.objects.all(), + initial=Newsletter.get_default) + address_file = forms.FileField(label=_("Address file")) + ignore_errors = forms.BooleanField( + label=_("Ignore non-fatal errors"), + initial=False, required=False) + + +class ConfirmForm(forms.Form): + + def clean(self): + value = self.cleaned_data['confirm'] + + if not value: + raise forms.ValidationError( + _("You should confirm in order to continue.")) + + confirm = forms.BooleanField( + label=_("Confirm import"), + initial=True, widget=forms.HiddenInput) + + +class SubscriptionAdminForm(forms.ModelForm): + + class Meta: + model = Subscription + fields = '__all__' + widgets = { + 'subscribed': widgets.AdminRadioSelect( + choices=[ + (True, _('Subscribed')), + (False, _('Unsubscribed')) + ], + attrs={ + 'class': options.get_ul_class(options.HORIZONTAL) + } + ) + } + + def __init__(self, *args, **kwargs): + super(SubscriptionAdminForm, self).__init__(*args, **kwargs) + + self.fields['subscribed'].label = _('Status') + + def clean_email_field(self): + data = self.cleaned_data['email_field'] + if self.cleaned_data['user'] and data: + raise forms.ValidationError(_( + 'If a user has been selected this field ' + 'should remain empty.')) + return data + + def clean_name_field(self): + data = self.cleaned_data['name_field'] + if self.cleaned_data['user'] and data: + raise forms.ValidationError(_( + 'If a user has been selected ' + 'this field should remain empty.')) + return data + + def clean(self): + cleaned_data = super(SubscriptionAdminForm, self).clean() + if not (cleaned_data.get('user', None) or + cleaned_data.get('email_field', None)): + + raise forms.ValidationError(_( + 'Either a user must be selected or an email address must ' + 'be specified.') + ) + return cleaned_data + + +class SubmissionAdminForm(forms.ModelForm): + + class Meta: + model = Submission + fields = '__all__' + + def clean_publish(self): + """ + Make sure only one submission can be published for each message. + """ + publish = self.cleaned_data['publish'] + + if publish and not self.errors: + message = self.cleaned_data['message'] + qs = Submission.objects.filter(publish=True, message=message) + if self.instance: + qs = qs.exclude(pk=self.instance.pk) + if qs.exists(): + raise forms.ValidationError(_( + 'This message has already been published in some ' + 'other submission. Messages can only be published once.') + ) + + return publish + + +class ArticleFormSet(forms.BaseInlineFormSet): + """ Formset for articles yielding default sortoder. """ + + def __init__(self, *args, **kwargs): + super(ArticleFormSet, self).__init__(*args, **kwargs) + + assert self.instance + next_sortorder = self.instance.get_next_article_sortorder() + for index, form in enumerate(self.extra_forms): + form.initial['sortorder'] = next_sortorder + index * 10 diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/admin_utils.py b/thesisenv/lib/python3.6/site-packages/newsletter/admin_utils.py new file mode 100644 index 0000000..c5f5c31 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/admin_utils.py @@ -0,0 +1,55 @@ +from functools import update_wrapper + +from django.contrib.admin.utils import unquote +from django.http import Http404 +from django.utils.encoding import force_text +from django.utils.translation import ugettext as _ +from .models import Subscription + + +class ExtendibleModelAdminMixin(object): + def _getobj(self, request, object_id): + opts = self.model._meta + + try: + obj = self.get_queryset(request).get(pk=unquote(object_id)) + except self.model.DoesNotExist: + # Don't raise Http404 just yet, because we haven't checked + # permissions yet. We don't want an unauthenticated user to + # be able to determine whether a given object exists. + obj = None + + if obj is None: + raise Http404( + _( + '%(name)s object with primary key ' + '%(key)r does not exist.' + ) % { + 'name': force_text(opts.verbose_name), + 'key': force_text(object_id) + } + ) + + return obj + + def _wrap(self, view): + def wrapper(*args, **kwargs): + return self.admin_site.admin_view(view)(*args, **kwargs) + return update_wrapper(wrapper, view) + + def _view_name(self, name): + info = self.model._meta.app_label, self.model._meta.model_name, name + + return '%s_%s_%s' % info + + +def make_subscription(newsletter, email, name=None): + addr = Subscription(subscribed=True) + + addr.newsletter = newsletter + addr.email_field = email + + if name: + addr.name_field = name + + return addr diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/compat.py b/thesisenv/lib/python3.6/site-packages/newsletter/compat.py new file mode 100644 index 0000000..df482f9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/compat.py @@ -0,0 +1,16 @@ +from django import get_version + +try: + from django.urls import reverse +except ImportError: # Django < 1.10 + from django.core.urlresolvers import reverse + +if get_version() < '1.10': + from django.template import Context + +def get_context(dictionary, **kwargs): + """Takes a dict and returns the correct object for template rendering.""" + if get_version() < '1.10': + return Context(dictionary, **kwargs) + else: + return dictionary diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/forms.py b/thesisenv/lib/python3.6/site-packages/newsletter/forms.py new file mode 100644 index 0000000..49514cf --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/forms.py @@ -0,0 +1,165 @@ +from django import forms +from django.forms.utils import ValidationError +from django.utils.translation import ugettext_lazy as _ + +from .models import Subscription +from .validators import validate_email_nouser + + +class NewsletterForm(forms.ModelForm): + """ This is the base class for all forms managing subscriptions. """ + + class Meta: + model = Subscription + fields = ('name_field', 'email_field') + + def __init__(self, *args, **kwargs): + + assert 'newsletter' in kwargs, 'No newsletter specified' + + newsletter = kwargs.pop('newsletter') + + if 'ip' in kwargs: + ip = kwargs['ip'] + del kwargs['ip'] + else: + ip = None + + super(NewsletterForm, self).__init__(*args, **kwargs) + + self.instance.newsletter = newsletter + + if ip: + self.instance.ip = ip + + +class SubscribeRequestForm(NewsletterForm): + """ + Request subscription to the newsletter. Will result in an activation email + being sent with a link where one can edit, confirm and activate one's + subscription. + """ + + email_field = forms.EmailField( + label=_("e-mail"), validators=[validate_email_nouser] + ) + + def clean_email_field(self): + data = self.cleaned_data['email_field'] + + # Check whether we have already been subscribed to + try: + subscription = Subscription.objects.get( + email_field__exact=data, + newsletter=self.instance.newsletter + ) + + if subscription.subscribed and not subscription.unsubscribed: + raise ValidationError( + _("Your e-mail address has already been subscribed to.") + ) + else: + self.instance = subscription + + self.instance = subscription + + except Subscription.DoesNotExist: + pass + + return data + + +class UpdateRequestForm(NewsletterForm): + """ + Request updating or activating subscription. Will result in an activation + email being sent. + """ + + email_field = forms.EmailField( + label=_("e-mail"), validators=[validate_email_nouser] + ) + + class Meta(NewsletterForm.Meta): + fields = ('email_field',) + + def clean(self): + if not self.instance.subscribed: + raise ValidationError( + _("This subscription has not yet been activated.") + ) + + return super(UpdateRequestForm, self).clean() + + def clean_email_field(self): + data = self.cleaned_data['email_field'] + + # Set our instance on the basis of the email field, or raise + # a validationerror + try: + self.instance = Subscription.objects.get( + newsletter=self.instance.newsletter, + email_field__exact=data + ) + + except Subscription.DoesNotExist: + raise ValidationError( + _("This e-mail address has not been subscribed to.") + ) + + return data + + +class UnsubscribeRequestForm(UpdateRequestForm): + """ + Similar to previous form but checks if we have not + already been unsubscribed. + """ + + def clean(self): + if self.instance.unsubscribed: + raise ValidationError( + _("This subscription has already been unsubscribed from.") + ) + + return super(UnsubscribeRequestForm, self).clean() + + +class UpdateForm(NewsletterForm): + """ + This form allows one to actually update to or unsubscribe from the + newsletter. To do this, a correct activation code is required. + """ + + email_field = forms.EmailField( + label=_("e-mail"), validators=[validate_email_nouser], disabled=True + ) + + def clean_user_activation_code(self): + data = self.cleaned_data['user_activation_code'] + + if data != self.instance.activation_code: + raise ValidationError( + _('The validation code supplied by you does not match.') + ) + + return data + + user_activation_code = forms.CharField( + label=_("Activation code"), max_length=40 + ) + + +class UserUpdateForm(forms.ModelForm): + """ + Form for updating subscription information/unsubscribing as a logged-in + user. + """ + + class Meta: + model = Subscription + fields = ('subscribed',) + # Newsletter here should become a read only field, + # once this is supported by Django. + + # For now, use a hidden field. + hidden_fields = ('newsletter',) diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/jobs/__init__.py b/thesisenv/lib/python3.6/site-packages/newsletter/jobs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/jobs/daily/__init__.py b/thesisenv/lib/python3.6/site-packages/newsletter/jobs/daily/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/jobs/hourly/__init__.py b/thesisenv/lib/python3.6/site-packages/newsletter/jobs/hourly/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/jobs/hourly/submit.py b/thesisenv/lib/python3.6/site-packages/newsletter/jobs/hourly/submit.py new file mode 100644 index 0000000..f46310a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/jobs/hourly/submit.py @@ -0,0 +1,15 @@ +import warnings + +from django_extensions.management.jobs import HourlyJob +from django.core.management import call_command + + +class Job(HourlyJob): + help = "Submit pending messages." + + def execute(self): + warnings.warn( + "The django-extensions cron job is deprecated in favor of the" + "submit_newsletter management command.", DeprecationWarning) + + call_command('submit_newsletter') diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/jobs/monthly/__init__.py b/thesisenv/lib/python3.6/site-packages/newsletter/jobs/monthly/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/jobs/weekly/__init__.py b/thesisenv/lib/python3.6/site-packages/newsletter/jobs/weekly/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/ar/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ar/LC_MESSAGES/django.mo new file mode 100644 index 0000000..20e7b5a Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ar/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/ar/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ar/LC_MESSAGES/django.po new file mode 100644 index 0000000..09f8cf0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ar/LC_MESSAGES/django.po @@ -0,0 +1,863 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# amrnegm , 2013 +# Bashar Al-Abdulhadi, 2015,2018 +# Bashar Al-Abdulhadi, 2014 +# dokterbob , 2016 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2017-11-16 11:31+0000\n" +"PO-Revision-Date: 2018-04-04 14:42+0000\n" +"Last-Translator: Bashar Al-Abdulhadi\n" +"Language-Team: Arabic (http://www.transifex.com/dokterbob/django-newsletter/language/ar/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ar\n" +"Plural-Forms: nplurals=6; plural=n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 && n%100<=10 ? 3 : n%100>=11 && n%100<=99 ? 4 : 5;\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "السجل '%s' لا يتضمن عنوان بريد إلكتروني صحيح." + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "ملف العناوين يتضمن عناوين مكرره للسجل '%s'." + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "بعض المدخلات مشتركة سلفا في." + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "عنوان البريد الإلكتروني %(email)s طويل جدا، والحد الأقصى هو %(email_length)s حرف." + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "الإسم %(name)s طويل جدا, الحد الأقصى هو %(name_length)s حرف." + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:165 +msgid "name" +msgstr "الاسم" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "عرض" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "لم يتم العثور على اسم العمود. يجب أن يكون اسم هذا العمود إما 'اسم' أو '%s'." + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 forms.py:43 forms.py:76 models.py:39 +#: models.py:179 +msgid "e-mail" +msgstr "البريد الإلكتروني" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "لم يتم العثور على اسم عمود البريد الإلكتروني. يجب أن يكون اسم هذا العمود إما 'email' أو '%(email)s'." + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "لا يمكن تحديد الأعمدة المناسبة بشكل صحيح في ملف CSV. يجب أن يكون هناك حقل يسمى 'اسم' أو '%(name)s' و واحد يسمى 'البريد الإلكتروني' أو '%(e-mail)s'." + +#: addressimport/parsers.py:264 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "الصف الذي يحتوي على '%(row)s' لا تحتوي على حقول الاسم والبريد الإلكتروني." + +#: addressimport/parsers.py:290 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "خطأ بقراءة ملف vCard: %s" + +#: addressimport/parsers.py:309 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "السجل '%s' لا يتضمن بريد إلكتروني." + +#: addressimport/parsers.py:345 +msgid "Some entries have no e-mail address." +msgstr "بعض المدخلات لا تتضمن عنوان بريد إلكتروني." + +#: admin.py:79 +msgid "Messages" +msgstr "الرسائل" + +#: admin.py:83 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "الاشتراكات" + +#: admin.py:87 +msgid "Submissions" +msgstr "الإرساليات" + +#: admin.py:99 models.py:102 models.py:300 models.py:480 models.py:699 +msgid "newsletter" +msgstr "قائمة التراسل" + +#: admin.py:117 models.py:555 +msgid "submission" +msgstr "الإرسالية" + +#: admin.py:124 +msgid "publish date" +msgstr "تاريخ النشر" + +#: admin.py:154 +msgid "Sent." +msgstr "أرسلت" + +#: admin.py:157 +msgid "Delayed submission." +msgstr "عملية إرسال متأخرة" + +#: admin.py:159 +msgid "Submitting." +msgstr "جاري الإرسال" + +#: admin.py:161 +msgid "Not sent." +msgstr "غير مرسل" + +#: admin.py:162 admin.py:389 admin_forms.py:115 +msgid "Status" +msgstr "الوضعية" + +#: admin.py:169 +msgid "Submission already sent." +msgstr "عملية الإرسال تم إرسالها سلفا." + +#: admin.py:178 +msgid "Your submission is being sent." +msgstr "جاري تنفيذ عملية الإرسال." + +#: admin.py:225 +msgid "Optional" +msgstr "اختياري" + +#: admin.py:253 models.py:450 models.py:491 models.py:703 +msgid "message" +msgstr "الرسالة" + +#: admin.py:258 templates/admin/newsletter/message/change_form.html:8 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "معاينة" + +#: admin.py:275 views.py:611 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "لا يوجد قالب HTML مرتبط مع قائمة التراسل تعود ملكية لهذه الرسالة" + +#: admin.py:384 admin_forms.py:103 +msgid "Subscribed" +msgstr "مشترك" + +#: admin.py:386 admin_forms.py:104 +msgid "Unsubscribed" +msgstr "إلغاء الإشتراك" + +#: admin.py:388 +msgid "Unactivated" +msgstr "غير فعّال" + +#: admin.py:396 models.py:314 +msgid "subscribe date" +msgstr "تاريخ الإشتراك" + +#: admin.py:403 models.py:322 +msgid "unsubscribe date" +msgstr "تاريخ إلغاء الإشتراك" + +#: admin.py:411 +#, python-format +msgid "%d user has been successfully subscribed." +msgid_plural "%d users have been successfully subscribed." +msgstr[0] "" +msgstr[1] "" +msgstr[2] "" +msgstr[3] "" +msgstr[4] "" +msgstr[5] "" + +#: admin.py:416 +msgid "Subscribe selected users" +msgstr "تسجيل المستخدمين المحددين" + +#: admin.py:423 +#, python-format +msgid "%d user has been successfully unsubscribed." +msgid_plural "%d users have been successfully unsubscribed." +msgstr[0] "" +msgstr[1] "" +msgstr[2] "" +msgstr[3] "" +msgstr[4] "" +msgstr[5] "" + +#: admin.py:428 +msgid "Unsubscribe selected users" +msgstr "إلفاء إشتراك المستخدمين المحددين" + +#: admin.py:484 +#, python-format +msgid "%d subscription has been successfully added." +msgid_plural "%d subscriptions have been successfully added." +msgstr[0] "" +msgstr[1] "" +msgstr[2] "" +msgstr[3] "" +msgstr[4] "" +msgstr[5] "" + +#: admin_forms.py:43 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "لم يتم التعرف على نوع الملف '%s'." + +#: admin_forms.py:60 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "لم يتم التعرف على امتداد الملف '%s' ." + +#: admin_forms.py:64 +msgid "No entries could found in this file." +msgstr "لا سجلات موجودة في هذا الملف." + +#: admin_forms.py:72 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:6 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "قائمة التراسل" + +#: admin_forms.py:75 +msgid "Address file" +msgstr "ملف العنوان" + +#: admin_forms.py:77 +msgid "Ignore non-fatal errors" +msgstr "تجاهل الأخطاء الغير فادحة" + +#: admin_forms.py:88 +msgid "You should confirm in order to continue." +msgstr "يجب عليك التأكيد من أجل المتابعة." + +#: admin_forms.py:91 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "تأكيد الإستيراد" + +#: admin_forms.py:121 admin_forms.py:129 +msgid "If a user has been selected this field should remain empty." +msgstr "إذا تم تحديد مستخدم هذا الحقل يجبأن يطل فارغا." + +#: admin_forms.py:139 +msgid "Either a user must be selected or an email address must be specified." +msgstr "إما يجب تحديد مستخدم أو يجب تحديد عنوان بريد إلكتروني." + +#: admin_forms.py:164 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "وقد تم بالفعل نشر هذه الرسالة في بعض تقديمها الإرسالات الأخرى. الرسائل لا يمكن أن يتم نشرها إلا مرة واحدة." + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "الكائن %(name)s مع المفتاح الأساسي %(key)r غير موجود." + +#: forms.py:57 +msgid "Your e-mail address has already been subscribed to." +msgstr " عنوان البريد الإلكتروني الخاص بك مسجل سلفا في." + +#: forms.py:84 +msgid "This subscription has not yet been activated." +msgstr "هذا الإشتراك لم يتم تفعيله حتى الآن." + +#: forms.py:102 +msgid "This e-mail address has not been subscribed to." +msgstr "هذا البريد الإلكتروني غير مسجّل في." + +#: forms.py:117 +msgid "This subscription has already been unsubscribed from." +msgstr "هذا الإشتراك تم إلغاء اشتراكه سلفا من." + +#: forms.py:133 +msgid "The validation code supplied by you does not match." +msgstr "رمز التحقق المُقدم من قبلكم غير مطابقة." + +#: forms.py:139 +msgid "Activation code" +msgstr "رمز التفعيل" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "إرسال قوائم التراسل التي في قائمة الإنتظار" + +#: models.py:34 +msgid "newsletter title" +msgstr "عنوان قائمة التراسل" + +#: models.py:39 +msgid "Sender e-mail" +msgstr "بريد المُرسِل" + +#: models.py:42 +msgid "sender" +msgstr "المرسل" + +#: models.py:42 +msgid "Sender name" +msgstr "إسم المرسل" + +#: models.py:46 +msgid "visible" +msgstr "مرئي" + +#: models.py:50 +msgid "send html" +msgstr "أرسل HTML" + +#: models.py:51 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "نعم ام لا لإرسال إصدارات HTML من رسائل البريد الإلكتروني." + +#: models.py:103 +msgid "newsletters" +msgstr "قوائم التراسل" + +#: models.py:159 +msgid "user" +msgstr "مستخدم" + +#: models.py:165 +msgid "optional" +msgstr "اختياري" + +#: models.py:209 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "تحديث الإشتراكات %(subscription)s لـ %(action)s." + +#: models.py:251 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "لم يتم تعيين بريد إلكتروني او إسم مستخدم. هذا يسبب تناقض!" + +#: models.py:255 +msgid "If user is set, email must be null and vice versa." +msgstr "إذا تم تعيين المستخدم، يجب أن يكون البريد الإلكتروني لاغيا والعكس صحيح." + +#: models.py:297 +msgid "IP address" +msgstr "عنوان IP" + +#: models.py:306 +msgid "activation code" +msgstr "تاريخ التفعيل" + +#: models.py:311 +msgid "subscribed" +msgstr "مشترك" + +#: models.py:319 +msgid "unsubscribed" +msgstr "تم إلغاء الإشتراك" + +#: models.py:327 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "%(name)s <%(email)s> لـ %(newsletter)s" + +#: models.py:334 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "%(email)s لـ %(newsletter)s" + +#: models.py:340 +msgid "subscription" +msgstr "الإشتراك" + +#: models.py:341 +msgid "subscriptions" +msgstr "الإشتراكات" + +#: models.py:429 +msgid "" +"Sort order determines the order in which articles are concatenated in a " +"post." +msgstr "أمر الترتيب يُحدد وضعية تسلسل المقالات في الإرسالية." + +#: models.py:431 +msgid "sort order" +msgstr "أمر الترتيب" + +#: models.py:434 models.py:476 +msgid "title" +msgstr "العنوان" + +#: models.py:435 +msgid "text" +msgstr "النص" + +#: models.py:438 +msgid "link" +msgstr "رابط" + +#: models.py:444 +msgid "image" +msgstr "صورة" + +#: models.py:456 +msgid "article" +msgstr "مقالة" + +#: models.py:457 +msgid "articles" +msgstr "مقالات" + +#: models.py:477 +msgid "slug" +msgstr "slug" + +#: models.py:484 +msgid "created" +msgstr "أنشئ" + +#: models.py:487 +msgid "modified" +msgstr "تم التعديل" + +#: models.py:492 +msgid "messages" +msgstr "الرسائل" + +#: models.py:497 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "%(title)s في %(newsletter)s" + +#: models.py:556 +msgid "submissions" +msgstr "الإرساليات" + +#: models.py:559 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "%(newsletter)s في %(publish_date)s" + +#: models.py:578 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "إرسال %(submission)s إلى %(count)d شخص" + +#: models.py:637 +#, python-format +msgid "Submitting message to: %s." +msgstr "إرسال الرسالى إلى: %s." + +#: models.py:646 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "الرسالة %(subscription)s فشلت بسبب: %(error)s" + +#: models.py:664 +#, python-format +msgid "Submission of message %s" +msgstr "إرساليات من الرسائل %s" + +#: models.py:709 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "إن لم تختار أي مشترك, النظام سوف يختار مشتركين لك بشكل تلقائي." + +#: models.py:711 +msgid "recipients" +msgstr "المستلمين" + +#: models.py:716 +msgid "publication date" +msgstr "تاريخ النشر" + +#: models.py:720 +msgid "publish" +msgstr "انشر" + +#: models.py:721 +msgid "Publish in archive." +msgstr "انشر في الأرشيف." + +#: models.py:725 +msgid "prepared" +msgstr "مجهّز" + +#: models.py:729 +msgid "sent" +msgstr "تم الإرسال" + +#: models.py:733 +msgid "sending" +msgstr "جاري الإرسال" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "معاينة الرسالة" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "الرئيسية" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "الرسالة" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "تغيير" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "إنشاء إرسالية" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "HTML" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "النص" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "أرسل" + +#: templates/admin/newsletter/subscription/change_list.html:5 +msgid "Import" +msgstr "إستيراد" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "إستيراد عناوين" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "تأكيد" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "رفع" + +#: templates/newsletter/message/message.html:21 +msgid "Read more" +msgstr "قراءة المزيد" + +#: templates/newsletter/message/message.html:27 +msgid "Read message online" +msgstr "قراءة الرسالة من خلال المتصفح" + +#: templates/newsletter/message/message.html:29 +#: templates/newsletter/newsletter_detail.html:21 +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "إلغاء الإشتراك" + +#: templates/newsletter/message/message.txt:15 +msgid "Unsubscribe:" +msgstr "إلغاء الإشتراك:" + +#: templates/newsletter/message/subscribe.html:6 +#, python-format +msgid "Subscription to %(title)s" +msgstr "التسجيل في %(title)s" + +#: templates/newsletter/message/subscribe.html:10 +#: templates/newsletter/message/subscribe.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested a subscription to %(title)s.\n" +"\n" +"If you would like to confirm your subscription, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "" + +#: templates/newsletter/message/subscribe_subject.txt:1 +msgid "Confirm subscription" +msgstr "تأكيد الإشتراك" + +#: templates/newsletter/message/unsubscribe.html:6 +#, python-format +msgid "Unsubscription from %(title)s" +msgstr "إلغاء الإشتراك من %(title)s" + +#: templates/newsletter/message/unsubscribe.html:9 +#: templates/newsletter/message/unsubscribe.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested unsubscription from %(title)s.\n" +"\n" +"If you would like to confirm your unsubscription, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "" + +#: templates/newsletter/message/unsubscribe_subject.txt:1 +msgid "Confirm unsubscription" +msgstr "تأكيد إلغاء الإشتراك" + +#: templates/newsletter/message/update.html:6 +#, python-format +msgid "Update of subscription to %(title)s" +msgstr "تحديث الإشتراك في %(title)s" + +#: templates/newsletter/message/update.html:9 +#: templates/newsletter/message/update.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested updating your personal information for %(title)s.\n" +"\n" +"To make changes to your information in our database, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "" + +#: templates/newsletter/message/update_subject.txt:1 +msgid "Update information" +msgstr "تحديث البيانات" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "تفاصيل قائمة التراسل" + +#: templates/newsletter/newsletter_detail.html:13 +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "الإشتراك" + +#: templates/newsletter/newsletter_detail.html:17 +msgid "Update" +msgstr "تحديث" + +#: templates/newsletter/newsletter_detail.html:24 +msgid "Archive" +msgstr "أرشفة" + +#: templates/newsletter/newsletter_detail.html:27 +#: templates/newsletter/submission_archive.html:18 +msgid "Back to list" +msgstr "العودة للقائمة" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "قوائم التراسل" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "تحديث الإشتراكات" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "أرشيف قائمة التراسل" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "تفعيل" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "تفعيل" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "إشتراك قائمة التراسل" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email." +" This could be because your email address is invalid." +msgstr "بسبب خطأ فني لم نكن قادرين على إرسال رسالة تأكيد التسجيل. هذا يمكن أن يكون بسبب عنوان بريدك الإلكتروني غير صحيح." + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "تم تفعيل اشتراكك بنجاح." + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "تم استقبال طلب إشتراكك بنجاح وتم إرسال رسالة إلكترونية تفعيل لك. في تلك الرسالة سوف تجد رابط تحتاج إلى متابعته لتفعيل اشتراكك." + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "هل ترغب في الاشتراك في قائمة التراسل هذه؟" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "إلغاء إشتراك قائمة التراسل" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "تم إلغاء إشتراكك بنجاح." + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "تم إستقبال طلب إلغاء الاشتراك الخاص بك بنجاح. تم ارسال بريد الكتروني لك مع رابط تحتاج إلى متابعته من أجل تأكيد إلغاء الاشتراك الخاص بك." + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "هل ترغب في إلغاء الاشتراك في قائمة التراسل هذه؟" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "تحديث قائمة التراسل" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "تحديث الإشتراك" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "تم تحديث اشتراكك بنجاح." + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "تم استقبال طلب التحديث بنجاح وتم إرسال رسالة إلكترونية تفعيل لك. في تلك الرسالة سوف تجد رابط تحتاج إلى متابعته لتحديث اشتراكك." + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "حاليا:" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "تغيير:" + +#: validators.py:15 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "عنوان البريد الإلكتروني '%(email)s' مملوك لمستخدم لديه حساب على هذا الموقع. الرجاء تسجيل الدخول بحساب مستخدم وحاول مرة أخرى." + +#: views.py:120 +msgid "Your changes have been saved." +msgstr "تم حفظ التغييرات." + +#: views.py:311 +#, python-format +msgid "You have been subscribed to %s." +msgstr "تم تسجيلك في %s." + +#: views.py:315 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "المستخدم %(rs)s تم تسجيله في %(my_newsletter)s." + +#: views.py:325 +#, python-format +msgid "You are already subscribed to %s." +msgstr "أنت مسجل سلفا في %s." + +#: views.py:350 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "تم إلغاء اشتراكك سلفا من %s." + +#: views.py:354 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "المستخدم %(rs)s تم إلغاء إشتراكو من %(my_newsletter)s." + +#: views.py:367 +#, python-format +msgid "You are not subscribed to %s." +msgstr "أنت غير مسجّل في %s." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/ar/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ar/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..9d153cc Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ar/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/ar/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ar/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..559f108 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ar/LC_MESSAGES/djangojs.po @@ -0,0 +1,26 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Bashar Al-Abdulhadi, 2015 +# Bashar Al-Abdulhadi, 2014 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2015-06-28 13:34+0000\n" +"Last-Translator: Bashar Al-Abdulhadi\n" +"Language-Team: Arabic (http://www.transifex.com/dokterbob/django-newsletter/language/ar/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ar\n" +"Plural-Forms: nplurals=6; plural=n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 && n%100<=10 ? 3 : n%100>=11 && n%100<=99 ? 4 : 5;\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "عملية الإرسال تم التعديل عليها. يجب حفظها أولا لكي تتمكن من إرسالها. أنقر على موافق لمتابعة عملية الحفظ او إلغاء لمتابعة التعديل." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/cs_CZ/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/cs_CZ/LC_MESSAGES/django.mo new file mode 100644 index 0000000..a5462d1 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/cs_CZ/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/cs_CZ/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/cs_CZ/LC_MESSAGES/django.po new file mode 100644 index 0000000..f866808 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/cs_CZ/LC_MESSAGES/django.po @@ -0,0 +1,780 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# dokterbob , 2016 +# tomaasch , 2014 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2016-01-05 17:58+0100\n" +"PO-Revision-Date: 2016-02-02 13:35+0000\n" +"Last-Translator: dokterbob \n" +"Language-Team: Czech (Czech Republic) (http://www.transifex.com/dokterbob/django-newsletter/language/cs_CZ/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: cs_CZ\n" +"Plural-Forms: nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "Položka '%s' neobsahuje validní e-mailovou adresu." + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "Soubor s adresami obsahuje duplicitní položku '%s'." + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "Některé položky jsou již přihlášeny k odběru." + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "E-mailová adresa %(email)s je příliš dlouhá, maximální povolená délka je %(email_length)s znaků." + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "Jméno %(name)s je příliš dlouhé, maximální povolená délka je %(name_length)s znaků." + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:162 +msgid "name" +msgstr "jméno" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "zobrazení" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "Jméno sloupce nenalezeno. Jméno sloupce musí být buď 'name' nebo '%s'." + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 models.py:37 models.py:176 +msgid "e-mail" +msgstr "e-mail" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "Sloupec s e-mailem nebyl nalezen. Název tohoto sloupce by měly být buďto 'e-mail' nebo '%(email)s'." + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "Nelze správně určit správné jména sloupců v CSV souboru. Tam by mělo být pole s názvem 'name' nebo '%(name)s' a jeden s názvem 'e-mail' nebo '%(e-mail)s'." + +#: addressimport/parsers.py:262 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "Řádek s obsahem '%(row)s' neobsahuje jméno a e-mailovou adresu." + +#: addressimport/parsers.py:288 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "Chyba při čtení souboru formátu vCard: %s" + +#: addressimport/parsers.py:307 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "Položka '%s' neobsahuje žádnou e-mailovou adresu." + +#: addressimport/parsers.py:343 +msgid "Some entries have no e-mail address." +msgstr "Některé položky nemají e-mailovou adresu." + +#: admin.py:63 +msgid "Messages" +msgstr "Zprávy" + +#: admin.py:71 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "Přihlášení" + +#: admin.py:77 +msgid "Submissions" +msgstr "Rozeslání zpráv" + +#: admin.py:97 models.py:537 +msgid "submission" +msgstr "rozeslání zprávy" + +#: admin.py:104 admin.py:247 admin.py:365 models.py:100 models.py:296 +#: models.py:487 models.py:665 +msgid "newsletter" +msgstr "newsletter" + +#: admin.py:112 +msgid "publish date" +msgstr "datum zveřejnění" + +#: admin.py:138 +msgid "Sent." +msgstr "Odesláno." + +#: admin.py:141 +msgid "Delayed submission." +msgstr "Odložené rozeslání." + +#: admin.py:143 +msgid "Submitting." +msgstr "Přihlášen." + +#: admin.py:145 +msgid "Not sent." +msgstr "Nebylo zasláno." + +#: admin.py:146 admin.py:390 admin_forms.py:113 +msgid "Status" +msgstr "Status" + +#: admin.py:153 +msgid "Submission already sent." +msgstr "Rozeslání zprávy bylo provedeno již dříve." + +#: admin.py:162 +msgid "Your submission is being sent." +msgstr "Vaše zpráva se právě rozesílá." + +#: admin.py:208 +msgid "Optional" +msgstr "Volitelné" + +#: admin.py:235 models.py:460 models.py:512 models.py:668 +msgid "message" +msgstr "zpráva" + +#: admin.py:239 templates/admin/newsletter/message/change_form.html.py:9 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "Náhled" + +#: admin.py:267 views.py:606 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "Tato zpráva nemá k dispozici žádnou HTML šablonu přiřazenou k newsletteru." + +#: admin.py:385 admin_forms.py:101 +msgid "Subscribed" +msgstr "Přihlášeno" + +#: admin.py:387 admin_forms.py:102 +msgid "Unsubscribed" +msgstr "Odhlášen" + +#: admin.py:389 +msgid "Unactivated" +msgstr "Deaktivován" + +#: admin.py:397 models.py:309 +msgid "subscribe date" +msgstr "datum přihlášení" + +#: admin.py:404 models.py:317 +msgid "unsubscribe date" +msgstr "datum odhlášení" + +#: admin.py:412 +#, python-format +msgid "%s user has been successfully subscribed." +msgid_plural "%s users have been successfully subscribed." +msgstr[0] "%s uživatel byl právě úspěšně přihlášen k odběru novinek." +msgstr[1] "%s uživatelé bylo právě úspěšně přihlášeni k odběru novinek." +msgstr[2] "%s uživatelů bylo právě úspěšně přihlášeno k odběru novinek." + +#: admin.py:417 +msgid "Subscribe selected users" +msgstr "Přihlásit vybrané uživatele." + +#: admin.py:424 +#, python-format +msgid "%s user has been successfully unsubscribed." +msgid_plural "%s users have been successfully unsubscribed." +msgstr[0] "%s uživatel byl právě úspěšně odhlášen z odběru novinek." +msgstr[1] "%s uživatelé byli právě úspěšně odhlášeni z odběru novinek." +msgstr[2] "%s uživatelů bylo právě úspěšně odhlášeno z odběru novinek." + +#: admin.py:429 +msgid "Unsubscribe selected users" +msgstr "Odhlásit vybrané uživatele." + +#: admin.py:484 +#, python-format +msgid "%s subscriptions have been successfully added." +msgstr "Úspěšně bylo přidáno %s odběratelů newsletteru." + +#: admin_forms.py:41 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "Soubor typu '%s' nebyl rozpoznán." + +#: admin_forms.py:58 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "Přípona souboru '%s' nebyla rozpoznána." + +#: admin_forms.py:62 +msgid "No entries could found in this file." +msgstr "V souboru nebyly nalezeny žádné položky." + +#: admin_forms.py:70 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:7 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "Newsletter" + +#: admin_forms.py:73 +msgid "Address file" +msgstr "Soubor s adresami" + +#: admin_forms.py:75 +msgid "Ignore non-fatal errors" +msgstr "Ignorovat drobné chyby" + +#: admin_forms.py:86 +msgid "You should confirm in order to continue." +msgstr "Potvrďte prosím, pokud chcete pokračovat." + +#: admin_forms.py:89 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "Potvrdit import" + +#: admin_forms.py:119 admin_forms.py:127 +msgid "If a user has been selected this field should remain empty." +msgstr "Toto pole musí zůstat prázdné, pokud byl vybrán uživatel." + +#: admin_forms.py:137 +msgid "Either a user must be selected or an email address must be specified." +msgstr "Buď musí být vybrán uživatel nebo musí být uvedena e-mailová adresa." + +#: admin_forms.py:162 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "Tato zpráva byla již zveřejněna v jiném zaslání newsletteru. Zprávy mohou být zveřejněny pouze jednou." + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "Objekt %(name)s s primárním klíčem %(key)r neexistuje." + +#: forms.py:47 forms.py:106 +msgid "An e-mail address is required." +msgstr "E-mailová adresa je povinná." + +#: forms.py:55 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "E-mailová adresa '%(email)s' patří uživateli s účtem na tomto webu. Prosím, přihlaste se jako tento uživatel a zkuste to znovu." + +#: forms.py:72 +msgid "Your e-mail address has already been subscribed to." +msgstr "Vaše e-mailová adresa byla přihlášena k odběru novinek již dříve." + +#: forms.py:97 +msgid "This subscription has not yet been activated." +msgstr "Toto přihlášení k odběru ještě nebylo aktivováno." + +#: forms.py:114 +#, python-format +msgid "" +"This e-mail address belongs to the user '%(username)s'. Please log in as " +"that user and try again." +msgstr "Tato e-mailová adresa patří uživateli '%(username)s'. Prosím, přihlaste se jako tento uživatel a zkuste to znovu." + +#: forms.py:132 +msgid "This e-mail address has not been subscribed to." +msgstr "Tato e-mailová adresa nebyla přihlášena k odběru novinek." + +#: forms.py:147 +msgid "This subscription has already been unsubscribed from." +msgstr "Toto přihlášení k odběru bylo již zrušeno." + +#: forms.py:163 +msgid "The validation code supplied by you does not match." +msgstr "Kontrolní kód, který jste zadal(a), není správný." + +#: forms.py:169 +msgid "Activation code" +msgstr "Aktivační kód" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "Rozeslání newsletteru bylo zařazeno do fronty." + +#: models.py:32 +msgid "newsletter title" +msgstr "titulek newsletteru" + +#: models.py:37 +msgid "Sender e-mail" +msgstr "E-mail odesílatele" + +#: models.py:40 +msgid "sender" +msgstr "odesílatel" + +#: models.py:40 +msgid "Sender name" +msgstr "Jméno odesílatele" + +#: models.py:44 +msgid "visible" +msgstr "viditelný" + +#: models.py:48 +msgid "send html" +msgstr "odeslat HTML" + +#: models.py:49 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "Má-li se odeslat HTML verze e-mailu či nikoliv." + +#: models.py:101 +msgid "newsletters" +msgstr "newslettery" + +#: models.py:157 +msgid "user" +msgstr "uživatel" + +#: models.py:162 +msgid "optional" +msgstr "volitelné" + +#: models.py:206 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "Aktualizováno přihlášení %(subscription)s k %(action)s." + +#: models.py:248 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "Není uvedena ani e-mailová adresa ani není vybrán uživatel." + +#: models.py:252 +msgid "If user is set, email must be null and vice versa." +msgstr "Pokud je nastaven uživatel, položka e-mailu musí být nevyplněna, a naopak." + +#: models.py:294 +msgid "IP address" +msgstr "IP adresa" + +#: models.py:301 +msgid "activation code" +msgstr "aktivační kód" + +#: models.py:306 +msgid "subscribed" +msgstr "přihlášen" + +#: models.py:314 +msgid "unsubscribed" +msgstr "odhlášen" + +#: models.py:322 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "%(name)s <%(email)s> k %(newsletter)s" + +#: models.py:329 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "%(email)s k %(newsletter)s" + +#: models.py:335 +msgid "subscription" +msgstr "přihlášení" + +#: models.py:336 +msgid "subscriptions" +msgstr "přihlášení" + +#: models.py:439 +msgid "" +"Sort order determines the order in which articles are concatenated in a " +"post." +msgstr "Pořadím je míněno řazení článků ve zprávě newsletteru." + +#: models.py:441 +msgid "sort order" +msgstr "pořadí" + +#: models.py:444 models.py:483 +msgid "title" +msgstr "titulek" + +#: models.py:445 +msgid "text" +msgstr "text" + +#: models.py:448 +msgid "link" +msgstr "odkaz" + +#: models.py:454 +msgid "image" +msgstr "obrázek" + +#: models.py:465 +msgid "article" +msgstr "článek" + +#: models.py:466 +msgid "articles" +msgstr "články" + +#: models.py:484 +msgid "slug" +msgstr "slug (část adresy)" + +#: models.py:491 +msgid "created" +msgstr "vytvořeno" + +#: models.py:494 +msgid "modified" +msgstr "změněno" + +#: models.py:499 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "%(title)s v %(newsletter)s" + +#: models.py:513 +msgid "messages" +msgstr "zprávy" + +#: models.py:538 +msgid "submissions" +msgstr "rozeslání zpráv" + +#: models.py:541 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "%(newsletter)s v %(publish_date)s" + +#: models.py:550 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "Odesláno %(submission)s %(count)d lidem" + +#: models.py:597 +#, python-format +msgid "Submitting message to: %s." +msgstr "Odeslání zprávy k : %s." + +#: models.py:606 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "Zpráva %(subscription)s skončila s chybou: %(error)s" + +#: models.py:630 +#, python-format +msgid "Submission of message %s" +msgstr "Rozeslání zprávy %s" + +#: models.py:673 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "Pokud nikoho nevyberete, systém najde odběratele automaticky." + +#: models.py:675 +msgid "recipients" +msgstr "příjemci" + +#: models.py:680 +msgid "publication date" +msgstr "datum zveřejnění" + +#: models.py:684 +msgid "publish" +msgstr "zveřejnit" + +#: models.py:685 +msgid "Publish in archive." +msgstr "Zveřejnit v archivu." + +#: models.py:689 +msgid "prepared" +msgstr "připraveno" + +#: models.py:693 +msgid "sent" +msgstr "odesláno" + +#: models.py:697 +msgid "sending" +msgstr "zasíláno" + +#: templates/admin/newsletter/message/change_form.html:7 +#: templates/admin/newsletter/newsletter/change_form.html:7 +#: templates/admin/newsletter/submission/change_form.html:14 +msgid "History" +msgstr "Historie" + +#: templates/admin/newsletter/message/change_form.html:8 +#: templates/admin/newsletter/newsletter/change_form.html:8 +#: templates/admin/newsletter/submission/change_form.html:15 +msgid "View on site" +msgstr "Ukázat na webu" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "Náhled zprávy" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "Domů" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "Zpráva" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "Změnit" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "Připravit rozeslání zprávy" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "HTML" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "Text" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "Odeslat" + +#: templates/admin/newsletter/subscription/change_list.html:8 +msgid "import" +msgstr "importovat" + +#: templates/admin/newsletter/subscription/change_list.html:12 +#, python-format +msgid "Add %(name)s" +msgstr "Přidat %(name)s" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "Importovat adresy" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "Potvrdit" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "Nahrát" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "Nastavení" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "Všechny newslettery" + +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "Přihlásit k odběru" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "Uložit" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "Archiv" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "aktivovat" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "Aktivovat" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "Přihlásit k odběru newsletteru" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email." +" This could be because your email address is invalid." +msgstr "Vzhedem k technickým problémům jsme vám nemohli odeslat potvrzovací e-mail. Zřejmě to bylo způsobeno vaší neplatnou e-malovou adresou." + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "Vaše přihlášení k odběru novinek bylo právě úspěšně aktivováno." + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "Vaše žádost o přihlášení k odběru novinek byla úspěšně přijata a byl vám zaslán e-mail s odkazem, který musíte potvrdit odkliknutím, aby vaše přihlášení k odběru novinek bylo aktivováno." + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "Chcete se opravdu přihlásit k odběru tohoto newsletteru?" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "Odhlásit z odběru newsletteru" + +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "Odhlásit z odběru" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "Právě jste byli úspěšně odhlášeni z odběru novinek." + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "Vaše žádost k odhlášení byla úspěšně přijata. Byl vám zaslán e-mail s odkazem, jehož odkliknutím potvrdíte vaše odhlášení." + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "Chcete se opravdu odhlásit z odběru tohoto newsletteru?" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "Aktualizovat newsletter" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "Aktualizovat údaje přihlášení" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "Vaše přihlášení k odběru novinek byl právě úspěšně aktualizováno." + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "Vaše žádost o aktualizaci údajů byla úspěšně přijata a byl vám zaslán aktivační e-mail. V tomto e-mailu najdete odkaz, který potvrdíte odkliknutím, a pak budou vaše údaje aktualizovány." + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "Aktuálně:" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "Změna:" + +#: views.py:114 +msgid "Your changes have been saved." +msgstr "Vaše změny byly uloženy." + +#: views.py:305 +#, python-format +msgid "You have been subscribed to %s." +msgstr "Právě jste byli přihlášeni k \"%s\"." + +#: views.py:309 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "Uživatel %(rs)s byl přihlášen k odběru novinek %(my_newsletter)s." + +#: views.py:319 +#, python-format +msgid "You are already subscribed to %s." +msgstr "Již jste byli přihlášeni k newsletteru \"%s\" dříve." + +#: views.py:344 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "Právě jste byli odhlášeni z \"%s\"." + +#: views.py:348 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "Uživatel %(rs)s byl odhlášen z odběru novinek %(my_newsletter)s." + +#: views.py:361 +#, python-format +msgid "You are not subscribed to %s." +msgstr "Nejste přihlášeni k %s." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/cs_CZ/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/cs_CZ/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..dbdd1fe Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/cs_CZ/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/cs_CZ/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/cs_CZ/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..7cc3bf0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/cs_CZ/LC_MESSAGES/djangojs.po @@ -0,0 +1,25 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# tomaasch , 2014 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2014-04-28 17:20+0000\n" +"Last-Translator: tomaasch \n" +"Language-Team: Czech (Czech Republic) (http://www.transifex.com/dokterbob/django-newsletter/language/cs_CZ/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: cs_CZ\n" +"Plural-Forms: nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "Zpráva newsletteru byla změněna a před odesláním musí být uložena. Klikněte prosím na OK pokud ji chcete uložit, nebo klikněte na 'zrušit' pokud chcete pokračovat v úpravě zprávy." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/de/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/de/LC_MESSAGES/django.mo new file mode 100644 index 0000000..0529e94 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/de/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/de/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/de/LC_MESSAGES/django.po new file mode 100644 index 0000000..41183ee --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/de/LC_MESSAGES/django.po @@ -0,0 +1,852 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Alexander Hartmann , 2018 +# Jannis Vajen, 2013 +# Jannis Vajen, 2012 +# Marc Richter , 2016 +# dokterbob , 2016 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2017-11-16 11:31+0000\n" +"PO-Revision-Date: 2018-06-22 10:33+0000\n" +"Last-Translator: Alexander Hartmann \n" +"Language-Team: German (http://www.transifex.com/dokterbob/django-newsletter/language/de/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: de\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "Eintrag '%s' enthält keine gültige E-Mail-Adresse." + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "Das Adressfeld enthält doppelte Einträge für '%s'." + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "Einige Einträge sind bereits in der Liste enthalten." + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "Die E-Mail-Adresse %(email)s ist zu lang, die Maximallänge beträgt %(email_length)s Zeichen." + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "Der Name %(name)s ist zu lang, die Maximallänge beträgt %(name_length)s Zeichen." + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:165 +msgid "name" +msgstr "Name" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "Anzeige" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "Namensspalte nicht gefunden. Die Spaltenbezeichnung sollte entwerder 'name' oder '%s' lauten." + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 forms.py:43 forms.py:76 models.py:39 +#: models.py:179 +msgid "e-mail" +msgstr "E-Mail" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "Feld für E-Mail-Adresse nicht gefunden. Der Name der Spalte sollte entweder 'email', 'e-mail' oder '%(email)s' lauten." + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "Die Felder der CSV-Datei konnten nicht korrekt bestimmt werden. Es sollte ein Feld 'name' oder '%(name)s' und ein Feld 'e-mail' oder '%(e-mail)s' geben." + +#: addressimport/parsers.py:264 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "Zeile mit Inhalt '%(row)s' enthält weder Name noch E-Mail-Adresse." + +#: addressimport/parsers.py:290 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "Ein Problem trat auf beim Einlesen der vCard-Datei: %s" + +#: addressimport/parsers.py:309 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "Eintrag '%s' enthält keine E-Mail-Adresse." + +#: addressimport/parsers.py:345 +msgid "Some entries have no e-mail address." +msgstr "Einige Einträge enthalten keine E-Mail-Adresse." + +#: admin.py:79 +msgid "Messages" +msgstr "Nachrichten" + +#: admin.py:83 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "Abonnenten" + +#: admin.py:87 +msgid "Submissions" +msgstr "Ausgaben" + +#: admin.py:99 models.py:102 models.py:300 models.py:480 models.py:699 +msgid "newsletter" +msgstr "Newsletter" + +#: admin.py:117 models.py:555 +msgid "submission" +msgstr "Ausgabe" + +#: admin.py:124 +msgid "publish date" +msgstr "Veröffentlichungsdatum" + +#: admin.py:154 +msgid "Sent." +msgstr "Versendet." + +#: admin.py:157 +msgid "Delayed submission." +msgstr "Verzögerte Ausgabe." + +#: admin.py:159 +msgid "Submitting." +msgstr "Wird versendet." + +#: admin.py:161 +msgid "Not sent." +msgstr "Nicht versendet." + +#: admin.py:162 admin.py:389 admin_forms.py:115 +msgid "Status" +msgstr "Status" + +#: admin.py:169 +msgid "Submission already sent." +msgstr "Ausgabe wurde bereits verschickt." + +#: admin.py:178 +msgid "Your submission is being sent." +msgstr "Die Ausgabe wird versandt." + +#: admin.py:225 +msgid "Optional" +msgstr "Optional" + +#: admin.py:253 models.py:450 models.py:491 models.py:703 +msgid "message" +msgstr "Nachricht" + +#: admin.py:258 templates/admin/newsletter/message/change_form.html:8 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "Vorschau" + +#: admin.py:275 views.py:611 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "Es wurde keine HTML-Vorlage für den Newsletter gefunden, zu dem diese Nachricht gehört." + +#: admin.py:384 admin_forms.py:103 +msgid "Subscribed" +msgstr "Abonniert" + +#: admin.py:386 admin_forms.py:104 +msgid "Unsubscribed" +msgstr "Ausgetragen" + +#: admin.py:388 +msgid "Unactivated" +msgstr "noch nicht aktiviert" + +#: admin.py:396 models.py:314 +msgid "subscribe date" +msgstr "Abonnierungsdatum" + +#: admin.py:403 models.py:322 +msgid "unsubscribe date" +msgstr "Austragungsdatum" + +#: admin.py:411 +#, python-format +msgid "%d user has been successfully subscribed." +msgid_plural "%d users have been successfully subscribed." +msgstr[0] "%d Benutzer wurde erfolgreich hinzugefügt." +msgstr[1] "%d Benutzer wurden erfolgreich hinzugefügt." + +#: admin.py:416 +msgid "Subscribe selected users" +msgstr "Ausgewählte Abonnements aktivieren" + +#: admin.py:423 +#, python-format +msgid "%d user has been successfully unsubscribed." +msgid_plural "%d users have been successfully unsubscribed." +msgstr[0] "%d Benutzer wurde erfolgreich ausgetragen." +msgstr[1] "%d Benutzer wurden erfolgreich ausgetragen." + +#: admin.py:428 +msgid "Unsubscribe selected users" +msgstr "Ausgewählte Abonnements deaktivieren" + +#: admin.py:484 +#, python-format +msgid "%d subscription has been successfully added." +msgid_plural "%d subscriptions have been successfully added." +msgstr[0] "%d Abonnement wurde erfolgreich hinzugefügt." +msgstr[1] "%d Abonnements wurden erfolgreich hinzugefügt." + +#: admin_forms.py:43 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "Dateityp '%s' wurde nicht erkannt." + +#: admin_forms.py:60 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "Die Dateiendung '%s' wurde nicht erkannt." + +#: admin_forms.py:64 +msgid "No entries could found in this file." +msgstr "Es wurden keine Einträge in dieser Datei gefunden." + +#: admin_forms.py:72 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:6 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "Newsletter" + +#: admin_forms.py:75 +msgid "Address file" +msgstr "Adressdatei" + +#: admin_forms.py:77 +msgid "Ignore non-fatal errors" +msgstr "Ignoriere nicht-schwerwiegende Fehler" + +#: admin_forms.py:88 +msgid "You should confirm in order to continue." +msgstr "Sie sollten den Vorgang bestätigen um fortzufahren." + +#: admin_forms.py:91 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "Bestätigen Sie den Importierungsvorgang." + +#: admin_forms.py:121 admin_forms.py:129 +msgid "If a user has been selected this field should remain empty." +msgstr "Falls ein Benutzer ausgewählt wurde, sollte dieses Feld leer bleiben." + +#: admin_forms.py:139 +msgid "Either a user must be selected or an email address must be specified." +msgstr "Es muss entweder ein Benutzer ausgewählt, oder eine Adresse vergeben werden." + +#: admin_forms.py:164 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "Diese Nachricht wurde bereits durch eine andere Ausgabe im Archiv veröffentlicht. Nachrichten können aber nur einmal veröffentlicht werden." + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "Objekt %(name)s mit Primärschlüssel %(key)r existiert nicht. " + +#: forms.py:57 +msgid "Your e-mail address has already been subscribed to." +msgstr "Ihre E-Mail-Adresse ist bereits eingetragen." + +#: forms.py:84 +msgid "This subscription has not yet been activated." +msgstr "Dieses Abonnement wurde noch nicht aktiviert." + +#: forms.py:102 +msgid "This e-mail address has not been subscribed to." +msgstr "Diese E-Mail-Adresse ist nicht eingetragen." + +#: forms.py:117 +msgid "This subscription has already been unsubscribed from." +msgstr "Dieses Abonnement wurde bereits gekündigt." + +#: forms.py:133 +msgid "The validation code supplied by you does not match." +msgstr "Der Bestätigungsschlüssel stimmt nicht." + +#: forms.py:139 +msgid "Activation code" +msgstr "Aktivierungsschlüssel" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "Verschicke Newsletter in Warteliste" + +#: models.py:34 +msgid "newsletter title" +msgstr "Newslettertitel" + +#: models.py:39 +msgid "Sender e-mail" +msgstr "Absenderadresse" + +#: models.py:42 +msgid "sender" +msgstr "Absender" + +#: models.py:42 +msgid "Sender name" +msgstr "Absendername" + +#: models.py:46 +msgid "visible" +msgstr "sichtbar" + +#: models.py:50 +msgid "send html" +msgstr "HTML verschicken" + +#: models.py:51 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "Ob HTML-Versionen der E-Mails verschickt werden sollen." + +#: models.py:103 +msgid "newsletters" +msgstr "Newsletter" + +#: models.py:159 +msgid "user" +msgstr "Benutzer" + +#: models.py:165 +msgid "optional" +msgstr "optional" + +#: models.py:209 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "Abonnement %(subscription)s wurde aktualisiert durch \"%(action)s\"." + +#: models.py:251 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "Es ist weder eine E-Mail-Adresse noch ein Benutzername angegeben. Das schreit nach Inkonsistenz!" + +#: models.py:255 +msgid "If user is set, email must be null and vice versa." +msgstr "Wenn ein Benutzer angegeben ist, muss das Adressfeld leer bleiben (und umgekehrt)." + +#: models.py:297 +msgid "IP address" +msgstr "IP-Adresse" + +#: models.py:306 +msgid "activation code" +msgstr "Aktivierungsschlüssel" + +#: models.py:311 +msgid "subscribed" +msgstr "abonniert" + +#: models.py:319 +msgid "unsubscribed" +msgstr "ausgetragen" + +#: models.py:327 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "%(name)s <%(email)s> bei %(newsletter)s" + +#: models.py:334 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "%(email)s bei %(newsletter)s" + +#: models.py:340 +msgid "subscription" +msgstr "Abonnement" + +#: models.py:341 +msgid "subscriptions" +msgstr "Abonnements" + +#: models.py:429 +msgid "" +"Sort order determines the order in which articles are concatenated in a " +"post." +msgstr "Sortierung legt fest, in welcher Reihenfolge die Artikel in der Nachricht zusammengefasst werden." + +#: models.py:431 +msgid "sort order" +msgstr "Sortierung" + +#: models.py:434 models.py:476 +msgid "title" +msgstr "Titel" + +#: models.py:435 +msgid "text" +msgstr "Text" + +#: models.py:438 +msgid "link" +msgstr "Link" + +#: models.py:444 +msgid "image" +msgstr "Bild" + +#: models.py:456 +msgid "article" +msgstr "Artikel" + +#: models.py:457 +msgid "articles" +msgstr "Artikel" + +#: models.py:477 +msgid "slug" +msgstr "Kurzform" + +#: models.py:484 +msgid "created" +msgstr "Erstellt" + +#: models.py:487 +msgid "modified" +msgstr "verändert" + +#: models.py:492 +msgid "messages" +msgstr "Nachrichten" + +#: models.py:497 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "%(title)s in %(newsletter)s" + +#: models.py:556 +msgid "submissions" +msgstr "Ausgaben" + +#: models.py:559 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "%(newsletter)s vom %(publish_date)s" + +#: models.py:578 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "Verschicke %(submission)s an %(count)d Personen" + +#: models.py:637 +#, python-format +msgid "Submitting message to: %s." +msgstr "Verschicke Nachricht an: %s." + +#: models.py:646 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "Nachricht %(subscription)s schlug fehl: %(error)s" + +#: models.py:664 +#, python-format +msgid "Submission of message %s" +msgstr "Ausgabe der Nachricht %s" + +#: models.py:709 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "Falls Sie keine auswählen, wird das System automatisch Abonnenten auswählen." + +#: models.py:711 +msgid "recipients" +msgstr "Empfänger" + +#: models.py:716 +msgid "publication date" +msgstr "Veröffentlichungsdatum" + +#: models.py:720 +msgid "publish" +msgstr "veröffentlichen" + +#: models.py:721 +msgid "Publish in archive." +msgstr "Im Archiv einsehbar." + +#: models.py:725 +msgid "prepared" +msgstr "vorbereitet" + +#: models.py:729 +msgid "sent" +msgstr "Versendet" + +#: models.py:733 +msgid "sending" +msgstr "Sende" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "Nachrichtenvorschau" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "Startseite" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "Nachricht" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "Änderung" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "Ausgabe erstellen" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "HTML" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "Mitteilung" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "Abschicken" + +#: templates/admin/newsletter/subscription/change_list.html:5 +msgid "Import" +msgstr "Import" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "Adressen importieren" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "Bestätigen" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "Hochladen" + +#: templates/newsletter/message/message.html:21 +msgid "Read more" +msgstr "weiter lesen" + +#: templates/newsletter/message/message.html:27 +msgid "Read message online" +msgstr "Nachricht online lesen" + +#: templates/newsletter/message/message.html:29 +#: templates/newsletter/newsletter_detail.html:21 +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "Abmelden" + +#: templates/newsletter/message/message.txt:15 +msgid "Unsubscribe:" +msgstr "Abmelden:" + +#: templates/newsletter/message/subscribe.html:6 +#, python-format +msgid "Subscription to %(title)s" +msgstr "Angemeldet in %(title)s" + +#: templates/newsletter/message/subscribe.html:10 +#: templates/newsletter/message/subscribe.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested a subscription to %(title)s.\n" +"\n" +"If you would like to confirm your subscription, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "Guten Tag %(name)s,\n\nSie, oder eine andere Person in Ihrem Namen hat die Anmeldung in %(title)s vorgenommen.\n\nWenn Sie die Anmeldung bestätigen möchten klicken Sie bitte auf folgenden Link:\nhttps://%(domain)s%(url)s\n\nMit freundlichen Grüßen" + +#: templates/newsletter/message/subscribe_subject.txt:1 +msgid "Confirm subscription" +msgstr "Anmeldung bestätigen" + +#: templates/newsletter/message/unsubscribe.html:6 +#, python-format +msgid "Unsubscription from %(title)s" +msgstr "Abmelden von %(title)s" + +#: templates/newsletter/message/unsubscribe.html:9 +#: templates/newsletter/message/unsubscribe.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested unsubscription from %(title)s.\n" +"\n" +"If you would like to confirm your unsubscription, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "Guten Tag %(name)s,\n\nSie, oder eine andere Person in Ihrem Namen hat die Abmeldung von %(title)s vorgenommen .\n\nWenn Sie die Abmeldung bestätigen möchten klicken Sie bitte auf folgenden Link:\nhttps://%(domain)s%(url)s\n\nMit freundlichen Grüßen" + +#: templates/newsletter/message/unsubscribe_subject.txt:1 +msgid "Confirm unsubscription" +msgstr "Abmeldung bestätigen" + +#: templates/newsletter/message/update.html:6 +#, python-format +msgid "Update of subscription to %(title)s" +msgstr "Aktualisierung der Anmeldung in %(title)s" + +#: templates/newsletter/message/update.html:9 +#: templates/newsletter/message/update.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested updating your personal information for %(title)s.\n" +"\n" +"To make changes to your information in our database, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "Guten Tag %(name)s,\n\nSie, oder eine andere Person in Ihrem Namen hat die Aktualisierung von %(title)s vorgenommen .\n\nWenn Sie die Aktualisierung bestätigen möchten klicken Sie bitte auf folgenden Link::\nhttps://%(domain)s%(url)s\n\nMit freundlichen Grüßen" + +#: templates/newsletter/message/update_subject.txt:1 +msgid "Update information" +msgstr "Aktualisiere Information" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "Newsletterdetails" + +#: templates/newsletter/newsletter_detail.html:13 +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "Anmelden" + +#: templates/newsletter/newsletter_detail.html:17 +msgid "Update" +msgstr "Aktualisierung" + +#: templates/newsletter/newsletter_detail.html:24 +msgid "Archive" +msgstr "Archiv" + +#: templates/newsletter/newsletter_detail.html:27 +#: templates/newsletter/submission_archive.html:18 +msgid "Back to list" +msgstr "zurück zur Liste" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "Newsletterliste" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "Abonnements aktualisieren" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "Newsletterarchiv" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "aktivieren" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "Aktivieren" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "Newsletter abonnieren" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email." +" This could be because your email address is invalid." +msgstr "Durch einen technischen Fehler konnte die Aktivierungsmail nicht verschickt wreden. Dies kann daran liegen, dass die E-Mail-Adresse ungültig ist." + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "Ihr Abonnement wurde erfolgreich aktiviert." + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "Dein Abonnement wurde erfolgreich entgegengenommen und eine E-Mail mit einem Bestätigungslink an dich verschickt. Dieser Bestätigungslink muss aufgerufen werden, um das Abonnement zu aktivieren." + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "Wollen Sie diesen Newsletter abonnieren?" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "Newsletter abbestellen" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "Ihr Abonnement wurde erfolgreich gekündigt." + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "Die Kündigung deines Abonnements wurde erfolgreich entgegengenommen. Eine E-Mail mit einem Link zur Bestätigung deiner Kündigung wurde an verschickt." + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "Wollen sie das Abonnement dieses Newsletters kündigen?" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "Newsletter-Aktualisierung" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "Abonnement aktualisieren" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "Ihr Abonnement wurde erfolgreich aktualisiert." + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "Die Aktualisierung deines Abonnements wurde erfolgreich entgegengenommen und eine E-Mail verschickt. In dieser E-Mail befindet sich ein Link, den es aufzurufen gilt, um die Änderungen zu bestätigen." + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "Momentan:" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "Änderung:" + +#: validators.py:15 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "Die E-Mail-Adresse '%(email)s' ist mit einem Konto auf dieser Seite verknüpft. Bitte melden Sie sich mit diesem Konto an und versuchen Sie es erneut." + +#: views.py:120 +msgid "Your changes have been saved." +msgstr "Ihre Änderungen wurden gespeichert." + +#: views.py:311 +#, python-format +msgid "You have been subscribed to %s." +msgstr "Sie haben %s abonniert." + +#: views.py:315 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "Benutzer %(rs)s abonnierte %(my_newsletter)s." + +#: views.py:325 +#, python-format +msgid "You are already subscribed to %s." +msgstr "Sie haben %s bereits abonniert." + +#: views.py:350 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "Sie haben das Abonnement von %s gekündigt." + +#: views.py:354 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "Benutzer %(rs)s kündigte Abonnement von %(my_newsletter)s." + +#: views.py:367 +#, python-format +msgid "You are not subscribed to %s." +msgstr "Sie haben %s nicht abonniert." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/de/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/de/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..fbdad5c Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/de/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/de/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/de/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..80f748b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/de/LC_MESSAGES/djangojs.po @@ -0,0 +1,25 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Jannis Vajen, 2012 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2013-11-20 12:25+0000\n" +"Last-Translator: Jannis Vajen\n" +"Language-Team: German (http://www.transifex.com/dokterbob/django-newsletter/language/de/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: de\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "Die Ausgabe wurde verändert. Vor dem Verschicken muss sie zunächst gespeichert werden. Klicken Sie auf OK um zu speichern, auf Abbrechen um mit dem Editieren fortzufahren." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/el_GR/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/el_GR/LC_MESSAGES/django.mo new file mode 100644 index 0000000..43cf268 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/el_GR/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/el_GR/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/el_GR/LC_MESSAGES/django.po new file mode 100644 index 0000000..3bd6ce1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/el_GR/LC_MESSAGES/django.po @@ -0,0 +1,849 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# dokterbob , 2018 +# Sophocles Tsivides , 2017 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2017-11-16 11:31+0000\n" +"PO-Revision-Date: 2018-03-25 19:06+0000\n" +"Last-Translator: dokterbob \n" +"Language-Team: Greek (Greece) (http://www.transifex.com/dokterbob/django-newsletter/language/el_GR/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: el_GR\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "Η καταχώρηση '%s' δεν περιέχει έγκυρη διεύθυνση e-mail." + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "Το αρχείο διευθύνσεων περιέχει διπλές καταχωρήσεις για '%s΄." + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "Ορισμένες καταχωρήσεις έχουν ήδη εγγραφεί." + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "Η διεύθυνση e-mail %(email)s είναι πολύ μεγάλη, το μέγιστος μήκος είναι %(email_length)s χαρακτήρες." + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "Το όνομα %(name)s είναι πολύ μεγάλο, το μέγιστος μήκος είναι %(name_length)s χαρακτήρες." + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:165 +msgid "name" +msgstr "όνομα" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "προβολή" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "Δεν βρέθηκε στήλη ονόματος. Το όνομα αυτής της στήλης πρέπει να είναι είτε 'όνομα' είτε '%s'." + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 forms.py:43 forms.py:76 models.py:39 +#: models.py:179 +msgid "e-mail" +msgstr "e-mail" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "Δεν βρέθηκε στήλη e-mail. Το όνομα αυτής της στήλης πρέπει να είναι είτε 'e-mail' είτε '%(email)s'." + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "Δεν ήταν δυνατό να προσδιοριστούν σωστά οι στήλες στο αρχείο CSV. Πρέπει να υπάρχει ένα πεδίο που ονομάζεται 'όνομα' ή %(name)s και ένα που ονομάζεται 'e-mail' ή '%(e-mail)s'." + +#: addressimport/parsers.py:264 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "Η σειρά με περιεχόμενο '%(row)s' δεν περιέχει πεδίο ονόματος και e-mail." + +#: addressimport/parsers.py:290 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "Σφάλμα ανάγνωσης αρχείου vCard: %s" + +#: addressimport/parsers.py:309 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "Η καταχώρηση '%s' δεν περιέχει διεύθυνση e-mail." + +#: addressimport/parsers.py:345 +msgid "Some entries have no e-mail address." +msgstr "Ορισμένες καταχωρήσεις δεν έχουν διεύθυνση e-mail." + +#: admin.py:79 +msgid "Messages" +msgstr "Μηνύματα" + +#: admin.py:83 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "Συνδρομές" + +#: admin.py:87 +msgid "Submissions" +msgstr "Υποβολές" + +#: admin.py:99 models.py:102 models.py:300 models.py:480 models.py:699 +msgid "newsletter" +msgstr "ενημερωτικό δελτίο" + +#: admin.py:117 models.py:555 +msgid "submission" +msgstr "υποβολή" + +#: admin.py:124 +msgid "publish date" +msgstr "ημερομηνία δημοσίευσης" + +#: admin.py:154 +msgid "Sent." +msgstr "Απεστάλη." + +#: admin.py:157 +msgid "Delayed submission." +msgstr "Καθυστερημένη υποβολή." + +#: admin.py:159 +msgid "Submitting." +msgstr "Υποβολή." + +#: admin.py:161 +msgid "Not sent." +msgstr "Δεν απεστάλη." + +#: admin.py:162 admin.py:389 admin_forms.py:115 +msgid "Status" +msgstr "Κατάσταση" + +#: admin.py:169 +msgid "Submission already sent." +msgstr "Η υποβολή έχει ήδη σταλεί." + +#: admin.py:178 +msgid "Your submission is being sent." +msgstr "Η υποβολή σας αποστέλλεται." + +#: admin.py:225 +msgid "Optional" +msgstr "Προαιρετικό" + +#: admin.py:253 models.py:450 models.py:491 models.py:703 +msgid "message" +msgstr "μήνυμα" + +#: admin.py:258 templates/admin/newsletter/message/change_form.html:8 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "Προεπισκόπηση" + +#: admin.py:275 views.py:611 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "Δεν υπάρχει πρότυπο HTML που να σχετίζεται με το newsletter στο οποίο ανήκει αυτό το μήνυμα." + +#: admin.py:384 admin_forms.py:103 +msgid "Subscribed" +msgstr "Εγγεγραμμένος" + +#: admin.py:386 admin_forms.py:104 +msgid "Unsubscribed" +msgstr "Έχει διαγραφεί" + +#: admin.py:388 +msgid "Unactivated" +msgstr "Μη ενεργοποιημένος" + +#: admin.py:396 models.py:314 +msgid "subscribe date" +msgstr "ημερομηνία εγγραφής" + +#: admin.py:403 models.py:322 +msgid "unsubscribe date" +msgstr "ημερομηνία διαγραφής" + +#: admin.py:411 +#, python-format +msgid "%d user has been successfully subscribed." +msgid_plural "%d users have been successfully subscribed." +msgstr[0] " %dχρήστες έχουν εγγραφεί επιτυχώς. " +msgstr[1] " %dχρήστες έχουν εγγραφεί επιτυχώς." + +#: admin.py:416 +msgid "Subscribe selected users" +msgstr "Εγγραφή επιλεγμένων χρηστών" + +#: admin.py:423 +#, python-format +msgid "%d user has been successfully unsubscribed." +msgid_plural "%d users have been successfully unsubscribed." +msgstr[0] "%dχρήστες έχουν διαγραφεί επιτυχώς. " +msgstr[1] "%dχρήστες έχουν διαγραφεί επιτυχώς. " + +#: admin.py:428 +msgid "Unsubscribe selected users" +msgstr "Διαγραφή επιλεγμένων χρηστών" + +#: admin.py:484 +#, python-format +msgid "%d subscription has been successfully added." +msgid_plural "%d subscriptions have been successfully added." +msgstr[0] "%d συνδρομές προστέθηκαν επιτυχώς." +msgstr[1] "%d συνδρομές προστέθηκαν επιτυχώς." + +#: admin_forms.py:43 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "Ο τύπος αρχείου '%s' δεν αναγνωρίστηκε." + +#: admin_forms.py:60 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "Η επέκταση αρχείου '%s' δεν αναγνωρίστηκε." + +#: admin_forms.py:64 +msgid "No entries could found in this file." +msgstr "Σε αυτό το αρχείο δεν βρέθηκαν καταχωρήσεις." + +#: admin_forms.py:72 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:6 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "Newsletter" + +#: admin_forms.py:75 +msgid "Address file" +msgstr "Διεύθυνση αρχείου" + +#: admin_forms.py:77 +msgid "Ignore non-fatal errors" +msgstr "Αγνοήστε τα μη μοιραία σφάλματα" + +#: admin_forms.py:88 +msgid "You should confirm in order to continue." +msgstr "Θα πρέπει να επιβεβαιώσετε για να συνεχίσετε." + +#: admin_forms.py:91 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "Επιβεβαίωση εισαγωγής" + +#: admin_forms.py:121 admin_forms.py:129 +msgid "If a user has been selected this field should remain empty." +msgstr "Εάν έχει επιλεγεί χρήστης, αυτό το πεδίο πρέπει να παραμείνει κενό." + +#: admin_forms.py:139 +msgid "Either a user must be selected or an email address must be specified." +msgstr "Πρέπει να επιλεγεί ένας χρήστης ή να οριστεί μια διεύθυνση e-mail." + +#: admin_forms.py:164 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "Αυτό το μήνυμα έχει ήδη δημοσιευθεί σε κάποια άλλη υποβολή. Τα μηνύματα μπορούν να δημοσιεύονται μόνο μία φορά." + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "%(name)s αντικείμενο με πρωτεύον κλειδί %(key)r δεν υπάρχει." + +#: forms.py:57 +msgid "Your e-mail address has already been subscribed to." +msgstr "Η διεύθυνση e-mail σας έχει ήδη εγγραφεί." + +#: forms.py:84 +msgid "This subscription has not yet been activated." +msgstr "Αυτή η συνδρομή δεν έχει ενεργοποιηθεί ακόμα." + +#: forms.py:102 +msgid "This e-mail address has not been subscribed to." +msgstr "Αυτή η διεύθυνση e-mail δεν έχει εγγραφεί." + +#: forms.py:117 +msgid "This subscription has already been unsubscribed from." +msgstr "Αυτή η συνδρομή έχει ήδη διαγραφεί." + +#: forms.py:133 +msgid "The validation code supplied by you does not match." +msgstr "Ο κωδικός επικύρωσης που εισήχθη από εσάς δεν ταιριάζει." + +#: forms.py:139 +msgid "Activation code" +msgstr "Κωδικός ενεργοποίησης" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "Υποβολή ουράς αναμονής αποστολής newsletter" + +#: models.py:34 +msgid "newsletter title" +msgstr "τίτλος newsletter" + +#: models.py:39 +msgid "Sender e-mail" +msgstr "E-mail αποστολέα" + +#: models.py:42 +msgid "sender" +msgstr "αποστολέας" + +#: models.py:42 +msgid "Sender name" +msgstr "Όνομα αποστολέα" + +#: models.py:46 +msgid "visible" +msgstr "ορατό" + +#: models.py:50 +msgid "send html" +msgstr "αποστολή html" + +#: models.py:51 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "Να αποστέλλονται ή όχι e-mails σε έκδοση HTML." + +#: models.py:103 +msgid "newsletters" +msgstr "newsletters" + +#: models.py:159 +msgid "user" +msgstr "χρήστης" + +#: models.py:165 +msgid "optional" +msgstr "προαιρετικό" + +#: models.py:209 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "Ενημερώθηκε η συνδρομή %(subscription)s σε %(action)s." + +#: models.py:251 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "Δεν έχει οριστεί ούτε e-mail ούτε ένα όνομα χρήστη. Αυτό είναι ασυνέπεια!" + +#: models.py:255 +msgid "If user is set, email must be null and vice versa." +msgstr "Εάν έχει οριστεί χρήστης, το e-mail πρέπει να είναι κενό και αντίστροφα." + +#: models.py:297 +msgid "IP address" +msgstr "Διεύθυνση IP" + +#: models.py:306 +msgid "activation code" +msgstr "κωδικός ενεργοποίησης" + +#: models.py:311 +msgid "subscribed" +msgstr "εγγεγραμμένος" + +#: models.py:319 +msgid "unsubscribed" +msgstr "διαγραμμένος" + +#: models.py:327 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "%(name)s<%(email)s σε %(newsletter)s" + +#: models.py:334 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "%(email)s σε %(newsletter)s" + +#: models.py:340 +msgid "subscription" +msgstr "συνδρομή" + +#: models.py:341 +msgid "subscriptions" +msgstr "συνδρομές" + +#: models.py:429 +msgid "" +"Sort order determines the order in which articles are concatenated in a " +"post." +msgstr "Η σειρά ταξινόμησης καθορίζει τη σειρά με την οποία τα άρθρα συνδέονται σε μια δημοσίευση." + +#: models.py:431 +msgid "sort order" +msgstr "σειρά ταξινόμησης" + +#: models.py:434 models.py:476 +msgid "title" +msgstr "τίτλος" + +#: models.py:435 +msgid "text" +msgstr "κείμενο" + +#: models.py:438 +msgid "link" +msgstr "σύνδεσμος" + +#: models.py:444 +msgid "image" +msgstr "εικόνα" + +#: models.py:456 +msgid "article" +msgstr "άρθρο" + +#: models.py:457 +msgid "articles" +msgstr "άρθρα" + +#: models.py:477 +msgid "slug" +msgstr "τίτλος" + +#: models.py:484 +msgid "created" +msgstr "δημιουργήθηκε" + +#: models.py:487 +msgid "modified" +msgstr "τροποποιήθηκε" + +#: models.py:492 +msgid "messages" +msgstr "μηνύματα" + +#: models.py:497 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "%(title)s σε %(newsletter)s" + +#: models.py:556 +msgid "submissions" +msgstr "υποβολές" + +#: models.py:559 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "%(newsletter)s σε %(publish_date)s" + +#: models.py:578 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "Υποβάλλονται %(submission)s σε %(count)d άτομα" + +#: models.py:637 +#, python-format +msgid "Submitting message to: %s." +msgstr "Υποβολή μηνύματος σε: %s." + +#: models.py:646 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "Το μήνυμα %(subscription)sαπέτυχε με σφάλμα: %(error)s" + +#: models.py:664 +#, python-format +msgid "Submission of message %s" +msgstr "Υποβολή μηνύματος %s" + +#: models.py:709 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "Εάν δεν επιλέξετε κανένα, το σύστημα θα εντοπίσει αυτόματα τους συνδρομητές για εσάς." + +#: models.py:711 +msgid "recipients" +msgstr "παραλήπτες" + +#: models.py:716 +msgid "publication date" +msgstr "ημερομηνία δημοσίευσης" + +#: models.py:720 +msgid "publish" +msgstr "δημοσίευση" + +#: models.py:721 +msgid "Publish in archive." +msgstr "Δημοσίευση στο αρχείο." + +#: models.py:725 +msgid "prepared" +msgstr "ετοιμάστηκε" + +#: models.py:729 +msgid "sent" +msgstr "απεστάλη" + +#: models.py:733 +msgid "sending" +msgstr "αποστέλλεται" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "Προεπισκόπηση μηνύματος" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "Κεντρική σελίδα" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "Μήνυμα" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "Αλλαγή" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "Δημιουργία υποβολής" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "HTML" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "Κείμενο" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "Υποβολή" + +#: templates/admin/newsletter/subscription/change_list.html:5 +msgid "Import" +msgstr "Εισαγωγή" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "Εισαγωγή διευθύνσεων" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "Επιβεβαίωση" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "Μεταφόρτωση" + +#: templates/newsletter/message/message.html:21 +msgid "Read more" +msgstr "" + +#: templates/newsletter/message/message.html:27 +msgid "Read message online" +msgstr "" + +#: templates/newsletter/message/message.html:29 +#: templates/newsletter/newsletter_detail.html:21 +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "Διαγραφή" + +#: templates/newsletter/message/message.txt:15 +msgid "Unsubscribe:" +msgstr "" + +#: templates/newsletter/message/subscribe.html:6 +#, python-format +msgid "Subscription to %(title)s" +msgstr "" + +#: templates/newsletter/message/subscribe.html:10 +#: templates/newsletter/message/subscribe.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested a subscription to %(title)s.\n" +"\n" +"If you would like to confirm your subscription, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "" + +#: templates/newsletter/message/subscribe_subject.txt:1 +msgid "Confirm subscription" +msgstr "" + +#: templates/newsletter/message/unsubscribe.html:6 +#, python-format +msgid "Unsubscription from %(title)s" +msgstr "" + +#: templates/newsletter/message/unsubscribe.html:9 +#: templates/newsletter/message/unsubscribe.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested unsubscription from %(title)s.\n" +"\n" +"If you would like to confirm your unsubscription, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "" + +#: templates/newsletter/message/unsubscribe_subject.txt:1 +msgid "Confirm unsubscription" +msgstr "" + +#: templates/newsletter/message/update.html:6 +#, python-format +msgid "Update of subscription to %(title)s" +msgstr "" + +#: templates/newsletter/message/update.html:9 +#: templates/newsletter/message/update.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested updating your personal information for %(title)s.\n" +"\n" +"To make changes to your information in our database, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "" + +#: templates/newsletter/message/update_subject.txt:1 +msgid "Update information" +msgstr "" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "Λεπτομέρειες newsletter" + +#: templates/newsletter/newsletter_detail.html:13 +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "Εγγραφή" + +#: templates/newsletter/newsletter_detail.html:17 +msgid "Update" +msgstr "" + +#: templates/newsletter/newsletter_detail.html:24 +msgid "Archive" +msgstr "" + +#: templates/newsletter/newsletter_detail.html:27 +#: templates/newsletter/submission_archive.html:18 +msgid "Back to list" +msgstr "" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "Λίστα newsletter" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "Ενημέρωση συνδρομών" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "Αρχείο newsletter" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "ενεργοποίηση" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "Ενεργοποίηση" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "Εγγραφή στο newsletter" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email." +" This could be because your email address is invalid." +msgstr "Λόγω τεχνικού σφάλματος, δεν ήταν δυνατή η υποβολή του e-mail επιβεβαίωσης. Αυτό μπορεί να οφείλεται στο γεγονός ότι η διεύθυνση e-mail σας δεν είναι έγκυρη." + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "Η συνδρομή σας έχει ενεργοποιηθεί με επιτυχία." + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "Το αίτημα συνδρομής σας λήφθηκε με επιτυχία και εστάλη e-mail ενεργοποίησης. Σε αυτό το e-mail θα βρείτε έναν σύνδεσμο που πρέπει να ακολουθήσετε για να ενεργοποιήσετε τη συνδρομή σας." + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "Θέλετε να εγγραφείτε σε αυτό το newsletter;" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "Διαγραφή από το newsletter" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "Έχετε καταργήσει την εγγραφή σας με επιτυχία." + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "Το αίτημά σας για κατάργηση εγγραφής έχει ληφθεί με επιτυχία. Σας έχει σταλεί ένα e-mail με έναν σύνδεσμο που πρέπει να ακολουθήσετε για να επιβεβαιώσετε την κατάργηση της εγγραφής σας." + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "Θέλετε να διαγραφείτε από αυτό το newsletter;" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "Ενημέρωση newsletter" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "Ενημέρωση συνδρομής" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "Η συνδρομή σας έχει ενημερωθεί με επιτυχία." + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "Η αίτησή σας για ενημέρωση λήφθηκε με επιτυχία και εστάλη ένα e-mail ενεργοποίησης. Σε αυτό το e-mail θα βρείτε έναν σύνδεσμο που πρέπει να ακολουθήσετε για να ενημερώσετε τη συνδρομή σας." + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "Επί του παρόντος:" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "Αλλαγή:" + +#: validators.py:15 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "Η διεύθυνση e-mail '%(email)s' ανήκει σε χρήστη με λογαριασμό σε αυτόν τον ιστότοπο. Συνδεθείτε ως αυτός ο χρήστης και δοκιμάστε ξανά." + +#: views.py:120 +msgid "Your changes have been saved." +msgstr "Οι αλλαγές σας έχουν αποθηκευτεί." + +#: views.py:311 +#, python-format +msgid "You have been subscribed to %s." +msgstr "Έχετε εγγραφεί στο %s." + +#: views.py:315 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "Ο χρήστης %(rs)sέχει εγγραφεί στο %(my_newsletter)s." + +#: views.py:325 +#, python-format +msgid "You are already subscribed to %s." +msgstr "Έχετε ήδη εγγραφεί στο %s." + +#: views.py:350 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "Έχετε καταργηθεί η εγγραφή από το %s." + +#: views.py:354 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "Ο χρήστης %(rs)sέχει διαγραφεί από το %(my_newsletter)s." + +#: views.py:367 +#, python-format +msgid "You are not subscribed to %s." +msgstr "Δεν έχετε εγγραφεί στο %s." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/el_GR/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/el_GR/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..0bf04b1 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/el_GR/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/el_GR/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/el_GR/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..ef1508d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/el_GR/LC_MESSAGES/djangojs.po @@ -0,0 +1,25 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Sophocles Tsivides , 2017 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2017-09-23 17:24+0000\n" +"Last-Translator: Sophocles Tsivides \n" +"Language-Team: Greek (Greece) (http://www.transifex.com/dokterbob/django-newsletter/language/el_GR/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: el_GR\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "Η υποβολή έχει αλλάξει. Πρέπει να αποθηκευτεί πριν μπορέσετε να υποβάλετε. Κάντε κλικ στο OK για να προχωρήσετε στην αποθήκευση, κάντε κλικ στο κουμπί ακύρωση για να συνεχίσετε την επεξεργασία." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/en/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/en/LC_MESSAGES/django.mo new file mode 100644 index 0000000..316b386 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/en/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/en/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/en/LC_MESSAGES/django.po new file mode 100644 index 0000000..a39df71 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/en/LC_MESSAGES/django.po @@ -0,0 +1,850 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2017-11-16 11:31+0000\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "" + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "" + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "" + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "" + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "" + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:165 +msgid "name" +msgstr "" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "" + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 forms.py:43 forms.py:76 models.py:39 +#: models.py:179 +msgid "e-mail" +msgstr "" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "" + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "" + +#: addressimport/parsers.py:264 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "" + +#: addressimport/parsers.py:290 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "" + +#: addressimport/parsers.py:309 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "" + +#: addressimport/parsers.py:345 +msgid "Some entries have no e-mail address." +msgstr "" + +#: admin.py:79 +msgid "Messages" +msgstr "" + +#: admin.py:83 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "" + +#: admin.py:87 +msgid "Submissions" +msgstr "" + +#: admin.py:99 models.py:102 models.py:300 models.py:480 models.py:699 +msgid "newsletter" +msgstr "" + +#: admin.py:117 models.py:555 +msgid "submission" +msgstr "" + +#: admin.py:124 +msgid "publish date" +msgstr "" + +#: admin.py:154 +msgid "Sent." +msgstr "" + +#: admin.py:157 +msgid "Delayed submission." +msgstr "" + +#: admin.py:159 +msgid "Submitting." +msgstr "" + +#: admin.py:161 +msgid "Not sent." +msgstr "" + +#: admin.py:162 admin.py:389 admin_forms.py:115 +msgid "Status" +msgstr "" + +#: admin.py:169 +msgid "Submission already sent." +msgstr "" + +#: admin.py:178 +msgid "Your submission is being sent." +msgstr "" + +#: admin.py:225 +msgid "Optional" +msgstr "" + +#: admin.py:253 models.py:450 models.py:491 models.py:703 +msgid "message" +msgstr "" + +#: admin.py:258 templates/admin/newsletter/message/change_form.html:8 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "" + +#: admin.py:275 views.py:611 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "" + +#: admin.py:384 admin_forms.py:103 +msgid "Subscribed" +msgstr "" + +#: admin.py:386 admin_forms.py:104 +msgid "Unsubscribed" +msgstr "" + +#: admin.py:388 +msgid "Unactivated" +msgstr "" + +#: admin.py:396 models.py:314 +msgid "subscribe date" +msgstr "" + +#: admin.py:403 models.py:322 +msgid "unsubscribe date" +msgstr "" + +#: admin.py:411 +#, python-format +msgid "%d user has been successfully subscribed." +msgid_plural "%d users have been successfully subscribed." +msgstr[0] "" +msgstr[1] "" + +#: admin.py:416 +msgid "Subscribe selected users" +msgstr "" + +#: admin.py:423 +#, python-format +msgid "%d user has been successfully unsubscribed." +msgid_plural "%d users have been successfully unsubscribed." +msgstr[0] "" +msgstr[1] "" + +#: admin.py:428 +msgid "Unsubscribe selected users" +msgstr "" + +#: admin.py:484 +#, python-format +msgid "%d subscription has been successfully added." +msgid_plural "%d subscriptions have been successfully added." +msgstr[0] "" +msgstr[1] "" + +#: admin_forms.py:43 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "" + +#: admin_forms.py:60 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "" + +#: admin_forms.py:64 +msgid "No entries could found in this file." +msgstr "" + +#: admin_forms.py:72 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:6 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "" + +#: admin_forms.py:75 +msgid "Address file" +msgstr "" + +#: admin_forms.py:77 +msgid "Ignore non-fatal errors" +msgstr "" + +#: admin_forms.py:88 +msgid "You should confirm in order to continue." +msgstr "" + +#: admin_forms.py:91 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "" + +#: admin_forms.py:121 admin_forms.py:129 +msgid "If a user has been selected this field should remain empty." +msgstr "" + +#: admin_forms.py:139 +msgid "Either a user must be selected or an email address must be specified." +msgstr "" + +#: admin_forms.py:164 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "" + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "" + +#: forms.py:57 +msgid "Your e-mail address has already been subscribed to." +msgstr "" + +#: forms.py:84 +msgid "This subscription has not yet been activated." +msgstr "" + +#: forms.py:102 +msgid "This e-mail address has not been subscribed to." +msgstr "" + +#: forms.py:117 +msgid "This subscription has already been unsubscribed from." +msgstr "" + +#: forms.py:133 +msgid "The validation code supplied by you does not match." +msgstr "" + +#: forms.py:139 +msgid "Activation code" +msgstr "" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "" + +#: models.py:34 +msgid "newsletter title" +msgstr "" + +#: models.py:39 +msgid "Sender e-mail" +msgstr "" + +#: models.py:42 +msgid "sender" +msgstr "" + +#: models.py:42 +msgid "Sender name" +msgstr "" + +#: models.py:46 +msgid "visible" +msgstr "" + +#: models.py:50 +msgid "send html" +msgstr "" + +#: models.py:51 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "" + +#: models.py:103 +msgid "newsletters" +msgstr "" + +#: models.py:159 +msgid "user" +msgstr "" + +#: models.py:165 +msgid "optional" +msgstr "" + +#: models.py:209 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "" + +#: models.py:251 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "" + +#: models.py:255 +msgid "If user is set, email must be null and vice versa." +msgstr "" + +#: models.py:297 +msgid "IP address" +msgstr "" + +#: models.py:306 +msgid "activation code" +msgstr "" + +#: models.py:311 +msgid "subscribed" +msgstr "" + +#: models.py:319 +msgid "unsubscribed" +msgstr "" + +#: models.py:327 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "" + +#: models.py:334 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "" + +#: models.py:340 +msgid "subscription" +msgstr "" + +#: models.py:341 +msgid "subscriptions" +msgstr "" + +#: models.py:429 +msgid "" +"Sort order determines the order in which articles are concatenated in a post." +msgstr "" + +#: models.py:431 +msgid "sort order" +msgstr "" + +#: models.py:434 models.py:476 +msgid "title" +msgstr "" + +#: models.py:435 +msgid "text" +msgstr "" + +#: models.py:438 +msgid "link" +msgstr "" + +#: models.py:444 +msgid "image" +msgstr "" + +#: models.py:456 +msgid "article" +msgstr "" + +#: models.py:457 +msgid "articles" +msgstr "" + +#: models.py:477 +msgid "slug" +msgstr "" + +#: models.py:484 +msgid "created" +msgstr "" + +#: models.py:487 +msgid "modified" +msgstr "" + +#: models.py:492 +msgid "messages" +msgstr "" + +#: models.py:497 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "" + +#: models.py:556 +msgid "submissions" +msgstr "" + +#: models.py:559 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "" + +#: models.py:578 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "" + +#: models.py:637 +#, python-format +msgid "Submitting message to: %s." +msgstr "" + +#: models.py:646 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "" + +#: models.py:664 +#, python-format +msgid "Submission of message %s" +msgstr "" + +#: models.py:709 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "" + +#: models.py:711 +msgid "recipients" +msgstr "" + +#: models.py:716 +msgid "publication date" +msgstr "" + +#: models.py:720 +msgid "publish" +msgstr "" + +#: models.py:721 +msgid "Publish in archive." +msgstr "" + +#: models.py:725 +msgid "prepared" +msgstr "" + +#: models.py:729 +msgid "sent" +msgstr "" + +#: models.py:733 +msgid "sending" +msgstr "" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "" + +#: templates/admin/newsletter/subscription/change_list.html:5 +msgid "Import" +msgstr "" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "" + +#: templates/newsletter/message/message.html:21 +msgid "Read more" +msgstr "" + +#: templates/newsletter/message/message.html:27 +msgid "Read message online" +msgstr "" + +#: templates/newsletter/message/message.html:29 +#: templates/newsletter/newsletter_detail.html:21 +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "" + +#: templates/newsletter/message/message.txt:15 +msgid "Unsubscribe:" +msgstr "" + +#: templates/newsletter/message/subscribe.html:6 +#, python-format +msgid "Subscription to %(title)s" +msgstr "" + +#: templates/newsletter/message/subscribe.html:10 +#: templates/newsletter/message/subscribe.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested a subscription to %(title)s.\n" +"\n" +"If you would like to confirm your subscription, please follow this " +"activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "" + +#: templates/newsletter/message/subscribe_subject.txt:1 +msgid "Confirm subscription" +msgstr "" + +#: templates/newsletter/message/unsubscribe.html:6 +#, python-format +msgid "Unsubscription from %(title)s" +msgstr "" + +#: templates/newsletter/message/unsubscribe.html:9 +#: templates/newsletter/message/unsubscribe.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested unsubscription from %(title)s.\n" +"\n" +"If you would like to confirm your unsubscription, please follow this " +"activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "" + +#: templates/newsletter/message/unsubscribe_subject.txt:1 +msgid "Confirm unsubscription" +msgstr "" + +#: templates/newsletter/message/update.html:6 +#, python-format +msgid "Update of subscription to %(title)s" +msgstr "" + +#: templates/newsletter/message/update.html:9 +#: templates/newsletter/message/update.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested updating your personal information " +"for %(title)s.\n" +"\n" +"To make changes to your information in our database, please follow this " +"activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "" + +#: templates/newsletter/message/update_subject.txt:1 +msgid "Update information" +msgstr "" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "" + +#: templates/newsletter/newsletter_detail.html:13 +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "" + +#: templates/newsletter/newsletter_detail.html:17 +msgid "Update" +msgstr "" + +#: templates/newsletter/newsletter_detail.html:24 +msgid "Archive" +msgstr "" + +#: templates/newsletter/newsletter_detail.html:27 +#: templates/newsletter/submission_archive.html:18 +msgid "Back to list" +msgstr "" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email. " +"This could be because your email address is invalid." +msgstr "" + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "" + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "" + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "" + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "" + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "" + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "" + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "" + +#: validators.py:15 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "" + +#: views.py:120 +msgid "Your changes have been saved." +msgstr "" + +#: views.py:311 +#, python-format +msgid "You have been subscribed to %s." +msgstr "" + +#: views.py:315 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "" + +#: views.py:325 +#, python-format +msgid "You are already subscribed to %s." +msgstr "" + +#: views.py:350 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "" + +#: views.py:354 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "" + +#: views.py:367 +#, python-format +msgid "You are not subscribed to %s." +msgstr "" diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/en/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/en/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..3571ea1 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/en/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/en/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/en/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..80cd84f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/en/LC_MESSAGES/djangojs.po @@ -0,0 +1,23 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "" diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/es/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/es/LC_MESSAGES/django.mo new file mode 100644 index 0000000..34c14be Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/es/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/es/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/es/LC_MESSAGES/django.po new file mode 100644 index 0000000..857f053 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/es/LC_MESSAGES/django.po @@ -0,0 +1,853 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Alex Santos , 2013 +# Dehivis Perez , 2015 +# Diamo Rafaela , 2016 +# hernantz , 2013 +# Javi Palanca , 2017-2018 +# Jorge Cuaron , 2016 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2017-11-16 11:31+0000\n" +"PO-Revision-Date: 2018-03-19 10:35+0000\n" +"Last-Translator: Javi Palanca \n" +"Language-Team: Spanish (http://www.transifex.com/dokterbob/django-newsletter/language/es/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: es\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "La entrada '%s' no contiene una dirección de email válida." + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "El archivo de direcciones contiene entradas duplicadas para '%s'" + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "Algunas entradas ya están suscritas." + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "Dirección de e-mail %(email)s demasiado larga, el máximo es de %(email_length)s caracteres." + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "Nombre %(name)s demasiado largo, el máximo es de %(name_length)s caracteres." + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:165 +msgid "name" +msgstr "nombre" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "mostrar" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "No se encuentra el nombre de columna. El nombre de esta columna debe ser 'name' o '%s'." + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 forms.py:43 forms.py:76 models.py:39 +#: models.py:179 +msgid "e-mail" +msgstr "e-mail" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "La columna e-mail no se encuentra. El nombre de esta columna debe ser 'email', 'e-mail' o '%(email)s'." + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "No se pudo determinar adecuadamente las columnas apropiadas en el archivo CSV. Debe haber un campo llamado 'nombre' o '%(name)s' y uno llamado \"e-mail\" o \"%(e-mail)s '." + +#: addressimport/parsers.py:264 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "La fila con el contenido '%(row)s' no contiene un campo de nombre y email." + +#: addressimport/parsers.py:290 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "Error leyendo archivo vCard: %s" + +#: addressimport/parsers.py:309 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "La entrada '%s' no contiene una dirección de email." + +#: addressimport/parsers.py:345 +msgid "Some entries have no e-mail address." +msgstr "Algunas entradas no tienen dirección de email." + +#: admin.py:79 +msgid "Messages" +msgstr "Mensajes" + +#: admin.py:83 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "Suscripciones" + +#: admin.py:87 +msgid "Submissions" +msgstr "Envíos" + +#: admin.py:99 models.py:102 models.py:300 models.py:480 models.py:699 +msgid "newsletter" +msgstr "boletín de noticias" + +#: admin.py:117 models.py:555 +msgid "submission" +msgstr "envío" + +#: admin.py:124 +msgid "publish date" +msgstr "fecha de publicación" + +#: admin.py:154 +msgid "Sent." +msgstr "Enviado." + +#: admin.py:157 +msgid "Delayed submission." +msgstr "Envío aplazado." + +#: admin.py:159 +msgid "Submitting." +msgstr "Enviando." + +#: admin.py:161 +msgid "Not sent." +msgstr "No enviado." + +#: admin.py:162 admin.py:389 admin_forms.py:115 +msgid "Status" +msgstr "Estado" + +#: admin.py:169 +msgid "Submission already sent." +msgstr "Envío ya enviado." + +#: admin.py:178 +msgid "Your submission is being sent." +msgstr "Tu envío está siendo enviado." + +#: admin.py:225 +msgid "Optional" +msgstr "Opcional" + +#: admin.py:253 models.py:450 models.py:491 models.py:703 +msgid "message" +msgstr "mensaje" + +#: admin.py:258 templates/admin/newsletter/message/change_form.html:8 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "Vista previa" + +#: admin.py:275 views.py:611 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "No hay una plantilla HTML asociada al boletín de noticias al que pertenece este mensaje." + +#: admin.py:384 admin_forms.py:103 +msgid "Subscribed" +msgstr "Suscrito" + +#: admin.py:386 admin_forms.py:104 +msgid "Unsubscribed" +msgstr "Eliminado" + +#: admin.py:388 +msgid "Unactivated" +msgstr "Desactivado" + +#: admin.py:396 models.py:314 +msgid "subscribe date" +msgstr "fecha de suscripción" + +#: admin.py:403 models.py:322 +msgid "unsubscribe date" +msgstr "fecha de baja" + +#: admin.py:411 +#, python-format +msgid "%d user has been successfully subscribed." +msgid_plural "%d users have been successfully subscribed." +msgstr[0] "%d usuario se ha suscrito con éxito." +msgstr[1] "%d usuarios se han suscrito con éxito." + +#: admin.py:416 +msgid "Subscribe selected users" +msgstr "Suscribir usuarios seleccionados" + +#: admin.py:423 +#, python-format +msgid "%d user has been successfully unsubscribed." +msgid_plural "%d users have been successfully unsubscribed." +msgstr[0] "%d usuario se ha dado de baja con éxito. " +msgstr[1] "%d usuarios se han dado de baja con éxito." + +#: admin.py:428 +msgid "Unsubscribe selected users" +msgstr "Dar de baja usuarios seleccionados" + +#: admin.py:484 +#, python-format +msgid "%d subscription has been successfully added." +msgid_plural "%d subscriptions have been successfully added." +msgstr[0] "%d suscripción se ha añadido con éxito. " +msgstr[1] "%d suscripciones se han añadido con éxito." + +#: admin_forms.py:43 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "Tipo de archivo '%s' no fue reconocido." + +#: admin_forms.py:60 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "La extensión de archivo '%s' no fue reconocida." + +#: admin_forms.py:64 +msgid "No entries could found in this file." +msgstr "No se encontraron entradas en este archivo." + +#: admin_forms.py:72 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:6 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "Boletín de Noticias" + +#: admin_forms.py:75 +msgid "Address file" +msgstr "Archivo de direcciones" + +#: admin_forms.py:77 +msgid "Ignore non-fatal errors" +msgstr "Ignorar errores no fatales" + +#: admin_forms.py:88 +msgid "You should confirm in order to continue." +msgstr "Debes confirmar para poder continuar." + +#: admin_forms.py:91 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "Confirmar importación" + +#: admin_forms.py:121 admin_forms.py:129 +msgid "If a user has been selected this field should remain empty." +msgstr "Si un usuario ha sido seleccionado este campo debería permanecer vacío." + +#: admin_forms.py:139 +msgid "Either a user must be selected or an email address must be specified." +msgstr "O bien un usuario debe ser seleccionado o una dirección de correo debe ser especificada." + +#: admin_forms.py:164 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "Este mensaje ya ha sido publicado en otra publicación. Los mensajes solo pueden ser publicados una sola vez." + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "El objeto %(name)s con clave primaria %(key)r no existe." + +#: forms.py:57 +msgid "Your e-mail address has already been subscribed to." +msgstr "Su dirección de e-mail ya ha sido suscrita." + +#: forms.py:84 +msgid "This subscription has not yet been activated." +msgstr "Tu suscripción no ha sido activada todavía." + +#: forms.py:102 +msgid "This e-mail address has not been subscribed to." +msgstr "Esta dirección de correo electrónico no ha sido suscrita." + +#: forms.py:117 +msgid "This subscription has already been unsubscribed from." +msgstr "Esta suscripción ya se ha cancelado." + +#: forms.py:133 +msgid "The validation code supplied by you does not match." +msgstr "El código de validación introducido por usted no coincide." + +#: forms.py:139 +msgid "Activation code" +msgstr "Código de activación" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "Enviando emails en cola del boletín de noticias." + +#: models.py:34 +msgid "newsletter title" +msgstr "título del boletín de noticias" + +#: models.py:39 +msgid "Sender e-mail" +msgstr "Correo del remitente" + +#: models.py:42 +msgid "sender" +msgstr "remitente" + +#: models.py:42 +msgid "Sender name" +msgstr "Nombre del remitente" + +#: models.py:46 +msgid "visible" +msgstr "visible" + +#: models.py:50 +msgid "send html" +msgstr "enviar html" + +#: models.py:51 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "Enviar o no las versiones HTML de los correos." + +#: models.py:103 +msgid "newsletters" +msgstr "boletines de noticias" + +#: models.py:159 +msgid "user" +msgstr "usuario" + +#: models.py:165 +msgid "optional" +msgstr "opcional" + +#: models.py:209 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "Suscripción %(subscription)s editada a %(action)s." + +#: models.py:251 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "Ni el email ni el nombre del usuario han sido introducidos. ¡Esto puede provocar inconsistencias!" + +#: models.py:255 +msgid "If user is set, email must be null and vice versa." +msgstr "Si el usuario está especificado, el campo email debe estar vacío y viceversa." + +#: models.py:297 +msgid "IP address" +msgstr "Dirección IP" + +#: models.py:306 +msgid "activation code" +msgstr "código de activación" + +#: models.py:311 +msgid "subscribed" +msgstr "suscrito" + +#: models.py:319 +msgid "unsubscribed" +msgstr "eliminado" + +#: models.py:327 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "%(name)s <%(email)s> a %(newsletter)s" + +#: models.py:334 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "%(email)s a %(newsletter)s" + +#: models.py:340 +msgid "subscription" +msgstr "suscripción" + +#: models.py:341 +msgid "subscriptions" +msgstr "suscripciones" + +#: models.py:429 +msgid "" +"Sort order determines the order in which articles are concatenated in a " +"post." +msgstr "El orden de clasificación determina el orden en el que los artículos se concatenan en una publicación." + +#: models.py:431 +msgid "sort order" +msgstr "Orden de clasificación" + +#: models.py:434 models.py:476 +msgid "title" +msgstr "título" + +#: models.py:435 +msgid "text" +msgstr "texto" + +#: models.py:438 +msgid "link" +msgstr "enlace" + +#: models.py:444 +msgid "image" +msgstr "imagen" + +#: models.py:456 +msgid "article" +msgstr "artículo" + +#: models.py:457 +msgid "articles" +msgstr "artículos" + +#: models.py:477 +msgid "slug" +msgstr "slug" + +#: models.py:484 +msgid "created" +msgstr "creado" + +#: models.py:487 +msgid "modified" +msgstr "modificado" + +#: models.py:492 +msgid "messages" +msgstr "mensajes" + +#: models.py:497 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "%(title)s en %(newsletter)s" + +#: models.py:556 +msgid "submissions" +msgstr "envios" + +#: models.py:559 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "%(newsletter)s en %(publish_date)s" + +#: models.py:578 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "Enviando %(submission)s a %(count)d personas" + +#: models.py:637 +#, python-format +msgid "Submitting message to: %s." +msgstr "Enviando mensaje a %s." + +#: models.py:646 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "El mensaje %(subscription)s falló con el error: %(error)s" + +#: models.py:664 +#, python-format +msgid "Submission of message %s" +msgstr "Envío del mensaje %s" + +#: models.py:709 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "Si no selecciona ninguno, el sistema automaticamente buscará los suscriptores por usted." + +#: models.py:711 +msgid "recipients" +msgstr "destinatarios" + +#: models.py:716 +msgid "publication date" +msgstr "fecha de publicación" + +#: models.py:720 +msgid "publish" +msgstr "publicar" + +#: models.py:721 +msgid "Publish in archive." +msgstr "Publicar en archivo." + +#: models.py:725 +msgid "prepared" +msgstr "preparado" + +#: models.py:729 +msgid "sent" +msgstr "enviado" + +#: models.py:733 +msgid "sending" +msgstr "enviando" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "Previsualizar el mensaje" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "Inicio" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "Mensaje" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "Cambiar" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "Crear suscripción" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "HTML" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "Texto" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "Enviar" + +#: templates/admin/newsletter/subscription/change_list.html:5 +msgid "Import" +msgstr "Importar" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "Importar direcciones" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "Confirmar" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "Subir" + +#: templates/newsletter/message/message.html:21 +msgid "Read more" +msgstr "Leer más" + +#: templates/newsletter/message/message.html:27 +msgid "Read message online" +msgstr "Leer mensaje online" + +#: templates/newsletter/message/message.html:29 +#: templates/newsletter/newsletter_detail.html:21 +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "Dar de baja" + +#: templates/newsletter/message/message.txt:15 +msgid "Unsubscribe:" +msgstr "Cancelar suscripción:" + +#: templates/newsletter/message/subscribe.html:6 +#, python-format +msgid "Subscription to %(title)s" +msgstr "Suscripción a %(title)s" + +#: templates/newsletter/message/subscribe.html:10 +#: templates/newsletter/message/subscribe.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested a subscription to %(title)s.\n" +"\n" +"If you would like to confirm your subscription, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "Estimado %(name)s,\n\nusted o alguien en su nombre solicitó una suscripción a %(title)s.\n\nSi desea confirmar su suscripción, siga este enlace de activación: \nhttp://%(domain)s%(url)s\n\nSaludos cordiales," + +#: templates/newsletter/message/subscribe_subject.txt:1 +msgid "Confirm subscription" +msgstr "Confirmar suscripción" + +#: templates/newsletter/message/unsubscribe.html:6 +#, python-format +msgid "Unsubscription from %(title)s" +msgstr "Baja de la suscripción de %(title)s" + +#: templates/newsletter/message/unsubscribe.html:9 +#: templates/newsletter/message/unsubscribe.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested unsubscription from %(title)s.\n" +"\n" +"If you would like to confirm your unsubscription, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "Estimado %(name)s,\n\nusted, o alguien en su nombre, ha solicitado darse de baja de %(title)s.\n\nSi desea confirmar su baja, siga este enlace de activación: \nhttp://%(domain)s%(url)s\n\nSaludos cordiales," + +#: templates/newsletter/message/unsubscribe_subject.txt:1 +msgid "Confirm unsubscription" +msgstr "Confirmar la cancelación de la suscripción" + +#: templates/newsletter/message/update.html:6 +#, python-format +msgid "Update of subscription to %(title)s" +msgstr "Actualización de la suscripción a %(title)s" + +#: templates/newsletter/message/update.html:9 +#: templates/newsletter/message/update.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested updating your personal information for %(title)s.\n" +"\n" +"To make changes to your information in our database, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "Estimado %(name)s,\n\nusted, o alguien en su nombre solicitó actualizar su información personal para %(title)s.\n\nPara hacer cambios a su información en nuestra base de datos, por favor siga este enlace de activación: \nhttp://%(domain)s%(url)s\n\nSaludos cordiales," + +#: templates/newsletter/message/update_subject.txt:1 +msgid "Update information" +msgstr "Actualizar información" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "Detalle de Boletín de Noticias" + +#: templates/newsletter/newsletter_detail.html:13 +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "Suscribirse" + +#: templates/newsletter/newsletter_detail.html:17 +msgid "Update" +msgstr "Actualizar" + +#: templates/newsletter/newsletter_detail.html:24 +msgid "Archive" +msgstr "Archivar" + +#: templates/newsletter/newsletter_detail.html:27 +#: templates/newsletter/submission_archive.html:18 +msgid "Back to list" +msgstr "Volver al listado" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "Lista de Boletines de Noticias" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "Actualizar suscripciones" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "Archivo de Boletín de Noticias" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "activar" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "Activar" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "Suscribirse al boletín de noticias" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email." +" This could be because your email address is invalid." +msgstr "Por un problema técnico no hemos podido enviar su correo de confirmación. Esto puede ser porque su correo electrónico es inválido." + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "Su suscripción se ha activado con éxito." + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "Tu petición de suscripción ha sido recibida y se te ha enviado un correo. En dicho correo encontrarás un enlace que debes seguir para activar tu suscripción." + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "¿Desea suscribirse a este Boletín de Noticias?" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "Baja de boletín de noticias" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "Te has dado de baja con éxito." + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "Tu petición de baja ha sido recibida. Un correo ha sido enviado con un enlace que debes seguir para confirmar tu baja." + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "¿Desea darse de baja de este Boletín de Noticias?" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "Actualización de Boletín de Noticias" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "Actualizar suscripción" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "Tu suscripción ha sido actualizada con éxito." + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "Tu petición de actualización ha sido recibida y se te ha enviado un correo. En dicho correo encontrarás un enlace que debes seguir para confirmar la actualización de tu subscripción." + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "Actualmente:" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "Cambiar:" + +#: validators.py:15 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "La dirección de correo electrónico '%(email)s' pertenece a un usuario con una cuenta en este sitio. Por favor, inicie sesión como ese usuario y vuelva a intentarlo." + +#: views.py:120 +msgid "Your changes have been saved." +msgstr "Sus cambios han sido guardados." + +#: views.py:311 +#, python-format +msgid "You have been subscribed to %s." +msgstr "Te has suscrito a %s." + +#: views.py:315 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "El usuario %(rs)s se suscribió a %(my_newsletter)s." + +#: views.py:325 +#, python-format +msgid "You are already subscribed to %s." +msgstr "Ya esta suscrito a %s." + +#: views.py:350 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "Te has dado de baja de %s." + +#: views.py:354 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "Usuario %(rs)s dado de baja de %(my_newsletter)s." + +#: views.py:367 +#, python-format +msgid "You are not subscribed to %s." +msgstr "No estás suscrito a %s." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/es/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/es/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..b665002 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/es/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/es/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/es/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..3ceaeb2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/es/LC_MESSAGES/djangojs.po @@ -0,0 +1,25 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Diamo Rafaela , 2016 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2016-05-07 13:56+0000\n" +"Last-Translator: Diamo Rafaela \n" +"Language-Team: Spanish (http://www.transifex.com/dokterbob/django-newsletter/language/es/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: es\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "Este envío fue modificado. Se debe guardar antes de que puedas enviarlo. Haz click en el botón de OK para proceder a guardar, haz click en cancelar para continuar editando." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/fa/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fa/LC_MESSAGES/django.mo new file mode 100644 index 0000000..5bb70fd Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fa/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/fa/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fa/LC_MESSAGES/django.po new file mode 100644 index 0000000..c752f75 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fa/LC_MESSAGES/django.po @@ -0,0 +1,780 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Ali Javadi , 2013 +# arian hedayati far , 2016 +# hooman zabeti , 2013 +# dokterbob , 2016 +# Mohammad Hossein Mojtahedi , 2014 +# mohsen rbb , 2015 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2016-01-05 17:58+0100\n" +"PO-Revision-Date: 2016-03-22 19:20+0000\n" +"Last-Translator: arian hedayati far \n" +"Language-Team: Persian (http://www.transifex.com/dokterbob/django-newsletter/language/fa/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: fa\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "ورودی '%s' دارای آدرس ایمیل معتبر نمی باشد." + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "آدرس فایل شامل نوشته های تکراری برای '%s' است." + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "بعضی از ورودی‌ها هم‌اکنون عضو هستند." + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "آدرس ایمیل %(email)s بسیار بلند است، حداکثر %(email_length)s کاراکتر پذیرفته می‌شود." + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "نام %(name)s بسیار بلند است، حداکثر %(name_length)s کاراکتر پذیرفته می‌شود." + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:162 +msgid "name" +msgstr "نام" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "نمایش" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "نام این ستون پیدا نشده است.نام این ستون می بایست 'name' یا '%s' باشد" + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 models.py:37 models.py:176 +msgid "e-mail" +msgstr "ایمیل" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "ستون ایمیل پیدا نشد.نام این ستون می بایست 'email','e-mail' یا '%(email) s'باشد" + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "سیستم به‌درستی نمی‌تواند ستون‌های فایل سی‌اس‌وی را تشخیص دهد. در فایل باید یک ستون به نام 'name' یا '%(name)s' و ستونی به نام 'e-mail' یا '%(e-mail)s' وجود داشته باشند." + +#: addressimport/parsers.py:262 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "ردیف '%(row)s' دارای فیلد نام و ایمیل نمی باشد." + +#: addressimport/parsers.py:288 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "خطای خواندن فایل vCard: %s" + +#: addressimport/parsers.py:307 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "ورودی '%s' حاوی آدرس ایمیل نمی باشد." + +#: addressimport/parsers.py:343 +msgid "Some entries have no e-mail address." +msgstr "برخی از ورودی ها حاوی آدرس ایمیل نمی باشند." + +#: admin.py:63 +msgid "Messages" +msgstr "پیام‌ها" + +#: admin.py:71 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "عضویت" + +#: admin.py:77 +msgid "Submissions" +msgstr "ارسال" + +#: admin.py:97 models.py:537 +msgid "submission" +msgstr "ارسال" + +#: admin.py:104 admin.py:247 admin.py:365 models.py:100 models.py:296 +#: models.py:487 models.py:665 +msgid "newsletter" +msgstr "خبرنامه" + +#: admin.py:112 +msgid "publish date" +msgstr "تاریخ انتشار" + +#: admin.py:138 +msgid "Sent." +msgstr "فرستاده شده" + +#: admin.py:141 +msgid "Delayed submission." +msgstr "ارسال با تأخیر" + +#: admin.py:143 +msgid "Submitting." +msgstr "ارسال." + +#: admin.py:145 +msgid "Not sent." +msgstr "فرستاده نشده" + +#: admin.py:146 admin.py:390 admin_forms.py:113 +msgid "Status" +msgstr "وضعیت" + +#: admin.py:153 +msgid "Submission already sent." +msgstr "ارسال انجام شد." + +#: admin.py:162 +msgid "Your submission is being sent." +msgstr "ارسال انجام شد." + +#: admin.py:208 +msgid "Optional" +msgstr "انتخابی" + +#: admin.py:235 models.py:460 models.py:512 models.py:668 +msgid "message" +msgstr "پیام" + +#: admin.py:239 templates/admin/newsletter/message/change_form.html.py:9 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "پیش‌نمایش" + +#: admin.py:267 views.py:606 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "هیچ نمونه اچ‌تی‌ام‌الی برای خبرنامه‌ای که این پیام متعلق به آن است، موجود نیست." + +#: admin.py:385 admin_forms.py:101 +msgid "Subscribed" +msgstr "عضو شده" + +#: admin.py:387 admin_forms.py:102 +msgid "Unsubscribed" +msgstr "لغو عضویت شده" + +#: admin.py:389 +msgid "Unactivated" +msgstr "غیر فعال" + +#: admin.py:397 models.py:309 +msgid "subscribe date" +msgstr "تاریخ عضویت" + +#: admin.py:404 models.py:317 +msgid "unsubscribe date" +msgstr "تاریخ لغو عضویت" + +#: admin.py:412 +#, python-format +msgid "%s user has been successfully subscribed." +msgid_plural "%s users have been successfully subscribed." +msgstr[0] "%s کاربر با موفقیت عضو شده‌اند." + +#: admin.py:417 +msgid "Subscribe selected users" +msgstr "کاربران انتخاب شده را عضو نمایید." + +#: admin.py:424 +#, python-format +msgid "%s user has been successfully unsubscribed." +msgid_plural "%s users have been successfully unsubscribed." +msgstr[0] "%s کاربر با موفقیت عضویت خود را لغو کرده‌اند." + +#: admin.py:429 +msgid "Unsubscribe selected users" +msgstr "عضویت کاربران انتخاب شده را لغو کنید" + +#: admin.py:484 +#, python-format +msgid "%s subscriptions have been successfully added." +msgstr "%s عضویت به صورت موفقیت آمیز اضافه شد." + +#: admin_forms.py:41 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "نوع فایل '%s' قابل شناسایی نمی باشد." + +#: admin_forms.py:58 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "پسوند فایل '%s' قابل شناسایی نمی باشد." + +#: admin_forms.py:62 +msgid "No entries could found in this file." +msgstr "دراین فایل هیچ ورودی پیدا نشد." + +#: admin_forms.py:70 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:7 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "خبرنامه" + +#: admin_forms.py:73 +msgid "Address file" +msgstr "فایل آدرس" + +#: admin_forms.py:75 +msgid "Ignore non-fatal errors" +msgstr "نادیده گرفتن خطاهای غیراساسی" + +#: admin_forms.py:86 +msgid "You should confirm in order to continue." +msgstr "شما قبل از اینکه ادامه دهید باید تأیید کنید" + +#: admin_forms.py:89 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "تأیید ورود اطلاعات" + +#: admin_forms.py:119 admin_forms.py:127 +msgid "If a user has been selected this field should remain empty." +msgstr "اگر کاربری این فیلد را انتخاب کرده باشد باید خالی بماند." + +#: admin_forms.py:137 +msgid "Either a user must be selected or an email address must be specified." +msgstr "یک کاربر یا یک ایمیل باید انتخاب شود." + +#: admin_forms.py:162 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "این پیام در یکی از ارسال های گذشته استفاده شده.هر پیام تنها یک بار قابلیت نشر را دارد." + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "%(name)s شی با کلید اساسی %(key)r وجود ندارد." + +#: forms.py:47 forms.py:106 +msgid "An e-mail address is required." +msgstr "آدرس ایمیل اجباری است" + +#: forms.py:55 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "این آدرس ایمیل '%(email)s' متعلق به یکی از اعضاء سایت می باشد.در صورتی که این اشتراک متعلق به شماست لطفا با آن وارد شوید" + +#: forms.py:72 +msgid "Your e-mail address has already been subscribed to." +msgstr "آدرس ایمیل شما عضو این خبرنامه است." + +#: forms.py:97 +msgid "This subscription has not yet been activated." +msgstr "این عضویت هنوز فعال نشده است." + +#: forms.py:114 +#, python-format +msgid "" +"This e-mail address belongs to the user '%(username)s'. Please log in as " +"that user and try again." +msgstr "این آدرس ایمیل متعلق به کاربر '%(username)s' می باشد. اگر این نام کاربری به شما تعلق دارد لطفا با آن وارد شوید." + +#: forms.py:132 +msgid "This e-mail address has not been subscribed to." +msgstr "این آدرس ایمیل عضو این خبرنامه نیست." + +#: forms.py:147 +msgid "This subscription has already been unsubscribed from." +msgstr "این عضویت لغو شده است" + +#: forms.py:163 +msgid "The validation code supplied by you does not match." +msgstr "کد اعتباری که توسط شما ارایه شده است" + +#: forms.py:169 +msgid "Activation code" +msgstr "کد فعال‌سازی" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "ارسال نامه صف خبرنامه" + +#: models.py:32 +msgid "newsletter title" +msgstr "عنوان خبرنامه" + +#: models.py:37 +msgid "Sender e-mail" +msgstr "ایمیل ارسال کننده" + +#: models.py:40 +msgid "sender" +msgstr "فرستنده" + +#: models.py:40 +msgid "Sender name" +msgstr "نام فرستنده" + +#: models.py:44 +msgid "visible" +msgstr "نمایان" + +#: models.py:48 +msgid "send html" +msgstr "ارسال html" + +#: models.py:49 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "نخسه های html ایمیل نیز ارسال شود." + +#: models.py:101 +msgid "newsletters" +msgstr "خبرنامه‌ها" + +#: models.py:157 +msgid "user" +msgstr "کاربر" + +#: models.py:162 +msgid "optional" +msgstr "انتخابی" + +#: models.py:206 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "" + +#: models.py:248 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "نه ایمیل و نه نام کاربری انتخاب نشده است. " + +#: models.py:252 +msgid "If user is set, email must be null and vice versa." +msgstr "اگر کاربر انتخاب شود، ایمیل باید خالی باشد و بر عکس." + +#: models.py:294 +msgid "IP address" +msgstr "آدرس آی‌پی" + +#: models.py:301 +msgid "activation code" +msgstr "کد فعال‌سازی" + +#: models.py:306 +msgid "subscribed" +msgstr "عضو شده" + +#: models.py:314 +msgid "unsubscribed" +msgstr "لغو عضویت شده" + +#: models.py:322 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "%(name)s <%(email)s> به %(newsletter)s" + +#: models.py:329 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "%(email)s به %(newsletter)s" + +#: models.py:335 +msgid "subscription" +msgstr "اشتراک" + +#: models.py:336 +msgid "subscriptions" +msgstr "اشتراک ها" + +#: models.py:439 +msgid "" +"Sort order determines the order in which articles are concatenated in a " +"post." +msgstr "روش مرتب کردن مشخص می کند روش پشت سر هم گذاشتن مقالات چگونه باشد." + +#: models.py:441 +msgid "sort order" +msgstr "مرتب کردن بر اساس" + +#: models.py:444 models.py:483 +msgid "title" +msgstr "عنوان" + +#: models.py:445 +msgid "text" +msgstr "متن" + +#: models.py:448 +msgid "link" +msgstr "لینک" + +#: models.py:454 +msgid "image" +msgstr "تصویر" + +#: models.py:465 +msgid "article" +msgstr "مقاله" + +#: models.py:466 +msgid "articles" +msgstr "مقالات" + +#: models.py:484 +msgid "slug" +msgstr "نام ماشینی" + +#: models.py:491 +msgid "created" +msgstr "تولید شده" + +#: models.py:494 +msgid "modified" +msgstr "ویرایش شده" + +#: models.py:499 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "%(title)s در%(newsletter)s" + +#: models.py:513 +msgid "messages" +msgstr "پیام " + +#: models.py:538 +msgid "submissions" +msgstr "ارسالی" + +#: models.py:541 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "%(newsletter)s در %(publish_date)s" + +#: models.py:550 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "ارسال %(submission)s به %(count)d نفر" + +#: models.py:597 +#, python-format +msgid "Submitting message to: %s." +msgstr "ارسال پیام به: %s." + +#: models.py:606 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "پیام %(subscription)s به علت مشکل %(error)s ارسال نشد" + +#: models.py:630 +#, python-format +msgid "Submission of message %s" +msgstr "پیام های ارسالی %s" + +#: models.py:673 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "اگر شما گزینه ی هیچکدام را انتخاب کنید،سیستم به صورت خودکار اشتراکی را برای شما پیدا می کند" + +#: models.py:675 +msgid "recipients" +msgstr "دریافت کنندگان" + +#: models.py:680 +msgid "publication date" +msgstr "تاریخ انتشار" + +#: models.py:684 +msgid "publish" +msgstr "انتشار" + +#: models.py:685 +msgid "Publish in archive." +msgstr "انتشار در ارشیو" + +#: models.py:689 +msgid "prepared" +msgstr "مهیا" + +#: models.py:693 +msgid "sent" +msgstr "ارسال شده " + +#: models.py:697 +msgid "sending" +msgstr "در حال ارسال" + +#: templates/admin/newsletter/message/change_form.html:7 +#: templates/admin/newsletter/newsletter/change_form.html:7 +#: templates/admin/newsletter/submission/change_form.html:14 +msgid "History" +msgstr "تاریخچه" + +#: templates/admin/newsletter/message/change_form.html:8 +#: templates/admin/newsletter/newsletter/change_form.html:8 +#: templates/admin/newsletter/submission/change_form.html:15 +msgid "View on site" +msgstr "نمایش در سایت" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "پیش نمایش پیام" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "خانه" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "پیام" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "ویرایش" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "ایجاد شماره‌ی جدید خبرنامه" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "اچ‌تی‌ام‌ال" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "متن" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "ثبت" + +#: templates/admin/newsletter/subscription/change_list.html:8 +msgid "import" +msgstr "ورود" + +#: templates/admin/newsletter/subscription/change_list.html:12 +#, python-format +msgid "Add %(name)s" +msgstr "اضافه‌کردن %(name)s" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "ورود آدرس‌ها" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "تأیید" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "بارگذاری" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "جزئیات خبرنامه" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "لیست خبرنامه‌ها" + +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "عضویت" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "بروز رسانی اشتراک ها" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "آرشیو خبرنامه" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "فعال" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "فعال" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "عضویت خبرنامه" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email." +" This could be because your email address is invalid." +msgstr "متاسفانه به دلیل مشکل فنی قادر به ارسال ایمیل تایید برای شما نمی باشیم.این مشکل ممکن است به دلیل نامعتبر بودن آدرس ایمیل شما باشد." + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "اشتراک شما با موفقیت فعال شد." + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "در خواست اشتراک شما با موفقیت دریافت شد و ایمیل فعال سازی برای شما ارسال گردید.دستورات و لینک فعال سازی در ایمیل قرار دارد." + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "آیا می خواهید مشترک خبرنامه شوید؟" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "لغو عضویت خبرنامه" + +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "لغو عضویت" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "عضویت شما با موفقیت لغو شد." + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "درخواست لغو اشتراک شما با موفقیت دریافت شد.ایمیلی حاوی لینک لغو اشتراک برای شما ارسال گردید." + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "آیا می خواهید اشتراک خبرنامه خود را لغو کنید؟" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "بروز رسانی خبرنامه" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "به‌روز‌رسانی اشتراک" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "اشتراک شما با موفقیت به روز رسانی شد." + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "درخواست بروز رسانی شما با موفقیت دریافت شد و ایمیل دستورات به همراه لینک بروز رسانی برای شما ارسال گردید." + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "در حال حاضر:" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "تغییر:" + +#: views.py:114 +msgid "Your changes have been saved." +msgstr "تغییرات شما ثبت شد." + +#: views.py:305 +#, python-format +msgid "You have been subscribed to %s." +msgstr "شما در %s عضو شدید." + +#: views.py:309 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "User %(rs)s مشترک %(my_newsletter)s. شد" + +#: views.py:319 +#, python-format +msgid "You are already subscribed to %s." +msgstr "شما عضو خبر نامه '%s' می باشید" + +#: views.py:344 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "عضویت شما از %s لغو شد" + +#: views.py:348 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "User %(rs)s اشتراک خود را از %(my_newsletter)s. لغو کرد" + +#: views.py:361 +#, python-format +msgid "You are not subscribed to %s." +msgstr "شما عضو خبرنامه %s نیستید" diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/fa/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fa/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..9e5ede9 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fa/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/fa/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fa/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..5e742e5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fa/LC_MESSAGES/djangojs.po @@ -0,0 +1,25 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Ali Javadi , 2013 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2013-11-20 12:25+0000\n" +"Last-Translator: Ali Javadi \n" +"Language-Team: Persian (http://www.transifex.com/dokterbob/django-newsletter/language/fa/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: fa\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "پیام تغییر یافته است و باید پیش از ارسال ذخیره شود. تأیید کنید تا ذخیره‌سازی انجام شود یا کنسل کنید و به ویرایش ادامه دهید." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/fr/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fr/LC_MESSAGES/django.mo new file mode 100644 index 0000000..5e2fe11 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fr/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/fr/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fr/LC_MESSAGES/django.po new file mode 100644 index 0000000..3931aeb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fr/LC_MESSAGES/django.po @@ -0,0 +1,851 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Claude Paroz , 2016 +# Marc Belmont , 2014 +# Santiago Malter-Terrada , 2013 +# Spout , 2018 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2017-11-16 11:31+0000\n" +"PO-Revision-Date: 2018-03-19 10:53+0000\n" +"Last-Translator: Spout \n" +"Language-Team: French (http://www.transifex.com/dokterbob/django-newsletter/language/fr/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: fr\n" +"Plural-Forms: nplurals=2; plural=(n > 1);\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "L'entrée « %s » ne contient pas d'adresse de courriel valide." + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "Le fichier d'adresses comporte des doublons pour « %s »." + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "Certaines entrées sont déjà abonnées." + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "L'adresse de courriel %(email)s est trop longue. La taille maximum est %(email_length)s caractères." + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "Le nom %(name)s est trop long. La taille maximum est de %(name_length)s caractères." + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:165 +msgid "name" +msgstr "nom" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "affichage" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "La colonne du nom n'a pas été trouvée. Le nom de cette colonne doit être « name » ou « %s »." + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 forms.py:43 forms.py:76 models.py:39 +#: models.py:179 +msgid "e-mail" +msgstr "courriel" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "La colonne de l'adresse électronique n'a pas été trouvée. Le nom de cette colonne doit être « email », « e-mail » ou « %(email)s »." + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "Impossible de déterminer les colonnes dans ce fichier CSV. Il doit y avoir un champ nommé « name » ou « %(name)s » et un nommé « e-mail » ou « %(e-mail)s »." + +#: addressimport/parsers.py:264 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "La ligne « %(row)s » ne contient pas de champ de courriel et de nom." + +#: addressimport/parsers.py:290 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "Erreur de lecture du fichier vCard : %s" + +#: addressimport/parsers.py:309 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "L'entrée « %s » ne contient pas d'adresse électronique." + +#: addressimport/parsers.py:345 +msgid "Some entries have no e-mail address." +msgstr "Plusieurs entrées n'ont pas d'adresse électronique. " + +#: admin.py:79 +msgid "Messages" +msgstr "Messages" + +#: admin.py:83 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "Inscriptions" + +#: admin.py:87 +msgid "Submissions" +msgstr "Publications" + +#: admin.py:99 models.py:102 models.py:300 models.py:480 models.py:699 +msgid "newsletter" +msgstr "newsletter" + +#: admin.py:117 models.py:555 +msgid "submission" +msgstr "publication" + +#: admin.py:124 +msgid "publish date" +msgstr "date de publication" + +#: admin.py:154 +msgid "Sent." +msgstr "Envoyé." + +#: admin.py:157 +msgid "Delayed submission." +msgstr "Publication reportée." + +#: admin.py:159 +msgid "Submitting." +msgstr "Envoi en cours." + +#: admin.py:161 +msgid "Not sent." +msgstr "Pas envoyé." + +#: admin.py:162 admin.py:389 admin_forms.py:115 +msgid "Status" +msgstr "Statut" + +#: admin.py:169 +msgid "Submission already sent." +msgstr "Publication déjà envoyée." + +#: admin.py:178 +msgid "Your submission is being sent." +msgstr "Votre publication est en cours d'envoi." + +#: admin.py:225 +msgid "Optional" +msgstr "Facultatif" + +#: admin.py:253 models.py:450 models.py:491 models.py:703 +msgid "message" +msgstr "message" + +#: admin.py:258 templates/admin/newsletter/message/change_form.html:8 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "Prévisualisation" + +#: admin.py:275 views.py:611 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "Aucun gabarit HTML n'est associé à la newsletter relative à ce message." + +#: admin.py:384 admin_forms.py:103 +msgid "Subscribed" +msgstr "Inscrit" + +#: admin.py:386 admin_forms.py:104 +msgid "Unsubscribed" +msgstr "Désinscrit" + +#: admin.py:388 +msgid "Unactivated" +msgstr "Désactivé" + +#: admin.py:396 models.py:314 +msgid "subscribe date" +msgstr "date d'inscription" + +#: admin.py:403 models.py:322 +msgid "unsubscribe date" +msgstr "date de désinscription" + +#: admin.py:411 +#, python-format +msgid "%d user has been successfully subscribed." +msgid_plural "%d users have been successfully subscribed." +msgstr[0] "%d utilisateur a bien été inscrit." +msgstr[1] "%d utilisateurs ont bien été inscrits." + +#: admin.py:416 +msgid "Subscribe selected users" +msgstr "Inscrire les utilisateurs sélectionnés" + +#: admin.py:423 +#, python-format +msgid "%d user has been successfully unsubscribed." +msgid_plural "%d users have been successfully unsubscribed." +msgstr[0] "%d utilisateur a été désinscrit." +msgstr[1] "%d utilisateurs ont été désinscrits." + +#: admin.py:428 +msgid "Unsubscribe selected users" +msgstr "Désinscrire les utilisateurs sélectionnés" + +#: admin.py:484 +#, python-format +msgid "%d subscription has been successfully added." +msgid_plural "%d subscriptions have been successfully added." +msgstr[0] "%d inscription a été ajoutée." +msgstr[1] "%d inscriptions ont été ajoutées." + +#: admin_forms.py:43 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "Le type de fichier « %s » n'est pas reconnu." + +#: admin_forms.py:60 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "L'extension de fichier « %s » n'est pas reconnue." + +#: admin_forms.py:64 +msgid "No entries could found in this file." +msgstr "Aucune entrée trouvée dans ce fichier." + +#: admin_forms.py:72 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:6 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "Newsletter" + +#: admin_forms.py:75 +msgid "Address file" +msgstr "Fichier d'adresses" + +#: admin_forms.py:77 +msgid "Ignore non-fatal errors" +msgstr "Ignorer les erreurs non bloquantes" + +#: admin_forms.py:88 +msgid "You should confirm in order to continue." +msgstr "Vous devez confirmer avant de pouvoir continuer." + +#: admin_forms.py:91 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "Confirmer l'importation" + +#: admin_forms.py:121 admin_forms.py:129 +msgid "If a user has been selected this field should remain empty." +msgstr "Si un utilisateur a été choisi, ce champ doit rester vide." + +#: admin_forms.py:139 +msgid "Either a user must be selected or an email address must be specified." +msgstr "Il faut soit choisir un utilisateur ou indiquer une adresse électronique." + +#: admin_forms.py:164 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "Ce message a déjà été publié dans un autre envoi. Les messages ne peuvent être publiés qu'une seule fois." + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "L'objet %(name)s avec la clé %(key)r n'existe pas." + +#: forms.py:57 +msgid "Your e-mail address has already been subscribed to." +msgstr "Votre adresse électronique est déjà inscrite." + +#: forms.py:84 +msgid "This subscription has not yet been activated." +msgstr "Cette inscription n'a pas encore été activée." + +#: forms.py:102 +msgid "This e-mail address has not been subscribed to." +msgstr "Cette adresse électronique n'a pas été inscrite." + +#: forms.py:117 +msgid "This subscription has already been unsubscribed from." +msgstr "Cette inscription a déjà été annulée." + +#: forms.py:133 +msgid "The validation code supplied by you does not match." +msgstr "Le code de validation que vous avez saisi ne correspond pas." + +#: forms.py:139 +msgid "Activation code" +msgstr "Code d'activation" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "Envoi des newsletters dans la queue" + +#: models.py:34 +msgid "newsletter title" +msgstr "titre de la newsletter" + +#: models.py:39 +msgid "Sender e-mail" +msgstr "Courriel de l'expéditeur" + +#: models.py:42 +msgid "sender" +msgstr "expéditeur" + +#: models.py:42 +msgid "Sender name" +msgstr "Nom de l'expéditeur" + +#: models.py:46 +msgid "visible" +msgstr "visible" + +#: models.py:50 +msgid "send html" +msgstr "envoyer en HTML" + +#: models.py:51 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "Indique si des versions HTML des courriels doivent être envoyés." + +#: models.py:103 +msgid "newsletters" +msgstr "newsletters" + +#: models.py:159 +msgid "user" +msgstr "utilisateur" + +#: models.py:165 +msgid "optional" +msgstr "facultatif" + +#: models.py:209 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "L'inscription %(subscription)s a été mise à jour avec l'action « %(action)s »." + +#: models.py:251 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "Ni adresse électronique, ni nom d'utilisateur n'ont été fournis. Il manque quelque chose !" + +#: models.py:255 +msgid "If user is set, email must be null and vice versa." +msgstr "Si un utilisateur est défini, l'adresse électronique doit être vide et inversement." + +#: models.py:297 +msgid "IP address" +msgstr "Adresse IP" + +#: models.py:306 +msgid "activation code" +msgstr "code d'activation" + +#: models.py:311 +msgid "subscribed" +msgstr "inscrit" + +#: models.py:319 +msgid "unsubscribed" +msgstr "désinscrit" + +#: models.py:327 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "%(name)s <%(email)s> à %(newsletter)s" + +#: models.py:334 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "%(email)s à %(newsletter)s" + +#: models.py:340 +msgid "subscription" +msgstr "inscription" + +#: models.py:341 +msgid "subscriptions" +msgstr "inscriptions" + +#: models.py:429 +msgid "" +"Sort order determines the order in which articles are concatenated in a " +"post." +msgstr "L'orde de tri détermine l'ordre dans lequel les articles sont concaténés dans un post." + +#: models.py:431 +msgid "sort order" +msgstr "ordre de tri" + +#: models.py:434 models.py:476 +msgid "title" +msgstr "titre" + +#: models.py:435 +msgid "text" +msgstr "texte" + +#: models.py:438 +msgid "link" +msgstr "lien" + +#: models.py:444 +msgid "image" +msgstr "image" + +#: models.py:456 +msgid "article" +msgstr "article" + +#: models.py:457 +msgid "articles" +msgstr "articles" + +#: models.py:477 +msgid "slug" +msgstr "slug" + +#: models.py:484 +msgid "created" +msgstr "créé" + +#: models.py:487 +msgid "modified" +msgstr "modifié" + +#: models.py:492 +msgid "messages" +msgstr "messages" + +#: models.py:497 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "%(title)s dans %(newsletter)s" + +#: models.py:556 +msgid "submissions" +msgstr "publications" + +#: models.py:559 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "%(newsletter)s le %(publish_date)s" + +#: models.py:578 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "Publication de %(submission)s à %(count)d personnes." + +#: models.py:637 +#, python-format +msgid "Submitting message to: %s." +msgstr "Publication du message à : %s." + +#: models.py:646 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "Le message %(subscription)s a échoué avec cette erreur : %(error)s" + +#: models.py:664 +#, python-format +msgid "Submission of message %s" +msgstr "Publication du message %s" + +#: models.py:709 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "Si vous n'en choisissez pas, le système va automatiquement choisir les destinataires pour vous." + +#: models.py:711 +msgid "recipients" +msgstr "destinataires" + +#: models.py:716 +msgid "publication date" +msgstr "date de publication" + +#: models.py:720 +msgid "publish" +msgstr "publier" + +#: models.py:721 +msgid "Publish in archive." +msgstr "Publier dans l'archive." + +#: models.py:725 +msgid "prepared" +msgstr "préparé" + +#: models.py:729 +msgid "sent" +msgstr "envoyé" + +#: models.py:733 +msgid "sending" +msgstr "envoi en cours" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "Aperçu du message" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "Accueil" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "Message" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "Changer" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "Créer une publication" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "HTML" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "Texte" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "Soumettre" + +#: templates/admin/newsletter/subscription/change_list.html:5 +msgid "Import" +msgstr "Importer" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "Importer des adresses" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "Confirmer" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "Envoyer" + +#: templates/newsletter/message/message.html:21 +msgid "Read more" +msgstr "En savoir plus" + +#: templates/newsletter/message/message.html:27 +msgid "Read message online" +msgstr "Lire le message en ligne" + +#: templates/newsletter/message/message.html:29 +#: templates/newsletter/newsletter_detail.html:21 +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "Désinscription" + +#: templates/newsletter/message/message.txt:15 +msgid "Unsubscribe:" +msgstr "Désinscription :" + +#: templates/newsletter/message/subscribe.html:6 +#, python-format +msgid "Subscription to %(title)s" +msgstr "Inscription à %(title)s" + +#: templates/newsletter/message/subscribe.html:10 +#: templates/newsletter/message/subscribe.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested a subscription to %(title)s.\n" +"\n" +"If you would like to confirm your subscription, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "Cher %(name)s,\n\nvous, ou quelqu'un en votre nom a demandé une inscription à %(title)s.\n\nSi vous voulez confirmer votre inscription, veuillez suivre ce lien d'activation :\nhttp://%(domain)s%(url)s\n\nCordialement," + +#: templates/newsletter/message/subscribe_subject.txt:1 +msgid "Confirm subscription" +msgstr "Confirmer l'inscription" + +#: templates/newsletter/message/unsubscribe.html:6 +#, python-format +msgid "Unsubscription from %(title)s" +msgstr "Désinscription de %(title)s" + +#: templates/newsletter/message/unsubscribe.html:9 +#: templates/newsletter/message/unsubscribe.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested unsubscription from %(title)s.\n" +"\n" +"If you would like to confirm your unsubscription, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "Cher %(name)s,\n\nvous, ou quelqu'un en votre nom a demandé une désinscription à %(title)s.\n\nSi vous voulez confirmer votre désinscription, veuillez suivre ce lien d'activation :\nhttp://%(domain)s%(url)s\n\nCordialement," + +#: templates/newsletter/message/unsubscribe_subject.txt:1 +msgid "Confirm unsubscription" +msgstr "Confirmer la désinscription" + +#: templates/newsletter/message/update.html:6 +#, python-format +msgid "Update of subscription to %(title)s" +msgstr "Modifier l'inscription à %(title)s" + +#: templates/newsletter/message/update.html:9 +#: templates/newsletter/message/update.txt:1 +#, python-format +msgid "" +"Dear %(name)s,\n" +"\n" +"you, or someone in your name requested updating your personal information for %(title)s.\n" +"\n" +"To make changes to your information in our database, please follow this activation link:\n" +"http://%(domain)s%(url)s\n" +"\n" +"Kind regards," +msgstr "Cher %(name)s,\n\nvous, ou quelqu'un en votre nom a demandé la modification de vos données personnelles pour %(title)s.\n\nPour valider les modifications de vos informations dans notre base de données, veuillez suivre ce lien d'activation :\nhttp://%(domain)s%(url)s\n\nCordialement," + +#: templates/newsletter/message/update_subject.txt:1 +msgid "Update information" +msgstr "Modifier les informations" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "Détail de la newsletter" + +#: templates/newsletter/newsletter_detail.html:13 +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "Inscription" + +#: templates/newsletter/newsletter_detail.html:17 +msgid "Update" +msgstr "Mise à jour" + +#: templates/newsletter/newsletter_detail.html:24 +msgid "Archive" +msgstr "Archiver" + +#: templates/newsletter/newsletter_detail.html:27 +#: templates/newsletter/submission_archive.html:18 +msgid "Back to list" +msgstr "Retour à la liste" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "Liste de newsletter" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "Mettre à jour les inscriptions" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "Archives de la newsletter" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "activer" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "Activer" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "Inscription newsletter" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email." +" This could be because your email address is invalid." +msgstr "En raison d'un problème technique, nous n'avons pas pu vous envoyer de courriel de confirmation. Votre adresse électronique n'est peut-être pas valide." + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "Votre inscription a été activée avec succès." + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "Votre demande d'inscription a bien été prise en compte et un courriel d'activation vous a été envoyé. Veuillez cliquer sur le lien s'y trouvant afin d'activer votre inscription." + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "Voulez-vous vous inscrire à cette newsletter ?" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "Désinscription newsletter" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "Vous avez été désinscrit." + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "Votre demande de désinscription a bien été prise en compte. Un courriel vous a été envoyé. Veuillez cliquer sur le lien s'y trouvant afin de confirmer votre désinscription." + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "Voulez-vous vous désinscrire de cette newsletter ?" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "Mise à jour newsletter" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "Mettre à jour l'inscription" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "Votre inscription a été mise à jour." + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "Votre demande de mise à jour a bien été reçue et un courriel de confirmation vous a été envoyé. Veuillez cliquer sur le lien s'y trouvant afin de mettre à jour votre inscription." + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "Actuellement :" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "Changement :" + +#: validators.py:15 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "Le courriel « %(email)s » correspond à un utilisateur ayant déjà un compte sur ce site. Veuillez vous connecter en tant que cet utilisateur et essayer à nouveau." + +#: views.py:120 +msgid "Your changes have been saved." +msgstr "Vos modifications ont été enregistrées." + +#: views.py:311 +#, python-format +msgid "You have been subscribed to %s." +msgstr "Vous avez été inscrit à %s." + +#: views.py:315 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "L'utilisateur %(rs)s s'est inscrit à %(my_newsletter)s." + +#: views.py:325 +#, python-format +msgid "You are already subscribed to %s." +msgstr "Vous êtes déjà inscrit à %s." + +#: views.py:350 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "Vous avez été désinscrit de %s." + +#: views.py:354 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "L'utilisateur %(rs)s s'est désinscrit de %(my_newsletter)s." + +#: views.py:367 +#, python-format +msgid "You are not subscribed to %s." +msgstr "Vous n'êtes pas inscrit à %s." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/fr/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fr/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..20f62ef Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fr/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/fr/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fr/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..ad7a7ad --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/fr/LC_MESSAGES/djangojs.po @@ -0,0 +1,25 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Santiago Malter-Terrada , 2013 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2013-11-20 12:25+0000\n" +"Last-Translator: Santiago Malter-Terrada \n" +"Language-Team: French (http://www.transifex.com/dokterbob/django-newsletter/language/fr/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: fr\n" +"Plural-Forms: nplurals=2; plural=(n > 1);\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "La publication a été modifiée. Elle doit être sauvée avant que vous puissiez l'envoyer. Cliquez sur OK pour continuer en sauvegardant, ou sur annuler pour continuer l'édition." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/is_IS/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/is_IS/LC_MESSAGES/django.mo new file mode 100644 index 0000000..9616b4f Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/is_IS/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/is_IS/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/is_IS/LC_MESSAGES/django.po new file mode 100644 index 0000000..de4d015 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/is_IS/LC_MESSAGES/django.po @@ -0,0 +1,778 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# ebergmundur , 2014 +# dokterbob , 2016 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2016-01-05 17:58+0100\n" +"PO-Revision-Date: 2016-02-02 13:35+0000\n" +"Last-Translator: dokterbob \n" +"Language-Team: Icelandic (Iceland) (http://www.transifex.com/dokterbob/django-newsletter/language/is_IS/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: is_IS\n" +"Plural-Forms: nplurals=2; plural=(n % 10 != 1 || n % 100 == 11);\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "Færsla '%s' inniheldur ekki nothæft netfang" + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "Netfangalisitinn inniheldur tvítekið '%s'." + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "Þegar er áskrift að sumum færslum." + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "Netfangið %(email)s er of langt, hámarf %(email_length)s bókstafir." + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "Nafn %(name)s of langt, hámark %(name_length)s bókstafir." + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:162 +msgid "name" +msgstr "nafn" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "sýna" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "Nafnadálkur finnst ekki. Heiti dálksins ætti að vera 'name' eða '%s'." + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 models.py:37 models.py:176 +msgid "e-mail" +msgstr "netfang" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "Netfangadálkur fannst ekki. Dálkurinn ætti að heita 'email', 'e-mail' eða '%(email)s'." + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "Gat ekki skilgreint dálka CVS skjalsins með vissu. Það ætti að vera dálkur sem heitir 'name' eða '%(name)s' og annar kallaður 'e-mail' eða '%(e-mail)s'." + +#: addressimport/parsers.py:262 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "Færsla með '%(row)s' inniheldur ekki nafn né netfang." + +#: addressimport/parsers.py:288 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "Villa við lestur vCars skrár: %s" + +#: addressimport/parsers.py:307 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "Færsla '%s' inniheldur ekki netfang." + +#: addressimport/parsers.py:343 +msgid "Some entries have no e-mail address." +msgstr "Sumar færslur hafa engin netföng." + +#: admin.py:63 +msgid "Messages" +msgstr "Skilaboð" + +#: admin.py:71 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "Áskriftir" + +#: admin.py:77 +msgid "Submissions" +msgstr "Sendingar" + +#: admin.py:97 models.py:537 +msgid "submission" +msgstr "sendingar" + +#: admin.py:104 admin.py:247 admin.py:365 models.py:100 models.py:296 +#: models.py:487 models.py:665 +msgid "newsletter" +msgstr "fréttabréf" + +#: admin.py:112 +msgid "publish date" +msgstr "dagsetning birtingar" + +#: admin.py:138 +msgid "Sent." +msgstr "Sent." + +#: admin.py:141 +msgid "Delayed submission." +msgstr "Sendingar í bið." + +#: admin.py:143 +msgid "Submitting." +msgstr "Sendi." + +#: admin.py:145 +msgid "Not sent." +msgstr "Ekki sent." + +#: admin.py:146 admin.py:390 admin_forms.py:113 +msgid "Status" +msgstr "Staða" + +#: admin.py:153 +msgid "Submission already sent." +msgstr "Sending þegar send." + +#: admin.py:162 +msgid "Your submission is being sent." +msgstr "Sending í gagni." + +#: admin.py:208 +msgid "Optional" +msgstr "Valkvæmt" + +#: admin.py:235 models.py:460 models.py:512 models.py:668 +msgid "message" +msgstr "skilaboð" + +#: admin.py:239 templates/admin/newsletter/message/change_form.html.py:9 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "Forskoða" + +#: admin.py:267 views.py:606 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "Ekkert HMTL sniðmát tengt því fréttabréfi sem þessi skilaboð tilheyra." + +#: admin.py:385 admin_forms.py:101 +msgid "Subscribed" +msgstr "Í áskrift" + +#: admin.py:387 admin_forms.py:102 +msgid "Unsubscribed" +msgstr "Ekki í áskrift" + +#: admin.py:389 +msgid "Unactivated" +msgstr "Ekki virkjað" + +#: admin.py:397 models.py:309 +msgid "subscribe date" +msgstr "dagsetning áskriftar" + +#: admin.py:404 models.py:317 +msgid "unsubscribe date" +msgstr "áskrift lokið" + +#: admin.py:412 +#, python-format +msgid "%s user has been successfully subscribed." +msgid_plural "%s users have been successfully subscribed." +msgstr[0] "%s notendur hafa verið aftengdir áskrift." +msgstr[1] "%s notendur hafa verið aftengdir áskrift." + +#: admin.py:417 +msgid "Subscribe selected users" +msgstr "Gerðu þessa notendur að áskrifendum." + +#: admin.py:424 +#, python-format +msgid "%s user has been successfully unsubscribed." +msgid_plural "%s users have been successfully unsubscribed." +msgstr[0] "%s notendur hafa verið aftengdir áskrift." +msgstr[1] "%s notendur hafa verið aftengdir áskrift." + +#: admin.py:429 +msgid "Unsubscribe selected users" +msgstr "Aftengja valda notendur" + +#: admin.py:484 +#, python-format +msgid "%s subscriptions have been successfully added." +msgstr "%s áskriftum giftusamlega bætt við." + +#: admin_forms.py:41 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "Skráargerð '%s' er óþekkt." + +#: admin_forms.py:58 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "Skráarending '%s' er óþekkt." + +#: admin_forms.py:62 +msgid "No entries could found in this file." +msgstr "Engar færslur fundust í þessu skjali." + +#: admin_forms.py:70 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:7 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "Fréttabréf" + +#: admin_forms.py:73 +msgid "Address file" +msgstr "Netfangaskrá" + +#: admin_forms.py:75 +msgid "Ignore non-fatal errors" +msgstr "Láta ómerkilegar villur sem vind um eyru þjóta" + +#: admin_forms.py:86 +msgid "You should confirm in order to continue." +msgstr "Þú þarft að staðfesta til að halda áfram." + +#: admin_forms.py:89 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "Staðfesta innlestur" + +#: admin_forms.py:119 admin_forms.py:127 +msgid "If a user has been selected this field should remain empty." +msgstr "Ef notandi hefur verið valinn ætti þessi reitur að vera tómur." + +#: admin_forms.py:137 +msgid "Either a user must be selected or an email address must be specified." +msgstr "Annað hvort notandi eða netfang verður að vera valið." + +#: admin_forms.py:162 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "Þessi skilaboð hafa þegar verið send í annari sendingu.\nSkilaboð er einungid hægt að senda einu sinni." + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "%(name)s hlutiur með lykli %(key)r finnst ekki." + +#: forms.py:47 forms.py:106 +msgid "An e-mail address is required." +msgstr "Netfang er nauðsynlegt." + +#: forms.py:55 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "Netfangið '%(email)s' tilheyrir skráðum notenda. Skráðu þig inn og reyndu aftur." + +#: forms.py:72 +msgid "Your e-mail address has already been subscribed to." +msgstr "Netfang þitt er þegar á skrá." + +#: forms.py:97 +msgid "This subscription has not yet been activated." +msgstr "Þessi áskrift er ekki virkjuð." + +#: forms.py:114 +#, python-format +msgid "" +"This e-mail address belongs to the user '%(username)s'. Please log in as " +"that user and try again." +msgstr "Þetta netfang tilheyrir notanda '%(username)s'. Skráðu þig inn og reyndu aftur." + +#: forms.py:132 +msgid "This e-mail address has not been subscribed to." +msgstr "Þetta nefagn er ekki í áskrift." + +#: forms.py:147 +msgid "This subscription has already been unsubscribed from." +msgstr "Þessari áskrift hefur þegar verið sagt upp." + +#: forms.py:163 +msgid "The validation code supplied by you does not match." +msgstr "Staðfestingarkóði passar ekki." + +#: forms.py:169 +msgid "Activation code" +msgstr "Virkjunarkóði" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "Sendi fréttabréf í biðröð" + +#: models.py:32 +msgid "newsletter title" +msgstr "yfirskirft fréttabréfs" + +#: models.py:37 +msgid "Sender e-mail" +msgstr "Netfang sendanda" + +#: models.py:40 +msgid "sender" +msgstr "sendandi" + +#: models.py:40 +msgid "Sender name" +msgstr "Nafn sendanda" + +#: models.py:44 +msgid "visible" +msgstr "sýnilegt" + +#: models.py:48 +msgid "send html" +msgstr "senda HTML" + +#: models.py:49 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "Val um HTML eður ei í póstum." + +#: models.py:101 +msgid "newsletters" +msgstr "fréttabréf" + +#: models.py:157 +msgid "user" +msgstr "notandi" + +#: models.py:162 +msgid "optional" +msgstr "valkvæmt" + +#: models.py:206 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "Uppfærði áskriftir %(subscription)s sem %(action)s." + +#: models.py:248 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "Hvorki netfang né notendanafn eru skráð. Það kallar á ósamræmi." + +#: models.py:252 +msgid "If user is set, email must be null and vice versa." +msgstr "Ef notandi er valinn, verður netfang að vera tómt eða öfugt." + +#: models.py:294 +msgid "IP address" +msgstr "IP tala" + +#: models.py:301 +msgid "activation code" +msgstr "staðfestingarkóði" + +#: models.py:306 +msgid "subscribed" +msgstr "í áskrift" + +#: models.py:314 +msgid "unsubscribed" +msgstr "ekki í áskrift" + +#: models.py:322 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "%(name)s <%(email)s> til %(newsletter)s" + +#: models.py:329 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "%(email)s til %(newsletter)s" + +#: models.py:335 +msgid "subscription" +msgstr "áskrift" + +#: models.py:336 +msgid "subscriptions" +msgstr "áskriftir" + +#: models.py:439 +msgid "" +"Sort order determines the order in which articles are concatenated in a " +"post." +msgstr "Röðun stýrir röð greina í sendingu" + +#: models.py:441 +msgid "sort order" +msgstr "röðun" + +#: models.py:444 models.py:483 +msgid "title" +msgstr "fyrirsögn" + +#: models.py:445 +msgid "text" +msgstr "text" + +#: models.py:448 +msgid "link" +msgstr "tengill" + +#: models.py:454 +msgid "image" +msgstr "mynd" + +#: models.py:465 +msgid "article" +msgstr "grein" + +#: models.py:466 +msgid "articles" +msgstr "greinar" + +#: models.py:484 +msgid "slug" +msgstr "slóðarkóði [slug}" + +#: models.py:491 +msgid "created" +msgstr "stofnað" + +#: models.py:494 +msgid "modified" +msgstr "breytt" + +#: models.py:499 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "%(title)s í %(newsletter)s" + +#: models.py:513 +msgid "messages" +msgstr "skilaboð" + +#: models.py:538 +msgid "submissions" +msgstr "sendingar" + +#: models.py:541 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "%(newsletter)s á %(publish_date)s" + +#: models.py:550 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "Sendi %(submission)s til %(count)d fólks" + +#: models.py:597 +#, python-format +msgid "Submitting message to: %s." +msgstr "Sendi boð til: %s." + +#: models.py:606 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "Skilaboð %(subscription)s brugðust með villu: %(error)s" + +#: models.py:630 +#, python-format +msgid "Submission of message %s" +msgstr "Sending skilaboða %s" + +#: models.py:673 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "Ef þú velur ekkert mun kerfið velja áskrifendur." + +#: models.py:675 +msgid "recipients" +msgstr "viðtakendur" + +#: models.py:680 +msgid "publication date" +msgstr "dagsetning birtignar" + +#: models.py:684 +msgid "publish" +msgstr "birta" + +#: models.py:685 +msgid "Publish in archive." +msgstr "Birta í safni" + +#: models.py:689 +msgid "prepared" +msgstr "tilbúið" + +#: models.py:693 +msgid "sent" +msgstr "sent" + +#: models.py:697 +msgid "sending" +msgstr "sendi" + +#: templates/admin/newsletter/message/change_form.html:7 +#: templates/admin/newsletter/newsletter/change_form.html:7 +#: templates/admin/newsletter/submission/change_form.html:14 +msgid "History" +msgstr "Saga" + +#: templates/admin/newsletter/message/change_form.html:8 +#: templates/admin/newsletter/newsletter/change_form.html:8 +#: templates/admin/newsletter/submission/change_form.html:15 +msgid "View on site" +msgstr "Sjá á vef" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "Forskoða skilaboð" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "Heim" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "Skilaboð" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "Breyta" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "Stofna sendingu" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "HTML" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "Texti" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "Senda" + +#: templates/admin/newsletter/subscription/change_list.html:8 +msgid "import" +msgstr "sækja" + +#: templates/admin/newsletter/subscription/change_list.html:12 +#, python-format +msgid "Add %(name)s" +msgstr "Bæta við %(name)s" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "Sækja netföng" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "Staðfesta" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "Sækja" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "Upplýsingar um fréttabréf" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "Fréttabréf, listi" + +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "Áskrift" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "Uppfæra áskriftir" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "Safn fréttabréfa" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "virkja" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "Virkja" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "Áskrift fréttabréfa" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email." +" This could be because your email address is invalid." +msgstr "Vegna villu hefur ekki tekist að senda þér staðfestingarpóst.\nGæti stafað af villu í netfangi." + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "Áskrift þín hefur verið virkjuð með sóma." + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "Áskriftarbeiðni þín er móttekin og póstur sendur á netfangið með tengli sem smella þarf á til að virkja áskriftina." + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "Vilt þú gerast áskrifandi að þessu fréttabréfi?" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "Afþökkun fréttabréfa" + +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "Hætta áskrift" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "Áskift þinni er sagt upp." + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "Beiðni um uppsögn hefur verið móttekin. Tölvupóstur með tengli til stafestingar hefur verið sendur." + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "Viltu þú segja upp þessari áskrift?" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "Uppfæra fréttabréf" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "Uppfæra áskrift" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "Áskrift þín er sómasamlega uppfærð." + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "Uppfærslubeiðni þín hefur verið móttekin. Póstur með tengli til virkjunar hefur verið sendur á netfang þitt. Þú verður að staðfesta með því að smella á þann tengil." + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "Núna:" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "Breyta:" + +#: views.py:114 +msgid "Your changes have been saved." +msgstr "Breytingar eru vistaðar." + +#: views.py:305 +#, python-format +msgid "You have been subscribed to %s." +msgstr "Þú hefur gerst áskrifandi að %s." + +#: views.py:309 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "Notandi %(rs)s gerðist áskrifandi að %(my_newsletter)s." + +#: views.py:319 +#, python-format +msgid "You are already subscribed to %s." +msgstr "Þú ert þegar áskrifandi að %s." + +#: views.py:344 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "Þú hefur sagt upp áskrift að %s." + +#: views.py:348 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "Notandi %(rs)s sagði upp áskrift að %(my_newsletter)s." + +#: views.py:361 +#, python-format +msgid "You are not subscribed to %s." +msgstr "Þú ert ekki áskrifandi að %s." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/is_IS/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/is_IS/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..5daa10a Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/is_IS/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/is_IS/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/is_IS/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..0e027a7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/is_IS/LC_MESSAGES/djangojs.po @@ -0,0 +1,25 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# ebergmundur , 2014 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2014-04-16 20:34+0000\n" +"Last-Translator: ebergmundur \n" +"Language-Team: Icelandic (Iceland) (http://www.transifex.com/dokterbob/django-newsletter/language/is_IS/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: is_IS\n" +"Plural-Forms: nplurals=2; plural=(n % 10 != 1 || n % 100 == 11);\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "Sendingin hefur tekið breytingum, það verður að vista hana áður en hægt er að senda. Smelltu á OK til að vista og halda áfram eða Cancel til að hætta við og halda áfram að breyta." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/it/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/it/LC_MESSAGES/django.mo new file mode 100644 index 0000000..e9e7116 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/it/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/it/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/it/LC_MESSAGES/django.po new file mode 100644 index 0000000..e59f5d5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/it/LC_MESSAGES/django.po @@ -0,0 +1,778 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Andrea , 2013 +# dokterbob , 2016 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2016-01-05 17:58+0100\n" +"PO-Revision-Date: 2016-02-02 13:33+0000\n" +"Last-Translator: dokterbob \n" +"Language-Team: Italian (http://www.transifex.com/dokterbob/django-newsletter/language/it/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: it\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "La voce '%s' non contiene un indirizzo e-mail valido." + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "Il file di indirizzi contiene dei duplicati per '%s'." + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "Alcune voci sono già registrate per l'iscrizione." + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "Indirizzo e-mail %(email)s troppo lungo, la lunghezza massima è di %(email_length)s caratteri." + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "Nome %(name)s troppo lungo, la lunghezza massima è di %(name_length)s caratteri." + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:162 +msgid "name" +msgstr "nome" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "mostra" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "Colonna nome non trovata. Il nome di questa colonna dovrebbe essere 'name' o '%s'." + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 models.py:37 models.py:176 +msgid "e-mail" +msgstr "e-mail" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "Colonna e-mail non trovata. Il nome della colonna dovrebbe essere 'email', 'e-mail' o '%(email)s'." + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "Non è stato possibile determinare le colonne appropriate nel file CSV. Dovrebbe esserci un campo denominato 'name' o '%(name)s' e uno denominato 'e-mail' o '%(e-mail)s'." + +#: addressimport/parsers.py:262 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "La riga con contenuto '%(row)s' non contiene un campo nome ed indirizzo e-mail." + +#: addressimport/parsers.py:288 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "Errore nella lettura del file vCard: %s" + +#: addressimport/parsers.py:307 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "La voce '%s' non contiene un indirizzo e-mail." + +#: addressimport/parsers.py:343 +msgid "Some entries have no e-mail address." +msgstr "Alcune voci non hanno un indirizzo e-mail." + +#: admin.py:63 +msgid "Messages" +msgstr "Messaggi" + +#: admin.py:71 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "Iscrizioni" + +#: admin.py:77 +msgid "Submissions" +msgstr "Richieste" + +#: admin.py:97 models.py:537 +msgid "submission" +msgstr "invio" + +#: admin.py:104 admin.py:247 admin.py:365 models.py:100 models.py:296 +#: models.py:487 models.py:665 +msgid "newsletter" +msgstr "newsletter" + +#: admin.py:112 +msgid "publish date" +msgstr "data pubblicazione" + +#: admin.py:138 +msgid "Sent." +msgstr "Inviato." + +#: admin.py:141 +msgid "Delayed submission." +msgstr "Invio rimandato." + +#: admin.py:143 +msgid "Submitting." +msgstr "Invio in corso." + +#: admin.py:145 +msgid "Not sent." +msgstr "Non inviato." + +#: admin.py:146 admin.py:390 admin_forms.py:113 +msgid "Status" +msgstr "Stato" + +#: admin.py:153 +msgid "Submission already sent." +msgstr "Richiesta già inviata." + +#: admin.py:162 +msgid "Your submission is being sent." +msgstr "La tua richiesta è in corso." + +#: admin.py:208 +msgid "Optional" +msgstr "Opzionale" + +#: admin.py:235 models.py:460 models.py:512 models.py:668 +msgid "message" +msgstr "messaggio" + +#: admin.py:239 templates/admin/newsletter/message/change_form.html.py:9 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "Anteprima" + +#: admin.py:267 views.py:606 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "Nessun template HTML è associato con la newsletter a cui appartiene questo messaggio." + +#: admin.py:385 admin_forms.py:101 +msgid "Subscribed" +msgstr "Iscritto" + +#: admin.py:387 admin_forms.py:102 +msgid "Unsubscribed" +msgstr "Disiscritto" + +#: admin.py:389 +msgid "Unactivated" +msgstr "Non attivato" + +#: admin.py:397 models.py:309 +msgid "subscribe date" +msgstr "data iscrizione" + +#: admin.py:404 models.py:317 +msgid "unsubscribe date" +msgstr "data disiscrizione" + +#: admin.py:412 +#, python-format +msgid "%s user has been successfully subscribed." +msgid_plural "%s users have been successfully subscribed." +msgstr[0] "%s utente è stato iscritto con successo." +msgstr[1] "%s utenti sono stati iscritti con successo." + +#: admin.py:417 +msgid "Subscribe selected users" +msgstr "Iscrivi gli utenti selezionati" + +#: admin.py:424 +#, python-format +msgid "%s user has been successfully unsubscribed." +msgid_plural "%s users have been successfully unsubscribed." +msgstr[0] "%s utente è stato disiscritto con successo." +msgstr[1] "%s utenti sono stati disiscritti con successo." + +#: admin.py:429 +msgid "Unsubscribe selected users" +msgstr "Disiscrivi gli utenti selezionati" + +#: admin.py:484 +#, python-format +msgid "%s subscriptions have been successfully added." +msgstr "%s iscrizioni sono state aggiunte con successo." + +#: admin_forms.py:41 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "Tipo di file '%s' non riconosciuto." + +#: admin_forms.py:58 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "Estensione del file '%s' non riconosciuta." + +#: admin_forms.py:62 +msgid "No entries could found in this file." +msgstr "Nessuna voce trovata in questo file." + +#: admin_forms.py:70 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:7 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "Newsletter" + +#: admin_forms.py:73 +msgid "Address file" +msgstr "File di indirizzi" + +#: admin_forms.py:75 +msgid "Ignore non-fatal errors" +msgstr "Ignora errori non fatali" + +#: admin_forms.py:86 +msgid "You should confirm in order to continue." +msgstr "Dovresti confermare per continuare." + +#: admin_forms.py:89 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "Conferma importazione" + +#: admin_forms.py:119 admin_forms.py:127 +msgid "If a user has been selected this field should remain empty." +msgstr "Se è stato selezionato un utente questo campo dovrebbe rimanere vuoto." + +#: admin_forms.py:137 +msgid "Either a user must be selected or an email address must be specified." +msgstr "È necessario selezionare un utente o specificare un indirizzo e-mail." + +#: admin_forms.py:162 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "Questo messaggio è già stato pubblicato in qualche altro invio. I messaggi possono essere pubblicati solo una volta." + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "L'oggetto %(name)s con 'primary key' %(key)r non esiste." + +#: forms.py:47 forms.py:106 +msgid "An e-mail address is required." +msgstr "È richiesto un indirizzo e-mail." + +#: forms.py:55 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "L'indirizzo e-mail '%(email)s' appartiene ad un utente registrato su questo sito. Per favore accedi con le credenziali di questo utente e riprova." + +#: forms.py:72 +msgid "Your e-mail address has already been subscribed to." +msgstr "Il tuo indirizzo e-mail è già registrato come iscritto." + +#: forms.py:97 +msgid "This subscription has not yet been activated." +msgstr "L'iscrizione non è ancora stata attivata." + +#: forms.py:114 +#, python-format +msgid "" +"This e-mail address belongs to the user '%(username)s'. Please log in as " +"that user and try again." +msgstr "Questo indirizzo e-mail appartiene all'utente '%(username)s'. Per favore accedi con le sue credenziali e riprova." + +#: forms.py:132 +msgid "This e-mail address has not been subscribed to." +msgstr "Questo indirizzo e-mail non è registrato per l'iscrizione." + +#: forms.py:147 +msgid "This subscription has already been unsubscribed from." +msgstr "Ci si è già disiscritti da questo elenco." + +#: forms.py:163 +msgid "The validation code supplied by you does not match." +msgstr "Il codice di validazione che hai fornito non corrisponde." + +#: forms.py:169 +msgid "Activation code" +msgstr "Codice di attivazione" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "Inviando mail newsletter in coda" + +#: models.py:32 +msgid "newsletter title" +msgstr "titolo newsletter" + +#: models.py:37 +msgid "Sender e-mail" +msgstr "E-mail mittente" + +#: models.py:40 +msgid "sender" +msgstr "mittente" + +#: models.py:40 +msgid "Sender name" +msgstr "Nome mittente" + +#: models.py:44 +msgid "visible" +msgstr "visibile" + +#: models.py:48 +msgid "send html" +msgstr "invia html" + +#: models.py:49 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "Se inviare o meno una versione HTML delle e-mail." + +#: models.py:101 +msgid "newsletters" +msgstr "newsletter" + +#: models.py:157 +msgid "user" +msgstr "utente" + +#: models.py:162 +msgid "optional" +msgstr "opzionale" + +#: models.py:206 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "Aggiornata iscrizione %(subscription)s per %(action)s." + +#: models.py:248 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "Non è stato impostato né un email né un nome utente. Questo porta all'inconsistenza!" + +#: models.py:252 +msgid "If user is set, email must be null and vice versa." +msgstr "Se è stato impostato un utente, il campo e-mail deve essere nullo e viceversa." + +#: models.py:294 +msgid "IP address" +msgstr "Indirizzo IP" + +#: models.py:301 +msgid "activation code" +msgstr "codice di attivazione" + +#: models.py:306 +msgid "subscribed" +msgstr "iscritto" + +#: models.py:314 +msgid "unsubscribed" +msgstr "disiscritto" + +#: models.py:322 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "%(name)s <%(email)s> a %(newsletter)s" + +#: models.py:329 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "%(email)s a %(newsletter)s" + +#: models.py:335 +msgid "subscription" +msgstr "iscrizione" + +#: models.py:336 +msgid "subscriptions" +msgstr "iscrizioni" + +#: models.py:439 +msgid "" +"Sort order determines the order in which articles are concatenated in a " +"post." +msgstr "L'ordinamento determina l'ordine in cui gli articoli sono concatenati in un post." + +#: models.py:441 +msgid "sort order" +msgstr "ordinamento" + +#: models.py:444 models.py:483 +msgid "title" +msgstr "titolo" + +#: models.py:445 +msgid "text" +msgstr "testo" + +#: models.py:448 +msgid "link" +msgstr "collegamento" + +#: models.py:454 +msgid "image" +msgstr "immagine" + +#: models.py:465 +msgid "article" +msgstr "articolo" + +#: models.py:466 +msgid "articles" +msgstr "articoli" + +#: models.py:484 +msgid "slug" +msgstr "slug" + +#: models.py:491 +msgid "created" +msgstr "creato" + +#: models.py:494 +msgid "modified" +msgstr "modificato" + +#: models.py:499 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "%(title)s in %(newsletter)s" + +#: models.py:513 +msgid "messages" +msgstr "messaggi" + +#: models.py:538 +msgid "submissions" +msgstr "invii" + +#: models.py:541 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "%(newsletter)s del %(publish_date)s" + +#: models.py:550 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "Inviando %(submission)s a %(count)d persone" + +#: models.py:597 +#, python-format +msgid "Submitting message to: %s." +msgstr "Inviando messaggio a: %s." + +#: models.py:606 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "Il messaggio %(subscription)s è fallito con errore: %(error)s" + +#: models.py:630 +#, python-format +msgid "Submission of message %s" +msgstr "Invio del messaggio %s" + +#: models.py:673 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "Se non selezioni nessuno, il sistema troverà automaticamente i sottoscrittori per te." + +#: models.py:675 +msgid "recipients" +msgstr "destinatari" + +#: models.py:680 +msgid "publication date" +msgstr "data di pubblicazione" + +#: models.py:684 +msgid "publish" +msgstr "pubblica" + +#: models.py:685 +msgid "Publish in archive." +msgstr "Pubblica nell'archivio." + +#: models.py:689 +msgid "prepared" +msgstr "preparato" + +#: models.py:693 +msgid "sent" +msgstr "inviato" + +#: models.py:697 +msgid "sending" +msgstr "inviando" + +#: templates/admin/newsletter/message/change_form.html:7 +#: templates/admin/newsletter/newsletter/change_form.html:7 +#: templates/admin/newsletter/submission/change_form.html:14 +msgid "History" +msgstr "Cronologia" + +#: templates/admin/newsletter/message/change_form.html:8 +#: templates/admin/newsletter/newsletter/change_form.html:8 +#: templates/admin/newsletter/submission/change_form.html:15 +msgid "View on site" +msgstr "Vedi sul sito" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "Anteprima messaggio" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "Inizio" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "Messaggio" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "Cambia" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "Crea invio" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "HTML" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "Testo" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "Invia" + +#: templates/admin/newsletter/subscription/change_list.html:8 +msgid "import" +msgstr "importa" + +#: templates/admin/newsletter/subscription/change_list.html:12 +#, python-format +msgid "Add %(name)s" +msgstr "Aggiungi %(name)s" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "Importa indirizzi" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "Conferma" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "Carica" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "Dettaglio newsletter" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "Elenco newsletter" + +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "Iscriviti" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "Aggiorna sottoscrizioni" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "Archivio newsletter" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "attiva" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "Attiva" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "Iscrizione newsletter" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email." +" This could be because your email address is invalid." +msgstr "A causa di un errore tecnico non è stato possibile inviare la tua mail di conferma. Questo potrebbe essere causato da un indirizzo e-mail non valido." + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "La tua iscrizione è stata attivata con successo." + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "La tua richiesta di iscrizione è stata ricevuta con successo e ti è stata inviata una e-mail di attivazione. In quella e-mail troverai un link da cliccare per poter attivare la tua iscrizione." + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "Vuoi iscriverti a questa newsletter?" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "Disiscrizione newsletter" + +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "Disiscriviti" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "Sei stato disiscritto con successo." + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "La tua richiesta di cancellazione è stata ricevuta con successo. Ti è stata inviata un'e-mail con il link da cliccare per poter confermare la tua disiscrizione." + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "Vuoi disiscriverti da questa newsletter?" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "Aggiornamento newsletter" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "Aggiorna iscrizione" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "La tua iscrizione è stata aggiornata con successo." + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "La tua richiesta di aggiornamento è stata ricevuta con successo e ti è stata inviata una e-mail di attivazione. In quella e-mail troverai un link da cliccare per poter completare l'aggiornamento della tua iscrizione." + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "Attualmente:" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "Cambia:" + +#: views.py:114 +msgid "Your changes have been saved." +msgstr "Le tue modifiche sono state salvate." + +#: views.py:305 +#, python-format +msgid "You have been subscribed to %s." +msgstr "Sei ora iscritto a %s." + +#: views.py:309 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "L'utente %(rs)s si è iscritto a %(my_newsletter)s." + +#: views.py:319 +#, python-format +msgid "You are already subscribed to %s." +msgstr "Sei già iscritto a %s." + +#: views.py:344 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "Sei stato disiscritto da %s." + +#: views.py:348 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "L'utente %(rs)s si è disiscritto da %(my_newsletter)s." + +#: views.py:361 +#, python-format +msgid "You are not subscribed to %s." +msgstr "Non sei iscritto a %s." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/it/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/it/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..f83ffd4 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/it/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/it/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/it/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..dc757fb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/it/LC_MESSAGES/djangojs.po @@ -0,0 +1,25 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Andrea , 2013 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2013-11-20 12:25+0000\n" +"Last-Translator: Andrea \n" +"Language-Team: Italian (http://www.transifex.com/dokterbob/django-newsletter/language/it/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: it\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "Il contenuto è stato cambiato. Deve essere salvato prima che tu possa inviarlo. Clicca OK per procedere con il salvataggio, clicca annulla per continuare con la modifica." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/nl/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/nl/LC_MESSAGES/django.mo new file mode 100644 index 0000000..b32337e Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/nl/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/nl/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/nl/LC_MESSAGES/django.po new file mode 100644 index 0000000..fd96c6d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/nl/LC_MESSAGES/django.po @@ -0,0 +1,754 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# dokterbob , 2011 +# G M , 2016 +# dokterbob , 2013,2016 +# dokterbob , 2012 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2016-08-11 16:09+0200\n" +"PO-Revision-Date: 2016-08-11 14:14+0000\n" +"Last-Translator: dokterbob \n" +"Language-Team: Dutch (http://www.transifex.com/dokterbob/django-newsletter/language/nl/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: nl\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "Rij met '%s' bevat geen geldig e-mail adres." + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "Het adressenbestand bevat een dubbele rij voor '%s'." + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "Op enkele rijen is al geabonneerd." + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "E-mail adres %(email)s is te lang, maximale lengte is %(email_length)s karakters." + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "Naam %(name)s is te lang, maximale lengte is %(name_length)s karakters." + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:164 +msgid "name" +msgstr "naam" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "display" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "Naam kolom niet gevonden. De naam van deze kolom moet 'name' of '%s' zijn." + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 models.py:39 models.py:178 +msgid "e-mail" +msgstr "e-mail" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "Email-kolom niet gevonden. De naam van deze kolom moet 'email', 'e-mail' of '%(email)s' zijn." + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "Het is niet gelukt om de juiste kolommon in het CSV-betand te bepalen. Er zou een veld 'name' of '%(name)s' en een veld 'e-mail' of '%(e-mail)s' in moeten staan.'" + +#: addressimport/parsers.py:264 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "Rij met inhoud '%(row)s' bevat geen naam- en email-veld." + +#: addressimport/parsers.py:290 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "Fout bij lezen vCard-bestand: %s" + +#: addressimport/parsers.py:309 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "Ingang '%s' bevat geen geldig e-mail adres." + +#: addressimport/parsers.py:345 +msgid "Some entries have no e-mail address." +msgstr "Enkele ingangen hebben geen geldig e-mail adres." + +#: admin.py:64 +msgid "Messages" +msgstr "Berichten" + +#: admin.py:72 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "Abonnementen" + +#: admin.py:78 +msgid "Submissions" +msgstr "Zendingen" + +#: admin.py:98 models.py:551 +msgid "submission" +msgstr "zending" + +#: admin.py:105 admin.py:249 admin.py:364 models.py:102 models.py:298 +#: models.py:476 models.py:687 +msgid "newsletter" +msgstr "nieuwsbrief" + +#: admin.py:113 +msgid "publish date" +msgstr "publicatiedatum" + +#: admin.py:139 +msgid "Sent." +msgstr "Verzonden." + +#: admin.py:142 +msgid "Delayed submission." +msgstr "Wordt later verzonden." + +#: admin.py:144 +msgid "Submitting." +msgstr "Wordt nu verzonden." + +#: admin.py:146 +msgid "Not sent." +msgstr "Niet verzonden." + +#: admin.py:147 admin.py:389 admin_forms.py:115 +msgid "Status" +msgstr "Status" + +#: admin.py:154 +msgid "Submission already sent." +msgstr "Zending is al verzonden." + +#: admin.py:163 +msgid "Your submission is being sent." +msgstr "Uw zending wordt verzonden." + +#: admin.py:210 +msgid "Optional" +msgstr "Optioneel" + +#: admin.py:237 models.py:447 models.py:487 models.py:690 +msgid "message" +msgstr "bericht" + +#: admin.py:241 templates/admin/newsletter/message/change_form.html.py:8 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "Voorvertonen" + +#: admin.py:266 views.py:606 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "Er is geen HTML-template geässocieerd met de nieuwsbrief die bij dit bericht hoort." + +#: admin.py:384 admin_forms.py:103 +msgid "Subscribed" +msgstr "Aangemeld" + +#: admin.py:386 admin_forms.py:104 +msgid "Unsubscribed" +msgstr "Opzeggen" + +#: admin.py:388 +msgid "Unactivated" +msgstr "Ongeactiveerd" + +#: admin.py:396 models.py:311 +msgid "subscribe date" +msgstr "abonneerdatum" + +#: admin.py:403 models.py:319 +msgid "unsubscribe date" +msgstr "opzegdatum" + +#: admin.py:411 +#, python-format +msgid "%d user has been successfully subscribed." +msgid_plural "%d users have been successfully subscribed." +msgstr[0] "%d gebruiker is succesvol ingeschreven." +msgstr[1] "%d gebruikers zijn succesvol ingeschreven." + +#: admin.py:416 +msgid "Subscribe selected users" +msgstr "Schrijf geselecteerde gebruikers in" + +#: admin.py:423 +#, python-format +msgid "%d user has been successfully unsubscribed." +msgid_plural "%d users have been successfully unsubscribed." +msgstr[0] "%d gebruiker is succesvol uitgeschreven." +msgstr[1] "%d gebruikers zijn succesvol uitgeschreven." + +#: admin.py:428 +msgid "Unsubscribe selected users" +msgstr "Geselecteerde gebruikers uitschrijven" + +#: admin.py:484 +#, python-format +msgid "%d subscription has been successfully added." +msgid_plural "%d subscriptions have been successfully added." +msgstr[0] "%d aanmelding is succesvol toegevoegd." +msgstr[1] "%d aanmeldingen zijn succesvol toegevoegd." + +#: admin_forms.py:43 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "Bestandstype '%s' wordt niet herkend." + +#: admin_forms.py:60 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "Bestandsextensie '%s' is niet herkend." + +#: admin_forms.py:64 +msgid "No entries could found in this file." +msgstr "Er zijn geen adressen gevonden in dit bestand." + +#: admin_forms.py:72 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:7 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "Nieuwsbrief" + +#: admin_forms.py:75 +msgid "Address file" +msgstr "Adressenbestand" + +#: admin_forms.py:77 +msgid "Ignore non-fatal errors" +msgstr "Negeer niet-fatale fouten" + +#: admin_forms.py:88 +msgid "You should confirm in order to continue." +msgstr "U moet bevestigen om door te kunnen gaan." + +#: admin_forms.py:91 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "Bevestig import" + +#: admin_forms.py:121 admin_forms.py:129 +msgid "If a user has been selected this field should remain empty." +msgstr "Als er een gebruiker is geselecteerd dient dit veld leeg te blijven." + +#: admin_forms.py:139 +msgid "Either a user must be selected or an email address must be specified." +msgstr "Er moet óf een gebruiker of een e-mail adres worden ingevoerd." + +#: admin_forms.py:164 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "Dit bericht is al gepubliceerd in een andere zending. Berichten kunnen maar eenmalig gepubliceerd worden." + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "%(name)s-object met primaire sleutel %(key)r bestaat niet." + +#: forms.py:57 +msgid "Your e-mail address has already been subscribed to." +msgstr "Uw e-mail adres is al geabonneerd." + +#: forms.py:84 +msgid "This subscription has not yet been activated." +msgstr "Deze inschrijving is nog niet geactiveerd." + +#: forms.py:102 +msgid "This e-mail address has not been subscribed to." +msgstr "Er is geen abonnement voor dit email adres." + +#: forms.py:117 +msgid "This subscription has already been unsubscribed from." +msgstr "Deze inschrijving is al opgezegd." + +#: forms.py:133 +msgid "The validation code supplied by you does not match." +msgstr "De verificatiecode die door u is opgegeven komt niet overeen." + +#: forms.py:139 +msgid "Activation code" +msgstr "Verificatiecode" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "Bezig met versturen van klaarstaande zendingen" + +#: models.py:34 +msgid "newsletter title" +msgstr "Nieuwsbrief titel" + +#: models.py:39 +msgid "Sender e-mail" +msgstr "E-mail verzender" + +#: models.py:42 +msgid "sender" +msgstr "verzender" + +#: models.py:42 +msgid "Sender name" +msgstr "Naam verzender" + +#: models.py:46 +msgid "visible" +msgstr "zichtbaar" + +#: models.py:50 +msgid "send html" +msgstr "html versturen" + +#: models.py:51 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "Of er al dan niet HTML versies van e-mails verstuurd moeten worden." + +#: models.py:103 +msgid "newsletters" +msgstr "nieuwsbrieven" + +#: models.py:159 +msgid "user" +msgstr "gebruiker" + +#: models.py:164 +msgid "optional" +msgstr "optioneel" + +#: models.py:208 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "Abonement %(subscription)s aangepast naar %(action)s." + +#: models.py:250 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "Er is geen e-mail adres noch een gebruikersnaam ingesteld. Dit is vragen om inconsistentie!" + +#: models.py:254 +msgid "If user is set, email must be null and vice versa." +msgstr "Als er een gebruiker is ingesteld moet e-mail null zijn en vice versa." + +#: models.py:296 +msgid "IP address" +msgstr "IP-adres" + +#: models.py:303 +msgid "activation code" +msgstr "activatiecode" + +#: models.py:308 +msgid "subscribed" +msgstr "aangemeld" + +#: models.py:316 +msgid "unsubscribed" +msgstr "opgezegd" + +#: models.py:324 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "%(name)s <%(email)s> op %(newsletter)s" + +#: models.py:331 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "%(email)s aan %(newsletter)s" + +#: models.py:337 +msgid "subscription" +msgstr "abonnement" + +#: models.py:338 +msgid "subscriptions" +msgstr "abonnementen" + +#: models.py:426 +msgid "" +"Sort order determines the order in which articles are concatenated in a " +"post." +msgstr "Sorteer volgorde bepaald de volgorde waarin artikelen in een bericht achter elkaar gezet worden." + +#: models.py:428 +msgid "sort order" +msgstr "sorteer volgorde" + +#: models.py:431 models.py:472 +msgid "title" +msgstr "titel" + +#: models.py:432 +msgid "text" +msgstr "tekst" + +#: models.py:435 +msgid "link" +msgstr "koppeling" + +#: models.py:441 +msgid "image" +msgstr "afbeelding" + +#: models.py:452 +msgid "article" +msgstr "artikel" + +#: models.py:453 +msgid "articles" +msgstr "artikelen" + +#: models.py:473 +msgid "slug" +msgstr "slug" + +#: models.py:480 +msgid "created" +msgstr "aangemaakt" + +#: models.py:483 +msgid "modified" +msgstr "verandert" + +#: models.py:488 +msgid "messages" +msgstr "berichten" + +#: models.py:493 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "%(title)s in %(newsletter)s" + +#: models.py:552 +msgid "submissions" +msgstr "zendingen" + +#: models.py:555 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "%(newsletter)s op %(publish_date)s" + +#: models.py:574 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "Versturen van %(submission)s aan %(count)d mensen" + +#: models.py:628 +#, python-format +msgid "Submitting message to: %s." +msgstr "Versturen bericht aan: %s" + +#: models.py:637 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "Bericht %(subscription)s is gefaald met fout: %(error)s" + +#: models.py:655 +#, python-format +msgid "Submission of message %s" +msgstr "Verzending van bericht %s" + +#: models.py:695 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "Als u er geen selecteerd, zal het systeem automatisch de abonnees voor u opzoeken." + +#: models.py:697 +msgid "recipients" +msgstr "geaddresseerden" + +#: models.py:702 +msgid "publication date" +msgstr "publicatiedatum" + +#: models.py:706 +msgid "publish" +msgstr "publiceer" + +#: models.py:707 +msgid "Publish in archive." +msgstr "Publiceer in archief." + +#: models.py:711 +msgid "prepared" +msgstr "voorbereid" + +#: models.py:715 +msgid "sent" +msgstr "verzonden" + +#: models.py:719 +msgid "sending" +msgstr "verzenden" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "Voorvertoning bericht" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "Home" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "Bericht" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "Verander" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "Maak zending aan" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "HTML" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "Tekst" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "Verzend" + +#: templates/admin/newsletter/subscription/change_list.html:5 +msgid "Import" +msgstr "Importeer" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "Importeer adres" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "Bevestig" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "Upload" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "Nieuwsbrief detail" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "Nieuwsbrieven lijst" + +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "Abonneer" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "Pas inschrijvingen aan" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "Nieuwsbrief archief" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "activeer" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "Activeer" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "Abonneer op nieuwsbrief" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email." +" This could be because your email address is invalid." +msgstr "Wegens technische omstandigheden is het niet gelukt om een bevestigingsmail te versturen. Misschien heeft u een ongeldig e-mail adres ingevuld." + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "Uw inschrijving is succesvol geactiveerd." + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "Uw inschrijfverzoek is succesvol ontvangen en een activiatie e-mail is naar u verstuurd. In deze e-mail vind u een link die gevolgd moet worden om uw inschrijving te activeren." + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "Wilt u zich inschrijven op deze nieuwsbrief?" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "Zeg nieuwsbrief op" + +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "Opzeggen" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "U bent succesvol uitgeschreven." + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "Uw uitschrijfverzoek is succesvol ontvangen. Er is een e-mail naar u verstuurd met een link die u dient te volgen om de uitschrijving te bevestigen." + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "Wilt u zich uitschrijven van deze nieuwsbrief?" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "Nieuwsbrief aanpassen" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "Pas inschrijving aan" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "Uw inschrijving is succesvol aangepast." + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "Uw aanpassingsverzoek is succesvol ontvangen en een activiatiemail is naar u verstuurd. In deze e-mail vind u een link die u dient aan te klikken om de aanpassingen te bevestigen." + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "Huidig:" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "Verander:" + +#: validators.py:15 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "Het email-adres '%(email)s' hoort bij een gebruiker van deze site. Log in als deze gebruiker en probeer het nog eens." + +#: views.py:115 +msgid "Your changes have been saved." +msgstr "Uw veranderingen zijn opgeslagen." + +#: views.py:306 +#, python-format +msgid "You have been subscribed to %s." +msgstr "U bent geaboneerd op %s." + +#: views.py:310 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "Gebruiker %(rs)s geaboneerd op %(my_newsletter)s." + +#: views.py:320 +#, python-format +msgid "You are already subscribed to %s." +msgstr "U bent al geabonneerd op %s." + +#: views.py:345 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "U bent uitgeschreven van %s." + +#: views.py:349 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "Gebruiker %(rs)s uitgeschreven van %(my_newsletter)s." + +#: views.py:362 +#, python-format +msgid "You are not subscribed to %s." +msgstr "U bent niet ingeschreven op %s." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/nl/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/nl/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..3536c88 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/nl/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/nl/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/nl/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..73174ba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/nl/LC_MESSAGES/djangojs.po @@ -0,0 +1,24 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2010-11-30 06:53+0000\n" +"Last-Translator: FULL NAME \n" +"Language-Team: Dutch (http://www.transifex.com/dokterbob/django-newsletter/language/nl/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: nl\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "De zending is veranderd. Hij moet opgeslagen worden voordat u kunt verzenden. Klik op OK om de zending op te slaan, klik op annuleren om verder te gaan met bewerken." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/pl/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pl/LC_MESSAGES/django.mo new file mode 100644 index 0000000..83befba Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pl/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/pl/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pl/LC_MESSAGES/django.po new file mode 100644 index 0000000..02d309a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pl/LC_MESSAGES/django.po @@ -0,0 +1,782 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# maciej.maciaszek , 2014 +# marcinph , 2014 +# dokterbob , 2016 +# mmiskiew , 2014 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2016-01-05 17:58+0100\n" +"PO-Revision-Date: 2016-02-02 13:33+0000\n" +"Last-Translator: dokterbob \n" +"Language-Team: Polish (http://www.transifex.com/dokterbob/django-newsletter/language/pl/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: pl\n" +"Plural-Forms: nplurals=3; plural=(n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "Wpis '%s' nie zawiera poprawnego adresu e-mail." + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "Plik z adresem zawiera zduplikowany wpis dla '%s'." + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "Niektóre wpisy są już dodane do subskrypcji." + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "Adres email %(email)s jest zbyt długi, maksymalna liczba znaków to %(email_length)s" + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "Nazwa %(name)s jest zbyt długa, maksymalna liczba znaków to %(name_length)s" + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:162 +msgid "name" +msgstr "nazwa" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "wyświetlanie" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "Nazwa kolumny nie została znaleziona. Prawidłowa nazwa kolumny to albo 'name' albo '%s'" + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 models.py:37 models.py:176 +msgid "e-mail" +msgstr "e-mail" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "Kolumna e-mail nie została znaleziona. Prawidłowa nazwa kolumny to albo 'email' albo '%(email)s'" + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "Nie znaleziono odpowiednich kolumn w pliku CSV. Powinno w nim wystąpić pole o nazwie 'name' lub '%(name)s' oraz pole o nazwie 'e-mail' lub '%(e-mail)s'." + +#: addressimport/parsers.py:262 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "Wiersz o zawartości '%(row)s' nie zawiera pól 'name' oraz 'email'" + +#: addressimport/parsers.py:288 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "Błąd przy odczycie pliku vCard: %s" + +#: addressimport/parsers.py:307 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "Wpis '%s' nie zawiera adresu e-mail." + +#: addressimport/parsers.py:343 +msgid "Some entries have no e-mail address." +msgstr "Niektóre wpisy nie posiadają adresów e-mail." + +#: admin.py:63 +msgid "Messages" +msgstr "Wiadomości" + +#: admin.py:71 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "Subskrybcje" + +#: admin.py:77 +msgid "Submissions" +msgstr "Wysyłki" + +#: admin.py:97 models.py:537 +msgid "submission" +msgstr "wysyłka" + +#: admin.py:104 admin.py:247 admin.py:365 models.py:100 models.py:296 +#: models.py:487 models.py:665 +msgid "newsletter" +msgstr "newsletter" + +#: admin.py:112 +msgid "publish date" +msgstr "data publikacji" + +#: admin.py:138 +msgid "Sent." +msgstr "Wysłane." + +#: admin.py:141 +msgid "Delayed submission." +msgstr "Odroczone wysyłki." + +#: admin.py:143 +msgid "Submitting." +msgstr "Wysyłanie." + +#: admin.py:145 +msgid "Not sent." +msgstr "Nie wysłane." + +#: admin.py:146 admin.py:390 admin_forms.py:113 +msgid "Status" +msgstr "Status" + +#: admin.py:153 +msgid "Submission already sent." +msgstr "Wysyłka została zakończona." + +#: admin.py:162 +msgid "Your submission is being sent." +msgstr "Twoja wysyłka jest przetwarzana." + +#: admin.py:208 +msgid "Optional" +msgstr "Opcjonalny" + +#: admin.py:235 models.py:460 models.py:512 models.py:668 +msgid "message" +msgstr "wiadomość" + +#: admin.py:239 templates/admin/newsletter/message/change_form.html.py:9 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "Podgląd" + +#: admin.py:267 views.py:606 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "Nie odnaleziono szablonu HTML dla newslettera do którego należy ta wiadomość" + +#: admin.py:385 admin_forms.py:101 +msgid "Subscribed" +msgstr "Zapisany" + +#: admin.py:387 admin_forms.py:102 +msgid "Unsubscribed" +msgstr "Niezapisany" + +#: admin.py:389 +msgid "Unactivated" +msgstr "Dezaktywowany" + +#: admin.py:397 models.py:309 +msgid "subscribe date" +msgstr "data zapisania" + +#: admin.py:404 models.py:317 +msgid "unsubscribe date" +msgstr "data wypisania" + +#: admin.py:412 +#, python-format +msgid "%s user has been successfully subscribed." +msgid_plural "%s users have been successfully subscribed." +msgstr[0] "%s użytkownik został pomyślnie zapisany" +msgstr[1] "%s użytkowników zostało pomyślnie zapisanych" +msgstr[2] "%s użytkowników zostało pomyślnie zapisanych." + +#: admin.py:417 +msgid "Subscribe selected users" +msgstr "Zapisz wybranych użytkowników" + +#: admin.py:424 +#, python-format +msgid "%s user has been successfully unsubscribed." +msgid_plural "%s users have been successfully unsubscribed." +msgstr[0] "%s użytkownik został pomyślnie wypisany." +msgstr[1] "%s użytkowników zostało pomyślnie wypisanych" +msgstr[2] "%s użytkowników zostało pomyślnie wypisanych." + +#: admin.py:429 +msgid "Unsubscribe selected users" +msgstr "Wypisz zaznaczonych użytkowników" + +#: admin.py:484 +#, python-format +msgid "%s subscriptions have been successfully added." +msgstr "%s subskrypcji zostało dodanych pomyślnie." + +#: admin_forms.py:41 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "Rodzaj pliku '%s' nie został rozpoznany." + +#: admin_forms.py:58 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "Rozszerzenie '%s' nie zostało rozpoznane." + +#: admin_forms.py:62 +msgid "No entries could found in this file." +msgstr "Żadne wpisy nie zostały znalezione w tym pliku." + +#: admin_forms.py:70 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:7 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "Newsletter" + +#: admin_forms.py:73 +msgid "Address file" +msgstr "Plik z adresami" + +#: admin_forms.py:75 +msgid "Ignore non-fatal errors" +msgstr "Zignoruj błędy niekrytyczne." + +#: admin_forms.py:86 +msgid "You should confirm in order to continue." +msgstr "Powinieneś zatwierdzić, żeby móc kontynuować." + +#: admin_forms.py:89 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "Potwierdź import" + +#: admin_forms.py:119 admin_forms.py:127 +msgid "If a user has been selected this field should remain empty." +msgstr "Jeśli użytkownik został wybrany, to pole powinno zostać puste." + +#: admin_forms.py:137 +msgid "Either a user must be selected or an email address must be specified." +msgstr "Użytkownik lub adres e-mail musi zostać podany." + +#: admin_forms.py:162 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "Ta wiadomość została już opublikowana w innej wysyłce. Wiadomości mogą być publikowane tylko raz." + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "Obiekt %(name)s z kluczem głównym %(key)r nie istnieje." + +#: forms.py:47 forms.py:106 +msgid "An e-mail address is required." +msgstr "Adres e-mail jest wymagany." + +#: forms.py:55 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "Adres e-mail '%(email)s' należy do użytkownika posiadającego konto w tym systemie. Zaloguj się jako ten użytkownik i spróbuj ponownie." + +#: forms.py:72 +msgid "Your e-mail address has already been subscribed to." +msgstr "Twój adres e-mail został już zapisany." + +#: forms.py:97 +msgid "This subscription has not yet been activated." +msgstr "Ta subskrypcja nie została jeszcze aktywowana." + +#: forms.py:114 +#, python-format +msgid "" +"This e-mail address belongs to the user '%(username)s'. Please log in as " +"that user and try again." +msgstr "Ten adres e-mail należy do użytkownika '%(username)s'. Zaloguj się jako ten użytkownik i spróbuj ponownie." + +#: forms.py:132 +msgid "This e-mail address has not been subscribed to." +msgstr "Ten adres e-mail został zapisany." + +#: forms.py:147 +msgid "This subscription has already been unsubscribed from." +msgstr "Ta subskrypcja została już anulowana." + +#: forms.py:163 +msgid "The validation code supplied by you does not match." +msgstr "Podany kod weryfikacyjny jest nieprawidłowy." + +#: forms.py:169 +msgid "Activation code" +msgstr "Kod aktywacyjny" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "Wysyłanie zaplanowanych wiadomości newslettera" + +#: models.py:32 +msgid "newsletter title" +msgstr "tytuł newslettera" + +#: models.py:37 +msgid "Sender e-mail" +msgstr "E-mail nadawcy" + +#: models.py:40 +msgid "sender" +msgstr "nadawca" + +#: models.py:40 +msgid "Sender name" +msgstr "Imię nadawcy" + +#: models.py:44 +msgid "visible" +msgstr "widoczny" + +#: models.py:48 +msgid "send html" +msgstr "wyślij html" + +#: models.py:49 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "Czy wiadomości e-mail mają być wysyłane w wersji HTML." + +#: models.py:101 +msgid "newsletters" +msgstr "newslettery" + +#: models.py:157 +msgid "user" +msgstr "użytkownik" + +#: models.py:162 +msgid "optional" +msgstr "opcjonalny" + +#: models.py:206 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "Zaktualizowano subskrypcję %(subscription)s o %(action)s" + +#: models.py:248 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "Email ani nazwa nie została ustawiona. To prosi się o niespójność!" + +#: models.py:252 +msgid "If user is set, email must be null and vice versa." +msgstr "Jeżeli wpiszesz użytkownika, adres email musi pozostać pusty i na odwrót." + +#: models.py:294 +msgid "IP address" +msgstr "adres IP" + +#: models.py:301 +msgid "activation code" +msgstr "kod aktywacyjny" + +#: models.py:306 +msgid "subscribed" +msgstr "zapisany" + +#: models.py:314 +msgid "unsubscribed" +msgstr "wypisany" + +#: models.py:322 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "%(name)s <%(email)s> do %(newsletter)s" + +#: models.py:329 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "%(email)s do %(newsletter)s" + +#: models.py:335 +msgid "subscription" +msgstr "subskrypcja" + +#: models.py:336 +msgid "subscriptions" +msgstr "subskrypcje" + +#: models.py:439 +msgid "" +"Sort order determines the order in which articles are concatenated in a " +"post." +msgstr "Porządek sortowania determinuje kolejność w jakiej artykuły są wyświetlane w wiadomości" + +#: models.py:441 +msgid "sort order" +msgstr "porządek sortowania" + +#: models.py:444 models.py:483 +msgid "title" +msgstr "tytuł" + +#: models.py:445 +msgid "text" +msgstr "tekst" + +#: models.py:448 +msgid "link" +msgstr "odnośnik" + +#: models.py:454 +msgid "image" +msgstr "obraz" + +#: models.py:465 +msgid "article" +msgstr "artykuł" + +#: models.py:466 +msgid "articles" +msgstr "artykuły" + +#: models.py:484 +msgid "slug" +msgstr "slug" + +#: models.py:491 +msgid "created" +msgstr "stworzony" + +#: models.py:494 +msgid "modified" +msgstr "zmodyfikowany" + +#: models.py:499 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "%(title)s w %(newsletter)s" + +#: models.py:513 +msgid "messages" +msgstr "wiadomości" + +#: models.py:538 +msgid "submissions" +msgstr "wysyłki" + +#: models.py:541 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "%(newsletter)s dnia %(publish_date)s" + +#: models.py:550 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "Wysłano %(submission)s wiadomości do %(count)d odbiorców" + +#: models.py:597 +#, python-format +msgid "Submitting message to: %s." +msgstr "Wysyłanie wiadomości do: %s." + +#: models.py:606 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "Wiadomość %(subscription)s zakończyła się niepowodzeniem: %(error)s" + +#: models.py:630 +#, python-format +msgid "Submission of message %s" +msgstr "Wysyłka wiadomości %s" + +#: models.py:673 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "Jeśli nic nie zaznaczysz, system automatycznie wybierze odbiorców dla Ciebie." + +#: models.py:675 +msgid "recipients" +msgstr "odbiorcy" + +#: models.py:680 +msgid "publication date" +msgstr "data publikacji" + +#: models.py:684 +msgid "publish" +msgstr "publikuj" + +#: models.py:685 +msgid "Publish in archive." +msgstr "Publikuj w archiwum" + +#: models.py:689 +msgid "prepared" +msgstr "przygotowany" + +#: models.py:693 +msgid "sent" +msgstr "wysłany" + +#: models.py:697 +msgid "sending" +msgstr "wysyłanie" + +#: templates/admin/newsletter/message/change_form.html:7 +#: templates/admin/newsletter/newsletter/change_form.html:7 +#: templates/admin/newsletter/submission/change_form.html:14 +msgid "History" +msgstr "Historia" + +#: templates/admin/newsletter/message/change_form.html:8 +#: templates/admin/newsletter/newsletter/change_form.html:8 +#: templates/admin/newsletter/submission/change_form.html:15 +msgid "View on site" +msgstr "Zobacz na stronie" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "Podgląd wiadomości" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "Strona główna" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "Wiadomość" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "Zmiana" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "Utwórz wysyłkę" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "HTML" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "Tekst" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "Wyślij" + +#: templates/admin/newsletter/subscription/change_list.html:8 +msgid "import" +msgstr "import" + +#: templates/admin/newsletter/subscription/change_list.html:12 +#, python-format +msgid "Add %(name)s" +msgstr "Dodaj %(name)s" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "Importuj adresy" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "Potwierdź" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "Wyślij" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "Szczegóły newslettera" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "Lista newslettera" + +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "Subskrybuj" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "Zaktualizuj subskrypcje" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "Archiwum newslettera" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "aktywuj" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "Aktywuj" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "Zapisz do newslettera" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email." +" This could be because your email address is invalid." +msgstr "W związku z problemami technicznymi nie jesteśmy w stanie przesłać emaila z potwierdzeniem. Powodem może być błędnie zdefiniowany adres email." + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "Twoja subskrypcja została pomyślnie aktywowana" + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "Twoja subskrypcja została pomyślnie przyjęta i został do Ciebie wysłany e-mail aktywacyjny. W tej wiadomości znajdziesz odnośnik, który musisz kliknąć w celu aktywacji Twojej subskrypcji." + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "Czy chcesz się zapisać do tego newslettera ?" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "Wypisz z newslettera" + +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "Przestań subskrybować" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "Zostałeś pomyślnie wypisany." + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "Twoja rezygnacja z subskrypcji została pomyślnie przyjęta. Została do Ciebie wysłana wiadomość e-mail z linkiem, który musisz kliknąć w celu potwierdzenia rezygnacji." + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "Czy na pewno chcesz się wypisać z tego newslettera?" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "Aktualizacja newslettera" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "Zaktualizuj subskrypcję" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "Twoja subskrypcja została pomyślnie zaktualizowana" + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "Twoja aktualizacja z subskrypcji została pomyślnie przyjęta. Została do Ciebie wysłana wiadomość e-mail z linkiem, który musisz kliknąć w celu potwierdzenia subskrypcji." + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "Obecnie:" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "Zmiana:" + +#: views.py:114 +msgid "Your changes have been saved." +msgstr "Twoje zmiany zostały zapisane." + +#: views.py:305 +#, python-format +msgid "You have been subscribed to %s." +msgstr "Zostałeś zapisany do %s." + +#: views.py:309 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "Użytkownik %(rs)s zapisany %(my_newsletter)s." + +#: views.py:319 +#, python-format +msgid "You are already subscribed to %s." +msgstr "Jesteś już zapisany do %s." + +#: views.py:344 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "Zostałeś wypisany z %s." + +#: views.py:348 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "Użytkownik %(rs)s został wypisany z %(my_newsletter)s." + +#: views.py:361 +#, python-format +msgid "You are not subscribed to %s." +msgstr "Nie jesteś zapisany do %s." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/pl/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pl/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..2776aad Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pl/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/pl/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pl/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..ac4c77c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pl/LC_MESSAGES/djangojs.po @@ -0,0 +1,25 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# mmiskiew , 2014 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2014-07-31 18:30+0000\n" +"Last-Translator: mmiskiew \n" +"Language-Team: Polish (http://www.transifex.com/dokterbob/django-newsletter/language/pl/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: pl\n" +"Plural-Forms: nplurals=3; plural=(n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "Wysyłka została zmieniona. Musi zostać zapisana zanim będzie możliwe jej wysłanie. Kliknij OK aby zapisać lub anuluj aby kontynuować edycję." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_BR/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_BR/LC_MESSAGES/django.mo new file mode 100644 index 0000000..d1b943f Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_BR/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_BR/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_BR/LC_MESSAGES/django.po new file mode 100644 index 0000000..a1eafd4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_BR/LC_MESSAGES/django.po @@ -0,0 +1,784 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# brunojm , 2013 +# brunojm , 2013-2015 +# Fábio , 2014 +# Luan Fonseca de Farias , 2013 +# Marcelo Zenaide , 2013 +# dokterbob , 2016 +# Partec , 2014 +# Ronaldo Bahia , 2015 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2016-01-05 17:58+0100\n" +"PO-Revision-Date: 2016-02-02 13:34+0000\n" +"Last-Translator: dokterbob \n" +"Language-Team: Portuguese (Brazil) (http://www.transifex.com/dokterbob/django-newsletter/language/pt_BR/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: pt_BR\n" +"Plural-Forms: nplurals=2; plural=(n > 1);\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "A entrada '%s' não contém um endereço de e-mail válido." + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "O arquivo de endereços contém referências duplicadas para '%s'." + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "Algumas entradas já foram inscritas." + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "Endereço de email %(email)s está muito grande, o tamanho máximo é de %(email_length)s caracteres." + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "O nome %(name)s está muito grande, o tamanho máximo é de %(name_length)s caracteres" + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:162 +msgid "name" +msgstr "nome" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "exibir" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "Coluna Nome não encontrada. O nome desta coluna deveria ser 'nome' ou '%s'." + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 models.py:37 models.py:176 +msgid "e-mail" +msgstr "e-mail" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "Coluna Email não encontrada. O nome desta coluna deveria ser 'email', 'e-mail' ou '%(email)s'." + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "Não foi possível determinar as colunas corretas no arquivo CSV. Deveria existir um campo chamado 'nome' ou '%(name)s' e um campo chamado 'e-mail' ou '%(e-mail)s'." + +#: addressimport/parsers.py:262 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "A linha '%(row)s' não possui o campo nome ou email." + +#: addressimport/parsers.py:288 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "Erro lendo arquivo vCard: %s" + +#: addressimport/parsers.py:307 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "O texto informado '%s' não contém um endereço de e-mail." + +#: addressimport/parsers.py:343 +msgid "Some entries have no e-mail address." +msgstr "Algumas entradas não contêm endereços de e-mail." + +#: admin.py:63 +msgid "Messages" +msgstr "Mensagens" + +#: admin.py:71 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "Assinaturas" + +#: admin.py:77 +msgid "Submissions" +msgstr "Envios" + +#: admin.py:97 models.py:537 +msgid "submission" +msgstr "envio" + +#: admin.py:104 admin.py:247 admin.py:365 models.py:100 models.py:296 +#: models.py:487 models.py:665 +msgid "newsletter" +msgstr "boletim" + +#: admin.py:112 +msgid "publish date" +msgstr "data de publicação" + +#: admin.py:138 +msgid "Sent." +msgstr "Enviado." + +#: admin.py:141 +msgid "Delayed submission." +msgstr "Envio atrasado." + +#: admin.py:143 +msgid "Submitting." +msgstr "Enviando." + +#: admin.py:145 +msgid "Not sent." +msgstr "Não enviado." + +#: admin.py:146 admin.py:390 admin_forms.py:113 +msgid "Status" +msgstr "Estado" + +#: admin.py:153 +msgid "Submission already sent." +msgstr "Sua requisição está sendo enviada." + +#: admin.py:162 +msgid "Your submission is being sent." +msgstr "Sua requisição está sendo enviada." + +#: admin.py:208 +msgid "Optional" +msgstr "Opcional" + +#: admin.py:235 models.py:460 models.py:512 models.py:668 +msgid "message" +msgstr "mensagem" + +#: admin.py:239 templates/admin/newsletter/message/change_form.html.py:9 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "Visualização" + +#: admin.py:267 views.py:606 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "Nenhum template HTML associado com o boletim que essa mensagem pertence" + +#: admin.py:385 admin_forms.py:101 +msgid "Subscribed" +msgstr "Assinado" + +#: admin.py:387 admin_forms.py:102 +msgid "Unsubscribed" +msgstr "Assinatura cancelada" + +#: admin.py:389 +msgid "Unactivated" +msgstr "Desativado" + +#: admin.py:397 models.py:309 +msgid "subscribe date" +msgstr "data da assinatura" + +#: admin.py:404 models.py:317 +msgid "unsubscribe date" +msgstr "data de cancelamento da assinatura" + +#: admin.py:412 +#, python-format +msgid "%s user has been successfully subscribed." +msgid_plural "%s users have been successfully subscribed." +msgstr[0] "%s usuário assinou com sucesso." +msgstr[1] "%s usuários assinaram com sucesso." + +#: admin.py:417 +msgid "Subscribe selected users" +msgstr "Assinar usuários selecionados" + +#: admin.py:424 +#, python-format +msgid "%s user has been successfully unsubscribed." +msgid_plural "%s users have been successfully unsubscribed." +msgstr[0] "%s usuários tiveram sua inscrição cancelada." +msgstr[1] "%s usuários tiveram sua inscrição cancelada." + +#: admin.py:429 +msgid "Unsubscribe selected users" +msgstr "Cancelar assinatura dos usuários selecionados" + +#: admin.py:484 +#, python-format +msgid "%s subscriptions have been successfully added." +msgstr "%s assinaturas foram adicionadas com sucesso." + +#: admin_forms.py:41 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "Tipo de arquivo '%s' desconhecido." + +#: admin_forms.py:58 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "Extensão '%s' desconhecida." + +#: admin_forms.py:62 +msgid "No entries could found in this file." +msgstr "Nenhuma entrada encontrada neste arquivo." + +#: admin_forms.py:70 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:7 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "Boletim" + +#: admin_forms.py:73 +msgid "Address file" +msgstr "Arquivo de endereços" + +#: admin_forms.py:75 +msgid "Ignore non-fatal errors" +msgstr "Ignorar erros não fatais." + +#: admin_forms.py:86 +msgid "You should confirm in order to continue." +msgstr "Você deve confirmar antes de continuar a ação." + +#: admin_forms.py:89 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "Confirmar a importação" + +#: admin_forms.py:119 admin_forms.py:127 +msgid "If a user has been selected this field should remain empty." +msgstr "Se um usuário foi selecionado esse campo deve permanecer vazio." + +#: admin_forms.py:137 +msgid "Either a user must be selected or an email address must be specified." +msgstr "Selecione um usuário ou um endereço de email." + +#: admin_forms.py:162 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "Essa mensagem já foi publicada em alguma outra submissão. Mensagens só podem ser publicadas uma vez." + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "O objeto %(name)s com chave primária %(key)r não existe." + +#: forms.py:47 forms.py:106 +msgid "An e-mail address is required." +msgstr "Um endereço de e-mail é obrigatório." + +#: forms.py:55 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "O endereço de e-mail '%(email)s' pertence a um usuário com uma conta neste site. Por favor acesse como esse usuário e tente novamente." + +#: forms.py:72 +msgid "Your e-mail address has already been subscribed to." +msgstr "Seu endereço de e-mail já está inscrito." + +#: forms.py:97 +msgid "This subscription has not yet been activated." +msgstr "Esta inscrição ainda não foi ativada." + +#: forms.py:114 +#, python-format +msgid "" +"This e-mail address belongs to the user '%(username)s'. Please log in as " +"that user and try again." +msgstr "Esse endereço de e-mail pertence ao usuário '%(username)s'. Por favor acesse como esse usuário e tente novamente." + +#: forms.py:132 +msgid "This e-mail address has not been subscribed to." +msgstr "Este endereço de e-mail não está inscrito." + +#: forms.py:147 +msgid "This subscription has already been unsubscribed from." +msgstr "Esta inscrição já foi cancelada." + +#: forms.py:163 +msgid "The validation code supplied by you does not match." +msgstr "O código de validação informado não é válido." + +#: forms.py:169 +msgid "Activation code" +msgstr "Código de ativação" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "Enviando boletim para lista de e-mails" + +#: models.py:32 +msgid "newsletter title" +msgstr "título da lista" + +#: models.py:37 +msgid "Sender e-mail" +msgstr "E-mail do remetente" + +#: models.py:40 +msgid "sender" +msgstr "remetente" + +#: models.py:40 +msgid "Sender name" +msgstr "Nome do remetente" + +#: models.py:44 +msgid "visible" +msgstr "visível" + +#: models.py:48 +msgid "send html" +msgstr "enviar html" + +#: models.py:49 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "Se deseja ou não enviar versões HTML de e-mails." + +#: models.py:101 +msgid "newsletters" +msgstr "Boletins" + +#: models.py:157 +msgid "user" +msgstr "usuário" + +#: models.py:162 +msgid "optional" +msgstr "opcional" + +#: models.py:206 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "Inscrição %(subscription)s atualizada para %(action)s." + +#: models.py:248 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "Informe um e-mail ou nome de usuário!" + +#: models.py:252 +msgid "If user is set, email must be null and vice versa." +msgstr "Se o usuário for definido, e-mail deve ser nulo e vice versa." + +#: models.py:294 +msgid "IP address" +msgstr "Endereço IP" + +#: models.py:301 +msgid "activation code" +msgstr "código de ativação" + +#: models.py:306 +msgid "subscribed" +msgstr "assinado" + +#: models.py:314 +msgid "unsubscribed" +msgstr "assinatura cancelada" + +#: models.py:322 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "%(name)s <%(email)s> para %(newsletter)s" + +#: models.py:329 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "%(email)s para %(newsletter)s" + +#: models.py:335 +msgid "subscription" +msgstr "assinatura" + +#: models.py:336 +msgid "subscriptions" +msgstr "assinaturas" + +#: models.py:439 +msgid "" +"Sort order determines the order in which articles are concatenated in a " +"post." +msgstr "Ordenação determina a ordem que os artigos são concatenados numa postagem." + +#: models.py:441 +msgid "sort order" +msgstr "ordenação" + +#: models.py:444 models.py:483 +msgid "title" +msgstr "título" + +#: models.py:445 +msgid "text" +msgstr "texto" + +#: models.py:448 +msgid "link" +msgstr "link" + +#: models.py:454 +msgid "image" +msgstr "imagem" + +#: models.py:465 +msgid "article" +msgstr "artigo" + +#: models.py:466 +msgid "articles" +msgstr "artigos" + +#: models.py:484 +msgid "slug" +msgstr "slug" + +#: models.py:491 +msgid "created" +msgstr "criado" + +#: models.py:494 +msgid "modified" +msgstr "modificado" + +#: models.py:499 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "%(title)s em %(newsletter)s" + +#: models.py:513 +msgid "messages" +msgstr "mensagens" + +#: models.py:538 +msgid "submissions" +msgstr "envios" + +#: models.py:541 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "%(newsletter)s em %(publish_date)s" + +#: models.py:550 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "Submetendo %(submission)s para %(count)d pessoas" + +#: models.py:597 +#, python-format +msgid "Submitting message to: %s." +msgstr "Submetendo mensagem para %s." + +#: models.py:606 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "Mensagem %(subscription)s falhou com o erro: %(error)s" + +#: models.py:630 +#, python-format +msgid "Submission of message %s" +msgstr "Envio de mensagem %s" + +#: models.py:673 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "Se você selecionar nenhum, o sistema automaticamente encontrará os assinantes para você." + +#: models.py:675 +msgid "recipients" +msgstr "destinatários" + +#: models.py:680 +msgid "publication date" +msgstr "data de publicação" + +#: models.py:684 +msgid "publish" +msgstr "publicar" + +#: models.py:685 +msgid "Publish in archive." +msgstr "Publicar em arquivo." + +#: models.py:689 +msgid "prepared" +msgstr "Pronto" + +#: models.py:693 +msgid "sent" +msgstr "enviado" + +#: models.py:697 +msgid "sending" +msgstr "enviando" + +#: templates/admin/newsletter/message/change_form.html:7 +#: templates/admin/newsletter/newsletter/change_form.html:7 +#: templates/admin/newsletter/submission/change_form.html:14 +msgid "History" +msgstr "Histórico" + +#: templates/admin/newsletter/message/change_form.html:8 +#: templates/admin/newsletter/newsletter/change_form.html:8 +#: templates/admin/newsletter/submission/change_form.html:15 +msgid "View on site" +msgstr "Ver no site" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "Pré-visualizar mensagem" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "Início" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "Mensagem" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "Alteração" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "Criar envio" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "HTML" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "Texto" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "Enviar" + +#: templates/admin/newsletter/subscription/change_list.html:8 +msgid "import" +msgstr "importar" + +#: templates/admin/newsletter/subscription/change_list.html:12 +#, python-format +msgid "Add %(name)s" +msgstr "Adicionar %(name)s" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "Importar endereço" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "Confirmar" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "Upload" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "Detalhes do Boletim" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "Lista de Boletins" + +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "Assinar" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "Atualizar inscrições" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "Arquivos do Boletim" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "ativar" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "Ativar" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "Assinar Boletim" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email." +" This could be because your email address is invalid." +msgstr "Devido a um erro técnico, não foi possível enviar seu e-mail de confirmação. Isto pode ter ocorrido porque o seu endereço de e-mail é inválido." + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "Sua assinatura foi ativada com sucesso." + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "Sua solicitação de inscrição foi recebido com sucesso e um e-mail de ativação foi enviado para você. Nesse e-mail você vai encontrar um link que deve ser acessado a fim de ativar sua assinatura." + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "Você deseja assinar este boletim?" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "Cancelar assinatura do Boletim" + +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "Cancelar assinatura" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "Sua assinatura foi cancelada com sucesso." + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "Sua solicitação de cancelamento foi recebida com sucesso. Um e-mail foi enviado para você com um link que deve ser acessado a fim de confirmar o seu cancelamento." + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "Você realmente deseja cancelar a assinatura desse boletim?" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "Atualizar Boletim" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "Atualizar assinatura" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "Sua assinatura foi atualizada com sucesso." + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "Sua solicitação de atualização foi recebido com sucesso e um e-mail de ativação foi enviado para você. Nesse e-mail você vai encontrar um link que deve ser acessado a fim de atualizar a sua assinatura." + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "Atualmente:" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "Alteração:" + +#: views.py:114 +msgid "Your changes have been saved." +msgstr "Suas alterações foram salvas." + +#: views.py:305 +#, python-format +msgid "You have been subscribed to %s." +msgstr "Você foi inscrito em %s." + +#: views.py:309 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "Usuário %(rs)s assinou a %(my_newsletter)s." + +#: views.py:319 +#, python-format +msgid "You are already subscribed to %s." +msgstr "Você já está inscrito em %s." + +#: views.py:344 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "Você foi desinscrito de %s." + +#: views.py:348 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "Usuário %(rs)s desinscrito de %(my_newsletter)s." + +#: views.py:361 +#, python-format +msgid "You are not subscribed to %s." +msgstr "Você não está inscrito em %s." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_BR/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_BR/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..bfb959a Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_BR/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_BR/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_BR/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..fa521be --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_BR/LC_MESSAGES/djangojs.po @@ -0,0 +1,25 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# brunojm , 2013 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2013-11-20 12:25+0000\n" +"Last-Translator: brunojm \n" +"Language-Team: Portuguese (Brazil) (http://www.transifex.com/dokterbob/django-newsletter/language/pt_BR/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: pt_BR\n" +"Plural-Forms: nplurals=2; plural=(n > 1);\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "O envio foi modificado. Deve ser salvo antes de continuar. Clique 'OK' para continuar com o salvamento, clique 'Cancelar' para continuar editando." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_PT/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_PT/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..f7f8fcb Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_PT/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_PT/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_PT/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..5bf35f2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/pt_PT/LC_MESSAGES/djangojs.po @@ -0,0 +1,25 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# jumpifzero , 2011 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2013-11-20 12:25+0000\n" +"Last-Translator: jumpifzero \n" +"Language-Team: Portuguese (Portugal) (http://www.transifex.com/dokterbob/django-newsletter/language/pt_PT/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: pt_PT\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "A submissão foi alterada e tem de ser gravada antes de poder submeter. Pressione OK para gravar ou cancelar para continuar a editar." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/ru/LC_MESSAGES/django.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ru/LC_MESSAGES/django.mo new file mode 100644 index 0000000..450729e Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ru/LC_MESSAGES/django.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/ru/LC_MESSAGES/django.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ru/LC_MESSAGES/django.po new file mode 100644 index 0000000..614e7be --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ru/LC_MESSAGES/django.po @@ -0,0 +1,783 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Eugene Akentyev , 2013 +# dokterbob , 2016 +# Oleksandr , 2014 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2016-01-05 17:58+0100\n" +"PO-Revision-Date: 2016-02-02 13:36+0000\n" +"Last-Translator: dokterbob \n" +"Language-Team: Russian (http://www.transifex.com/dokterbob/django-newsletter/language/ru/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ru\n" +"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || (n%100>=11 && n%100<=14)? 2 : 3);\n" + +#: addressimport/parsers.py:40 +#, python-format +msgid "Entry '%s' does not contain a valid e-mail address." +msgstr "Запись '%s' не содержит правильный адрес электронной почты." + +#: addressimport/parsers.py:55 +#, python-format +msgid "The address file contains duplicate entries for '%s'." +msgstr "Файл с адресами содержит копии '%s'." + +#: addressimport/parsers.py:69 +msgid "Some entries are already subscribed to." +msgstr "Некоторые записи уже подписаны." + +#: addressimport/parsers.py:108 +#, python-format +msgid "" +"E-mail address %(email)s too long, maximum length is %(email_length)s " +"characters." +msgstr "Адрес электронной почты %(email)s слишком длинный, максимальная длина: %(email_length)s символов." + +#: addressimport/parsers.py:135 +#, python-format +msgid "Name %(name)s too long, maximum length is %(name_length)s characters." +msgstr "Имя %(name)s слишком длинное, максимальная длина: %(name_length)s символов." + +#: addressimport/parsers.py:196 addressimport/parsers.py:208 +#: addressimport/parsers.py:243 models.py:162 +msgid "name" +msgstr "имя" + +#: addressimport/parsers.py:200 +msgid "display" +msgstr "отображение" + +#: addressimport/parsers.py:207 +#, python-format +msgid "" +"Name column not found. The name of this column should be either 'name' or " +"'%s'." +msgstr "Столбец с именем не найден. Имя этого столбца должно быть 'name' или '%s'." + +#: addressimport/parsers.py:219 addressimport/parsers.py:231 +#: addressimport/parsers.py:244 models.py:37 models.py:176 +msgid "e-mail" +msgstr "электронная почта" + +#: addressimport/parsers.py:229 +#, python-format +msgid "" +"E-mail column not found. The name of this column should be either 'email', " +"'e-mail' or '%(email)s'." +msgstr "Колонка с электронной почтой не найдена. Имя этой колонки должно быть 'email', 'e-mail' или '%(email)s'." + +#: addressimport/parsers.py:239 +#, python-format +msgid "" +"Could not properly determine the proper columns in the CSV-file. There " +"should be a field called 'name' or '%(name)s' and one called 'e-mail' or " +"'%(e-mail)s'." +msgstr "Невозможно верно определить колонки в CSV-файле. Необходимы поля с именами 'name' или '%(name)s' и 'e-mail' или '%(e-mail)s'." + +#: addressimport/parsers.py:262 +#, python-format +msgid "Row with content '%(row)s' does not contain a name and email field." +msgstr "Строка, содержащая '%(row)s', не содержит поля имени и электронной почты." + +#: addressimport/parsers.py:288 +#, python-format +msgid "Error reading vCard file: %s" +msgstr "Ошибка при чтении vCard файла: %s" + +#: addressimport/parsers.py:307 +#, python-format +msgid "Entry '%s' contains no email address." +msgstr "В записи '%s' нет адреса электронной почты." + +#: addressimport/parsers.py:343 +msgid "Some entries have no e-mail address." +msgstr "Некоторые записи не содержат адрес электронной почты." + +#: admin.py:63 +msgid "Messages" +msgstr "Сообщения" + +#: admin.py:71 +#: templates/admin/newsletter/subscription/confirmimportform.html:16 +#: templates/admin/newsletter/subscription/importform.html:16 +msgid "Subscriptions" +msgstr "Подписчики" + +#: admin.py:77 +msgid "Submissions" +msgstr "Отправки" + +#: admin.py:97 models.py:537 +msgid "submission" +msgstr "отправка" + +#: admin.py:104 admin.py:247 admin.py:365 models.py:100 models.py:296 +#: models.py:487 models.py:665 +msgid "newsletter" +msgstr "рассылка" + +#: admin.py:112 +msgid "publish date" +msgstr "дата публикации" + +#: admin.py:138 +msgid "Sent." +msgstr "Отправлено." + +#: admin.py:141 +msgid "Delayed submission." +msgstr "Отправка с задержкой." + +#: admin.py:143 +msgid "Submitting." +msgstr "Отправляется." + +#: admin.py:145 +msgid "Not sent." +msgstr "Не отправлено." + +#: admin.py:146 admin.py:390 admin_forms.py:113 +msgid "Status" +msgstr "Статус" + +#: admin.py:153 +msgid "Submission already sent." +msgstr "Посылка уже отправлена." + +#: admin.py:162 +msgid "Your submission is being sent." +msgstr "Ваша посылка отправляется." + +#: admin.py:208 +msgid "Optional" +msgstr "Необязательно" + +#: admin.py:235 models.py:460 models.py:512 models.py:668 +msgid "message" +msgstr "сообщение" + +#: admin.py:239 templates/admin/newsletter/message/change_form.html.py:9 +#: templates/admin/newsletter/message/preview.html:13 +msgid "Preview" +msgstr "Предпросмотр" + +#: admin.py:267 views.py:606 +msgid "" +"No HTML template associated with the newsletter this message belongs to." +msgstr "Для рассылки, к которой принадлежит это сообщение, не было найдено HTML шаблона." + +#: admin.py:385 admin_forms.py:101 +msgid "Subscribed" +msgstr "Подписался" + +#: admin.py:387 admin_forms.py:102 +msgid "Unsubscribed" +msgstr "Отписанный" + +#: admin.py:389 +msgid "Unactivated" +msgstr "Неактивированный" + +#: admin.py:397 models.py:309 +msgid "subscribe date" +msgstr "дата подписки" + +#: admin.py:404 models.py:317 +msgid "unsubscribe date" +msgstr "дата отписки" + +#: admin.py:412 +#, python-format +msgid "%s user has been successfully subscribed." +msgid_plural "%s users have been successfully subscribed." +msgstr[0] "%s пользователь был успешно подписан." +msgstr[1] "%s пользователи были успешно подписаны." +msgstr[2] "%s пользователей были успешно подписаны." +msgstr[3] "%s пользователей были успешно подписаны." + +#: admin.py:417 +msgid "Subscribe selected users" +msgstr "Подписать выбранных пользователей" + +#: admin.py:424 +#, python-format +msgid "%s user has been successfully unsubscribed." +msgid_plural "%s users have been successfully unsubscribed." +msgstr[0] "%s пользователь был успешно отписан." +msgstr[1] "%s пользователи были успешно отписаны." +msgstr[2] "%s пользователей были успешно отписаны." +msgstr[3] "%s пользователей были успешно отписаны." + +#: admin.py:429 +msgid "Unsubscribe selected users" +msgstr "Отписать выбранных пользователей" + +#: admin.py:484 +#, python-format +msgid "%s subscriptions have been successfully added." +msgstr "%s подписчики успешно добавлены." + +#: admin_forms.py:41 +#, python-format +msgid "File type '%s' was not recognized." +msgstr "Тип файла '%s' не был опознан." + +#: admin_forms.py:58 +#, python-format +msgid "File extension '%s' was not recognized." +msgstr "Расширение файла '%s' не было опознано." + +#: admin_forms.py:62 +msgid "No entries could found in this file." +msgstr "Данный файл не содержит записей." + +#: admin_forms.py:70 +#: templates/admin/newsletter/subscription/confirmimportform.html:12 +#: templates/admin/newsletter/subscription/importform.html:12 +#: templates/newsletter/common.html:7 +#: templates/newsletter/newsletter_detail.html:10 +#: templates/newsletter/newsletter_list.html:14 +#: templates/newsletter/newsletter_list.html:32 +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "Newsletter" +msgstr "Рассылка" + +#: admin_forms.py:73 +msgid "Address file" +msgstr "Файл с адресами" + +#: admin_forms.py:75 +msgid "Ignore non-fatal errors" +msgstr "Игнорировать неустранимые ошибки" + +#: admin_forms.py:86 +msgid "You should confirm in order to continue." +msgstr "Вы должны подтвердить, чтобы продолжить." + +#: admin_forms.py:89 +#: templates/admin/newsletter/subscription/confirmimportform.html:23 +#: templates/admin/newsletter/subscription/confirmimportform.html:28 +msgid "Confirm import" +msgstr "Подтвердить импортирование" + +#: admin_forms.py:119 admin_forms.py:127 +msgid "If a user has been selected this field should remain empty." +msgstr "Поле должно быть пустым, если пользователь был выбран." + +#: admin_forms.py:137 +msgid "Either a user must be selected or an email address must be specified." +msgstr "Какой-нибудь пользователь должен быть выбран или указан адрес электронной почты." + +#: admin_forms.py:162 +msgid "" +"This message has already been published in some other submission. Messages " +"can only be published once." +msgstr "Это сообщение уже было опубликовано в другой посылке. Сообщения могут быть опубликованы только раз." + +#: admin_utils.py:25 +#, python-format +msgid "%(name)s object with primary key %(key)r does not exist." +msgstr "%(name)s объект с первичным ключем %(key)r не существует" + +#: forms.py:47 forms.py:106 +msgid "An e-mail address is required." +msgstr "Необходим адрес электронной почты." + +#: forms.py:55 +#, python-format +msgid "" +"The e-mail address '%(email)s' belongs to a user with an account on this " +"site. Please log in as that user and try again." +msgstr "Адрес электронной почты '%(email)s' принадлежит пользователю этого сайта. Пожалуйста зайдите и попробуйте снова." + +#: forms.py:72 +msgid "Your e-mail address has already been subscribed to." +msgstr "Ваш адрес электронной почты уже подписан." + +#: forms.py:97 +msgid "This subscription has not yet been activated." +msgstr "Эта подписка еще не активирована." + +#: forms.py:114 +#, python-format +msgid "" +"This e-mail address belongs to the user '%(username)s'. Please log in as " +"that user and try again." +msgstr "Этот адрес электронной почты принадлежит пользователю '%(username)s'. Пожалуйста войдите и попробуйте еще раз." + +#: forms.py:132 +msgid "This e-mail address has not been subscribed to." +msgstr "Этот адрес электронной почты не подписан." + +#: forms.py:147 +msgid "This subscription has already been unsubscribed from." +msgstr "Это подписка уже отменена." + +#: forms.py:163 +msgid "The validation code supplied by you does not match." +msgstr "Предоставляемый вами код подтверждения не совпадает." + +#: forms.py:169 +msgid "Activation code" +msgstr "Код активации" + +#: jobs/hourly/submit.py:15 +msgid "Submitting queued newsletter mailings" +msgstr "Посылка добавлена в очередь рассылок" + +#: models.py:32 +msgid "newsletter title" +msgstr "заголовок рассылки" + +#: models.py:37 +msgid "Sender e-mail" +msgstr "Электронная почта отправителя" + +#: models.py:40 +msgid "sender" +msgstr "отправитель" + +#: models.py:40 +msgid "Sender name" +msgstr "Имя отправителя" + +#: models.py:44 +msgid "visible" +msgstr "отображается" + +#: models.py:48 +msgid "send html" +msgstr "отправить html" + +#: models.py:49 +msgid "Whether or not to send HTML versions of e-mails." +msgstr "Будете или нет отправлять HTML версию электронных писем." + +#: models.py:101 +msgid "newsletters" +msgstr "рассылки" + +#: models.py:157 +msgid "user" +msgstr "пользователь" + +#: models.py:162 +msgid "optional" +msgstr "необязательно" + +#: models.py:206 +#, python-format +msgid "Updated subscription %(subscription)s to %(action)s." +msgstr "Обновлена подписка %(subscription)s на %(action)s." + +#: models.py:248 +msgid "Neither an email nor a username is set. This asks for inconsistency!" +msgstr "Ни один электронный ящик или пользователь не установлены. Здесь несоответствие!" + +#: models.py:252 +msgid "If user is set, email must be null and vice versa." +msgstr "Если пользователь указан, то электронная почта не нужна и наоборот." + +#: models.py:294 +msgid "IP address" +msgstr "IP адрес" + +#: models.py:301 +msgid "activation code" +msgstr "код активации" + +#: models.py:306 +msgid "subscribed" +msgstr "подписался" + +#: models.py:314 +msgid "unsubscribed" +msgstr "отписался" + +#: models.py:322 +#, python-format +msgid "%(name)s <%(email)s> to %(newsletter)s" +msgstr "%(name)s <%(email)s> в %(newsletter)s" + +#: models.py:329 +#, python-format +msgid "%(email)s to %(newsletter)s" +msgstr "%(email)s в %(newsletter)s" + +#: models.py:335 +msgid "subscription" +msgstr "подписка" + +#: models.py:336 +msgid "subscriptions" +msgstr "подписки" + +#: models.py:439 +msgid "" +"Sort order determines the order in which articles are concatenated in a " +"post." +msgstr "Порядок сортировки определяет порядок, в котором статьи будут собраны в пост." + +#: models.py:441 +msgid "sort order" +msgstr "порядок сортировки" + +#: models.py:444 models.py:483 +msgid "title" +msgstr "заголовок" + +#: models.py:445 +msgid "text" +msgstr "текст" + +#: models.py:448 +msgid "link" +msgstr "ссылка" + +#: models.py:454 +msgid "image" +msgstr "изображение" + +#: models.py:465 +msgid "article" +msgstr "статья" + +#: models.py:466 +msgid "articles" +msgstr "статьи" + +#: models.py:484 +msgid "slug" +msgstr "ЧПУ" + +#: models.py:491 +msgid "created" +msgstr "создан" + +#: models.py:494 +msgid "modified" +msgstr "изменен" + +#: models.py:499 +#, python-format +msgid "%(title)s in %(newsletter)s" +msgstr "%(title)s в %(newsletter)s" + +#: models.py:513 +msgid "messages" +msgstr "сообщения" + +#: models.py:538 +msgid "submissions" +msgstr "отправки" + +#: models.py:541 +#, python-format +msgid "%(newsletter)s on %(publish_date)s" +msgstr "%(newsletter)s на %(publish_date)s" + +#: models.py:550 +#, python-format +msgid "Submitting %(submission)s to %(count)d people" +msgstr "Отправка %(submission)s для %(count)d людей" + +#: models.py:597 +#, python-format +msgid "Submitting message to: %s." +msgstr "Отправка сообщения для: %s" + +#: models.py:606 +#, python-format +msgid "Message %(subscription)s failed with error: %(error)s" +msgstr "Отправить сообщение в %(subscription)s не удалось, ошибка: %(error)s" + +#: models.py:630 +#, python-format +msgid "Submission of message %s" +msgstr "Отправка сообщения %s" + +#: models.py:673 +msgid "" +"If you select none, the system will automatically find the subscribers for " +"you." +msgstr "Если вы ничего не выберете, тогда система автоматически найдет подписчиков для вас." + +#: models.py:675 +msgid "recipients" +msgstr "получатели" + +#: models.py:680 +msgid "publication date" +msgstr "дата публикации" + +#: models.py:684 +msgid "publish" +msgstr "опубликованная" + +#: models.py:685 +msgid "Publish in archive." +msgstr "Опубликованные в архиве." + +#: models.py:689 +msgid "prepared" +msgstr "подготовленные" + +#: models.py:693 +msgid "sent" +msgstr "отправлено" + +#: models.py:697 +msgid "sending" +msgstr "отправляется" + +#: templates/admin/newsletter/message/change_form.html:7 +#: templates/admin/newsletter/newsletter/change_form.html:7 +#: templates/admin/newsletter/submission/change_form.html:14 +msgid "History" +msgstr "История" + +#: templates/admin/newsletter/message/change_form.html:8 +#: templates/admin/newsletter/newsletter/change_form.html:8 +#: templates/admin/newsletter/submission/change_form.html:15 +msgid "View on site" +msgstr "Смотреть на сайте" + +#: templates/admin/newsletter/message/preview.html:5 +#: templates/admin/newsletter/message/preview.html:19 +msgid "Preview message" +msgstr "Предпросмотр сообщения" + +#: templates/admin/newsletter/message/preview.html:9 +#: templates/admin/newsletter/subscription/confirmimportform.html:8 +#: templates/admin/newsletter/subscription/importform.html:8 +msgid "Home" +msgstr "Главная" + +#: templates/admin/newsletter/message/preview.html:11 +msgid "Message" +msgstr "Сообщение" + +#: templates/admin/newsletter/message/preview.html:22 +#: templates/admin/newsletter/subscription/importform.html:28 +msgid "Change" +msgstr "Изменить" + +#: templates/admin/newsletter/message/preview.html:23 +#: templates/admin/newsletter/subscription/importform.html:29 +msgid "Create submission" +msgstr "Создать отправку" + +#: templates/admin/newsletter/message/preview.html:26 +msgid "HTML" +msgstr "HTML" + +#: templates/admin/newsletter/message/preview.html:30 +msgid "Text" +msgstr "Текст" + +#: templates/admin/newsletter/submission/change_form.html:16 +msgid "Submit" +msgstr "Отправить" + +#: templates/admin/newsletter/subscription/change_list.html:8 +msgid "import" +msgstr "Импортировать" + +#: templates/admin/newsletter/subscription/change_list.html:12 +#, python-format +msgid "Add %(name)s" +msgstr "Добавить %(name)s" + +#: templates/admin/newsletter/subscription/confirmimportform.html:3 +#: templates/admin/newsletter/subscription/confirmimportform.html:20 +#: templates/admin/newsletter/subscription/importform.html:3 +#: templates/admin/newsletter/subscription/importform.html:19 +#: templates/admin/newsletter/subscription/importform.html:24 +msgid "Import addresses" +msgstr "Импортировать адреса" + +#: templates/admin/newsletter/subscription/confirmimportform.html:40 +msgid "Confirm" +msgstr "Подтвердить" + +#: templates/admin/newsletter/subscription/importform.html:37 +msgid "Upload" +msgstr "Загрузить" + +#: templates/newsletter/newsletter_detail.html:5 +msgid "Newsletter detail" +msgstr "Подробности рассылок" + +#: templates/newsletter/newsletter_list.html:5 +msgid "Newsletter list" +msgstr "Список рассылок" + +#: templates/newsletter/newsletter_list.html:16 +#: templates/newsletter/subscription_subscribe.html:23 +#: templates/newsletter/subscription_subscribe_user.html:22 +msgid "Subscribe" +msgstr "Подписаться" + +#: templates/newsletter/newsletter_list.html:27 +msgid "Update subscriptions" +msgstr "Обновить подписки" + +#: templates/newsletter/submission_archive.html:5 +#: templates/newsletter/submission_archive.html:10 +msgid "Newsletter archive" +msgstr "Архив рассылок" + +#: templates/newsletter/subscription_activate.html:5 +#: templates/newsletter/subscription_activate.html:8 +#: templates/newsletter/subscription_subscribe_activated.html:5 +#: templates/newsletter/subscription_subscribe_activated.html:8 +#: templates/newsletter/subscription_unsubscribe_activated.html:5 +#: templates/newsletter/subscription_unsubscribe_activated.html:8 +#: templates/newsletter/subscription_update_activated.html:5 +#: templates/newsletter/subscription_update_activated.html:8 +msgid "activate" +msgstr "активировать" + +#: templates/newsletter/subscription_activate.html:13 +msgid "Activate" +msgstr "Активировать" + +#: templates/newsletter/subscription_subscribe.html:5 +#: templates/newsletter/subscription_subscribe.html:8 +#: templates/newsletter/subscription_subscribe_email_sent.html:5 +#: templates/newsletter/subscription_subscribe_email_sent.html:8 +#: templates/newsletter/subscription_subscribe_user.html:5 +#: templates/newsletter/subscription_subscribe_user.html:8 +msgid "Newsletter subscribe" +msgstr "Подписаться на рассылку" + +#: templates/newsletter/subscription_subscribe.html:11 +#: templates/newsletter/subscription_unsubscribe.html:11 +#: templates/newsletter/subscription_update.html:11 +msgid "" +"Due to a technical error we were not able to submit your confirmation email." +" This could be because your email address is invalid." +msgstr "Из-за технической ошибки письмо с подтверждением не было отправлено. Возможно ваш адрес электронной почты неправильный." + +#: templates/newsletter/subscription_subscribe_activated.html:10 +msgid "Your subscription has successfully been activated." +msgstr "Ваша подписка была успешно активирована." + +#: templates/newsletter/subscription_subscribe_email_sent.html:10 +msgid "" +"Your subscription request was successfully received and an activation email " +"has been sent to you. In that email you will find a link which you need to " +"follow in order to activate your subscription." +msgstr "Ваш запрос на подписку был успешно получен, и email для активации был отправлен Вам. В нем вы должны найти ссылку и перейти по ней, чтобы подтвердить свое решение активировать ​​подписку." + +#: templates/newsletter/subscription_subscribe_user.html:19 +msgid "Do you want to subscribe to this newsletter?" +msgstr "Вы хотите подписаться на эту рассылку?" + +#: templates/newsletter/subscription_unsubscribe.html:5 +#: templates/newsletter/subscription_unsubscribe.html:8 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:5 +#: templates/newsletter/subscription_unsubscribe_email_sent.html:8 +#: templates/newsletter/subscription_unsubscribe_user.html:5 +#: templates/newsletter/subscription_unsubscribe_user.html:8 +msgid "Newsletter unsubscribe" +msgstr "Отписаться от рассылки" + +#: templates/newsletter/subscription_unsubscribe.html:23 +#: templates/newsletter/subscription_unsubscribe_user.html:23 +msgid "Unsubscribe" +msgstr "Отписаться" + +#: templates/newsletter/subscription_unsubscribe_activated.html:10 +msgid "You have successfully been unsubscribed." +msgstr "Вы успешно отписались." + +#: templates/newsletter/subscription_unsubscribe_email_sent.html:10 +msgid "" +"Your unsubscription request has successfully been received. An email has " +"been sent to you with a link you need to follow in order to confirm your " +"unsubscription." +msgstr "Ваш запрос на отписку был успешно получен.Сообщение со ссылкой было отправлено Вам, вы должны перейти по ней, чтобы подтвердить свою ​​отписку." + +#: templates/newsletter/subscription_unsubscribe_user.html:20 +msgid "Do you want to unsubscribe from this newsletter?" +msgstr "Вы хотите отменить подписку на эту рассылку?" + +#: templates/newsletter/subscription_update.html:5 +#: templates/newsletter/subscription_update.html:8 +#: templates/newsletter/subscription_update_email_sent.html:5 +#: templates/newsletter/subscription_update_email_sent.html:8 +msgid "Newsletter update" +msgstr "Обновление рассылки" + +#: templates/newsletter/subscription_update.html:23 +msgid "Update subscription" +msgstr "Обновить подписку" + +#: templates/newsletter/subscription_update_activated.html:10 +msgid "Your subscription has successfully been updated." +msgstr "Ваша подписка успешно обновлена." + +#: templates/newsletter/subscription_update_email_sent.html:10 +msgid "" +"Your update request was successfully received and an activation email has " +"been sent to you. In that email you will find a link which you need to " +"follow in order to update your subscription." +msgstr "Ваш запрос на обновление был успешно получен, сообщение для активации было отправлено Вам. В нем вы должны найти ссылку и перейти по ней, чтобы подтвердить свое решение обновить ​​подписку." + +#: templates/widget/image.html:2 +msgid "Currently:" +msgstr "В текущий момент:" + +#: templates/widget/image.html:4 +msgid "Change:" +msgstr "Изменить:" + +#: views.py:114 +msgid "Your changes have been saved." +msgstr "Ваши изменения сохранены." + +#: views.py:305 +#, python-format +msgid "You have been subscribed to %s." +msgstr "Вы подписались на %s." + +#: views.py:309 +#, python-format +msgid "User %(rs)s subscribed to %(my_newsletter)s." +msgstr "Пользователь %(rs)s подписался на %(my_newsletter)s." + +#: views.py:319 +#, python-format +msgid "You are already subscribed to %s." +msgstr "Вы уже подписаны на %s." + +#: views.py:344 +#, python-format +msgid "You have been unsubscribed from %s." +msgstr "Вы отписались от %s." + +#: views.py:348 +#, python-format +msgid "User %(rs)s unsubscribed from %(my_newsletter)s." +msgstr "Пользователь %(rs)s отписался от %(my_newsletter)s." + +#: views.py:361 +#, python-format +msgid "You are not subscribed to %s." +msgstr "Вы не подписаны на %s." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/ru/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ru/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..1418f1f Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ru/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/ru/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ru/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..a5ebf25 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/ru/LC_MESSAGES/djangojs.po @@ -0,0 +1,25 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# Eugene Akentyev , 2013 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2013-11-20 12:25+0000\n" +"Last-Translator: Eugene Akentyev \n" +"Language-Team: Russian (http://www.transifex.com/dokterbob/django-newsletter/language/ru/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ru\n" +"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || (n%100>=11 && n%100<=14)? 2 : 3);\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "Посылка изменилась. Необходимо сохранить перед отправкой. Нажмите OK, чтобы сохранить, нажмите отменить, чтобы продолжить редактирование." diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/zh_CN/LC_MESSAGES/djangojs.mo b/thesisenv/lib/python3.6/site-packages/newsletter/locale/zh_CN/LC_MESSAGES/djangojs.mo new file mode 100644 index 0000000..2bc0caf Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/locale/zh_CN/LC_MESSAGES/djangojs.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/locale/zh_CN/LC_MESSAGES/djangojs.po b/thesisenv/lib/python3.6/site-packages/newsletter/locale/zh_CN/LC_MESSAGES/djangojs.po new file mode 100644 index 0000000..6976e90 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/locale/zh_CN/LC_MESSAGES/djangojs.po @@ -0,0 +1,25 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: +# mozillazg , 2014 +msgid "" +msgstr "" +"Project-Id-Version: django-newsletter\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2012-11-19 21:55+0100\n" +"PO-Revision-Date: 2014-01-21 08:15+0000\n" +"Last-Translator: mozillazg \n" +"Language-Team: Chinese (China) (http://www.transifex.com/dokterbob/django-newsletter/language/zh_CN/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: zh_CN\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +#: static/newsletter/admin/js/submit_interface.js:12 +msgid "" +"The submission has been changed. It has to be saved before you can submit. " +"Click OK to proceed with saving, click cancel to continue editing." +msgstr "提交已更改。在您提交前它必须被保存。点击确定继续保存,点击取消继续编辑。" diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/management/__init__.py b/thesisenv/lib/python3.6/site-packages/newsletter/management/__init__.py new file mode 100644 index 0000000..0a8f859 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/management/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- +# management commands for the newsletter diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/management/commands/__init__.py b/thesisenv/lib/python3.6/site-packages/newsletter/management/commands/__init__.py new file mode 100644 index 0000000..0a8f859 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/management/commands/__init__.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- +# management commands for the newsletter diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/management/commands/submit_newsletter.py b/thesisenv/lib/python3.6/site-packages/newsletter/management/commands/submit_newsletter.py new file mode 100644 index 0000000..bc869f4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/management/commands/submit_newsletter.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +actual sending of the submissions +""" +import logging + +from django.core.management.base import BaseCommand +from django.utils.translation import ugettext as _ + +from newsletter.models import Submission + + +class Command(BaseCommand): + help = _("Submit pending messages.") + + def handle(self, *args, **options): + # Setup logging based on verbosity: 1 -> INFO, >1 -> DEBUG + verbosity = int(options['verbosity']) + logger = logging.getLogger('newsletter') + if verbosity == 0: + logger.setLevel(logging.WARN) + elif verbosity == 1: # default + logger.setLevel(logging.INFO) + elif verbosity > 1: + logger.setLevel(logging.DEBUG) + if verbosity > 2: + logger = logging.getLogger() + logger.setLevel(logging.DEBUG) + + logger.info(_('Submitting queued newsletter mailings')) + + # Call submission + Submission.submit_queue() diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0001_initial.py b/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0001_initial.py new file mode 100644 index 0000000..2f27395 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0001_initial.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import models, migrations +import sorl.thumbnail.fields +import newsletter.utils +import django.utils.timezone +from django.conf import settings + + +class Migration(migrations.Migration): + + dependencies = [ + ('sites', '0001_initial'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='Article', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('sortorder', models.PositiveIntegerField(help_text='Sort order determines the order in which articles are concatenated in a post.', verbose_name='sort order', db_index=True)), + ('title', models.CharField(max_length=200, verbose_name='title')), + ('text', models.TextField(verbose_name='text')), + ('url', models.URLField(null=True, verbose_name='link', blank=True)), + ('image', sorl.thumbnail.fields.ImageField(upload_to='newsletter/images/%Y/%m/%d', null=True, verbose_name='image', blank=True)), + ], + options={ + 'ordering': ('sortorder',), + 'verbose_name': 'article', + 'verbose_name_plural': 'articles', + }, + bases=(models.Model,), + ), + migrations.CreateModel( + name='Message', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('title', models.CharField(max_length=200, verbose_name='title')), + ('slug', models.SlugField(verbose_name='slug')), + ('date_create', models.DateTimeField(auto_now_add=True, verbose_name='created')), + ('date_modify', models.DateTimeField(auto_now=True, verbose_name='modified')), + ], + options={ + 'verbose_name': 'message', + 'verbose_name_plural': 'messages', + }, + bases=(models.Model,), + ), + migrations.CreateModel( + name='Newsletter', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('title', models.CharField(max_length=200, verbose_name='newsletter title')), + ('slug', models.SlugField(unique=True)), + ('email', models.EmailField(help_text='Sender e-mail', max_length=75, verbose_name='e-mail')), + ('sender', models.CharField(help_text='Sender name', max_length=200, verbose_name='sender')), + ('visible', models.BooleanField(default=True, db_index=True, verbose_name='visible')), + ('send_html', models.BooleanField(default=True, help_text='Whether or not to send HTML versions of e-mails.', verbose_name='send html')), + ('site', models.ManyToManyField(default=newsletter.utils.get_default_sites, to='sites.Site')), + ], + options={ + 'verbose_name': 'newsletter', + 'verbose_name_plural': 'newsletters', + }, + bases=(models.Model,), + ), + migrations.CreateModel( + name='Submission', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('publish_date', models.DateTimeField(default=django.utils.timezone.now, null=True, verbose_name='publication date', db_index=True, blank=True)), + ('publish', models.BooleanField(default=True, help_text='Publish in archive.', db_index=True, verbose_name='publish')), + ('prepared', models.BooleanField(default=False, verbose_name='prepared', db_index=True, editable=False)), + ('sent', models.BooleanField(default=False, verbose_name='sent', db_index=True, editable=False)), + ('sending', models.BooleanField(default=False, verbose_name='sending', db_index=True, editable=False)), + ('message', models.ForeignKey(verbose_name='message', to='newsletter.Message', on_delete=models.CASCADE)), + ('newsletter', models.ForeignKey(editable=False, to='newsletter.Newsletter', verbose_name='newsletter', on_delete=models.CASCADE)), + ], + options={ + 'verbose_name': 'submission', + 'verbose_name_plural': 'submissions', + }, + bases=(models.Model,), + ), + migrations.CreateModel( + name='Subscription', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('name_field', models.CharField(db_column='name', max_length=30, blank=True, help_text='optional', null=True, verbose_name='name')), + ('email_field', models.EmailField(db_column='email', max_length=75, blank=True, null=True, verbose_name='e-mail', db_index=True)), + ('ip', models.IPAddressField(null=True, verbose_name='IP address', blank=True)), + ('create_date', models.DateTimeField(default=django.utils.timezone.now, editable=False)), + ('activation_code', models.CharField(default=newsletter.utils.make_activation_code, max_length=40, verbose_name='activation code')), + ('subscribed', models.BooleanField(default=False, db_index=True, verbose_name='subscribed')), + ('subscribe_date', models.DateTimeField(null=True, verbose_name='subscribe date', blank=True)), + ('unsubscribed', models.BooleanField(default=False, db_index=True, verbose_name='unsubscribed')), + ('unsubscribe_date', models.DateTimeField(null=True, verbose_name='unsubscribe date', blank=True)), + ('newsletter', models.ForeignKey(verbose_name='newsletter', to='newsletter.Newsletter', on_delete=models.CASCADE)), + ('user', models.ForeignKey(verbose_name='user', blank=True, to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE)), + ], + options={ + 'verbose_name': 'subscription', + 'verbose_name_plural': 'subscriptions', + }, + bases=(models.Model,), + ), + migrations.AlterUniqueTogether( + name='subscription', + unique_together=set([('user', 'email_field', 'newsletter')]), + ), + migrations.AddField( + model_name='submission', + name='subscriptions', + field=models.ManyToManyField(help_text='If you select none, the system will automatically find the subscribers for you.', to='newsletter.Subscription', db_index=True, verbose_name='recipients', blank=True), + preserve_default=True, + ), + migrations.AddField( + model_name='message', + name='newsletter', + field=models.ForeignKey(verbose_name='newsletter', to='newsletter.Newsletter', on_delete=models.CASCADE), + preserve_default=True, + ), + migrations.AlterUniqueTogether( + name='message', + unique_together=set([('slug', 'newsletter')]), + ), + migrations.AddField( + model_name='article', + name='post', + field=models.ForeignKey(related_name='articles', verbose_name='message', to='newsletter.Message', on_delete=models.CASCADE), + preserve_default=True, + ), + ] diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0002_auto_20150416_1555.py b/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0002_auto_20150416_1555.py new file mode 100644 index 0000000..45a28b6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0002_auto_20150416_1555.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import models, migrations +import django.db.models.manager +import django.contrib.sites.managers + + +class Migration(migrations.Migration): + + dependencies = [ + ('newsletter', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='newsletter', + name='email', + field=models.EmailField(help_text='Sender e-mail', max_length=254, verbose_name='e-mail'), + ), + migrations.AlterField( + model_name='subscription', + name='email_field', + field=models.EmailField(db_column='email', max_length=254, blank=True, null=True, verbose_name='e-mail', db_index=True), + ), + ] + + # if using Django version 1.8 and later also apply AlterModelManagers and AlterField to GenericIPAddressField + if django.VERSION >= (1,8): + operations += [ + migrations.AlterModelManagers( + name='newsletter', + managers=[ + ('objects', django.db.models.manager.Manager()), + ('on_site', django.contrib.sites.managers.CurrentSiteManager()), + ], + ), + migrations.AlterField( + model_name='subscription', + name='ip', + field=models.GenericIPAddressField(null=True, verbose_name='IP address', blank=True), + ) + ] \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0003_auto_20160226_1518.py b/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0003_auto_20160226_1518.py new file mode 100644 index 0000000..5d0bfad --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0003_auto_20160226_1518.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +import logging +logger = logging.getLogger(__name__) + +from django.db import migrations, models + + +def renumerate_article_sortorder(apps, schema_editor): + """ Renumerate articles for consistent and guaranteed unique sortorder. """ + + Message = apps.get_model('newsletter', 'Message') + + for message in Message.objects.all(): + for index, article in enumerate(message.articles.all()): + # We're using the fact that articles are ordered by default + + article.sortorder = (index + 1) * 10 + article.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ('newsletter', '0002_auto_20150416_1555'), + ] + + operations = [ + migrations.AlterField( + model_name='article', + name='sortorder', + field=models.PositiveIntegerField(help_text='Sort order determines the order in which articles are concatenated in a post.', verbose_name='sort order', blank=True), + ), + migrations.RunPython(renumerate_article_sortorder), + migrations.AlterUniqueTogether( + name='article', + unique_together=set([('post', 'sortorder')]), + ), + ] diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0004_auto_20180407_1043.py b/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0004_auto_20180407_1043.py new file mode 100644 index 0000000..d031964 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0004_auto_20180407_1043.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import newsletter.models + + +class Migration(migrations.Migration): + + dependencies = [ + ('newsletter', '0003_auto_20160226_1518'), + ] + + operations = [ + migrations.AlterField( + model_name='message', + name='newsletter', + field=models.ForeignKey(default=newsletter.models.get_default_newsletter, + verbose_name='newsletter', + to='newsletter.Newsletter', + on_delete=models.CASCADE), + ), + ] diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0005_auto_20181026_1408.py b/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0005_auto_20181026_1408.py new file mode 100644 index 0000000..6700c6a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/migrations/0005_auto_20181026_1408.py @@ -0,0 +1,18 @@ +# Generated by Django 2.1 on 2018-10-26 12:08 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('newsletter', '0004_auto_20180407_1043'), + ] + + operations = [ + migrations.AlterField( + model_name='submission', + name='subscriptions', + field=models.ManyToManyField(blank=True, db_index=True, help_text='If you select none, the system will automatically find the subscribers for you.', limit_choices_to={'subscribed': True}, to='newsletter.Subscription', verbose_name='recipients'), + ), + ] diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/migrations/__init__.py b/thesisenv/lib/python3.6/site-packages/newsletter/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/models.py b/thesisenv/lib/python3.6/site-packages/newsletter/models.py new file mode 100644 index 0000000..9f0922c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/models.py @@ -0,0 +1,732 @@ +import logging +import time +import django + +from django.conf import settings +from django.contrib.sites.models import Site +from django.contrib.sites.managers import CurrentSiteManager +from django.core.mail import EmailMultiAlternatives +from django.db import models +from django.template.loader import select_template +from django.utils.encoding import python_2_unicode_compatible +from django.utils.functional import cached_property +from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import ugettext +from django.utils.timezone import now + +from sorl.thumbnail import ImageField +from distutils.version import LooseVersion + + +from .compat import get_context, reverse +from .utils import ( + make_activation_code, get_default_sites, ACTIONS +) + +logger = logging.getLogger(__name__) + +AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User') + + +@python_2_unicode_compatible +class Newsletter(models.Model): + site = models.ManyToManyField(Site, default=get_default_sites) + + title = models.CharField( + max_length=200, verbose_name=_('newsletter title') + ) + slug = models.SlugField(db_index=True, unique=True) + + email = models.EmailField( + verbose_name=_('e-mail'), help_text=_('Sender e-mail') + ) + sender = models.CharField( + max_length=200, verbose_name=_('sender'), help_text=_('Sender name') + ) + + visible = models.BooleanField( + default=True, verbose_name=_('visible'), db_index=True + ) + + send_html = models.BooleanField( + default=True, verbose_name=_('send html'), + help_text=_('Whether or not to send HTML versions of e-mails.') + ) + + objects = models.Manager() + + # Automatically filter the current site + on_site = CurrentSiteManager() + + def get_templates(self, action): + """ + Return a subject, text, HTML tuple with e-mail templates for + a particular action. Returns a tuple with subject, text and e-mail + template. + """ + + assert action in ACTIONS + ('message', ), 'Unknown action: %s' % action + + # Common substitutions for filenames + tpl_subst = { + 'action': action, + 'newsletter': self.slug + } + + # Common root path for all the templates + tpl_root = 'newsletter/message/' + + subject_template = select_template([ + tpl_root + '%(newsletter)s/%(action)s_subject.txt' % tpl_subst, + tpl_root + '%(action)s_subject.txt' % tpl_subst, + ]) + + text_template = select_template([ + tpl_root + '%(newsletter)s/%(action)s.txt' % tpl_subst, + tpl_root + '%(action)s.txt' % tpl_subst, + ]) + + if self.send_html: + html_template = select_template([ + tpl_root + '%(newsletter)s/%(action)s.html' % tpl_subst, + tpl_root + '%(action)s.html' % tpl_subst, + ]) + else: + # HTML templates are not required + html_template = None + + return (subject_template, text_template, html_template) + + def __str__(self): + return self.title + + class Meta: + verbose_name = _('newsletter') + verbose_name_plural = _('newsletters') + + def get_absolute_url(self): + return ( + 'newsletter_detail', (), + {'newsletter_slug': self.slug} + ) + + def subscribe_url(self): + return ( + 'newsletter_subscribe_request', (), + {'newsletter_slug': self.slug} + ) + + def unsubscribe_url(self): + return ( + 'newsletter_unsubscribe_request', (), + {'newsletter_slug': self.slug} + ) + + def update_url(self): + return ( + 'newsletter_update_request', (), + {'newsletter_slug': self.slug} + ) + + def archive_url(self): + return ( + 'newsletter_archive', (), + {'newsletter_slug': self.slug} + ) + + def get_sender(self): + return get_address(self.sender, self.email) + + def get_subscriptions(self): + logger.debug(u'Looking up subscribers for %s', self) + + return Subscription.objects.filter(newsletter=self, subscribed=True) + + @classmethod + def get_default(cls): + try: + return cls.objects.all()[0].pk + except IndexError: + return None + + +@python_2_unicode_compatible +class Subscription(models.Model): + user = models.ForeignKey( + AUTH_USER_MODEL, blank=True, null=True, verbose_name=_('user'), + on_delete=models.CASCADE + ) + + name_field = models.CharField( + db_column='name', max_length=30, blank=True, null=True, + verbose_name=_('name'), help_text=_('optional') + ) + + def get_name(self): + if self.user: + return self.user.get_full_name() + return self.name_field + + def set_name(self, name): + if not self.user: + self.name_field = name + name = property(get_name, set_name) + + email_field = models.EmailField( + db_column='email', verbose_name=_('e-mail'), db_index=True, + blank=True, null=True + ) + + def get_email(self): + if self.user: + return self.user.email + return self.email_field + + def set_email(self, email): + if not self.user: + self.email_field = email + email = property(get_email, set_email) + + def update(self, action): + """ + Update subscription according to requested action: + subscribe/unsubscribe/update/, then save the changes. + """ + + assert action in ('subscribe', 'update', 'unsubscribe') + + # If a new subscription or update, make sure it is subscribed + # Else, unsubscribe + if action == 'subscribe' or action == 'update': + self.subscribed = True + else: + self.unsubscribed = True + + logger.debug( + _(u'Updated subscription %(subscription)s to %(action)s.'), + { + 'subscription': self, + 'action': action + } + ) + + # This triggers the subscribe() and/or unsubscribe() methods, taking + # care of stuff like maintaining the (un)subscribe date. + self.save() + + def _subscribe(self): + """ + Internal helper method for managing subscription state + during subscription. + """ + logger.debug(u'Subscribing subscription %s.', self) + + self.subscribe_date = now() + self.subscribed = True + self.unsubscribed = False + + def _unsubscribe(self): + """ + Internal helper method for managing subscription state + during unsubscription. + """ + logger.debug(u'Unsubscribing subscription %s.', self) + + self.subscribed = False + self.unsubscribed = True + self.unsubscribe_date = now() + + def save(self, *args, **kwargs): + """ + Perform some basic validation and state maintenance of Subscription. + TODO: Move this code to a more suitable place (i.e. `clean()`) and + cleanup the code. Refer to comment below and + https://docs.djangoproject.com/en/dev/ref/models/instances/#django.db.models.Model.clean + """ + assert self.user or self.email_field, \ + _('Neither an email nor a username is set. This asks for ' + 'inconsistency!') + assert ((self.user and not self.email_field) or + (self.email_field and not self.user)), \ + _('If user is set, email must be null and vice versa.') + + # This is a lame way to find out if we have changed but using Django + # API internals is bad practice. This is necessary to discriminate + # from a state where we have never been subscribed but is mostly for + # backward compatibility. It might be very useful to make this just + # one attribute 'subscribe' later. In this case unsubscribed can be + # replaced by a method property. + + if self.pk: + assert(Subscription.objects.filter(pk=self.pk).count() == 1) + + subscription = Subscription.objects.get(pk=self.pk) + old_subscribed = subscription.subscribed + old_unsubscribed = subscription.unsubscribed + + # If we are subscribed now and we used not to be so, subscribe. + # If we user to be unsubscribed but are not so anymore, subscribe. + if ((self.subscribed and not old_subscribed) or + (old_unsubscribed and not self.unsubscribed)): + self._subscribe() + + assert not self.unsubscribed + assert self.subscribed + + # If we are unsubcribed now and we used not to be so, unsubscribe. + # If we used to be subscribed but are not subscribed anymore, + # unsubscribe. + elif ((self.unsubscribed and not old_unsubscribed) or + (old_subscribed and not self.subscribed)): + self._unsubscribe() + + assert not self.subscribed + assert self.unsubscribed + else: + if self.subscribed: + self._subscribe() + elif self.unsubscribed: + self._unsubscribe() + + super(Subscription, self).save(*args, **kwargs) + + ip = models.GenericIPAddressField(_("IP address"), blank=True, null=True) + + newsletter = models.ForeignKey( + Newsletter, verbose_name=_('newsletter'), on_delete=models.CASCADE + ) + + create_date = models.DateTimeField(editable=False, default=now) + + activation_code = models.CharField( + verbose_name=_('activation code'), max_length=40, + default=make_activation_code + ) + + subscribed = models.BooleanField( + default=False, verbose_name=_('subscribed'), db_index=True + ) + subscribe_date = models.DateTimeField( + verbose_name=_("subscribe date"), null=True, blank=True + ) + + # This should be a pseudo-field, I reckon. + unsubscribed = models.BooleanField( + default=False, verbose_name=_('unsubscribed'), db_index=True + ) + unsubscribe_date = models.DateTimeField( + verbose_name=_("unsubscribe date"), null=True, blank=True + ) + + def __str__(self): + if self.name: + return _(u"%(name)s <%(email)s> to %(newsletter)s") % { + 'name': self.name, + 'email': self.email, + 'newsletter': self.newsletter + } + + else: + return _(u"%(email)s to %(newsletter)s") % { + 'email': self.email, + 'newsletter': self.newsletter + } + + class Meta: + verbose_name = _('subscription') + verbose_name_plural = _('subscriptions') + unique_together = ('user', 'email_field', 'newsletter') + + def get_recipient(self): + return get_address(self.name, self.email) + + def send_activation_email(self, action): + assert action in ACTIONS, 'Unknown action: %s' % action + + (subject_template, text_template, html_template) = \ + self.newsletter.get_templates(action) + + variable_dict = { + 'subscription': self, + 'site': Site.objects.get_current(), + 'newsletter': self.newsletter, + 'date': self.subscribe_date, + 'STATIC_URL': settings.STATIC_URL, + 'MEDIA_URL': settings.MEDIA_URL + } + + unescaped_context = get_context(variable_dict, autoescape=False) + + subject = subject_template.render(unescaped_context).strip() + text = text_template.render(unescaped_context) + + message = EmailMultiAlternatives( + subject, text, + from_email=self.newsletter.get_sender(), + to=[self.email] + ) + + if html_template: + escaped_context = get_context(variable_dict) + + message.attach_alternative( + html_template.render(escaped_context), "text/html" + ) + + message.send() + + logger.debug( + u'Activation email sent for action "%(action)s" to %(subscriber)s ' + u'with activation code "%(action_code)s".', { + 'action_code': self.activation_code, + 'action': action, + 'subscriber': self + } + ) + + def subscribe_activate_url(self): + return ('newsletter_update_activate', (), { + 'newsletter_slug': self.newsletter.slug, + 'email': self.email, + 'action': 'subscribe', + 'activation_code': self.activation_code + }) + + def unsubscribe_activate_url(self): + return ('newsletter_update_activate', (), { + 'newsletter_slug': self.newsletter.slug, + 'email': self.email, + 'action': 'unsubscribe', + 'activation_code': self.activation_code + }) + + def update_activate_url(self): + return ('newsletter_update_activate', (), { + 'newsletter_slug': self.newsletter.slug, + 'email': self.email, + 'action': 'update', + 'activation_code': self.activation_code + }) + + +@python_2_unicode_compatible +class Article(models.Model): + """ + An Article within a Message which will be send through a Submission. + """ + + sortorder = models.PositiveIntegerField( + help_text=_('Sort order determines the order in which articles are ' + 'concatenated in a post.'), + verbose_name=_('sort order'), blank=True + ) + + title = models.CharField(max_length=200, verbose_name=_('title')) + text = models.TextField(verbose_name=_('text')) + + url = models.URLField( + verbose_name=_('link'), blank=True, null=True + ) + + # Make this a foreign key for added elegance + image = ImageField( + upload_to='newsletter/images/%Y/%m/%d', blank=True, null=True, + verbose_name=_('image') + ) + + # Message this article is associated with + # TODO: Refactor post to message (post is legacy notation). + post = models.ForeignKey( + 'Message', verbose_name=_('message'), related_name='articles', + on_delete=models.CASCADE + ) + + class Meta: + ordering = ('sortorder',) + verbose_name = _('article') + verbose_name_plural = _('articles') + unique_together = ('post', 'sortorder') + + def __str__(self): + return self.title + + def save(self, **kwargs): + if self.sortorder is None: + # If saving a new object get the next available Article ordering + # as to assure uniqueness. + self.sortorder = self.post.get_next_article_sortorder() + + super(Article, self).save() + + +def get_default_newsletter(): + return Newsletter.get_default() + +@python_2_unicode_compatible +class Message(models.Model): + """ Message as sent through a Submission. """ + + title = models.CharField(max_length=200, verbose_name=_('title')) + slug = models.SlugField(verbose_name=_('slug')) + + newsletter = models.ForeignKey( + Newsletter, verbose_name=_('newsletter'), on_delete=models.CASCADE, default=get_default_newsletter + ) + + date_create = models.DateTimeField( + verbose_name=_('created'), auto_now_add=True, editable=False + ) + date_modify = models.DateTimeField( + verbose_name=_('modified'), auto_now=True, editable=False + ) + + class Meta: + verbose_name = _('message') + verbose_name_plural = _('messages') + unique_together = ('slug', 'newsletter') + + def __str__(self): + try: + return _(u"%(title)s in %(newsletter)s") % { + 'title': self.title, + 'newsletter': self.newsletter + } + except Newsletter.DoesNotExist: + logger.warning('No newsletter has been set for this message yet.') + return self.title + + def get_next_article_sortorder(self): + """ Get next available sortorder for Article. """ + + next_order = self.articles.aggregate( + models.Max('sortorder') + )['sortorder__max'] + + if next_order: + return next_order + 10 + else: + return 10 + + @cached_property + def _templates(self): + """Return a (subject_template, text_template, html_template) tuple.""" + return self.newsletter.get_templates('message') + + @property + def subject_template(self): + return self._templates[0] + + @property + def text_template(self): + return self._templates[1] + + @property + def html_template(self): + return self._templates[2] + + @classmethod + def get_default(cls): + try: + return cls.objects.order_by('-date_create').all()[0] + except IndexError: + return None + + +@python_2_unicode_compatible +class Submission(models.Model): + """ + Submission represents a particular Message as it is being submitted + to a list of Subscribers. This is where actual queueing and submission + happen. + """ + class Meta: + verbose_name = _('submission') + verbose_name_plural = _('submissions') + + def __str__(self): + return _(u"%(newsletter)s on %(publish_date)s") % { + 'newsletter': self.message, + 'publish_date': self.publish_date + } + + @cached_property + def extra_headers(self): + return { + 'List-Unsubscribe': 'http://%s%s' % ( + Site.objects.get_current().domain, + reverse('newsletter_unsubscribe_request', + args=[self.message.newsletter.slug]) + ), + } + + def submit(self): + subscriptions = self.subscriptions.filter(subscribed=True) + + logger.info( + ugettext(u"Submitting %(submission)s to %(count)d people"), + {'submission': self, 'count': subscriptions.count()} + ) + + assert self.publish_date < now(), \ + 'Something smells fishy; submission time in future.' + + self.sending = True + self.save() + + try: + for idx, subscription in enumerate(subscriptions, start=1): + if hasattr(settings, 'NEWSLETTER_EMAIL_DELAY'): + time.sleep(settings.NEWSLETTER_EMAIL_DELAY) + if hasattr(settings, 'NEWSLETTER_BATCH_SIZE') and settings.NEWSLETTER_BATCH_SIZE > 0: + if idx % settings.NEWSLETTER_BATCH_SIZE == 0: + time.sleep(settings.NEWSLETTER_BATCH_DELAY) + self.send_message(subscription) + self.sent = True + + finally: + self.sending = False + self.save() + + def send_message(self, subscription): + variable_dict = { + 'subscription': subscription, + 'site': Site.objects.get_current(), + 'submission': self, + 'message': self.message, + 'newsletter': self.newsletter, + 'date': self.publish_date, + 'STATIC_URL': settings.STATIC_URL, + 'MEDIA_URL': settings.MEDIA_URL + } + + unescaped_context = get_context(variable_dict, autoescape=False) + + subject = self.message.subject_template.render( + unescaped_context).strip() + text = self.message.text_template.render(unescaped_context) + + message = EmailMultiAlternatives( + subject, text, + from_email=self.newsletter.get_sender(), + to=[subscription.get_recipient()], + headers=self.extra_headers, + ) + + if self.message.html_template: + escaped_context = get_context(variable_dict) + + message.attach_alternative( + self.message.html_template.render(escaped_context), + "text/html" + ) + + try: + logger.debug( + ugettext(u'Submitting message to: %s.'), + subscription + ) + + message.send() + + except Exception as e: + # TODO: Test coverage for this branch. + logger.error( + ugettext(u'Message %(subscription)s failed ' + u'with error: %(error)s'), + {'subscription': subscription, + 'error': e} + ) + + @classmethod + def submit_queue(cls): + todo = cls.objects.filter( + prepared=True, sent=False, sending=False, + publish_date__lt=now() + ) + + for submission in todo: + submission.submit() + + @classmethod + def from_message(cls, message): + logger.debug(ugettext('Submission of message %s'), message) + submission = cls() + submission.message = message + submission.newsletter = message.newsletter + submission.save() + try: + submission.subscriptions.set(message.newsletter.get_subscriptions()) + except AttributeError: # Django < 1.10 + submission.subscriptions = message.newsletter.get_subscriptions() + return submission + + def save(self, **kwargs): + """ Set the newsletter from associated message upon saving. """ + assert self.message.newsletter + + self.newsletter = self.message.newsletter + + return super(Submission, self).save() + + def get_absolute_url(self): + assert self.newsletter.slug + assert self.message.slug + + return ( + 'newsletter_archive_detail', (), { + 'newsletter_slug': self.newsletter.slug, + 'year': self.publish_date.year, + 'month': self.publish_date.month, + 'day': self.publish_date.day, + 'slug': self.message.slug + } + ) + + newsletter = models.ForeignKey( + Newsletter, verbose_name=_('newsletter'), editable=False, + on_delete=models.CASCADE + ) + message = models.ForeignKey( + Message, verbose_name=_('message'), editable=True, null=False, + on_delete=models.CASCADE + ) + + subscriptions = models.ManyToManyField( + 'Subscription', + help_text=_('If you select none, the system will automatically find ' + 'the subscribers for you.'), + blank=True, db_index=True, verbose_name=_('recipients'), + limit_choices_to={'subscribed': True} + ) + + publish_date = models.DateTimeField( + verbose_name=_('publication date'), blank=True, null=True, + default=now, db_index=True + ) + publish = models.BooleanField( + default=True, verbose_name=_('publish'), + help_text=_('Publish in archive.'), db_index=True + ) + + prepared = models.BooleanField( + default=False, verbose_name=_('prepared'), + db_index=True, editable=False + ) + sent = models.BooleanField( + default=False, verbose_name=_('sent'), + db_index=True, editable=False + ) + sending = models.BooleanField( + default=False, verbose_name=_('sending'), + db_index=True, editable=False + ) + +def get_address(name, email): + # Converting name to ascii for compatibility with django < 1.9. + # Remove this when django 1.8 is no longer supported. + if LooseVersion(django.get_version()) < LooseVersion('1.9'): + name = name.encode('ascii', 'ignore').decode('ascii').strip() + if name: + return u'%s <%s>' % (name, email) + else: + return u'%s' % email diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/settings.py b/thesisenv/lib/python3.6/site-packages/newsletter/settings.py new file mode 100644 index 0000000..9ab91d3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/settings.py @@ -0,0 +1,105 @@ +from importlib import import_module + +from django.conf import settings as django_settings +from django.core.exceptions import ImproperlyConfigured + +from .utils import Singleton + + +class Settings(object): + """ + A settings object that proxies settings and handles defaults, inspired + by `django-appconf` and the way it works in `django-rest-framework`. + + By default, a single instance of this class is created as `_settings`, + from which `_SETTING_NAME` can be accessed as `SETTING_NAME`, i.e.:: + + from myapp.settings import myapp_settings + + if myapp_settings.SETTING_NAME: + # DO FUNKY DANCE + + If a setting has not been explicitly defined in Django's settings, defaults + can be specified as `DEFAULT_SETTING_NAME` class variable or property. + """ + + __metaclass__ = Singleton + + def __init__(self): + """ + Assert app-specific prefix. + """ + assert hasattr(self, 'settings_prefix'), 'No prefix specified.' + + def __getattr__(self, attr): + """ + Return Django setting `PREFIX_SETTING` if explicitly specified, + otherwise return `PREFIX_SETTING_DEFAULT` if specified. + """ + + if attr.isupper(): + # Require settings to have uppercase characters + + try: + setting = getattr( + django_settings, + '%s_%s' % (self.settings_prefix, attr), + ) + except AttributeError: + if not attr.startswith('DEFAULT_'): + setting = getattr(self, 'DEFAULT_%s' % attr) + else: + raise + + return setting + + else: + # Default behaviour + raise AttributeError( + 'No setting or default available for \'%s\'' % attr + ) + + +class NewsletterSettings(Settings): + """ Django-newsletter specific settings. """ + settings_prefix = 'NEWSLETTER' + + DEFAULT_CONFIRM_EMAIL = True + + @property + def DEFAULT_CONFIRM_EMAIL_SUBSCRIBE(self): + return self.CONFIRM_EMAIL + + @property + def DEFAULT_CONFIRM_EMAIL_UNSUBSCRIBE(self): + return self.CONFIRM_EMAIL + + @property + def DEFAULT_CONFIRM_EMAIL_UPDATE(self): + return self.CONFIRM_EMAIL + + @property + def RICHTEXT_WIDGET(self): + # Import and set the richtext field + NEWSLETTER_RICHTEXT_WIDGET = getattr( + django_settings, "NEWSLETTER_RICHTEXT_WIDGET", "" + ) + + if NEWSLETTER_RICHTEXT_WIDGET: + try: + module, attr = NEWSLETTER_RICHTEXT_WIDGET.rsplit(".", 1) + mod = import_module(module) + return getattr(mod, attr) + except Exception as e: + # Catch ImportError and other exceptions too + # (e.g. user sets setting to an integer) + raise ImproperlyConfigured( + "Error while importing setting " + "NEWSLETTER_RICHTEXT_WIDGET %r: %s" % ( + NEWSLETTER_RICHTEXT_WIDGET, e + ) + ) + + return None + +newsletter_settings = NewsletterSettings() diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/img/icon-no.gif b/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/img/icon-no.gif new file mode 100644 index 0000000..1b4ee58 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/img/icon-no.gif differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/img/icon-yes.gif b/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/img/icon-yes.gif new file mode 100644 index 0000000..7399282 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/img/icon-yes.gif differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/img/submitting.gif b/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/img/submitting.gif new file mode 100644 index 0000000..471c1a4 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/img/submitting.gif differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/img/waiting.gif b/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/img/waiting.gif new file mode 100644 index 0000000..bfb9b5f Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/img/waiting.gif differ diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/js/submit_interface.js b/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/js/submit_interface.js new file mode 100644 index 0000000..c7bbe6d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/js/submit_interface.js @@ -0,0 +1,26 @@ +var SubmitInterface = { + changed: false, + + init: function(submitname) { + var submitlink = django.jQuery(submitname); + var initial_href = submitlink.attr('href'); + submitlink.click(function() { + SubmitInterface.process(initial_href); + }); + submitlink.attr('href', '#'); + django.jQuery('form:first :input').change(function() { + SubmitInterface.changed = true; + }); + }, + + process: function(href) { + if (SubmitInterface.changed) { + var result = confirm(gettext('The submission has been changed. It has to be saved before you can submit. Click OK to proceed with saving, click cancel to continue editing.')); + if (result) { + django.jQuery('form:first [name="_continue"]').click(); + } + } else { + window.location = href; + } + } +}; diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/js/subscriber_lookup.js b/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/js/subscriber_lookup.js new file mode 100644 index 0000000..f3f79c9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/static/newsletter/admin/js/subscriber_lookup.js @@ -0,0 +1,34 @@ +var JsonSubscribers = { + init: function(inputname, add) { + inp = document.getElementById(inputname); + addEvent(inp, "change", function(e) { JsonSubscribers.setSubscribers(inp.value); }); + if (add==true && inp.value != "") { + JsonSubscribers.setSubscribers(inp.value); + } + }, + + setSubscribers: function(id) { + SelectBox.move_all('id_subscriptions_to', 'id_subscriptions_from'); + + if (id) { + xmlhttp.open( "GET", "/admin/newsletter/message/"+id+"/subscribers/json/", true ); + xmlhttp.onreadystatechange=function() { + if (xmlhttp.readyState==4 && xmlhttp.status == 200) { + + objects = eval( "(" + xmlhttp.responseText + ")" ); + + var from_box = document.getElementById('id_subscriptions_from'); + for (var i = 0; (option = from_box.options[i]); i++) { + for (j=0;(object = objects[j]);j++) { + if (object.pk.toString() == option.value) { + option.selected = true; + } + } + } + SelectBox.move('id_subscriptions_from', 'id_subscriptions_to'); + } + } + xmlhttp.send(null) + } + } +}; diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/message/change_form.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/message/change_form.html new file mode 100644 index 0000000..906d204 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/message/change_form.html @@ -0,0 +1,10 @@ +{% extends "admin/change_form.html" %} + +{% load i18n admin_urls %} + +{% block object-tools-items %} + {{ block.super }} + +
  • {% trans "Preview" %}
  • + {% comment
  • {% trans "Prepare submission" %}
  • {% endcomment %} +{% endblock %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/message/preview.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/message/preview.html new file mode 100644 index 0000000..082d85c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/message/preview.html @@ -0,0 +1,36 @@ +{% extends "admin/base_site.html" %} + +{% load i18n %} + +{% block title %}{% trans "Preview message" %}{{ block.super }}{% endblock %} + +{% block breadcrumbs %} + +{% endblock %} + + +{% block content %} +

    {% trans "Preview message" %}

    +
    + + {% if message.newsletter.send_html %} +

    {% trans "HTML" %}

    + + {% endif %} + +

    {% trans "Text" %}

    + +
    +
    +
    + +{% endblock %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/submission/change_form.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/submission/change_form.html new file mode 100644 index 0000000..619b1c8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/submission/change_form.html @@ -0,0 +1,24 @@ +{% extends "admin/change_form.html" %} + +{% load i18n admin_urls %} +{% load static from staticfiles %} + +{% block extrahead %} +{{ block.super }} + + + +{% endblock %} + +{% block object-tools-items %} + {{ block.super }} + +
  • {% trans "Submit" %}
  • +{% endblock %} + +{% block after_related_objects %}{{ block.super }}{% endblock %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/subscription/change_list.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/subscription/change_list.html new file mode 100644 index 0000000..9d10608 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/subscription/change_list.html @@ -0,0 +1,8 @@ +{% extends "admin/change_list.html" %} +{% load i18n admin_urls %} + +{% block object-tools-items %} +
  • {% trans "Import" %}
  • + + {{ block.super }} +{% endblock %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/subscription/confirmimportform.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/subscription/confirmimportform.html new file mode 100644 index 0000000..fb37242 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/subscription/confirmimportform.html @@ -0,0 +1,46 @@ +{% extends "admin/base_site.html" %} +{% load i18n %} +{% block title %}{% trans "Import addresses" %}{{ block.super }}{% endblock %} + +{% block breadcrumbs %} + +{% endblock %} + +{% block content %} +

    {% trans "Confirm import" %}

    +
    +
      + {% for email, name in subscribers.items %} +
    • {% if name %}{{ name }} <{{ email }}>{% else %}{{ email }}{% endif %}
    • + {% endfor %} +
    +
    + + {{ form.as_table }} +
    + {% csrf_token %} + +
    +
    +
    +
    + +{% endblock %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/subscription/importform.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/subscription/importform.html new file mode 100644 index 0000000..de54104 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/admin/newsletter/subscription/importform.html @@ -0,0 +1,43 @@ +{% extends "admin/base_site.html" %} +{% load i18n %} +{% block title %}{% trans "Import addresses" %}{{ block.super }}{% endblock %} + +{% block breadcrumbs %} + +{% endblock %} + +{% block content %} +

    {% trans "Import addresses" %}

    +
    + +
    + + {{ form.as_table }} +
    + {% csrf_token %} + +
    +
    +
    +
    + +{% endblock %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/common.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/common.html new file mode 100644 index 0000000..20ac822 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/common.html @@ -0,0 +1,15 @@ +{% load i18n %} + + + + + {% block title %}{% trans "Newsletter" %}{% endblock title %} + {% block header %} + {% endblock header %} + + + + {% block body %} + {% endblock body %} + + diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/message.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/message.html new file mode 100644 index 0000000..3861f65 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/message.html @@ -0,0 +1,32 @@ +{% load thumbnail i18n %} + + + + + {{ newsletter.title }}: {{ message.title }} + + +

    {{ newsletter.title }}

    +

    {{ message.title }}

    + {% for article in message.articles.all %} +

    {{ article.title }}

    + + {% thumbnail article.image "200x200" as image %} + + {% endthumbnail %} + +
    {{ article.text|safe }}
    + + {% if article.url %} + + {% endif %} + {% endfor %} + + + + diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/message.txt b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/message.txt new file mode 100644 index 0000000..1471e47 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/message.txt @@ -0,0 +1,15 @@ +{% load i18n %}++++++++++++++++++++ + +{{ newsletter.title }}: {{ message.title }} + +++++++++++++++++++++ + +{% for article in message.articles.all %} +{{ article.title }} +{{ article.text|striptags|safe }} + +{% endfor %} + +++++++++++++++++++++ + +{% trans "Unsubscribe:" %} http://{{ site }}{% url "newsletter_unsubscribe_request" newsletter.slug %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/message_subject.txt b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/message_subject.txt new file mode 100644 index 0000000..00aa450 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/message_subject.txt @@ -0,0 +1 @@ +{{ newsletter.title }} - {{ message.title }} \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/subscribe.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/subscribe.html new file mode 100644 index 0000000..088a3d0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/subscribe.html @@ -0,0 +1,20 @@ +{% load i18n %} + + + + + {% blocktrans with title=newsletter.title %}Subscription to {{ title }}{% endblocktrans %} + + + +{% blocktrans with name=subscription.name title=newsletter.title domain=site.domain url=subscription.subscribe_activate_url %}Dear {{ name }}, + +you, or someone in your name requested a subscription to {{ title }}. + +If you would like to confirm your subscription, please follow this activation link: +http://{{ domain }}{{ url }} + +Kind regards,{% endblocktrans %} +{{ newsletter.sender }} + + diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/subscribe.txt b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/subscribe.txt new file mode 100644 index 0000000..e7559b0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/subscribe.txt @@ -0,0 +1,9 @@ +{% load i18n %}{% blocktrans with name=subscription.name title=newsletter.title domain=site.domain url=subscription.subscribe_activate_url %}Dear {{ name }}, + +you, or someone in your name requested a subscription to {{ title }}. + +If you would like to confirm your subscription, please follow this activation link: +http://{{ domain }}{{ url }} + +Kind regards,{% endblocktrans %} +{{ newsletter.sender }} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/subscribe_subject.txt b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/subscribe_subject.txt new file mode 100644 index 0000000..f61b228 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/subscribe_subject.txt @@ -0,0 +1 @@ +{% load i18n %}{{ newsletter.title }} - {% trans "Confirm subscription" %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/unsubscribe.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/unsubscribe.html new file mode 100644 index 0000000..4b1a86b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/unsubscribe.html @@ -0,0 +1,19 @@ +{% load i18n %} + + + + + {% blocktrans with title=newsletter.title %}Unsubscription from {{ title }}{% endblocktrans %} + + +{% blocktrans with name=subscription.name title=newsletter.title domain=site.domain url=subscription.unsubscribe_activate_url %}Dear {{ name }}, + +you, or someone in your name requested unsubscription from {{ title }}. + +If you would like to confirm your unsubscription, please follow this activation link: +http://{{ domain }}{{ url }} + +Kind regards,{% endblocktrans %} +{{ newsletter.sender }} + + diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/unsubscribe.txt b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/unsubscribe.txt new file mode 100644 index 0000000..1d2ccd8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/unsubscribe.txt @@ -0,0 +1,9 @@ +{% load i18n %}{% blocktrans with name=subscription.name title=newsletter.title domain=site.domain url=subscription.unsubscribe_activate_url %}Dear {{ name }}, + +you, or someone in your name requested unsubscription from {{ title }}. + +If you would like to confirm your unsubscription, please follow this activation link: +http://{{ domain }}{{ url }} + +Kind regards,{% endblocktrans %} +{{ newsletter.sender }} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/unsubscribe_subject.txt b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/unsubscribe_subject.txt new file mode 100644 index 0000000..49c68ef --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/unsubscribe_subject.txt @@ -0,0 +1 @@ +{% load i18n %}{{ newsletter.title }} - {% trans "Confirm unsubscription" %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/update.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/update.html new file mode 100644 index 0000000..e46235d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/update.html @@ -0,0 +1,19 @@ +{% load i18n %} + + + + + {% blocktrans with title=newsletter.title %}Update of subscription to {{ title }}{% endblocktrans %} + + +{% blocktrans with name=subscription.name title=newsletter.title domain=site.domain url=subscription.update_activate_url %}Dear {{ name }}, + +you, or someone in your name requested updating your personal information for {{ title }}. + +To make changes to your information in our database, please follow this activation link: +http://{{ domain }}{{ url }} + +Kind regards,{% endblocktrans %} +{{ newsletter.sender }} + + diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/update.txt b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/update.txt new file mode 100644 index 0000000..a37eb08 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/update.txt @@ -0,0 +1,9 @@ +{% load i18n %}{% blocktrans with name=subscription.name title=newsletter.title domain=site.domain url=subscription.update_activate_url %}Dear {{ name }}, + +you, or someone in your name requested updating your personal information for {{ title }}. + +To make changes to your information in our database, please follow this activation link: +http://{{ domain }}{{ url }} + +Kind regards,{% endblocktrans %} +{{ newsletter.sender }} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/update_subject.txt b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/update_subject.txt new file mode 100644 index 0000000..217d5f6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/message/update_subject.txt @@ -0,0 +1 @@ +{% load i18n %}{{ newsletter.title }} - {% trans "Update information" %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/newsletter_detail.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/newsletter_detail.html new file mode 100644 index 0000000..fe23a1f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/newsletter_detail.html @@ -0,0 +1,30 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter detail" %}{% endblock title %} + +{% block body %} + + + + + + + + {% if not user.is_authenticated %} + + + + {% endif %} + + + + + + + + + +
    {% trans "Newsletter" %} {{ object.title }}
    {% trans "Subscribe" %}
    {% trans "Update" %}
    {% trans "Unsubscribe" %}
    {% trans "Archive" %}
    {% trans "Back to list" %}
    +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/newsletter_list.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/newsletter_list.html new file mode 100644 index 0000000..2b48fa2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/newsletter_list.html @@ -0,0 +1,41 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter list" %}{% endblock title %} + +{% block body %} +{% if user.is_authenticated %} +
    + {% csrf_token %} + {{ formset.management_form }} + + + + {% if user %} + + {% endif %} + + {% for form in formset.forms %} + + + + + {% endfor %} +
    {% trans "Newsletter" %}{% trans "Subscribe" %}
    {{ form.id }}{{ form.newsletter }} +{{ form.instance.newsletter.title }}{{ form.subscribed }}
    +

    +
    +{% else %} + + + + + {% for newsletter in object_list %} + + + + {% endfor %} +
    {% trans "Newsletter" %}
    {{ newsletter.title }}
    +{% endif %} +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/submission_archive.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/submission_archive.html new file mode 100644 index 0000000..771b995 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/submission_archive.html @@ -0,0 +1,21 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter archive" %}{% endblock title %} + +{% block body %} + + + + + {% for submission in latest %} + + + + {% endfor %} + + + +
    {% trans "Newsletter archive" %} {{ newsletter.title }}
    {{ submission }}
    {% trans "Back to list" %}
    +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_activate.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_activate.html new file mode 100644 index 0000000..558013a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_activate.html @@ -0,0 +1,15 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter" %} {{ newsletter.title }} {{ action }} {% trans "activate" %}{% endblock title %} + +{% block body %} +

    {% trans "Newsletter" %} {{ newsletter.title }} {{ action }} {% trans "activate" %}

    + +
    + {% csrf_token %} + {{ form.as_p }} +

    +
    +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_subscribe.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_subscribe.html new file mode 100644 index 0000000..07225c6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_subscribe.html @@ -0,0 +1,25 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter subscribe" %}{% endblock title %} + +{% block body %} +

    {% trans "Newsletter subscribe" %} {{ newsletter.title }}

    + + {% if error %} +

    {% trans "Due to a technical error we were not able to submit your confirmation email. This could be because your email address is invalid." %}

    + + {% comment %} Replace the the following dummy with a valid email address and remove this comment. + +

    {% trans "If the error persists, please don't hesitate to contact us at the following email address:" %} info@foobar.com

    + + {% endcomment %} + {% else %} +
    + {% csrf_token %} + {{ form.as_p }} +

    +
    + {% endif %} +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_subscribe_activated.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_subscribe_activated.html new file mode 100644 index 0000000..c8519d7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_subscribe_activated.html @@ -0,0 +1,11 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter" %} {{ newsletter.title }} {{ action }} {% trans "activate" %}{% endblock title %} + +{% block body %} +

    {% trans "Newsletter" %} {{ newsletter.title }} {{ action }} {% trans "activate" %}

    + +

    {% trans "Your subscription has successfully been activated." %}

    +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_subscribe_email_sent.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_subscribe_email_sent.html new file mode 100644 index 0000000..79eea11 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_subscribe_email_sent.html @@ -0,0 +1,11 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter subscribe" %}{% endblock title %} + +{% block body %} +

    {% trans "Newsletter subscribe" %} {{ newsletter.title }}

    + +

    {% trans "Your subscription request was successfully received and an activation email has been sent to you. In that email you will find a link which you need to follow in order to activate your subscription." %}

    +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_subscribe_user.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_subscribe_user.html new file mode 100644 index 0000000..f0bbf3d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_subscribe_user.html @@ -0,0 +1,25 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter subscribe" %}{% endblock title %} + +{% block body %} +

    {% trans "Newsletter subscribe" %} {{ newsletter.title }}

    + +

    Welcome, {{ user }}!

    + +{% if messages %} +
      + {% for message in messages %} +
    • {{ message }}
    • + {% endfor %} +
    +{% else %} +{% trans "Do you want to subscribe to this newsletter?" %} +
    + {% csrf_token %} +

    +
    +{% endif %} +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_unsubscribe.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_unsubscribe.html new file mode 100644 index 0000000..022ba4b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_unsubscribe.html @@ -0,0 +1,25 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter unsubscribe" %}{% endblock title %} + +{% block body %} +

    {% trans "Newsletter unsubscribe" %} {{ newsletter.title }}

    + + {% if error %} +

    {% trans "Due to a technical error we were not able to submit your confirmation email. This could be because your email address is invalid." %}

    + + {% comment %} Replace the the following dummy with a valid email address and remove this comment. + +

    {% trans "If the error persists, please don't hesitate to contact us at the following email address:" %} info@foobar.com

    + + {% endcomment %} + {% else %} +
    + {% csrf_token %} + {{ form.as_p }} +

    +
    + {% endif %} +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_unsubscribe_activated.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_unsubscribe_activated.html new file mode 100644 index 0000000..65628c5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_unsubscribe_activated.html @@ -0,0 +1,11 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter" %} {{ newsletter.title }} {{ action }} {% trans "activate" %}{% endblock title %} + +{% block body %} +

    {% trans "Newsletter" %} {{ newsletter.title }} {{ action }} {% trans "activate" %}

    + +

    {% trans "You have successfully been unsubscribed." %}

    +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_unsubscribe_email_sent.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_unsubscribe_email_sent.html new file mode 100644 index 0000000..19d00b8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_unsubscribe_email_sent.html @@ -0,0 +1,11 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter unsubscribe" %}{% endblock title %} + +{% block body %} +

    {% trans "Newsletter unsubscribe" %} {{ newsletter.title }}

    + +

    {% trans "Your unsubscription request has successfully been received. An email has been sent to you with a link you need to follow in order to confirm your unsubscription." %}

    +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_unsubscribe_user.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_unsubscribe_user.html new file mode 100644 index 0000000..5bc81dc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_unsubscribe_user.html @@ -0,0 +1,27 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter unsubscribe" %}{% endblock title %} + +{% block body %} +

    {% trans "Newsletter unsubscribe" %} {{ newsletter.title }}

    + +

    Welcome, {{ user }}!

    + +{% if messages %} +
      + {% for message in messages %} +
    • {{ message }}
    • + {% endfor %} +
    +{% else %} + +{% trans "Do you want to unsubscribe from this newsletter?" %} +
    + {% csrf_token %} +

    +
    +{% endif %} + +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_update.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_update.html new file mode 100644 index 0000000..fe83797 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_update.html @@ -0,0 +1,25 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter update" %}{% endblock title %} + +{% block body %} +

    {% trans "Newsletter update" %} {{ newsletter.title }}

    + + {% if error %} +

    {% trans "Due to a technical error we were not able to submit your confirmation email. This could be because your email address is invalid." %}

    + + {% comment %} Replace the the following dummy with a valid email address and remove this comment. + +

    {% trans "If the error persists, please don't hesitate to contact us at the following email address:" %} info@foobar.com

    + + {% endcomment %} + {% else %} +
    + {% csrf_token %} + {{ form.as_p }} +

    +
    + {% endif %} +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_update_activated.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_update_activated.html new file mode 100644 index 0000000..65b0298 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_update_activated.html @@ -0,0 +1,11 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter" %} {{ newsletter.title }} {{ action }} {% trans "activate" %}{% endblock title %} + +{% block body %} +

    {% trans "Newsletter" %} {{ newsletter.title }} {{ action }} {% trans "activate" %}

    + +

    {% trans "Your subscription has successfully been updated." %}

    +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_update_email_sent.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_update_email_sent.html new file mode 100644 index 0000000..b740d18 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/newsletter/subscription_update_email_sent.html @@ -0,0 +1,11 @@ +{% extends "newsletter/common.html" %} + +{% load i18n %} + +{% block title %}{% trans "Newsletter update" %}{% endblock title %} + +{% block body %} +

    {% trans "Newsletter update" %} {{ newsletter.title }}

    + +

    {% trans "Your update request was successfully received and an activation email has been sent to you. In that email you will find a link which you need to follow in order to update your subscription." %}

    +{% endblock body %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/templates/widget/image.html b/thesisenv/lib/python3.6/site-packages/newsletter/templates/widget/image.html new file mode 100755 index 0000000..5de45f1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/templates/widget/image.html @@ -0,0 +1,5 @@ +{% load admin_modify i18n %}{% if bound_field.original_value %} +{% trans "Currently:" %} {{ bound_field.original_value|escape }}
    +
    +{% trans "Change:" %}{% output_all bound_field.form_fields %} +{% else %} {% output_all bound_field.form_fields %} {% endif %} diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/urls.py b/thesisenv/lib/python3.6/site-packages/newsletter/urls.py new file mode 100644 index 0000000..8f95b15 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/urls.py @@ -0,0 +1,84 @@ +from surlex.dj import surl + +from .views import ( + NewsletterListView, NewsletterDetailView, + SubmissionArchiveIndexView, SubmissionArchiveDetailView, + SubscribeRequestView, UnsubscribeRequestView, UpdateRequestView, + ActionTemplateView, UpdateSubscriptionView, +) + +urlpatterns = [ + # Newsletter list and detail view + surl('^$', NewsletterListView.as_view(), name='newsletter_list'), + surl( + '^/$', + NewsletterDetailView.as_view(), name='newsletter_detail' + ), + + # Action request views + surl( + '^/subscribe/$', + SubscribeRequestView.as_view(), + name='newsletter_subscribe_request' + ), + surl( + '^/subscribe/confirm/$', + SubscribeRequestView.as_view(confirm=True), + name='newsletter_subscribe_confirm' + ), + surl( + '^/update/$', + UpdateRequestView.as_view(), + name='newsletter_update_request' + ), + surl( + '^/unsubscribe/$', + UnsubscribeRequestView.as_view(), + name='newsletter_unsubscribe_request' + ), + surl( + '^/unsubscribe/confirm/$', + UnsubscribeRequestView.as_view(confirm=True), + name='newsletter_unsubscribe_confirm' + ), + + # Activation email sent view + surl( + '^//' + 'email-sent/$', + ActionTemplateView.as_view( + template_name='newsletter/subscription_%(action)s_email_sent.html' + ), + name='newsletter_activation_email_sent'), + + # Action confirmation views + surl( + '^/subscription//' + '/activate//$', + UpdateSubscriptionView.as_view(), name='newsletter_update_activate' + ), + surl( + '^/subscription//' + '/activate/$', + UpdateSubscriptionView.as_view(), name='newsletter_update' + ), + + # Action activation completed view + surl( + '^//' + 'activation-completed/$', + ActionTemplateView.as_view( + template_name='newsletter/subscription_%(action)s_activated.html' + ), + name='newsletter_action_activated'), + + # Archive views + surl( + '^/archive/////$', + SubmissionArchiveDetailView.as_view(), name='newsletter_archive_detail' + ), + surl( + '^/archive/$', + SubmissionArchiveIndexView.as_view(), name='newsletter_archive' + ), +] diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/utils.py b/thesisenv/lib/python3.6/site-packages/newsletter/utils.py new file mode 100644 index 0000000..ed43d7c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/utils.py @@ -0,0 +1,42 @@ +""" Generic helper functions """ + +import logging + + +from django.contrib.sites.models import Site +from django.utils.crypto import get_random_string + +logger = logging.getLogger(__name__) + + +# Possible actions that user can perform +ACTIONS = ('subscribe', 'unsubscribe', 'update') + + +def make_activation_code(): + """ Generate a unique activation code. """ + + # Use Django's crypto get_random_string() instead of rolling our own. + return get_random_string(length=40) + + +def get_default_sites(): + """ Get a list of id's for all sites; the default for newsletters. """ + return [site.id for site in Site.objects.all()] + + +class Singleton(type): + """ + Singleton metaclass. + Source: + http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python + """ + _instances = {} + + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super(Singleton, cls).__call__( + *args, **kwargs + ) + + return cls._instances[cls] diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/validators.py b/thesisenv/lib/python3.6/site-packages/newsletter/validators.py new file mode 100644 index 0000000..754df3b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/validators.py @@ -0,0 +1,18 @@ +from django.contrib.auth import get_user_model +from django.forms.utils import ValidationError +from django.utils.translation import ugettext_lazy as _ + + +def validate_email_nouser(email): + """ + Check if the email address does not belong to an existing user. + """ + # Check whether we should be subscribed to as a user + User = get_user_model() + + if User.objects.filter(email__exact=email).exists(): + raise ValidationError(_( + "The e-mail address '%(email)s' belongs to a user with an " + "account on this site. Please log in as that user " + "and try again." + ) % {'email': email}) diff --git a/thesisenv/lib/python3.6/site-packages/newsletter/views.py b/thesisenv/lib/python3.6/site-packages/newsletter/views.py new file mode 100644 index 0000000..304fbd0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/newsletter/views.py @@ -0,0 +1,627 @@ +import logging + +import datetime +import socket + +from smtplib import SMTPException + +from django.core.exceptions import ValidationError, ImproperlyConfigured +from django.conf import settings + +from django.template.response import SimpleTemplateResponse + +from django.shortcuts import get_object_or_404, redirect +from django.http import Http404 + +from django.views.generic import ( + ListView, DetailView, + ArchiveIndexView, DateDetailView, + TemplateView, FormView +) + +from django.contrib import messages +from django.contrib.sites.models import Site +from django.contrib.auth.decorators import login_required + +from django.utils.decorators import method_decorator +from django.utils.translation import ugettext, ugettext_lazy as _ +from django.utils import timezone + +from django.forms.models import modelformset_factory + +from .compat import reverse +from .models import Newsletter, Subscription, Submission +from .forms import ( + SubscribeRequestForm, UserUpdateForm, UpdateRequestForm, + UnsubscribeRequestForm, UpdateForm +) +from .settings import newsletter_settings +from .utils import ACTIONS + + +logger = logging.getLogger(__name__) + + +def is_authenticated(user): + # Compat method for Django < 1.10 + return user.is_authenticated if isinstance(user.is_authenticated, bool) else user.is_authenticated() + + +class NewsletterViewBase(object): + """ Base class for newsletter views. """ + queryset = Newsletter.on_site.filter(visible=True) + allow_empty = False + slug_url_kwarg = 'newsletter_slug' + + +class NewsletterDetailView(NewsletterViewBase, DetailView): + pass + + +class NewsletterListView(NewsletterViewBase, ListView): + """ + List available newsletters and generate a formset for (un)subscription + for authenticated users. + """ + + def post(self, request, **kwargs): + """ Allow post requests. """ + + # All logic (for now) occurs in the form logic + return super(NewsletterListView, self).get(request, **kwargs) + + def get_context_data(self, **kwargs): + context = super(NewsletterListView, self).get_context_data(**kwargs) + + if is_authenticated(self.request.user): + # Add a formset for logged in users. + context['formset'] = self.get_formset() + + return context + + def get_formset(self): + """ + Return a formset with newsletters for logged in users, or None. + """ + + # Short-hand variable names + newsletters = self.get_queryset() + request = self.request + user = request.user + + SubscriptionFormSet = modelformset_factory( + Subscription, form=UserUpdateForm, extra=0 + ) + + # Before rendering the formset, subscription objects should + # already exist. + for n in newsletters: + Subscription.objects.get_or_create( + newsletter=n, user=user + ) + + # Get all subscriptions for use in the formset + qs = Subscription.objects.filter( + newsletter__in=newsletters, user=user + ) + + if request.method == 'POST': + try: + formset = SubscriptionFormSet(request.POST, queryset=qs) + + if not formset.is_valid(): + raise ValidationError('Update form invalid.') + + # Everything's allright, let's save + formset.save() + + messages.info( + request, + ugettext("Your changes have been saved.") + ) + + except ValidationError: + # Invalid form posted. As there is no way for a user to + # enter data - invalid forms should be ignored from the UI. + + # However, we log them for debugging purposes. + logger.warning( + 'Invalid form post received', + exc_info=True, extra={'request': request} + ) + + # Present a pristine form + formset = SubscriptionFormSet(queryset=qs) + + else: + formset = SubscriptionFormSet(queryset=qs) + + return formset + + +class ProcessUrlDataMixin(object): + """ + Mixin providing the ability to process args and kwargs from url + before dispatching request. + """ + + def process_url_data(self, *args, **kwargs): + """ Subclasses should put url data processing in this method. """ + pass + + def dispatch(self, *args, **kwargs): + self.process_url_data(*args, **kwargs) + + return super(ProcessUrlDataMixin, self).dispatch(*args, **kwargs) + + +class NewsletterMixin(ProcessUrlDataMixin): + """ + Mixin retrieving newsletter based on newsletter_slug from url + and adding it to context and form kwargs. + """ + + def process_url_data(self, *args, **kwargs): + """ + Get newsletter based on `newsletter_slug` from url + and add it to instance attributes. + """ + + assert 'newsletter_slug' in kwargs + + super(NewsletterMixin, self).process_url_data(*args, **kwargs) + + newsletter_queryset = kwargs.get( + 'newsletter_queryset', + Newsletter.on_site.all() + ) + newsletter_slug = kwargs['newsletter_slug'] + + self.newsletter = get_object_or_404( + newsletter_queryset, slug=newsletter_slug, + ) + + def get_form_kwargs(self): + """ Add newsletter to form kwargs. """ + kwargs = super(NewsletterMixin, self).get_form_kwargs() + + kwargs['newsletter'] = self.newsletter + + return kwargs + + def get_context_data(self, **kwargs): + """ Add newsletter to context. """ + context = super(NewsletterMixin, self).get_context_data(**kwargs) + + context['newsletter'] = self.newsletter + + return context + + +class ActionMixin(ProcessUrlDataMixin): + """ Mixin retrieving action from url and adding it to context. """ + + action = None + + def process_url_data(self, *args, **kwargs): + """ Add action from url to instance attributes if not already set. """ + super(ActionMixin, self).process_url_data(*args, **kwargs) + + if self.action is None: + assert 'action' in kwargs + self.action = kwargs['action'] + + assert self.action in ACTIONS, 'Unknown action: %s' % self.action + + def get_context_data(self, **kwargs): + """ Add action to context. """ + context = super(ActionMixin, self).get_context_data(**kwargs) + + context['action'] = self.action + + return context + + def get_template_names(self): + """ Return list of template names for proper action. """ + + if self.template_name is None: + raise ImproperlyConfigured( + '%(class_name)s should define template_name, ' + 'or implement get_template_names()' % { + 'class_name': self.__class__.__name__ + } + ) + + else: + try: + return [self.template_name % {'action': self.action}] + except KeyError as e: + raise ImproperlyConfigured( + '%(class_name)s inherits from ActionMixin and can contain ' + '%%(action)s in template_name to be replaced ' + 'by action name %(wrong_key)s given instead.' % { + 'class_name': self.__class__.__name__, + 'wrong_key': e, + } + ) + + +class ActionTemplateView(NewsletterMixin, ActionMixin, TemplateView): + """ + View that renders a template for proper action, + with newsletter and action in context. + """ + pass + + +class ActionFormView(NewsletterMixin, ActionMixin, FormView): + """ FormView with newsletter and action support. """ + + def get_url_from_viewname(self, viewname): + """ + Return url for given `viename` + and associated with this view newsletter and action. + """ + + return reverse( + viewname, + kwargs={ + 'newsletter_slug': self.newsletter.slug, + 'action': self.action + } + ) + + +class ActionUserView(ActionTemplateView): + """ Base class for subscribe and unsubscribe user views. """ + template_name = "newsletter/subscription_%(action)s_user.html" + + def process_url_data(self, *args, **kwargs): + """ Add confirm to instance attributes. """ + super(ActionUserView, self).process_url_data(*args, **kwargs) + + # confirm is optional kwarg defaulting to False + self.confirm = kwargs.get('confirm', False) + + def post(self, request, *args, **kwargs): + return self.get(request, *args, **kwargs) + + @method_decorator(login_required) + def dispatch(self, *args, **kwargs): + return super(ActionUserView, self).dispatch(*args, **kwargs) + + +class SubscribeUserView(ActionUserView): + action = 'subscribe' + + def get(self, request, *args, **kwargs): + already_subscribed = False + instance = Subscription.objects.get_or_create( + newsletter=self.newsletter, user=request.user + )[0] + + if instance.subscribed: + already_subscribed = True + elif self.confirm: + instance.subscribed = True + instance.save() + + messages.success( + request, + _('You have been subscribed to %s.') % self.newsletter + ) + + logger.debug( + _('User %(rs)s subscribed to %(my_newsletter)s.'), + { + "rs": request.user, + "my_newsletter": self.newsletter + } + ) + + if already_subscribed: + messages.info( + request, + _('You are already subscribed to %s.') % self.newsletter + ) + + return super(SubscribeUserView, self).get(request, *args, **kwargs) + + +class UnsubscribeUserView(ActionUserView): + action = 'unsubscribe' + + def get(self, request, *args, **kwargs): + not_subscribed = False + + try: + instance = Subscription.objects.get( + newsletter=self.newsletter, user=request.user + ) + + if not instance.subscribed: + not_subscribed = True + elif self.confirm: + instance.subscribed = False + instance.save() + + messages.success( + request, + _('You have been unsubscribed from %s.') % self.newsletter + ) + + logger.debug( + _('User %(rs)s unsubscribed from %(my_newsletter)s.'), + { + "rs": request.user, + "my_newsletter": self.newsletter + } + ) + + except Subscription.DoesNotExist: + not_subscribed = True + + if not_subscribed: + messages.info( + request, + _('You are not subscribed to %s.') % self.newsletter + ) + + return super(UnsubscribeUserView, self).get(request, *args, **kwargs) + + +class ActionRequestView(ActionFormView): + """ Base class for subscribe, unsubscribe and update request views. """ + template_name = "newsletter/subscription_%(action)s.html" + + def process_url_data(self, *args, **kwargs): + """ Add error to instance attributes. """ + super(ActionRequestView, self).process_url_data(*args, **kwargs) + + self.error = None + + def get_context_data(self, **kwargs): + """ Add error to context. """ + context = super(ActionRequestView, self).get_context_data(**kwargs) + + context.update({ + 'error': self.error, + }) + + return context + + def get_subscription(self, form): + """ Return subscription for the current request. """ + return form.instance + + def no_email_confirm(self, form): + """ + Subscribe/unsubscribe user and redirect to action activated page. + """ + self.subscription.update(self.action) + + return redirect( + self.get_url_from_viewname('newsletter_action_activated') + ) + + def get_success_url(self): + return self.get_url_from_viewname('newsletter_activation_email_sent') + + def form_valid(self, form): + self.subscription = self.get_subscription(form) + + if not getattr( + newsletter_settings, + 'CONFIRM_EMAIL_%s' % self.action.upper() + ): + # Confirmation email for this action was switched off in settings. + return self.no_email_confirm(form) + + try: + self.subscription.send_activation_email(action=self.action) + + except (SMTPException, socket.error) as e: + logger.exception( + 'Error %s while submitting email to %s.', + e, self.subscription.email + ) + self.error = True + + # Although form was valid there was error while sending email, + # so stay at the same url. + return super(ActionRequestView, self).form_invalid(form) + + return super(ActionRequestView, self).form_valid(form) + + +class SubscribeRequestView(ActionRequestView): + action = 'subscribe' + form_class = SubscribeRequestForm + confirm = False + + def get_form_kwargs(self): + """ Add ip to form kwargs for submitted forms. """ + kwargs = super(SubscribeRequestView, self).get_form_kwargs() + + if self.request.method in ('POST', 'PUT'): + kwargs['ip'] = self.request.META.get('REMOTE_ADDR') + + return kwargs + + def get_subscription(self, form): + return form.save() + + def dispatch(self, request, *args, **kwargs): + if is_authenticated(request.user): + kwargs['confirm'] = self.confirm + return SubscribeUserView.as_view()(request, *args, **kwargs) + + return super(SubscribeRequestView, self).dispatch( + request, *args, **kwargs + ) + + +class UnsubscribeRequestView(ActionRequestView): + action = 'unsubscribe' + form_class = UnsubscribeRequestForm + confirm = False + + def dispatch(self, request, *args, **kwargs): + if is_authenticated(request.user): + kwargs['confirm'] = self.confirm + return UnsubscribeUserView.as_view()(request, *args, **kwargs) + + return super(UnsubscribeRequestView, self).dispatch( + request, *args, **kwargs + ) + + +class UpdateRequestView(ActionRequestView): + action = 'update' + form_class = UpdateRequestForm + + def no_email_confirm(self, form): + """ Redirect to update subscription view. """ + return redirect(self.subscription.update_activate_url()) + + +class UpdateSubscriptionView(ActionFormView): + form_class = UpdateForm + template_name = "newsletter/subscription_activate.html" + + def process_url_data(self, *args, **kwargs): + """ + Add email, subscription and activation_code + to instance attributes. + """ + assert 'email' in kwargs + + super(UpdateSubscriptionView, self).process_url_data(*args, **kwargs) + + self.subscription = get_object_or_404( + Subscription, newsletter=self.newsletter, + email_field__exact=kwargs['email'] + ) + # activation_code is optional kwarg which defaults to None + self.activation_code = kwargs.get('activation_code') + + def get_initial(self): + """ Returns the initial data to use for forms on this view. """ + if self.activation_code: + return {'user_activation_code': self.activation_code} + else: + # TODO: Test coverage of this branch + return None + + def get_form_kwargs(self): + """ Add instance to form kwargs. """ + kwargs = super(UpdateSubscriptionView, self).get_form_kwargs() + + kwargs['instance'] = self.subscription + + return kwargs + + def get_success_url(self): + return self.get_url_from_viewname('newsletter_action_activated') + + def form_valid(self, form): + """ Get our instance, but do not save yet. """ + subscription = form.save(commit=False) + + subscription.update(self.action) + + return super(UpdateSubscriptionView, self).form_valid(form) + + +class SubmissionViewBase(NewsletterMixin): + """ Base class for submission archive views. """ + date_field = 'publish_date' + allow_empty = True + queryset = Submission.objects.filter(publish=True) + slug_field = 'message__slug' + + # Specify date element notation + year_format = '%Y' + month_format = '%m' + day_format = '%d' + + def process_url_data(self, *args, **kwargs): + """ Use only visible newsletters. """ + + kwargs['newsletter_queryset'] = NewsletterListView().get_queryset() + return super( + SubmissionViewBase, self).process_url_data(*args, **kwargs) + + def get_queryset(self): + """ Filter out submissions for current newsletter. """ + qs = super(SubmissionViewBase, self).get_queryset() + + qs = qs.filter(newsletter=self.newsletter) + + return qs + + def _make_date_lookup_arg(self, value): + """ + Convert a date into a datetime when the date field is a DateTimeField. + + When time zone support is enabled, `date` is assumed to be in the + default time zone, so that displayed items are consistent with the URL. + + Related discussion: + https://github.com/dokterbob/django-newsletter/issues/74 + """ + value = datetime.datetime.combine(value, datetime.time.min) + if settings.USE_TZ: + value = timezone.make_aware(value, timezone.get_default_timezone()) + return value + + +class SubmissionArchiveIndexView(SubmissionViewBase, ArchiveIndexView): + pass + + +class SubmissionArchiveDetailView(SubmissionViewBase, DateDetailView): + def get_context_data(self, **kwargs): + """ + Make sure the actual message is available. + """ + context = \ + super(SubmissionArchiveDetailView, self).get_context_data(**kwargs) + + message = self.object.message + + context.update({ + 'message': message, + 'site': Site.objects.get_current(), + 'date': self.object.publish_date, + 'STATIC_URL': settings.STATIC_URL, + 'MEDIA_URL': settings.MEDIA_URL + }) + + return context + + def get_template(self): + """ Get the message template for the current newsletter. """ + + html_template = self.object.message.html_template + + # No HTML -> no party! + if not html_template: + raise Http404(ugettext( + 'No HTML template associated with the newsletter this ' + 'message belongs to.' + )) + + return html_template + + def render_to_response(self, context, **response_kwargs): + """ + Return a simplified response; the template should be rendered without + any context. Use a SimpleTemplateResponse as a RequestContext should + not be used. + """ + return SimpleTemplateResponse( + template=self.get_template(), + context=context, + **response_kwargs + ) diff --git a/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/INSTALLER b/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/LICENSE.txt b/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/LICENSE.txt new file mode 100644 index 0000000..e1f9ad7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/LICENSE.txt @@ -0,0 +1,44 @@ +Zope Public License (ZPL) Version 2.1 + +A copyright notice accompanies this license document that identifies the +copyright holders. + +This license has been certified as open source. It has also been designated as +GPL compatible by the Free Software Foundation (FSF). + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions in source code must retain the accompanying copyright +notice, this list of conditions, and the following disclaimer. + +2. Redistributions in binary form must reproduce the accompanying copyright +notice, this list of conditions, and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +3. Names of the copyright holders must not be used to endorse or promote +products derived from this software without prior written permission from the +copyright holders. + +4. The right to distribute this software or to use it for any purpose does not +give you the right to use Servicemarks (sm) or Trademarks (tm) of the +copyright +holders. Use of them is covered by separate agreement with the copyright +holders. + +5. If any files are modified, you must cause the modified files to carry +prominent notices stating that you changed the files and the date of any +change. + +Disclaimer + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY EXPRESSED +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/METADATA b/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/METADATA new file mode 100644 index 0000000..ab3fe5d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/METADATA @@ -0,0 +1,412 @@ +Metadata-Version: 2.1 +Name: persistent +Version: 4.4.3 +Summary: Translucent persistent objects +Home-page: https://github.com/zopefoundation/persistent/ +Author: Zope Corporation +Author-email: zodb-dev@zope.org +License: ZPL 2.1 +Platform: any +Classifier: Development Status :: 6 - Mature +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Framework :: ZODB +Classifier: Topic :: Database +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: Unix +Requires-Dist: zope.interface +Provides-Extra: docs +Requires-Dist: Sphinx; extra == 'docs' +Requires-Dist: repoze.sphinx.autointerface; extra == 'docs' +Provides-Extra: test +Requires-Dist: zope.testrunner; extra == 'test' +Requires-Dist: manuel; extra == 'test' +Requires-Dist: cffi; (platform_python_implementation == "CPython") and extra == 'test' +Provides-Extra: testing + +``persistent``: automatic persistence for Python objects +========================================================= + +.. image:: https://travis-ci.org/zopefoundation/persistent.svg?branch=master + :target: https://travis-ci.org/zopefoundation/persistent + +.. image:: https://coveralls.io/repos/github/zopefoundation/persistent/badge.svg?branch=master + :target: https://coveralls.io/github/zopefoundation/persistent?branch=master + +.. image:: https://readthedocs.org/projects/persistent/badge/?version=latest + :target: http://persistent.readthedocs.org/en/latest/ + :alt: Documentation Status + +.. image:: https://img.shields.io/pypi/v/persistent.svg + :target: https://pypi.org/project/persistent + :alt: Latest release + +.. image:: https://img.shields.io/pypi/pyversions/persistent.svg + :target: https://pypi.org/project/persistent + :alt: Python versions + +This package contains a generic persistence implementation for Python. It +forms the core protocol for making objects interact "transparently" with +a database such as the ZODB. + +Please see the Sphinx documentation (``docs/index.rst``) for further +information, or view the documentation at Read The Docs, for either +the latest (``http://persistent.readthedocs.io/en/latest/``) or stable +release (``http://persistent.readthedocs.io/en/stable/``). + +.. note:: + + Use of this standalone ``persistent`` release is not recommended or + supported with ZODB < 3.11. ZODB 3.10 and earlier bundle their own + version of the ``persistent`` package. + + +``persistent`` Changelog +======================== + +4.4.3 (2018-10-22) +------------------ + +- Fix the repr of the persistent objects to include the module name + when using the C extension. This matches the pure-Python behaviour + and the behaviour prior to 4.4.0. See `issue 92 + `_. + +- Change the repr of persistent objects to format the OID as in + integer in hexadecimal notation if it is an 8-byte byte string, as + ZODB does. This eliminates some issues in doctests. See `issue 95 + `_. + +4.4.2 (2018-08-28) +------------------ + +- Explicitly use unsigned constants for packing and unpacking C + timestamps, fixing an arithmetic issue for GCC when optimizations + are enabled and ``-fwrapv`` is *not* enabled. See `issue 86 + `_. + + +4.4.1 (2018-08-23) +------------------ + +- Fix installation of source packages on PyPy. See `issue 88 + `_. + + +4.4.0 (2018-08-22) +------------------ + +- Use unsigned constants when doing arithmetic on C timestamps, + possibly avoiding some overflow issues with some compilers or + compiler settings. See `issue 86 + `_. + +- Change the default representation of ``Persistent`` objects to + include the representation of their OID and jar, if set. Also add + the ability for subclasses to implement ``_p_repr()`` instead of + overriding ``__repr__`` for better exception handling. See `issue 11 + `_. + +- Reach and maintain 100% test coverage. + +- Simplify ``__init__.py``, including removal of an attempted legacy + import of ``persistent.TimeStamp``. See `PR 80 + `_. + +- Add support for Python 3.7 and drop support for Python 3.3. + +- Build the CFFI modules (used on PyPy or when PURE_PYTHON is set) `at + installation or wheel building time + `_ + when CFFI is available. This replaces `the deprecated way + `_ + of building them at import time. If binary wheels are distributed, + it eliminates the need to have a functioning C compiler to use PyPy. + See `issue 75 + `_. + +- Fix deleting the ``_p_oid`` of a pure-Python persistent object when + it is in a cache. + +- Fix deleting special (``_p``) attributes of a pure-Python persistent + object that overrides ``__delattr__`` and correctly calls ``_p_delattr``. + +- Remove some internal compatibility shims that are no longer + necessary. See `PR 82 `_. + +- Make the return value of ``TimeStamp.second()`` consistent across C + and Python implementations when the ``TimeStamp`` was created from 6 + arguments with floating point seconds. Also make it match across + trips through ``TimeStamp.raw()``. Previously, the C version could + initially have erroneous rounding and too much false precision, + while the Python version could have too much precision. The raw/repr + values have not changed. See `issue 41 + `_. + +4.3.0 (2018-07-30) +------------------ + +- Fix the possibility of a rare crash in the C extension when + deallocating items. See https://github.com/zopefoundation/persistent/issues/66 + +- Change cPickleCache's comparison of object sizes to determine + whether an object can go in the cache to use ``PyObject_TypeCheck()``. + This matches what the pure Python implementation does and is a + stronger test that the object really is compatible with the cache. + Previously, an object could potentially include ``cPersistent_HEAD`` + and *not* set ``tp_base`` to ``cPersistenceCAPI->pertype`` and still + be eligible for the pickle cache; that is no longer the case. See + `issue 69 `_. + +4.2.4.2 (2017-04-23) +-------------------- + +- Packaging-only release: fix Python 2.7 ``manylinux`` wheels. + + +4.2.4.1 (2017-04-21) +-------------------- + +- Packaging-only release: get ``manylinux`` wheel built automatically. + + +4.2.4 (2017-03-20) +------------------ + +- Avoid raising a ``SystemError: error return without exception set`` + when loading an object with slots whose jar generates an exception + (such as a ZODB ``POSKeyError``) in ``setstate``. + + +4.2.3 (2017-03-08) +------------------ + +- Fix the hashcode of Python ``TimeStamp`` objects on 64-bit Python on + Windows. See https://github.com/zopefoundation/persistent/pull/55 + +- Stop calling ``gc.collect`` every time ``PickleCache.incrgc`` is called (every + transaction boundary) in pure-Python mode (PyPy). This means that + the reported size of the cache may be wrong (until the next GC), but + it is much faster. This should not have any observable effects for + user code. + +- Stop clearing the dict and slots of objects added to + ``PickleCache.new_ghost`` (typically these values are passed to + ``__new__`` from the pickle data) in pure-Python mode (PyPy). This + matches the behaviour of the C code. + +- Add support for Python 3.6. + +- Fix ``__setstate__`` interning when ``state`` parameter is not a built-in dict + + +4.2.2 (2016-11-29) +------------------ + +- Drop use of ``ctypes`` for determining maximum integer size, to increase + pure-Python compatibility. See https://github.com/zopefoundation/persistent/pull/31 + +- Ensure that ``__slots__`` attributes are cleared when a persistent + object is ghostified. (This excluses classes that override + ``__new__``. See + https://github.com/zopefoundation/persistent/wiki/Notes_on_state_new_and_slots + if you're curious.) + +4.2.1 (2016-05-26) +------------------ + +- Fix the hashcode of C ``TimeStamp`` objects on 64-bit Python 3 on + Windows. + +4.2.0 (2016-05-05) +------------------ + +- Fixed the Python(/PYPY) implementation ``TimeStamp.timeTime`` method + to have subsecond precision. + +- When testing ``PURE_PYTHON`` environments under ``tox``, avoid poisoning + the user's global wheel cache. + +- Add support for Python 3.5. + +- Drop support for Python 2.6 and 3.2. + +4.1.1 (2015-06-02) +------------------ + +- Fix manifest and re-upload to fix stray files included in 4.1.0. + +4.1.0 (2015-05-19) +------------------ + +- Make the Python implementation of ``Persistent`` and ``PickleCache`` + behave more similarly to the C implementation. In particular, the + Python version can now run the complete ZODB and ZEO test suites. + +- Fix the hashcode of the Python ``TimeStamp`` on 32-bit platforms. + +4.0.9 (2015-04-08) +------------------ + +- Make the C and Python ``TimeStamp`` objects behave more alike. The + Python version now produces the same ``repr`` and ``.raw()`` output as + the C version, and has the same hashcode. In addition, the Python + version is now supports ordering and equality like the C version. + +- Intern keys of object state in ``__setstate__`` to reduce memory usage + when unpickling multiple objects with the same attributes. + +- Add support for PyPy3. + +- 100% branch coverage. + +4.0.8 (2014-03-20) +------------------ + +- Add support for Python 3.4. + +- In pure-Python ``Persistent``, avoid loading state in ``_p_activate`` + for non-ghost objects (which could corrupt their state). (PR #9) + +- In pure-Python, and don't throw ``POSKeyError`` if ``_p_activate`` is + called on an object that has never been committed. (PR #9) + +- In pure-Python ``Persistent``, avoid calling a subclass's ``__setattr__`` + at instance creation time. (PR #8) + +- Make it possible to delete ``_p_jar`` / ``_p_oid`` of a pure-Python + ``Persistent`` object which has been removed from the jar's cache + (fixes aborting a ZODB Connection that has added objects). (PR #7) + +4.0.7 (2014-02-20) +------------------ + +- Avoid a KeyError from ``_p_accessed()`` on newly-created objects under + pure-Python: these objects may be assigned to a jar, but not yet added + to its cache. (PR #6) + +- Avoid a failure in ``Persistent.__setstate__`` when the state dict + contains exactly two keys. (PR #5) + +- Fix a hang in ``picklecache`` invalidation if OIDs are manually passed + out-of-order. (PR #4) + +- Add ``PURE_PYTHON`` environment variable support: if set, the C + extensions will not be built, imported, or tested. + + +4.0.6 (2013-01-03) +------------------ + +- Updated Trove classifiers. + + +4.0.5 (2012-12-14) +------------------ + +- Fixed the C-extensions under Py3k (previously they compiled but were + not importable). + + +4.0.4 (2012-12-11) +------------------ + +- Added support for Python 3.3. + +- C extenstions now build under Python 3.2, passing the same tests as + the pure-Python reference implementation. + +4.0.3 (2012-11-19) +------------------ + +- Fixed: In the C implimentation, an integer was compared with a + pointer, with undefined results and a compiler warning. + +- Fixed: the Python implementation of the ``_p_estimated_size`` propety + didn't support deletion. + +- Simplified implementation of the ``_p_estimated_size`` property to + only accept integers. A TypeError is raised if an incorrect type is + provided. + + +4.0.2 (2012-08-27) +------------------ + +- Correct initialization functions in renamed ``_timestamp`` extension. + + +4.0.1 (2012-08-26) +------------------ + +- Worked around test failure due to overflow to long on 32-bit systems. + +- Renamed ``TimeStamp`` extension module to avoid clash with pure-Python + ``timestamp`` module on case-insensitive filesystems. + + N.B: the canonical way to import the ``TimeStamp`` class is now:: + + from persistent.timestamp import TimeStamp + + which will yield the class from the extension module (if available), + falling back to the pure-Python reference implementation. + + +4.0.0 (2012-08-11) +------------------ + +Platform Changes +################ + +- Added explicit support for Python 3.2 and PyPy. + + - Note that the C implementations of Persistent, PickleCache, and Timestamp + are not built (yet) on these platforms. + +- Dropped support for Python < 2.6. + +Testing Changes +############### + +- 100% unit test coverage. + +- Removed all ``ZODB``-dependent tests: + + - Rewrote some to avoid the dependency + + - Cloned the remainder into new ``ZODB.tests`` modules. + +- Refactored some doctests refactored as unittests. + +- Completed pure-Python reference implementations of 'Persistent', + 'PickleCache', and 'TimeStamp'. + +- All covered platforms tested under ``tox``. + +- Added support for continuous integration using ``tox`` and ``jenkins``. + +- Added ``setup.py dev`` alias (installs ``nose`` and ``coverage``). + +- Dropped dependency on ``zope.testing`` / ``zope.testrunner``: tests now + run with ``setup.py test``. + +Documentation Changes +##################### + +- Refactored many Doctests as Sphinx documentation (snippets are exercised + via 'tox'). + +- Added ``setup.py docs`` alias (installs ``Sphinx`` and + ``repoze.sphinx.autointerface``). + + diff --git a/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/RECORD b/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/RECORD new file mode 100644 index 0000000..27f3a68 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/RECORD @@ -0,0 +1,67 @@ +../../../include/site/python3.6/persistent/cPersistence.h,sha256=sSXwdcCpn9t1WpAJ2Po2RjboAtgVieedjk2rcZwB04Y,5114 +../../../include/site/python3.6/persistent/ring.h,sha256=RbpXV2O0vgqFwSERFZAkJgU8J17q3C3s-Y_dWQTrAE0,2639 +persistent-4.4.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +persistent-4.4.3.dist-info/LICENSE.txt,sha256=PmcdsR32h1FswdtbPWXkqjg-rKPCDOo_r1Og9zNdCjw,2070 +persistent-4.4.3.dist-info/METADATA,sha256=wMhHSH93H1lOmyxfkFZ8A2Tv38vjGEV6XUDmjKSRuuM,14062 +persistent-4.4.3.dist-info/RECORD,, +persistent-4.4.3.dist-info/WHEEL,sha256=EqYtf7kBe5N6WHAm79ETzUPLkyUuvZrFyRt3YQhGXds,109 +persistent-4.4.3.dist-info/top_level.txt,sha256=I3n7tB5JKG4dsBBbfoBzNiYFa4qVxfkpY1FOO5szyns,11 +persistent/__init__.py,sha256=OVIqHdnT-92GHgXWkHrpYw4cYqAl1yMPzq-K1axhjYQ,2070 +persistent/__pycache__/__init__.cpython-36.pyc,, +persistent/__pycache__/_compat.cpython-36.pyc,, +persistent/__pycache__/_ring_build.cpython-36.pyc,, +persistent/__pycache__/dict.cpython-36.pyc,, +persistent/__pycache__/interfaces.cpython-36.pyc,, +persistent/__pycache__/list.cpython-36.pyc,, +persistent/__pycache__/mapping.cpython-36.pyc,, +persistent/__pycache__/persistence.cpython-36.pyc,, +persistent/__pycache__/picklecache.cpython-36.pyc,, +persistent/__pycache__/ring.cpython-36.pyc,, +persistent/__pycache__/timestamp.cpython-36.pyc,, +persistent/__pycache__/wref.cpython-36.pyc,, +persistent/_compat.h,sha256=f7-F-5-xYnSFv8L4AXxldlpjTNMayHFp2kui3pEXdO0,1531 +persistent/_compat.py,sha256=K0P14BwahHDT0t7rpRtlFTHVkCatmG5YBm82vEs3zRw,1107 +persistent/_ring.abi3.so,sha256=t0NyGiXtnYv4fqoJ6oWeE-l7p94WQ2gksv79xqLdSQQ,27552 +persistent/_ring_build.py,sha256=2ui8bMEyYMqpdLWkJ_0oUCfQVQtYWY2xCjvtQyTVbqA,1056 +persistent/_timestamp.c,sha256=qi8z1k0qdwimtqhutYyIsWlEhPPbOjXLVcGOEGlR4_A,17071 +persistent/_timestamp.cpython-36m-darwin.so,sha256=GSn7G35_9aVY4pSc5xcYNzVqd1WU34UCjS75tzlkE0U,37412 +persistent/cPersistence.c,sha256=uhtzUVHHGK9d9n-1bZxgbyd3WMTCzdsMlI3g57Zhtvc,48599 +persistent/cPersistence.cpython-36m-darwin.so,sha256=cgaoCsfGKD1QPnJqGLlNNGfFo6xkaOM55BC5bL2qbjA,73856 +persistent/cPersistence.h,sha256=sSXwdcCpn9t1WpAJ2Po2RjboAtgVieedjk2rcZwB04Y,5114 +persistent/cPickleCache.c,sha256=-fqWdeJdI1q-oVMRoe3YazjUpcNLNXslLDfTBD7wRRE,41506 +persistent/cPickleCache.cpython-36m-darwin.so,sha256=qkX6IEZiLSIDUZbzbpZJtP4ZL5HM801g9mk4euwIRJA,52252 +persistent/dict.py,sha256=MuOlMTApJ39KBRDfklZrkplp_dPqybRM731esQwoFCs,751 +persistent/interfaces.py,sha256=bw-UG1G9tCFyidVkEax3IbWLGfngawWC3H3rUtC4QFE,18119 +persistent/list.py,sha256=qZ4UWorMS5OmoZ3VqzUMxUQw7H6Y7RXNaWIkl0QIQjA,2882 +persistent/mapping.py,sha256=CXn5EiGWqC95K5-nPbxbxc9WenoYfFIBzHW5fZpZ5iI,3448 +persistent/persistence.py,sha256=jyRmyWMmtjnVDWG9mN66WR6IxtxBRkaLQUFQuNuj5NY,21596 +persistent/picklecache.py,sha256=KpIX-Fduh3o1ZnFWrhDq-Oi69dmbdOgd6xIm6lP-KuM,14548 +persistent/ring.c,sha256=0U8ubpb9xRuj0E0AkatqvO47BnjVGrpQVEITVmFbAbo,1803 +persistent/ring.h,sha256=RbpXV2O0vgqFwSERFZAkJgU8J17q3C3s-Y_dWQTrAE0,2639 +persistent/ring.py,sha256=3wHmIpYHe8lmUg3n_vIrEyK8TrNVEkunPVQ7cflQi_o,7177 +persistent/tests/__init__.py,sha256=UnxmjVrk-eNORsitiM48W0pB6yfsaErOak8RYh_ELt8,10 +persistent/tests/__pycache__/__init__.cpython-36.pyc,, +persistent/tests/__pycache__/attrhooks.cpython-36.pyc,, +persistent/tests/__pycache__/cucumbers.cpython-36.pyc,, +persistent/tests/__pycache__/test_docs.cpython-36.pyc,, +persistent/tests/__pycache__/test_list.cpython-36.pyc,, +persistent/tests/__pycache__/test_mapping.cpython-36.pyc,, +persistent/tests/__pycache__/test_persistence.cpython-36.pyc,, +persistent/tests/__pycache__/test_picklecache.cpython-36.pyc,, +persistent/tests/__pycache__/test_ring.cpython-36.pyc,, +persistent/tests/__pycache__/test_timestamp.cpython-36.pyc,, +persistent/tests/__pycache__/test_wref.cpython-36.pyc,, +persistent/tests/__pycache__/utils.cpython-36.pyc,, +persistent/tests/attrhooks.py,sha256=yhEx6IU8LBY7b3XMf3WzXfTB2Cs6-xpnUCsbQdO3tSk,4214 +persistent/tests/cucumbers.py,sha256=miha0gTITw4DGJiXO3H8XbUJ0uOBz_RiQPz1CBD8FeY,2790 +persistent/tests/test_docs.py,sha256=AAWAnklWFyu_uE_eOtP4hd2CGBzyh39-S4O8sXLPHyc,2000 +persistent/tests/test_list.py,sha256=hdC_tCARO4yEYxNkO34tDAJ3OYO4oblk7kG9wwxxn_4,8666 +persistent/tests/test_mapping.py,sha256=2tJdLTcWePJFm3GMtpPtNyXIOaQYrYiWBWyAki8u5D4,7116 +persistent/tests/test_persistence.py,sha256=JSKKGgrqHt1moEwJUm-plNqObO0AiN-7vY0eDz93xCM,73608 +persistent/tests/test_picklecache.py,sha256=KHmm5kY0VKtwB8quzrrWkfNGGpzeJEkb2idzXrE3Lxk,33089 +persistent/tests/test_ring.py,sha256=crzufZuvFI8v3-Kd9OT6DtxVynEXyeuJey0Lh-tcRlM,4024 +persistent/tests/test_timestamp.py,sha256=e-fKUndAjs9__Sh1I1ow3SMX6sBNt9g91dBDeynvWZw,15625 +persistent/tests/test_wref.py,sha256=RU1zL2Npf56DpDQw3-0ckTyEfJXLS5gb0IIGPDwq6B0,11307 +persistent/tests/utils.py,sha256=gx8tbQVsqgvRBRTPLUuDVRPfiEG9alwLINBMEj05C9s,1999 +persistent/timestamp.py,sha256=ku8zhYFHjFRSx48G-Ufj7DZcekF0BU4c9yckFpE-8nk,6665 +persistent/wref.py,sha256=JTWdsb4YNEbyqQl-za-SkiYZcmLPhPByruqA9SWn0rM,4099 diff --git a/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/WHEEL b/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/WHEEL new file mode 100644 index 0000000..d0537bf --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.2) +Root-Is-Purelib: false +Tag: cp36-cp36m-macosx_10_6_intel + diff --git a/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/top_level.txt new file mode 100644 index 0000000..f050bcc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent-4.4.3.dist-info/top_level.txt @@ -0,0 +1 @@ +persistent diff --git a/thesisenv/lib/python3.6/site-packages/persistent/__init__.py b/thesisenv/lib/python3.6/site-packages/persistent/__init__.py new file mode 100644 index 0000000..e803f4c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/__init__.py @@ -0,0 +1,63 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Prefer C implementations of Persistent / PickleCache / TimeStamp. + +Fall back to pure Python implementations. +""" + +import sys + +__all__ = [ + 'IPersistent', + 'Persistent', + 'GHOST', + 'UPTODATE', + 'CHANGED', + 'STICKY', + 'PickleCache', + 'TimeStamp', +] +from persistent._compat import PURE_PYTHON +from persistent.interfaces import IPersistent + +import persistent.timestamp as TimeStamp + +from persistent import persistence as pyPersistence +from persistent import picklecache as pyPickleCache + +try: + # Be careful not to shadow the modules + from persistent import cPersistence as _cPersistence + from persistent import cPickleCache as _cPickleCache +except ImportError: # pragma: no cover + _cPersistence = None + _cPickleCache = None +else: + # Make an interface declaration for Persistent + # Note that the Python version already does this. + from zope.interface import classImplements + classImplements(_cPersistence.Persistent, IPersistent) + + +_persistence = pyPersistence if PURE_PYTHON or _cPersistence is None else _cPersistence +_picklecache = pyPickleCache if PURE_PYTHON or _cPickleCache is None else _cPickleCache + +Persistent = _persistence.Persistent +GHOST = _persistence.GHOST +UPTODATE = _persistence.UPTODATE +CHANGED = _persistence.CHANGED +STICKY = _persistence.STICKY +PickleCache = _picklecache.PickleCache + +sys.modules['persistent.TimeStamp'] = sys.modules['persistent.timestamp'] diff --git a/thesisenv/lib/python3.6/site-packages/persistent/_compat.h b/thesisenv/lib/python3.6/site-packages/persistent/_compat.h new file mode 100644 index 0000000..2d9b6f6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/_compat.h @@ -0,0 +1,48 @@ +/***************************************************************************** + + Copyright (c) 2012 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +#ifndef PERSISTENT__COMPAT_H +#define PERSISTENT__COMPAT_H + +#include "Python.h" + +#if PY_MAJOR_VERSION >= 3 +#define PY3K +#endif + +#ifdef PY3K +#define INTERN PyUnicode_InternFromString +#define INTERN_INPLACE PyUnicode_InternInPlace +#define NATIVE_CHECK_EXACT PyUnicode_CheckExact +#define NATIVE_FROM_STRING_AND_SIZE PyUnicode_FromStringAndSize + +#define Py_TPFLAGS_HAVE_RICHCOMPARE 0 + +#define INT_FROM_LONG(x) PyLong_FromLong(x) +#define INT_CHECK(x) PyLong_Check(x) +#define INT_AS_LONG(x) PyLong_AS_LONG(x) +#define CAPI_CAPSULE_NAME "persistent.cPersistence.CAPI" + +#else +#define INTERN PyString_InternFromString +#define INTERN_INPLACE PyString_InternInPlace +#define NATIVE_CHECK_EXACT PyString_CheckExact +#define NATIVE_FROM_STRING_AND_SIZE PyString_FromStringAndSize + +#define INT_FROM_LONG(x) PyInt_FromLong(x) +#define INT_CHECK(x) PyInt_Check(x) +#define INT_AS_LONG(x) PyInt_AS_LONG(x) +#endif + +#endif diff --git a/thesisenv/lib/python3.6/site-packages/persistent/_compat.py b/thesisenv/lib/python3.6/site-packages/persistent/_compat.py new file mode 100644 index 0000000..7c79573 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/_compat.py @@ -0,0 +1,37 @@ +############################################################################## +# +# Copyright (c) 2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import sys +import os + +PURE_PYTHON = os.environ.get('PURE_PYTHON') + +if sys.version_info[0] > 2: + import copyreg as copy_reg + from collections import UserDict as IterableUserDict + from collections import UserList + from sys import intern + + PYTHON3 = True + PYTHON2 = False + +else: # pragma: no cover + import copy_reg + from UserDict import IterableUserDict + from UserList import UserList + + PYTHON3 = False + PYTHON2 = True + + intern = intern diff --git a/thesisenv/lib/python3.6/site-packages/persistent/_ring.abi3.so b/thesisenv/lib/python3.6/site-packages/persistent/_ring.abi3.so new file mode 100755 index 0000000..877bb9d Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/persistent/_ring.abi3.so differ diff --git a/thesisenv/lib/python3.6/site-packages/persistent/_ring_build.py b/thesisenv/lib/python3.6/site-packages/persistent/_ring_build.py new file mode 100644 index 0000000..b5326cb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/_ring_build.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +############################################################################## +# +# Copyright (c) 2018 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from __future__ import absolute_import, print_function, division + +import os +from cffi import FFI + +this_dir = os.path.dirname(os.path.abspath(__file__)) + +ffi = FFI() +with open(os.path.join(this_dir, 'ring.h')) as f: + ffi.cdef(f.read()) + +ffi.set_source('persistent._ring', + '#include "ring.c"', + include_dirs=[this_dir]) + +if __name__ == '__main__': + ffi.compile() diff --git a/thesisenv/lib/python3.6/site-packages/persistent/_timestamp.c b/thesisenv/lib/python3.6/site-packages/persistent/_timestamp.c new file mode 100644 index 0000000..ee7b66a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/_timestamp.c @@ -0,0 +1,619 @@ +/***************************************************************************** + + Copyright (c) 2001, 2004 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +#include "Python.h" +#include "bytesobject.h" +#include +#include "_compat.h" + + +PyObject *TimeStamp_FromDate(int, int, int, int, int, double); +PyObject *TimeStamp_FromString(const char *); + +static char TimeStampModule_doc[] = +"A 64-bit TimeStamp used as a ZODB serial number.\n" +"\n" +"$Id$\n"; + + +/* A magic constant having the value 0.000000013969839. When an + number of seconds between 0 and 59 is *divided* by this number, we get + a number between 0 (for 0), 71582786 (for 1) and 4223384393 (for 59), + all of which can be represented in a 32-bit unsigned integer, suitable + for packing into 4 bytes using `TS_PACK_UINT32_INTO_BYTES`. + To get (close to) the original seconds back, use + `TS_UNPACK_UINT32_FROM_BYTES` and *multiply* by this number. + */ +#define TS_SECOND_BYTES_BIAS ((double)((double)60) / ((double)(0x10000)) / ((double)(0x10000))) +#define TS_BASE_YEAR 1900 +#define TS_MINUTES_PER_DAY 1440 +/* We pretend there are always 31 days in a month; this has us using + 372 days in a year in some calculations */ +#define TS_DAYS_PER_MONTH 31 +#define TS_MONTHS_PER_YEAR 12 +#define TS_MINUTES_PER_MONTH (TS_DAYS_PER_MONTH * TS_MINUTES_PER_DAY) +#define TS_MINUTES_PER_YEAR (TS_MINUTES_PER_MONTH * TS_MONTHS_PER_YEAR) + +/* The U suffixes matter on these constants to be sure + the compiler generates the appropriate instructions when + optimizations are enabled. On x86_64 GCC, if -fno-wrapv is given + and -O is used, the compiler might choose to treat these as 32 bit + signed quantities otherwise, producing incorrect results on + some corner cases. See + https://github.com/zopefoundation/persistent/issues/86 +*/ + +/** + * Given an unsigned int *v*, pack it into the four + * unsigned char bytes beginning at *bytes*. If *v* is larger + * than 2^31 (i.e., it doesn't fit in 32 bits), the results will + * be invalid (the first byte will be 0.) + * + * The inverse is `TS_UNPACK_UINT32_FROM_BYTES`. This is a + * lossy operation and may lose some lower-order precision. + * + */ +#define TS_PACK_UINT32_INTO_BYTES(v, bytes) do { \ + *(bytes) = v / 0x1000000U; \ + *(bytes + 1) = (v % 0x1000000U) / 0x10000U; \ + *(bytes + 2) = (v % 0x10000U) / 0x100U; \ + *(bytes + 3) = v % 0x100U; \ +} while (0) + +/** + * Given a sequence of four unsigned chars beginning at *bytes* + * as produced by `TS_PACK_UINT32_INTO_BYTES`, return the + * original unsigned int. + * + * Remember this is a lossy operation, and the value you get back + * may not exactly match the original value. If the original value + * was greater than 2^31 it will definitely not match. + */ +#define TS_UNPACK_UINT32_FROM_BYTES(bytes) (*(bytes) * 0x1000000U + *(bytes + 1) * 0x10000U + *(bytes + 2) * 0x100U + *(bytes + 3)) + +typedef struct +{ + PyObject_HEAD + /* + The first four bytes of data store the year, month, day, hour, and + minute as the number of minutes since Jan 1 00:00. + + The final four bytes store the seconds since 00:00 as + the number of microseconds. + + Both are normalized into those four bytes the same way with + TS_[UN]PACK_UINT32_INTO|FROM_BYTES. + */ + + unsigned char data[8]; +} TimeStamp; + +/* The first dimension of the arrays below is non-leapyear / leapyear */ + +static char month_len[2][12] = +{ + {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}, + {31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31} +}; + +static short joff[2][12] = +{ + {0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334}, + {0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335} +}; + +static double gmoff=0; + + +static int +leap(int year) +{ + return year % 4 == 0 && (year % 100 != 0 || year % 400 == 0); +} + +static int +days_in_month(int year, int month) +{ + return month_len[leap(year)][month]; +} + +static double +TimeStamp_yad(int y) +{ + double d, s; + + y -= TS_BASE_YEAR; + + d = (y - 1) * 365; + if (y > 0) { + s = 1.0; + y -= 1; + } else { + s = -1.0; + y = -y; + } + return d + s * (y / 4 - y / 100 + (y + 300) / 400); +} + +static double +TimeStamp_abst(int y, int mo, int d, int m, int s) +{ + return (TimeStamp_yad(y) + joff[leap(y)][mo] + d) * 86400 + m * 60 + s; +} + +static int +TimeStamp_init_gmoff(void) +{ + struct tm *t; + time_t z=0; + + t = gmtime(&z); + if (t == NULL) + { + PyErr_SetString(PyExc_SystemError, "gmtime failed"); + return -1; + } + + gmoff = TimeStamp_abst(t->tm_year + TS_BASE_YEAR, t->tm_mon, t->tm_mday - 1, + t->tm_hour * 60 + t->tm_min, t->tm_sec); + + return 0; +} + +static void +TimeStamp_dealloc(TimeStamp *ts) +{ + PyObject_Del(ts); +} + +static PyObject* +TimeStamp_richcompare(TimeStamp *self, TimeStamp *other, int op) +{ + PyObject *result = NULL; + int cmp; + + if (Py_TYPE(other) != Py_TYPE(self)) + { + result = Py_NotImplemented; + } + else + { + cmp = memcmp(self->data, other->data, 8); + switch (op) { + case Py_LT: + result = (cmp < 0) ? Py_True : Py_False; + break; + case Py_LE: + result = (cmp <= 0) ? Py_True : Py_False; + break; + case Py_EQ: + result = (cmp == 0) ? Py_True : Py_False; + break; + case Py_NE: + result = (cmp != 0) ? Py_True : Py_False; + break; + case Py_GT: + result = (cmp > 0) ? Py_True : Py_False; + break; + case Py_GE: + result = (cmp >= 0) ? Py_True : Py_False; + break; + } + } + + Py_XINCREF(result); + return result; +} + + +#ifdef PY3K +static Py_hash_t +#else +static long +#endif +TimeStamp_hash(TimeStamp *self) +{ + register unsigned char *p = (unsigned char *)self->data; + register int len = 8; + register long x = *p << 7; + while (--len >= 0) + x = (1000003*x) ^ *p++; + x ^= 8; + if (x == -1) + x = -2; + return x; +} + +typedef struct +{ + /* TODO: reverse-engineer what's in these things and comment them */ + int y; + int m; + int d; + int mi; +} TimeStampParts; + + +static void +TimeStamp_unpack(TimeStamp *self, TimeStampParts *p) +{ + unsigned int minutes_since_base; + + minutes_since_base = TS_UNPACK_UINT32_FROM_BYTES(self->data); + p->y = minutes_since_base / TS_MINUTES_PER_YEAR + TS_BASE_YEAR; + p->m = (minutes_since_base % TS_MINUTES_PER_YEAR) / TS_MINUTES_PER_MONTH + 1; + p->d = (minutes_since_base % TS_MINUTES_PER_MONTH) / TS_MINUTES_PER_DAY + 1; + p->mi = minutes_since_base % TS_MINUTES_PER_DAY; +} + +static double +TimeStamp_sec(TimeStamp *self) +{ + unsigned int v; + + v = TS_UNPACK_UINT32_FROM_BYTES(self->data +4); + return TS_SECOND_BYTES_BIAS * v; +} + +static PyObject * +TimeStamp_year(TimeStamp *self) +{ + TimeStampParts p; + TimeStamp_unpack(self, &p); + return INT_FROM_LONG(p.y); +} + +static PyObject * +TimeStamp_month(TimeStamp *self) +{ + TimeStampParts p; + TimeStamp_unpack(self, &p); + return INT_FROM_LONG(p.m); +} + +static PyObject * +TimeStamp_day(TimeStamp *self) +{ + TimeStampParts p; + TimeStamp_unpack(self, &p); + return INT_FROM_LONG(p.d); +} + +static PyObject * +TimeStamp_hour(TimeStamp *self) +{ + TimeStampParts p; + TimeStamp_unpack(self, &p); + return INT_FROM_LONG(p.mi / 60); +} + +static PyObject * +TimeStamp_minute(TimeStamp *self) +{ + TimeStampParts p; + TimeStamp_unpack(self, &p); + return INT_FROM_LONG(p.mi % 60); +} + +static PyObject * +TimeStamp_second(TimeStamp *self) +{ + return PyFloat_FromDouble(TimeStamp_sec(self)); +} + +static PyObject * +TimeStamp_timeTime(TimeStamp *self) +{ + TimeStampParts p; + TimeStamp_unpack(self, &p); + return PyFloat_FromDouble(TimeStamp_abst(p.y, p.m - 1, p.d - 1, p.mi, 0) + + TimeStamp_sec(self) - gmoff); +} + +static PyObject * +TimeStamp_raw(TimeStamp *self) +{ + return PyBytes_FromStringAndSize((const char*)self->data, 8); +} + +static PyObject * +TimeStamp_repr(TimeStamp *self) +{ + PyObject *raw, *result; + raw = TimeStamp_raw(self); + result = PyObject_Repr(raw); + Py_DECREF(raw); + return result; +} + +static PyObject * +TimeStamp_str(TimeStamp *self) +{ + char buf[128]; + TimeStampParts p; + int len; + + TimeStamp_unpack(self, &p); + len =sprintf(buf, "%4.4d-%2.2d-%2.2d %2.2d:%2.2d:%09.6f", + p.y, p.m, p.d, p.mi / 60, p.mi % 60, + TimeStamp_sec(self)); + + return NATIVE_FROM_STRING_AND_SIZE(buf, len); +} + + +static PyObject * +TimeStamp_laterThan(TimeStamp *self, PyObject *obj) +{ + TimeStamp *o = NULL; + TimeStampParts p; + unsigned char new[8]; + int i; + + if (Py_TYPE(obj) != Py_TYPE(self)) + { + PyErr_SetString(PyExc_TypeError, "expected TimeStamp object"); + return NULL; + } + o = (TimeStamp *)obj; + if (memcmp(self->data, o->data, 8) > 0) + { + Py_INCREF(self); + return (PyObject *)self; + } + + memcpy(new, o->data, 8); + for (i = 7; i > 3; i--) + { + if (new[i] == 255) + new[i] = 0; + else + { + new[i]++; + return TimeStamp_FromString((const char*)new); + } + } + + /* All but the first two bytes are the same. Need to increment + the year, month, and day explicitly. */ + TimeStamp_unpack(o, &p); + if (p.mi >= 1439) + { + p.mi = 0; + if (p.d == month_len[leap(p.y)][p.m - 1]) + { + p.d = 1; + if (p.m == 12) + { + p.m = 1; + p.y++; + } + else + p.m++; + } + else + p.d++; + } + else + p.mi++; + + return TimeStamp_FromDate(p.y, p.m, p.d, p.mi / 60, p.mi % 60, 0); +} + +static struct PyMethodDef TimeStamp_methods[] = +{ + {"year", (PyCFunction)TimeStamp_year, METH_NOARGS}, + {"minute", (PyCFunction)TimeStamp_minute, METH_NOARGS}, + {"month", (PyCFunction)TimeStamp_month, METH_NOARGS}, + {"day", (PyCFunction)TimeStamp_day, METH_NOARGS}, + {"hour", (PyCFunction)TimeStamp_hour, METH_NOARGS}, + {"second", (PyCFunction)TimeStamp_second, METH_NOARGS}, + {"timeTime", (PyCFunction)TimeStamp_timeTime, METH_NOARGS}, + {"laterThan", (PyCFunction)TimeStamp_laterThan, METH_O}, + {"raw", (PyCFunction)TimeStamp_raw, METH_NOARGS}, + {NULL, NULL}, +}; + +#define DEFERRED_ADDRESS(ADDR) 0 + +static PyTypeObject TimeStamp_type = +{ + PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(NULL), 0) + "persistent.TimeStamp", + sizeof(TimeStamp), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)TimeStamp_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + (reprfunc)TimeStamp_repr, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + (hashfunc)TimeStamp_hash, /* tp_hash */ + 0, /* tp_call */ + (reprfunc)TimeStamp_str, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | + Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_HAVE_RICHCOMPARE, /* tp_flags */ + 0, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + (richcmpfunc)&TimeStamp_richcompare, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + TimeStamp_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ +}; + +PyObject * +TimeStamp_FromString(const char *buf) +{ + /* buf must be exactly 8 characters */ + TimeStamp *ts = (TimeStamp *)PyObject_New(TimeStamp, &TimeStamp_type); + memcpy(ts->data, buf, 8); + return (PyObject *)ts; +} + +#define CHECK_RANGE(VAR, LO, HI) if ((VAR) < (LO) || (VAR) > (HI)) { \ + return PyErr_Format(PyExc_ValueError, \ + # VAR " must be between %d and %d: %d", \ + (LO), (HI), (VAR)); \ + } + +PyObject * +TimeStamp_FromDate(int year, int month, int day, int hour, int min, + double sec) +{ + + TimeStamp *ts = NULL; + int d; + unsigned int years_since_base; + unsigned int months_since_base; + unsigned int days_since_base; + unsigned int hours_since_base; + unsigned int minutes_since_base; + unsigned int v; + + if (year < TS_BASE_YEAR) + return PyErr_Format(PyExc_ValueError, + "year must be greater than %d: %d", TS_BASE_YEAR, year); + CHECK_RANGE(month, 1, 12); + d = days_in_month(year, month - 1); + if (day < 1 || day > d) + return PyErr_Format(PyExc_ValueError, + "day must be between 1 and %d: %d", d, day); + CHECK_RANGE(hour, 0, 23); + CHECK_RANGE(min, 0, 59); + /* Seconds are allowed to be anything, so chill + If we did want to be pickly, 60 would be a better choice. + if (sec < 0 || sec > 59) + return PyErr_Format(PyExc_ValueError, + "second must be between 0 and 59: %f", sec); + */ + ts = (TimeStamp *)PyObject_New(TimeStamp, &TimeStamp_type); + /* months come in 1-based, hours and minutes come in 0-based */ + /* The base time is Jan 1, 00:00 of TS_BASE_YEAR */ + years_since_base = year - TS_BASE_YEAR; + months_since_base = years_since_base * TS_MONTHS_PER_YEAR + (month - 1); + days_since_base = months_since_base * TS_DAYS_PER_MONTH + (day - 1); + hours_since_base = days_since_base * 24 + hour; + minutes_since_base = hours_since_base * 60 + min; + + TS_PACK_UINT32_INTO_BYTES(minutes_since_base, ts->data); + + sec /= TS_SECOND_BYTES_BIAS; + v = (unsigned int)sec; + TS_PACK_UINT32_INTO_BYTES(v, ts->data + 4); + return (PyObject *)ts; +} + +PyObject * +TimeStamp_TimeStamp(PyObject *obj, PyObject *args) +{ + char *buf = NULL; + int len = 0, y, mo, d, h = 0, m = 0; + double sec = 0; + +#ifdef PY3K + if (PyArg_ParseTuple(args, "y#", &buf, &len)) +#else + if (PyArg_ParseTuple(args, "s#", &buf, &len)) +#endif + { + if (len != 8) + { + PyErr_SetString(PyExc_ValueError, + "8-byte array expected"); + return NULL; + } + return TimeStamp_FromString(buf); + } + PyErr_Clear(); + + if (!PyArg_ParseTuple(args, "iii|iid", &y, &mo, &d, &h, &m, &sec)) + return NULL; + return TimeStamp_FromDate(y, mo, d, h, m, sec); +} + +static PyMethodDef TimeStampModule_functions[] = +{ + {"TimeStamp", TimeStamp_TimeStamp, METH_VARARGS}, + {NULL, NULL}, +}; + +#ifdef PY3K +static struct PyModuleDef moduledef = +{ + PyModuleDef_HEAD_INIT, + "_timestamp", /* m_name */ + TimeStampModule_doc, /* m_doc */ + -1, /* m_size */ + TimeStampModule_functions, /* m_methods */ + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL, /* m_free */ +}; +#endif + + +static PyObject* +module_init(void) +{ + PyObject *module; + + if (TimeStamp_init_gmoff() < 0) + return NULL; + +#ifdef PY3K + module = PyModule_Create(&moduledef); +#else + module = Py_InitModule4("_timestamp", TimeStampModule_functions, + TimeStampModule_doc, NULL, PYTHON_API_VERSION); +#endif + if (module == NULL) + return NULL; + +#ifdef PY3K + ((PyObject*)&TimeStamp_type)->ob_type = &PyType_Type; +#else + TimeStamp_type.ob_type = &PyType_Type; +#endif + TimeStamp_type.tp_getattro = PyObject_GenericGetAttr; + + return module; +} + +#ifdef PY3K +PyMODINIT_FUNC PyInit__timestamp(void) +{ + return module_init(); +} +#else +PyMODINIT_FUNC init_timestamp(void) +{ + module_init(); +} +#endif diff --git a/thesisenv/lib/python3.6/site-packages/persistent/_timestamp.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/persistent/_timestamp.cpython-36m-darwin.so new file mode 100755 index 0000000..c0da286 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/persistent/_timestamp.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/persistent/cPersistence.c b/thesisenv/lib/python3.6/site-packages/persistent/cPersistence.c new file mode 100644 index 0000000..285361e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/cPersistence.c @@ -0,0 +1,1799 @@ +/***************************************************************************** + + Copyright (c) 2001, 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + +****************************************************************************/ +static char cPersistence_doc_string[] = + "Defines Persistent mixin class for persistent objects.\n" + "\n" + "$Id$\n"; + +#include "cPersistence.h" +#include "structmember.h" + +struct ccobject_head_struct +{ + CACHE_HEAD +}; + +/* + The compiler on Windows used for Python 2.7 doesn't include + stdint.h. +*/ +#if !defined(PY3K) && defined(_WIN32) +typedef unsigned long long uint64_t; +#else +#include +#endif + +/* These two objects are initialized when the module is loaded */ +static PyObject *TimeStamp, *py_simple_new; + +/* Strings initialized by init_strings() below. */ +static PyObject *py_keys, *py_setstate, *py___dict__, *py_timeTime; +static PyObject *py__p_changed, *py__p_deactivate; +static PyObject *py___getattr__, *py___setattr__, *py___delattr__; +static PyObject *py___slotnames__, *copy_reg_slotnames, *__newobj__; +static PyObject *py___getnewargs__, *py___getstate__; +static PyObject *py_unsaved, *py_ghost, *py_saved, *py_changed, *py_sticky; + + +static int +init_strings(void) +{ +#define INIT_STRING(S) \ + if (!(py_ ## S = INTERN(#S))) \ + return -1; + INIT_STRING(keys); + INIT_STRING(setstate); + INIT_STRING(timeTime); + INIT_STRING(__dict__); + INIT_STRING(_p_changed); + INIT_STRING(_p_deactivate); + INIT_STRING(__getattr__); + INIT_STRING(__setattr__); + INIT_STRING(__delattr__); + INIT_STRING(__slotnames__); + INIT_STRING(__getnewargs__); + INIT_STRING(__getstate__); + INIT_STRING(unsaved); + INIT_STRING(ghost); + INIT_STRING(saved); + INIT_STRING(changed); + INIT_STRING(sticky); +#undef INIT_STRING + return 0; +} + +#ifdef Py_DEBUG +static void +fatal_1350(cPersistentObject *self, const char *caller, const char *detail) +{ + char buf[1000]; + + PyOS_snprintf(buf, sizeof(buf), + "cPersistence.c %s(): object at %p with type %.200s\n" + "%s.\n" + "The only known cause is multiple threads trying to ghost and\n" + "unghost the object simultaneously.\n" + "That's not legal, but ZODB can't stop it.\n" + "See Collector #1350.\n", + caller, self, Py_TYPE(self)->tp_name, detail); + Py_FatalError(buf); +} +#endif + +static void ghostify(cPersistentObject*); +static PyObject * pickle_slotnames(PyTypeObject *cls); + +static PyObject * convert_name(PyObject *name); + +/* Load the state of the object, unghostifying it. Upon success, return 1. + * If an error occurred, re-ghostify the object and return -1. + */ +static int +unghostify(cPersistentObject *self) +{ + if (self->state < 0 && self->jar) + { + PyObject *r; + + /* Is it ever possible to not have a cache? */ + if (self->cache) + { + /* Create a node in the ring for this unghostified object. */ + self->cache->non_ghost_count++; + self->cache->total_estimated_size += + _estimated_size_in_bytes(self->estimated_size); + ring_add(&self->cache->ring_home, &self->ring); + Py_INCREF(self); + } + /* set state to CHANGED while setstate() call is in progress + to prevent a recursive call to _PyPersist_Load(). + */ + self->state = cPersistent_CHANGED_STATE; + /* Call the object's __setstate__() */ + r = PyObject_CallMethod(self->jar, "setstate", "O", (PyObject *)self); + if (r == NULL) + { + ghostify(self); + return -1; + } + self->state = cPersistent_UPTODATE_STATE; + Py_DECREF(r); + if (self->cache && self->ring.r_next == NULL) + { +#ifdef Py_DEBUG + fatal_1350(self, "unghostify", + "is not in the cache despite that we just " + "unghostified it"); +#else + PyErr_Format(PyExc_SystemError, "object at %p with type " + "%.200s not in the cache despite that we just " + "unghostified it", self, Py_TYPE(self)->tp_name); + return -1; +#endif + } + } + return 1; +} + +/****************************************************************************/ + +static PyTypeObject Pertype; + +static void +accessed(cPersistentObject *self) +{ + /* Do nothing unless the object is in a cache and not a ghost. */ + if (self->cache && self->state >= 0 && self->ring.r_next) + ring_move_to_head(&self->cache->ring_home, &self->ring); +} + +static void +ghostify(cPersistentObject *self) +{ + PyObject **dictptr, *slotnames; + PyObject *errtype, *errvalue, *errtb; + + /* are we already a ghost? */ + if (self->state == cPersistent_GHOST_STATE) + return; + + /* Is it ever possible to not have a cache? */ + if (self->cache == NULL) + { + self->state = cPersistent_GHOST_STATE; + return; + } + + if (self->ring.r_next == NULL) + { + /* There's no way to raise an error in this routine. */ +#ifdef Py_DEBUG + fatal_1350(self, "ghostify", "claims to be in a cache but isn't"); +#else + return; +#endif + } + + /* If we're ghostifying an object, we better have some non-ghosts. */ + assert(self->cache->non_ghost_count > 0); + self->cache->non_ghost_count--; + self->cache->total_estimated_size -= + _estimated_size_in_bytes(self->estimated_size); + ring_del(&self->ring); + self->state = cPersistent_GHOST_STATE; + + /* clear __dict__ */ + dictptr = _PyObject_GetDictPtr((PyObject *)self); + if (dictptr && *dictptr) + { + Py_DECREF(*dictptr); + *dictptr = NULL; + } + + /* clear all slots besides _p_* + * ( for backward-compatibility reason we do this only if class does not + * override __new__ ) */ + if (Py_TYPE(self)->tp_new == Pertype.tp_new) + { + /* later we might clear an AttributeError but + * if we have a pending exception that still needs to be + * raised so that we don't generate a SystemError. + */ + PyErr_Fetch(&errtype, &errvalue, &errtb); + + slotnames = pickle_slotnames(Py_TYPE(self)); + if (slotnames && slotnames != Py_None) + { + int i; + + for (i = 0; i < PyList_GET_SIZE(slotnames); i++) + { + PyObject *name; + char *cname; + int is_special; + + name = PyList_GET_ITEM(slotnames, i); +#ifdef PY3K + if (PyUnicode_Check(name)) + { + PyObject *converted = convert_name(name); + cname = PyBytes_AS_STRING(converted); +#else + if (PyBytes_Check(name)) + { + cname = PyBytes_AS_STRING(name); +#endif + is_special = !strncmp(cname, "_p_", 3); +#ifdef PY3K + Py_DECREF(converted); +#endif + if (is_special) /* skip persistent */ + { + continue; + } + } + + /* NOTE: this skips our delattr hook */ + if (PyObject_GenericSetAttr((PyObject *)self, name, NULL) < 0) + /* delattr of non-set slot will raise AttributeError - we + * simply ignore. */ + PyErr_Clear(); + } + } + Py_XDECREF(slotnames); + PyErr_Restore(errtype, errvalue, errtb); + } + + /* We remove the reference to the just ghosted object that the ring + * holds. Note that the dictionary of oids->objects has an uncounted + * reference, so if the ring's reference was the only one, this frees + * the ghost object. Note further that the object's dealloc knows to + * inform the dictionary that it is going away. + */ + Py_DECREF(self); +} + +static int +changed(cPersistentObject *self) +{ + if ((self->state == cPersistent_UPTODATE_STATE || + self->state == cPersistent_STICKY_STATE) + && self->jar) + { + PyObject *meth, *arg, *result; + static PyObject *s_register; + + if (s_register == NULL) + s_register = INTERN("register"); + meth = PyObject_GetAttr((PyObject *)self->jar, s_register); + if (meth == NULL) + return -1; + arg = PyTuple_New(1); + if (arg == NULL) + { + Py_DECREF(meth); + return -1; + } + Py_INCREF(self); + PyTuple_SET_ITEM(arg, 0, (PyObject *)self); + result = PyEval_CallObject(meth, arg); + Py_DECREF(arg); + Py_DECREF(meth); + if (result == NULL) + return -1; + Py_DECREF(result); + + self->state = cPersistent_CHANGED_STATE; + } + + return 0; +} + +static int +readCurrent(cPersistentObject *self) +{ + if ((self->state == cPersistent_UPTODATE_STATE || + self->state == cPersistent_STICKY_STATE) + && self->jar && self->oid) + { + static PyObject *s_readCurrent=NULL; + PyObject *r; + + if (s_readCurrent == NULL) + s_readCurrent = INTERN("readCurrent"); + + r = PyObject_CallMethodObjArgs(self->jar, s_readCurrent, self, NULL); + if (r == NULL) + return -1; + + Py_DECREF(r); + } + + return 0; +} + +static PyObject * +Per__p_deactivate(cPersistentObject *self) +{ + if (self->state == cPersistent_UPTODATE_STATE && self->jar) + { + PyObject **dictptr = _PyObject_GetDictPtr((PyObject *)self); + if (dictptr && *dictptr) + { + Py_DECREF(*dictptr); + *dictptr = NULL; + } + /* Note that we need to set to ghost state unless we are + called directly. Methods that override this need to + do the same! */ + ghostify(self); + if (PyErr_Occurred()) + return NULL; + } + + Py_INCREF(Py_None); + return Py_None; +} + +static PyObject * +Per__p_activate(cPersistentObject *self) +{ + if (unghostify(self) < 0) + return NULL; + + Py_INCREF(Py_None); + return Py_None; +} + +static int Per_set_changed(cPersistentObject *self, PyObject *v); + +static PyObject * +Per__p_invalidate(cPersistentObject *self) +{ + signed char old_state = self->state; + + if (old_state != cPersistent_GHOST_STATE) + { + if (Per_set_changed(self, NULL) < 0) + return NULL; + ghostify(self); + if (PyErr_Occurred()) + return NULL; + } + Py_INCREF(Py_None); + return Py_None; +} + + +static PyObject * +pickle_slotnames(PyTypeObject *cls) +{ + PyObject *slotnames; + + slotnames = PyDict_GetItem(cls->tp_dict, py___slotnames__); + if (slotnames) + { + int n = PyObject_Not(slotnames); + if (n < 0) + return NULL; + if (n) + slotnames = Py_None; + + Py_INCREF(slotnames); + return slotnames; + } + + slotnames = PyObject_CallFunctionObjArgs(copy_reg_slotnames, + (PyObject*)cls, NULL); + if (slotnames && !(slotnames == Py_None || PyList_Check(slotnames))) + { + PyErr_SetString(PyExc_TypeError, + "copy_reg._slotnames didn't return a list or None"); + Py_DECREF(slotnames); + return NULL; + } + + return slotnames; +} + +static PyObject * +pickle_copy_dict(PyObject *state) +{ + PyObject *copy, *key, *value; + char *ckey; + Py_ssize_t pos = 0; + + copy = PyDict_New(); + if (!copy) + return NULL; + + if (!state) + return copy; + + while (PyDict_Next(state, &pos, &key, &value)) + { + int is_special; +#ifdef PY3K + if (key && PyUnicode_Check(key)) + { + PyObject *converted = convert_name(key); + ckey = PyBytes_AS_STRING(converted); +#else + if (key && PyBytes_Check(key)) + { + ckey = PyBytes_AS_STRING(key); +#endif + is_special = (*ckey == '_' && + (ckey[1] == 'v' || ckey[1] == 'p') && + ckey[2] == '_'); +#ifdef PY3K + Py_DECREF(converted); +#endif + if (is_special) /* skip volatile and persistent */ + continue; + } + + if (PyObject_SetItem(copy, key, value) < 0) + goto err; + } + + return copy; +err: + Py_DECREF(copy); + return NULL; +} + + +static char pickle___getstate__doc[] = + "Get the object serialization state\n" + "\n" + "If the object has no assigned slots and has no instance dictionary, then \n" + "None is returned.\n" + "\n" + "If the object has no assigned slots and has an instance dictionary, then \n" + "the a copy of the instance dictionary is returned. The copy has any items \n" + "with names starting with '_v_' or '_p_' ommitted.\n" + "\n" + "If the object has assigned slots, then a two-element tuple is returned. \n" + "The first element is either None or a copy of the instance dictionary, \n" + "as described above. The second element is a dictionary with items \n" + "for each of the assigned slots.\n" + ; + +static PyObject * +pickle___getstate__(PyObject *self) +{ + PyObject *slotnames=NULL, *slots=NULL, *state=NULL; + PyObject **dictp; + int n=0; + + slotnames = pickle_slotnames(Py_TYPE(self)); + if (!slotnames) + return NULL; + + dictp = _PyObject_GetDictPtr(self); + if (dictp) + state = pickle_copy_dict(*dictp); + else + { + state = Py_None; + Py_INCREF(state); + } + + if (slotnames != Py_None) + { + int i; + + slots = PyDict_New(); + if (!slots) + goto end; + + for (i = 0; i < PyList_GET_SIZE(slotnames); i++) + { + PyObject *name, *value; + char *cname; + int is_special; + + name = PyList_GET_ITEM(slotnames, i); +#ifdef PY3K + if (PyUnicode_Check(name)) + { + PyObject *converted = convert_name(name); + cname = PyBytes_AS_STRING(converted); +#else + if (PyBytes_Check(name)) + { + cname = PyBytes_AS_STRING(name); +#endif + is_special = (*cname == '_' && + (cname[1] == 'v' || cname[1] == 'p') && + cname[2] == '_'); +#ifdef PY3K + Py_DECREF(converted); +#endif + if (is_special) /* skip volatile and persistent */ + { + continue; + } + } + + /* Unclear: Will this go through our getattr hook? */ + value = PyObject_GetAttr(self, name); + if (value == NULL) + PyErr_Clear(); + else + { + int err = PyDict_SetItem(slots, name, value); + Py_DECREF(value); + if (err < 0) + goto end; + n++; + } + } + } + + if (n) + state = Py_BuildValue("(NO)", state, slots); + +end: + Py_XDECREF(slotnames); + Py_XDECREF(slots); + + return state; +} + +static int +pickle_setattrs_from_dict(PyObject *self, PyObject *dict) +{ + PyObject *key, *value; + Py_ssize_t pos = 0; + + if (!PyDict_Check(dict)) + { + PyErr_SetString(PyExc_TypeError, "Expected dictionary"); + return -1; + } + + while (PyDict_Next(dict, &pos, &key, &value)) + { + if (PyObject_SetAttr(self, key, value) < 0) + return -1; + } + return 0; +} + +static char pickle___setstate__doc[] = + "Set the object serialization state\n\n" + "The state should be in one of 3 forms:\n\n" + "- None\n\n" + " Ignored\n\n" + "- A dictionary\n\n" + " In this case, the object's instance dictionary will be cleared and \n" + " updated with the new state.\n\n" + "- A two-tuple with a string as the first element. \n\n" + " In this case, the method named by the string in the first element will\n" + " be called with the second element.\n\n" + " This form supports migration of data formats.\n\n" + "- A two-tuple with None or a Dictionary as the first element and\n" + " with a dictionary as the second element.\n\n" + " If the first element is not None, then the object's instance dictionary \n" + " will be cleared and updated with the value.\n\n" + " The items in the second element will be assigned as attributes.\n" + ; + +static PyObject * +pickle___setstate__(PyObject *self, PyObject *state) +{ + PyObject *slots=NULL; + + if (PyTuple_Check(state)) + { + if (!PyArg_ParseTuple(state, "OO:__setstate__", &state, &slots)) + return NULL; + } + + if (state != Py_None) + { + PyObject **dict; + PyObject *items; + PyObject *d_key, *d_value; + Py_ssize_t i; + int len; + + dict = _PyObject_GetDictPtr(self); + + if (!dict) + { + PyErr_SetString(PyExc_TypeError, + "this object has no instance dictionary"); + return NULL; + } + + if (!*dict) + { + *dict = PyDict_New(); + if (!*dict) + return NULL; + } + + PyDict_Clear(*dict); + + if (PyDict_CheckExact(state)) + { + i = 0; + while (PyDict_Next(state, &i, &d_key, &d_value)) { + /* normally the keys for instance attributes are + interned. we should try to do that here. */ + if (NATIVE_CHECK_EXACT(d_key)) { + Py_INCREF(d_key); + INTERN_INPLACE(&d_key); + Py_DECREF(d_key); + } + if (PyObject_SetItem(*dict, d_key, d_value) < 0) + return NULL; + } + } + else + { + /* can happen that not a built-in dict is passed as state + fall back to iterating over items, instead of silently + failing with PyDict_Next */ + items = PyMapping_Items(state); + if (items == NULL) + return NULL; + len = PySequence_Size(items); + if (len < 0) + { + Py_DECREF(items); + return NULL; + } + for ( i=0; istate >= 0) + { + /* If the cache has been cleared, then a non-ghost object + isn't in the ring any longer. + */ + if (self->ring.r_next != NULL) + { + /* if we're ghostifying an object, we better have some non-ghosts */ + assert(self->cache->non_ghost_count > 0); + self->cache->non_ghost_count--; + self->cache->total_estimated_size -= + _estimated_size_in_bytes(self->estimated_size); + ring_del(&self->ring); + } + } + + if (self->cache) + cPersistenceCAPI->percachedel(self->cache, self->oid); + Py_XDECREF(self->cache); + Py_XDECREF(self->jar); + Py_XDECREF(self->oid); + Py_TYPE(self)->tp_free(self); +} + +static int +Per_traverse(cPersistentObject *self, visitproc visit, void *arg) +{ + int err; + +#define VISIT(SLOT) \ + if (SLOT) { \ + err = visit((PyObject *)(SLOT), arg); \ + if (err) \ + return err; \ + } + + VISIT(self->jar); + VISIT(self->oid); + VISIT(self->cache); + +#undef VISIT + return 0; +} + +/* convert_name() returns a new reference to a string name + or sets an exception and returns NULL. +*/ + +static PyObject * +convert_name(PyObject *name) +{ +#ifdef Py_USING_UNICODE + /* The Unicode to string conversion is done here because the + existing tp_setattro slots expect a string object as name + and we wouldn't want to break those. */ + if (PyUnicode_Check(name)) + { + name = PyUnicode_AsEncodedString(name, NULL, NULL); + } + else +#endif + if (!PyBytes_Check(name)) + { + PyErr_SetString(PyExc_TypeError, "attribute name must be a string"); + return NULL; + } + else + Py_INCREF(name); + return name; +} + +/* Returns true if the object requires unghostification. + + There are several special attributes that we allow access to without + requiring that the object be unghostified: + __class__ + __del__ + __dict__ + __of__ + __setstate__ +*/ + +static int +unghost_getattr(const char *s) +{ + if (*s++ != '_') + return 1; + if (*s == 'p') + { + s++; + if (*s == '_') + return 0; /* _p_ */ + else + return 1; + } + else if (*s == '_') + { + s++; + switch (*s) + { + case 'c': + return strcmp(s, "class__"); + case 'd': + s++; + if (!strcmp(s, "el__")) + return 0; /* __del__ */ + if (!strcmp(s, "ict__")) + return 0; /* __dict__ */ + return 1; + case 'o': + return strcmp(s, "of__"); + case 's': + return strcmp(s, "setstate__"); + default: + return 1; + } + } + return 1; +} + +static PyObject* +Per_getattro(cPersistentObject *self, PyObject *name) +{ + PyObject *result = NULL; /* guilty until proved innocent */ + PyObject *converted; + char *s; + + converted = convert_name(name); + if (!converted) + goto Done; + s = PyBytes_AS_STRING(converted); + + if (unghost_getattr(s)) + { + if (unghostify(self) < 0) + goto Done; + accessed(self); + } + result = PyObject_GenericGetAttr((PyObject *)self, name); + +Done: + Py_XDECREF(converted); + return result; +} + +/* Exposed as _p_getattr method. Test whether base getattr should be used */ +static PyObject * +Per__p_getattr(cPersistentObject *self, PyObject *name) +{ + PyObject *result = NULL; /* guilty until proved innocent */ + PyObject *converted; + char *s; + + converted = convert_name(name); + if (!converted) + goto Done; + s = PyBytes_AS_STRING(converted); + + if (*s != '_' || unghost_getattr(s)) + { + if (unghostify(self) < 0) + goto Done; + accessed(self); + result = Py_False; + } + else + result = Py_True; + + Py_INCREF(result); + +Done: + Py_XDECREF(converted); + return result; +} + +/* + TODO: we should probably not allow assignment of __class__ and __dict__. +*/ + +static int +Per_setattro(cPersistentObject *self, PyObject *name, PyObject *v) +{ + int result = -1; /* guilty until proved innocent */ + PyObject *converted; + char *s; + + converted = convert_name(name); + if (!converted) + goto Done; + s = PyBytes_AS_STRING(converted); + + if (strncmp(s, "_p_", 3) != 0) + { + if (unghostify(self) < 0) + goto Done; + accessed(self); + if (strncmp(s, "_v_", 3) != 0 + && self->state != cPersistent_CHANGED_STATE) + { + if (changed(self) < 0) + goto Done; + } + } + result = PyObject_GenericSetAttr((PyObject *)self, name, v); + +Done: + Py_XDECREF(converted); + return result; +} + + +static int +Per_p_set_or_delattro(cPersistentObject *self, PyObject *name, PyObject *v) +{ + int result = -1; /* guilty until proved innocent */ + PyObject *converted; + char *s; + + converted = convert_name(name); + if (!converted) + goto Done; + s = PyBytes_AS_STRING(converted); + + if (strncmp(s, "_p_", 3)) + { + if (unghostify(self) < 0) + goto Done; + accessed(self); + + result = 0; + } + else + { + if (PyObject_GenericSetAttr((PyObject *)self, name, v) < 0) + goto Done; + result = 1; + } + +Done: + Py_XDECREF(converted); + return result; +} + +static PyObject * +Per__p_setattr(cPersistentObject *self, PyObject *args) +{ + PyObject *name, *v, *result; + int r; + + if (!PyArg_ParseTuple(args, "OO:_p_setattr", &name, &v)) + return NULL; + + r = Per_p_set_or_delattro(self, name, v); + if (r < 0) + return NULL; + + result = r ? Py_True : Py_False; + Py_INCREF(result); + return result; +} + +static PyObject * +Per__p_delattr(cPersistentObject *self, PyObject *name) +{ + int r; + PyObject *result; + + r = Per_p_set_or_delattro(self, name, NULL); + if (r < 0) + return NULL; + + result = r ? Py_True : Py_False; + Py_INCREF(result); + return result; +} + + +static PyObject * +Per_get_changed(cPersistentObject *self) +{ + if (self->state < 0) + { + Py_INCREF(Py_None); + return Py_None; + } + return PyBool_FromLong(self->state == cPersistent_CHANGED_STATE); +} + +static int +Per_set_changed(cPersistentObject *self, PyObject *v) +{ + int deactivate = 0; + int true; + + if (!v) + { + /* delattr is used to invalidate an object even if it has changed. */ + if (self->state != cPersistent_GHOST_STATE) + self->state = cPersistent_UPTODATE_STATE; + deactivate = 1; + } + else if (v == Py_None) + deactivate = 1; + + if (deactivate) + { + PyObject *res, *meth; + meth = PyObject_GetAttr((PyObject *)self, py__p_deactivate); + if (meth == NULL) + return -1; + res = PyObject_CallObject(meth, NULL); + if (res) + Py_DECREF(res); + else + { + /* an error occured in _p_deactivate(). + + It's not clear what we should do here. The code is + obviously ignoring the exception, but it shouldn't return + 0 for a getattr and set an exception. The simplest change + is to clear the exception, but that simply masks the + error. + + This prints an error to stderr just like exceptions in + __del__(). It would probably be better to log it but that + would be painful from C. + */ + PyErr_WriteUnraisable(meth); + } + Py_DECREF(meth); + return 0; + } + /* !deactivate. If passed a true argument, mark self as changed (starting + * with ZODB 3.6, that includes activating the object if it's a ghost). + * If passed a false argument, and the object isn't a ghost, set the + * state as up-to-date. + */ + true = PyObject_IsTrue(v); + if (true == -1) + return -1; + if (true) + { + if (self->state < 0) + { + if (unghostify(self) < 0) + return -1; + } + return changed(self); + } + + /* We were passed a false, non-None argument. If we're not a ghost, + * mark self as up-to-date. + */ + if (self->state >= 0) + self->state = cPersistent_UPTODATE_STATE; + return 0; +} + +static PyObject * +Per_get_oid(cPersistentObject *self) +{ + PyObject *oid = self->oid ? self->oid : Py_None; + Py_INCREF(oid); + return oid; +} + +static int +Per_set_oid(cPersistentObject *self, PyObject *v) +{ + if (self->cache) + { + int result; + + if (v == NULL) + { + PyErr_SetString(PyExc_ValueError, + "can't delete _p_oid of cached object"); + return -1; + } + result = PyObject_RichCompareBool(self->oid, v, Py_NE); + if (result < 0) + return -1; + if (result) + { + PyErr_SetString(PyExc_ValueError, + "can not change _p_oid of cached object"); + return -1; + } + } + Py_XDECREF(self->oid); + Py_XINCREF(v); + self->oid = v; + return 0; +} + +static PyObject * +Per_get_jar(cPersistentObject *self) +{ + PyObject *jar = self->jar ? self->jar : Py_None; + Py_INCREF(jar); + return jar; +} + +static int +Per_set_jar(cPersistentObject *self, PyObject *v) +{ + if (self->cache) + { + int result; + + if (v == NULL) + { + PyErr_SetString(PyExc_ValueError, + "can't delete _p_jar of cached object"); + return -1; + } + result = PyObject_RichCompareBool(self->jar, v, Py_NE); + if (result < 0) + return -1; + if (result) + { + PyErr_SetString(PyExc_ValueError, + "can not change _p_jar of cached object"); + return -1; + } + } + Py_XDECREF(self->jar); + Py_XINCREF(v); + self->jar = v; + return 0; +} + +static PyObject * +Per_get_serial(cPersistentObject *self) +{ + return PyBytes_FromStringAndSize(self->serial, 8); +} + +static int +Per_set_serial(cPersistentObject *self, PyObject *v) +{ + if (v) + { + if (PyBytes_Check(v) && PyBytes_GET_SIZE(v) == 8) + memcpy(self->serial, PyBytes_AS_STRING(v), 8); + else + { + PyErr_SetString(PyExc_ValueError, + "_p_serial must be an 8-character bytes array"); + return -1; + } + } + else + memset(self->serial, 0, 8); + return 0; +} + +static PyObject * +Per_get_mtime(cPersistentObject *self) +{ + PyObject *t, *v; + + if (unghostify(self) < 0) + return NULL; + + accessed(self); + + if (memcmp(self->serial, "\0\0\0\0\0\0\0\0", 8) == 0) + { + Py_INCREF(Py_None); + return Py_None; + } + +#ifdef PY3K + t = PyObject_CallFunction(TimeStamp, "y#", self->serial, 8); +#else + t = PyObject_CallFunction(TimeStamp, "s#", self->serial, 8); +#endif + if (!t) + { + return NULL; + } + v = PyObject_CallMethod(t, "timeTime", ""); + Py_DECREF(t); + return v; +} + +static PyObject * +Per_get_state(cPersistentObject *self) +{ + return INT_FROM_LONG(self->state); +} + +static PyObject * +Per_get_estimated_size(cPersistentObject *self) +{ + return INT_FROM_LONG(_estimated_size_in_bytes(self->estimated_size)); +} + +static int +Per_set_estimated_size(cPersistentObject *self, PyObject *v) +{ + if (v) + { + if (INT_CHECK(v)) + { + long lv = INT_AS_LONG(v); + if (lv < 0) + { + PyErr_SetString(PyExc_ValueError, + "_p_estimated_size must not be negative"); + return -1; + } + self->estimated_size = _estimated_size_in_24_bits(lv); + } + else + { + PyErr_SetString(PyExc_TypeError, + "_p_estimated_size must be an integer"); + return -1; + } + } + else + self->estimated_size = 0; + return 0; +} + +static PyObject * +Per_get_status(cPersistentObject *self) +{ + PyObject *result = NULL; + + if (!self->jar) + { + result = py_unsaved; + } else + { + switch (self->state) + { + case cPersistent_GHOST_STATE: + result = py_ghost; + break; + case cPersistent_STICKY_STATE: + result = py_sticky; + break; + case cPersistent_UPTODATE_STATE: + result = py_saved; + break; + case cPersistent_CHANGED_STATE: + result = py_changed; + break; + } + } + + if (result) + { + Py_INCREF(result); + } + return result; +} + +static PyObject* +Per_get_sticky(cPersistentObject *self) +{ + return PyBool_FromLong(self->state == cPersistent_STICKY_STATE); +} + +static int +Per_set_sticky(cPersistentObject *self, PyObject* value) +{ + if (self->state < 0) + { + PyErr_SetString(PyExc_ValueError, + "can't set sticky flag on a ghost"); + return -1; + } + if (self->jar) + { + if (PyObject_IsTrue(value)) + { + self->state = cPersistent_STICKY_STATE; + } else { + self->state = cPersistent_UPTODATE_STATE; + } + } + return 0; +} + +static PyObject* +repr_format_exception(char* format) +{ + /* If an exception we should catch occurred, return a new + string of its repr. Otherwise, return NULL. */ + PyObject *exc_t; + PyObject *exc_v; + PyObject *exc_tb; + PyObject *result = NULL; + + if (PyErr_Occurred() && PyErr_ExceptionMatches(PyExc_Exception)) + { + PyErr_Fetch(&exc_t, &exc_v, &exc_tb); + PyErr_NormalizeException(&exc_t, &exc_v, &exc_tb); + PyErr_Clear(); + + result = PyUnicode_FromFormat(format, exc_v); + Py_DECREF(exc_t); + Py_DECREF(exc_v); + Py_DECREF(exc_tb); + } + return result; +} + +static PyObject* +repr_helper(PyObject *o, char* format) +{ + /* Returns a new reference, or NULL on error */ + PyObject *result; + + if (o) + { + result = PyUnicode_FromFormat(format, o); + if (!result) + result = repr_format_exception(format); + } + else + { + result = PyUnicode_FromString(""); + } + + return result; + +} + +static PyObject* +Per_repr(cPersistentObject *self) +{ + PyObject *prepr = NULL; + PyObject *prepr_exc_str = NULL; + + PyObject *module = NULL; + PyObject *name = NULL; + PyObject *oid_str = NULL; + PyObject *jar_str = NULL; + PyObject *result = NULL; + + unsigned char* oid_bytes; + char buf[20]; + uint64_t oid_value; + + prepr = PyObject_GetAttrString((PyObject*)Py_TYPE(self), "_p_repr"); + if (prepr) + { + result = PyObject_CallFunctionObjArgs(prepr, self, NULL); + if (result) + goto cleanup; + else + { + prepr_exc_str = repr_format_exception(" _p_repr %R"); + if (!prepr_exc_str) + goto cleanup; + } + } + else + { + PyErr_Clear(); + prepr_exc_str = PyUnicode_FromString(""); + } + + if (self->oid && PyBytes_Check(self->oid) && PyBytes_GET_SIZE(self->oid) == 8) { + oid_bytes = (unsigned char*)PyBytes_AS_STRING(self->oid); + oid_value = ((uint64_t)oid_bytes[0] << 56) + | ((uint64_t)oid_bytes[1] << 48) + | ((uint64_t)oid_bytes[2] << 40) + | ((uint64_t)oid_bytes[3] << 32) + | ((uint64_t)oid_bytes[4] << 24) + | ((uint64_t)oid_bytes[5] << 16) + | ((uint64_t)oid_bytes[6] << 8) + | ((uint64_t)oid_bytes[7]); + /* + Python's PyUnicode_FromFormat doesn't understand the ll + length modifier for %x, so to format a 64-bit value we need to + use stdio. + */ + snprintf(buf, sizeof(buf) - 1, "%llx", oid_value); + oid_str = PyUnicode_FromFormat(" oid 0x%s", buf); + } + + if (!oid_str) { + oid_str = repr_helper(self->oid, " oid %R"); + if (!oid_str) + goto cleanup; + } + + jar_str = repr_helper(self->jar, " in %R"); + if (!jar_str) + goto cleanup; + + module = PyObject_GetAttrString((PyObject*)Py_TYPE(self), "__module__"); + name = PyObject_GetAttrString((PyObject*)Py_TYPE(self), "__name__"); + + if (!module || !name) { + /* + Some error retrieving __module__ or __name__. Ignore it, use the + C data. + */ + PyErr_Clear(); + result = PyUnicode_FromFormat("<%s object at %p%S%S%S>", + Py_TYPE(self)->tp_name, self, + oid_str, jar_str, prepr_exc_str); + } + else { + result = PyUnicode_FromFormat("<%S.%S object at %p%S%S%S>", + module, name, self, + oid_str, jar_str, prepr_exc_str); + } + +cleanup: + Py_XDECREF(prepr); + Py_XDECREF(prepr_exc_str); + Py_XDECREF(oid_str); + Py_XDECREF(jar_str); + Py_XDECREF(name); + Py_XDECREF(module); + + return result; +} + +static PyGetSetDef Per_getsets[] = { + {"_p_changed", (getter)Per_get_changed, (setter)Per_set_changed}, + {"_p_jar", (getter)Per_get_jar, (setter)Per_set_jar}, + {"_p_mtime", (getter)Per_get_mtime}, + {"_p_oid", (getter)Per_get_oid, (setter)Per_set_oid}, + {"_p_serial", (getter)Per_get_serial, (setter)Per_set_serial}, + {"_p_state", (getter)Per_get_state}, + {"_p_estimated_size", + (getter)Per_get_estimated_size, (setter)Per_set_estimated_size + }, + {"_p_status", (getter)Per_get_status}, + {"_p_sticky", (getter)Per_get_sticky, (setter)Per_set_sticky}, + {NULL} +}; + +static struct PyMethodDef Per_methods[] = { + {"_p_deactivate", (PyCFunction)Per__p_deactivate, METH_NOARGS, + "_p_deactivate() -- Deactivate the object"}, + {"_p_activate", (PyCFunction)Per__p_activate, METH_NOARGS, + "_p_activate() -- Activate the object"}, + {"_p_invalidate", (PyCFunction)Per__p_invalidate, METH_NOARGS, + "_p_invalidate() -- Invalidate the object"}, + {"_p_getattr", (PyCFunction)Per__p_getattr, METH_O, + "_p_getattr(name) -- Test whether the base class must handle the name\n" + "\n" + "The method unghostifies the object, if necessary.\n" + "The method records the object access, if necessary.\n" + "\n" + "This method should be called by subclass __getattribute__\n" + "implementations before doing anything else. If the method\n" + "returns True, then __getattribute__ implementations must delegate\n" + "to the base class, Persistent.\n" + }, + {"_p_setattr", (PyCFunction)Per__p_setattr, METH_VARARGS, + "_p_setattr(name, value) -- Save persistent meta data\n" + "\n" + "This method should be called by subclass __setattr__ implementations\n" + "before doing anything else. If it returns true, then the attribute\n" + "was handled by the base class.\n" + "\n" + "The method unghostifies the object, if necessary.\n" + "The method records the object access, if necessary.\n" + }, + {"_p_delattr", (PyCFunction)Per__p_delattr, METH_O, + "_p_delattr(name) -- Delete persistent meta data\n" + "\n" + "This method should be called by subclass __delattr__ implementations\n" + "before doing anything else. If it returns true, then the attribute\n" + "was handled by the base class.\n" + "\n" + "The method unghostifies the object, if necessary.\n" + "The method records the object access, if necessary.\n" + }, + {"__getstate__", (PyCFunction)Per__getstate__, METH_NOARGS, + pickle___getstate__doc }, + {"__setstate__", (PyCFunction)pickle___setstate__, METH_O, + pickle___setstate__doc}, + {"__reduce__", (PyCFunction)pickle___reduce__, METH_NOARGS, + pickle___reduce__doc}, + + {NULL, NULL} /* sentinel */ +}; + +/* This module is compiled as a shared library. Some compilers don't + allow addresses of Python objects defined in other libraries to be + used in static initializers here. The DEFERRED_ADDRESS macro is + used to tag the slots where such addresses appear; the module init + function must fill in the tagged slots at runtime. The argument is + for documentation -- the macro ignores it. +*/ +#define DEFERRED_ADDRESS(ADDR) 0 + +static PyTypeObject Pertype = { + PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) + "persistent.Persistent", /* tp_name */ + sizeof(cPersistentObject), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)Per_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + (reprfunc)Per_repr, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + (getattrofunc)Per_getattro, /* tp_getattro */ + (setattrofunc)Per_setattro, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | + Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_HAVE_GC, /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)Per_traverse, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + Per_methods, /* tp_methods */ + 0, /* tp_members */ + Per_getsets, /* tp_getset */ +}; + +/* End of code for Persistent objects */ +/* -------------------------------------------------------- */ + +typedef int (*intfunctionwithpythonarg)(PyObject*); + +/* Load the object's state if necessary and become sticky */ +static int +Per_setstate(cPersistentObject *self) +{ + if (unghostify(self) < 0) + return -1; + self->state = cPersistent_STICKY_STATE; + return 0; +} + +static PyObject * +simple_new(PyObject *self, PyObject *type_object) +{ + if (!PyType_Check(type_object)) + { + PyErr_SetString(PyExc_TypeError, + "simple_new argument must be a type object."); + return NULL; + } + return PyType_GenericNew((PyTypeObject *)type_object, NULL, NULL); +} + +static PyMethodDef cPersistence_methods[] = +{ + {"simple_new", simple_new, METH_O, + "Create an object by simply calling a class's __new__ method without " + "arguments."}, + {NULL, NULL} +}; + + +static cPersistenceCAPIstruct +truecPersistenceCAPI = { + &Pertype, + (getattrofunc)Per_getattro, /*tp_getattr with object key*/ + (setattrofunc)Per_setattro, /*tp_setattr with object key*/ + changed, + accessed, + ghostify, + (intfunctionwithpythonarg)Per_setstate, + NULL, /* The percachedel slot is initialized in cPickleCache.c when + the module is loaded. It uses a function in a different + shared library. */ + readCurrent +}; + +#ifdef PY3K +static struct PyModuleDef moduledef = +{ + PyModuleDef_HEAD_INIT, + "cPersistence", /* m_name */ + cPersistence_doc_string, /* m_doc */ + -1, /* m_size */ + cPersistence_methods, /* m_methods */ + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL, /* m_free */ +}; + +#endif + +static PyObject* +module_init(void) +{ + PyObject *module, *ts_module, *capi; + PyObject *copy_reg; + + if (init_strings() < 0) + return NULL; + +#ifdef PY3K + module = PyModule_Create(&moduledef); +#else + module = Py_InitModule3("cPersistence", cPersistence_methods, + cPersistence_doc_string); +#endif + +#ifdef PY3K + ((PyObject*)&Pertype)->ob_type = &PyType_Type; +#else + Pertype.ob_type = &PyType_Type; +#endif + Pertype.tp_new = PyType_GenericNew; + if (PyType_Ready(&Pertype) < 0) + return NULL; + if (PyModule_AddObject(module, "Persistent", (PyObject *)&Pertype) < 0) + return NULL; + + cPersistenceCAPI = &truecPersistenceCAPI; +#ifdef PY3K + capi = PyCapsule_New(cPersistenceCAPI, CAPI_CAPSULE_NAME, NULL); +#else + capi = PyCObject_FromVoidPtr(cPersistenceCAPI, NULL); +#endif + if (!capi) + return NULL; + if (PyModule_AddObject(module, "CAPI", capi) < 0) + return NULL; + + if (PyModule_AddIntConstant(module, "GHOST", cPersistent_GHOST_STATE) < 0) + return NULL; + + if (PyModule_AddIntConstant(module, "UPTODATE", + cPersistent_UPTODATE_STATE) < 0) + return NULL; + + if (PyModule_AddIntConstant(module, "CHANGED", + cPersistent_CHANGED_STATE) < 0) + return NULL; + + if (PyModule_AddIntConstant(module, "STICKY", + cPersistent_STICKY_STATE) < 0) + return NULL; + + py_simple_new = PyObject_GetAttrString(module, "simple_new"); + if (!py_simple_new) + return NULL; + +#ifdef PY3K + copy_reg = PyImport_ImportModule("copyreg"); +#else + copy_reg = PyImport_ImportModule("copy_reg"); +#endif + if (!copy_reg) + return NULL; + + copy_reg_slotnames = PyObject_GetAttrString(copy_reg, "_slotnames"); + if (!copy_reg_slotnames) + { + Py_DECREF(copy_reg); + return NULL; + } + + __newobj__ = PyObject_GetAttrString(copy_reg, "__newobj__"); + if (!__newobj__) + { + Py_DECREF(copy_reg); + return NULL; + } + + if (!TimeStamp) + { + ts_module = PyImport_ImportModule("persistent.timestamp"); + if (!ts_module) + return NULL; + TimeStamp = PyObject_GetAttrString(ts_module, "TimeStamp"); + Py_DECREF(ts_module); + /* fall through to immediate return on error */ + } + return module; +} + +#ifdef PY3K +PyMODINIT_FUNC PyInit_cPersistence(void) +{ + return module_init(); +} +#else +PyMODINIT_FUNC initcPersistence(void) +{ + module_init(); +} +#endif diff --git a/thesisenv/lib/python3.6/site-packages/persistent/cPersistence.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/persistent/cPersistence.cpython-36m-darwin.so new file mode 100755 index 0000000..d125e1e Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/persistent/cPersistence.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/persistent/cPersistence.h b/thesisenv/lib/python3.6/site-packages/persistent/cPersistence.h new file mode 100644 index 0000000..ac9a885 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/cPersistence.h @@ -0,0 +1,156 @@ +/***************************************************************************** + + Copyright (c) 2001, 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +#ifndef CPERSISTENCE_H +#define CPERSISTENCE_H + +#include "_compat.h" +#include "bytesobject.h" + +#include "ring.h" + +#define CACHE_HEAD \ + PyObject_HEAD \ + CPersistentRing ring_home; \ + int non_ghost_count; \ + Py_ssize_t total_estimated_size; + +struct ccobject_head_struct; + +typedef struct ccobject_head_struct PerCache; + +/* How big is a persistent object? + + 12 PyGC_Head is two pointers and an int + 8 PyObject_HEAD is an int and a pointer + + 12 jar, oid, cache pointers + 8 ring struct + 8 serialno + 4 state + extra + 4 size info + + (56) so far + + 4 dict ptr + 4 weaklist ptr + ------------------------- + 68 only need 62, but obmalloc rounds up to multiple of eight + + Even a ghost requires 64 bytes. It's possible to make a persistent + instance with slots and no dict, which changes the storage needed. + +*/ + +#define cPersistent_HEAD \ + PyObject_HEAD \ + PyObject *jar; \ + PyObject *oid; \ + PerCache *cache; \ + CPersistentRing ring; \ + char serial[8]; \ + signed state:8; \ + unsigned estimated_size:24; + +/* We recently added estimated_size. We originally added it as a new + unsigned long field after a signed char state field and a + 3-character reserved field. This didn't work because there + are packages in the wild that have their own copies of cPersistence.h + that didn't see the update. + + To get around this, we used the reserved space by making + estimated_size a 24-bit bit field in the space occupied by the old + 3-character reserved field. To fit in 24 bits, we made the units + of estimated_size 64-character blocks. This allows is to handle up + to a GB. We should never see that, but to be paranoid, we also + truncate sizes greater than 1GB. We also set the minimum size to + 64 bytes. + + We use the _estimated_size_in_24_bits and _estimated_size_in_bytes + macros both to avoid repetition and to make intent a little clearer. +*/ +#define _estimated_size_in_24_bits(I) ((I) > 1073741696 ? 16777215 : (I)/64+1) +#define _estimated_size_in_bytes(I) ((I)*64) + +#define cPersistent_GHOST_STATE -1 +#define cPersistent_UPTODATE_STATE 0 +#define cPersistent_CHANGED_STATE 1 +#define cPersistent_STICKY_STATE 2 + +typedef struct { + cPersistent_HEAD +} cPersistentObject; + +typedef void (*percachedelfunc)(PerCache *, PyObject *); + +typedef struct { + PyTypeObject *pertype; + getattrofunc getattro; + setattrofunc setattro; + int (*changed)(cPersistentObject*); + void (*accessed)(cPersistentObject*); + void (*ghostify)(cPersistentObject*); + int (*setstate)(PyObject*); + percachedelfunc percachedel; + int (*readCurrent)(cPersistentObject*); +} cPersistenceCAPIstruct; + +#define cPersistenceType cPersistenceCAPI->pertype + +#ifndef DONT_USE_CPERSISTENCECAPI +static cPersistenceCAPIstruct *cPersistenceCAPI; +#endif + +#define cPersistanceModuleName "cPersistence" + +#define PER_TypeCheck(O) PyObject_TypeCheck((O), cPersistenceCAPI->pertype) + +#define PER_USE_OR_RETURN(O,R) {if((O)->state==cPersistent_GHOST_STATE && cPersistenceCAPI->setstate((PyObject*)(O)) < 0) return (R); else if ((O)->state==cPersistent_UPTODATE_STATE) (O)->state=cPersistent_STICKY_STATE;} + +#define PER_CHANGED(O) (cPersistenceCAPI->changed((cPersistentObject*)(O))) + +#define PER_READCURRENT(O, E) \ + if (cPersistenceCAPI->readCurrent((cPersistentObject*)(O)) < 0) { E; } + +#define PER_GHOSTIFY(O) (cPersistenceCAPI->ghostify((cPersistentObject*)(O))) + +/* If the object is sticky, make it non-sticky, so that it can be ghostified. + The value is not meaningful + */ +#define PER_ALLOW_DEACTIVATION(O) ((O)->state==cPersistent_STICKY_STATE && ((O)->state=cPersistent_UPTODATE_STATE)) + +#define PER_PREVENT_DEACTIVATION(O) ((O)->state==cPersistent_UPTODATE_STATE && ((O)->state=cPersistent_STICKY_STATE)) + +/* + Make a persistent object usable from C by: + + - Making sure it is not a ghost + + - Making it sticky. + + IMPORTANT: If you call this and don't call PER_ALLOW_DEACTIVATION, + your object will not be ghostified. + + PER_USE returns a 1 on success and 0 failure, where failure means + error. + */ +#define PER_USE(O) \ +(((O)->state != cPersistent_GHOST_STATE \ + || (cPersistenceCAPI->setstate((PyObject*)(O)) >= 0)) \ + ? (((O)->state==cPersistent_UPTODATE_STATE) \ + ? ((O)->state=cPersistent_STICKY_STATE) : 1) : 0) + +#define PER_ACCESSED(O) (cPersistenceCAPI->accessed((cPersistentObject*)(O))) + +#endif diff --git a/thesisenv/lib/python3.6/site-packages/persistent/cPickleCache.c b/thesisenv/lib/python3.6/site-packages/persistent/cPickleCache.c new file mode 100644 index 0000000..496a8c8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/cPickleCache.c @@ -0,0 +1,1384 @@ +/***************************************************************************** + + Copyright (c) 2001, 2002 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + +****************************************************************************/ + +/* + + Objects are stored under three different regimes: + + Regime 1: Persistent Classes + + Persistent Classes are part of ZClasses. They are stored in the + self->data dictionary, and are never garbage collected. + + The klass_items() method returns a sequence of (oid,object) tuples for + every Persistent Class, which should make it possible to implement + garbage collection in Python if necessary. + + Regime 2: Ghost Objects + + There is no benefit to keeping a ghost object which has no external + references, therefore a weak reference scheme is used to ensure that + ghost objects are removed from memory as soon as possible, when the + last external reference is lost. + + Ghost objects are stored in the self->data dictionary. Normally a + dictionary keeps a strong reference on its values, however this + reference count is 'stolen'. + + This weak reference scheme leaves a dangling reference, in the + dictionary, when the last external reference is lost. To clean up this + dangling reference the persistent object dealloc function calls + self->cache->_oid_unreferenced(self->oid). The cache looks up the oid + in the dictionary, ensures it points to an object whose reference + count is zero, then removes it from the dictionary. Before removing + the object from the dictionary it must temporarily resurrect the + object in much the same way that class instances are resurrected + before their __del__ is called. + + Since ghost objects are stored under a different regime to non-ghost + objects, an extra ghostify function in cPersistenceAPI replaces + self->state=GHOST_STATE assignments that were common in other + persistent classes (such as BTrees). + + Regime 3: Non-Ghost Objects + + Non-ghost objects are stored in two data structures: the dictionary + mapping oids to objects and a doubly-linked list that encodes the + order in which the objects were accessed. The dictionary reference is + borrowed, as it is for ghosts. The list reference is a new reference; + the list stores recently used objects, even if they are otherwise + unreferenced, to avoid loading the object from the database again. + + The doubly-link-list nodes contain next and previous pointers linking + together the cache and all non-ghost persistent objects. + + The node embedded in the cache is the home position. On every + attribute access a non-ghost object will relink itself just behind the + home position in the ring. Objects accessed least recently will + eventually find themselves positioned after the home position. + + Occasionally other nodes are temporarily inserted in the ring as + position markers. The cache contains a ring_lock flag which must be + set and unset before and after doing so. Only if the flag is unset can + the cache assume that all nodes are either his own home node, or nodes + from persistent objects. This assumption is useful during the garbage + collection process. + + The number of non-ghost objects is counted in self->non_ghost_count. + The garbage collection process consists of traversing the ring, and + deactivating (that is, turning into a ghost) every object until + self->non_ghost_count is down to the target size, or until it + reaches the home position again. + + Note that objects in the sticky or changed states are still kept in + the ring, however they can not be deactivated. The garbage collection + process must skip such objects, rather than deactivating them. + +*/ + +static char cPickleCache_doc_string[] = + "Defines the PickleCache used by ZODB Connection objects.\n" + "\n" + "$Id$\n"; + +#define DONT_USE_CPERSISTENCECAPI +#include "cPersistence.h" +#include "structmember.h" +#include +#include +#undef Py_FindMethod + + +/* Python string objects to speed lookups; set by module init. */ +static PyObject *py__p_changed; +static PyObject *py__p_deactivate; +static PyObject *py__p_jar; +static PyObject *py__p_oid; + +static cPersistenceCAPIstruct *cPersistenceCAPI; + +/* This object is the pickle cache. The CACHE_HEAD macro guarantees + that layout of this struct is the same as the start of + ccobject_head in cPersistence.c */ +typedef struct +{ + CACHE_HEAD + int klass_count; /* count of persistent classes */ + PyObject *data; /* oid -> object dict */ + PyObject *jar; /* Connection object */ + int cache_size; /* target number of items in cache */ + Py_ssize_t cache_size_bytes; /* target total estimated size of + items in cache */ + + /* Most of the time the ring contains only: + * many nodes corresponding to persistent objects + * one 'home' node from the cache. + In some cases it is handy to temporarily add other types + of node into the ring as placeholders. 'ring_lock' is a boolean + indicating that someone has already done this. Currently this + is only used by the garbage collection code. */ + + int ring_lock; + + /* 'cache_drain_resistance' controls how quickly the cache size will drop + when it is smaller than the configured size. A value of zero means it + will not drop below the configured size (suitable for most caches). + Otherwise, it will remove cache_non_ghost_count/cache_drain_resistance + items from the cache every time (suitable for rarely used caches, such + as those associated with Zope versions. */ + + int cache_drain_resistance; + +} ccobject; + +static int cc_ass_sub(ccobject *self, PyObject *key, PyObject *v); + +/* ---------------------------------------------------------------- */ + +#define OBJECT_FROM_RING(SELF, HERE) \ + ((cPersistentObject *)(((char *)here) - offsetof(cPersistentObject, ring))) + +/* Insert self into the ring, following after. */ +static void +insert_after(CPersistentRing *self, CPersistentRing *after) +{ + assert(self != NULL); + assert(after != NULL); + self->r_prev = after; + self->r_next = after->r_next; + after->r_next->r_prev = self; + after->r_next = self; +} + +/* Remove self from the ring. */ +static void +unlink_from_ring(CPersistentRing *self) +{ + assert(self != NULL); + self->r_prev->r_next = self->r_next; + self->r_next->r_prev = self->r_prev; +} + +static int +scan_gc_items(ccobject *self, int target, Py_ssize_t target_bytes) +{ + /* This function must only be called with the ring lock held, + because it places non-object placeholders in the ring. + */ + cPersistentObject *object; + CPersistentRing *here; + CPersistentRing before_original_home; + int result = -1; /* guilty until proved innocent */ + + /* Scan the ring, from least to most recently used, deactivating + * up-to-date objects, until we either find the ring_home again or + * or we've ghosted enough objects to reach the target size. + * Tricky: __getattr__ and __del__ methods can do anything, and in + * particular if we ghostify an object with a __del__ method, that method + * can load the object again, putting it back into the MRU part of the + * ring. Waiting to find ring_home again can thus cause an infinite + * loop (Collector #1208). So before_original_home records the MRU + * position we start with, and we stop the scan when we reach that. + */ + insert_after(&before_original_home, self->ring_home.r_prev); + here = self->ring_home.r_next; /* least recently used object */ + while (here != &before_original_home && + (self->non_ghost_count > target + || (target_bytes && self->total_estimated_size > target_bytes) + ) + ) + { + assert(self->ring_lock); + assert(here != &self->ring_home); + + /* At this point we know that the ring only contains nodes + from persistent objects, plus our own home node. We know + this because the ring lock is held. We can safely assume + the current ring node is a persistent object now we know it + is not the home */ + object = OBJECT_FROM_RING(self, here); + + if (object->state == cPersistent_UPTODATE_STATE) + { + CPersistentRing placeholder; + PyObject *method; + PyObject *temp; + int error_occurred = 0; + /* deactivate it. This is the main memory saver. */ + + /* Add a placeholder, a dummy node in the ring. We need + to do this to mark our position in the ring. It is + possible that the PyObject_GetAttr() call below will + invoke a __getattr__() hook in Python. Also possible + that deactivation will lead to a __del__ method call. + So another thread might run, and mutate the ring as a side + effect of object accesses. There's no predicting then where + in the ring here->next will point after that. The + placeholder won't move as a side effect of calling Python + code. + */ + insert_after(&placeholder, here); + method = PyObject_GetAttr((PyObject *)object, py__p_deactivate); + if (method == NULL) + error_occurred = 1; + else + { + temp = PyObject_CallObject(method, NULL); + Py_DECREF(method); + if (temp == NULL) + error_occurred = 1; + else + Py_DECREF(temp); + } + + here = placeholder.r_next; + unlink_from_ring(&placeholder); + if (error_occurred) + goto Done; + } + else + here = here->r_next; + } + result = 0; +Done: + unlink_from_ring(&before_original_home); + return result; +} + +static PyObject * +lockgc(ccobject *self, int target_size, Py_ssize_t target_size_bytes) +{ + /* This is thread-safe because of the GIL, and there's nothing + * in between checking the ring_lock and acquiring it that calls back + * into Python. + */ + if (self->ring_lock) + { + Py_INCREF(Py_None); + return Py_None; + } + + self->ring_lock = 1; + if (scan_gc_items(self, target_size, target_size_bytes) < 0) + { + self->ring_lock = 0; + return NULL; + } + self->ring_lock = 0; + + Py_INCREF(Py_None); + return Py_None; +} + +static PyObject * +cc_incrgc(ccobject *self, PyObject *args) +{ + int obsolete_arg = -999; + int starting_size = self->non_ghost_count; + int target_size = self->cache_size; + Py_ssize_t target_size_bytes = self->cache_size_bytes; + + if (self->cache_drain_resistance >= 1) + { + /* This cache will gradually drain down to a small size. Check + a (small) number of objects proportional to the current size */ + + int target_size_2 = (starting_size - 1 + - starting_size / self->cache_drain_resistance); + if (target_size_2 < target_size) + target_size = target_size_2; + } + + + if (!PyArg_ParseTuple(args, "|i:incrgc", &obsolete_arg)) + return NULL; + + if (obsolete_arg != -999 + && + (PyErr_Warn(PyExc_DeprecationWarning, + "No argument expected") + < 0)) + return NULL; + + return lockgc(self, target_size, target_size_bytes); +} + +static PyObject * +cc_full_sweep(ccobject *self, PyObject *args) +{ + int dt = -999; + + /* TODO: This should be deprecated; */ + + if (!PyArg_ParseTuple(args, "|i:full_sweep", &dt)) + return NULL; + if (dt == -999) + return lockgc(self, 0, 0); + else + return cc_incrgc(self, args); +} + +static PyObject * +cc_minimize(ccobject *self, PyObject *args) +{ + int ignored = -999; + + if (!PyArg_ParseTuple(args, "|i:minimize", &ignored)) + return NULL; + + if (ignored != -999 + && + (PyErr_Warn(PyExc_DeprecationWarning, + "No argument expected") + < 0)) + return NULL; + + return lockgc(self, 0, 0); +} + +static int +_invalidate(ccobject *self, PyObject *key) +{ + static PyObject *_p_invalidate = NULL; + PyObject *meth, *v; + + v = PyDict_GetItem(self->data, key); + if (v == NULL) + return 0; + + if (_p_invalidate == NULL) + { + _p_invalidate = INTERN("_p_invalidate"); + if (_p_invalidate == NULL) + { + /* It doesn't make any sense to ignore this error, but + the caller ignores all errors. + + TODO: and why does it do that? This should be fixed + */ + return -1; + } + } + + if (v->ob_refcnt <= 1 && PyType_Check(v)) + { + /* This looks wrong, but it isn't. We use strong references to types + because they don't have the ring members. + + The result is that we *never* remove classes unless + they are modified. We can fix this by using wekrefs uniformly. + */ + self->klass_count--; + return PyDict_DelItem(self->data, key); + } + + meth = PyObject_GetAttr(v, _p_invalidate); + if (meth == NULL) + return -1; + + v = PyObject_CallObject(meth, NULL); + Py_DECREF(meth); + if (v == NULL) + return -1; + Py_DECREF(v); + return 0; +} + +static PyObject * +cc_invalidate(ccobject *self, PyObject *inv) +{ + PyObject *key, *v; + Py_ssize_t i = 0; + + if (PyDict_Check(inv)) + { + while (PyDict_Next(inv, &i, &key, &v)) + { + if (_invalidate(self, key) < 0) + return NULL; + } + PyDict_Clear(inv); + } + else + { + if (PyBytes_Check(inv)) + { + if (_invalidate(self, inv) < 0) + return NULL; + } + else + { + int l, r; + + l = PyObject_Length(inv); + if (l < 0) + return NULL; + for (i=l; --i >= 0; ) + { + key = PySequence_GetItem(inv, i); + if (!key) + return NULL; + r = _invalidate(self, key); + Py_DECREF(key); + if (r < 0) + return NULL; + } + /* Dubious: modifying the input may be an unexpected side effect. */ + PySequence_DelSlice(inv, 0, l); + } + } + + Py_INCREF(Py_None); + return Py_None; +} + +static PyObject * +cc_get(ccobject *self, PyObject *args) +{ + PyObject *r, *key, *d = NULL; + + if (!PyArg_ParseTuple(args, "O|O:get", &key, &d)) + return NULL; + + r = PyDict_GetItem(self->data, key); + if (!r) + { + if (d) + r = d; + else + r = Py_None; + } + Py_INCREF(r); + return r; +} + +static PyObject * +cc_items(ccobject *self) +{ + return PyObject_CallMethod(self->data, "items", ""); +} + +static PyObject * +cc_klass_items(ccobject *self) +{ + PyObject *l,*k,*v; + Py_ssize_t p = 0; + + l = PyList_New(0); + if (l == NULL) + return NULL; + + while (PyDict_Next(self->data, &p, &k, &v)) + { + if(PyType_Check(v)) + { + v = Py_BuildValue("OO", k, v); + if (v == NULL) + { + Py_DECREF(l); + return NULL; + } + if (PyList_Append(l, v) < 0) + { + Py_DECREF(v); + Py_DECREF(l); + return NULL; + } + Py_DECREF(v); + } + } + + return l; +} + +static PyObject * +cc_debug_info(ccobject *self) +{ + PyObject *l,*k,*v; + Py_ssize_t p = 0; + + l = PyList_New(0); + if (l == NULL) + return NULL; + + while (PyDict_Next(self->data, &p, &k, &v)) + { + if (v->ob_refcnt <= 0) + v = Py_BuildValue("Oi", k, v->ob_refcnt); + + else if (! PyType_Check(v) && + PER_TypeCheck(v) + ) + v = Py_BuildValue("Oisi", + k, v->ob_refcnt, v->ob_type->tp_name, + ((cPersistentObject*)v)->state); + else + v = Py_BuildValue("Ois", k, v->ob_refcnt, v->ob_type->tp_name); + + if (v == NULL) + goto err; + + if (PyList_Append(l, v) < 0) + goto err; + } + + return l; + +err: + Py_DECREF(l); + return NULL; +} + +static PyObject * +cc_lru_items(ccobject *self) +{ + PyObject *l; + CPersistentRing *here; + + if (self->ring_lock) + { + /* When the ring lock is held, we have no way of know which + ring nodes belong to persistent objects, and which a + placeholders. */ + PyErr_SetString(PyExc_ValueError, + ".lru_items() is unavailable during garbage collection"); + return NULL; + } + + l = PyList_New(0); + if (l == NULL) + return NULL; + + here = self->ring_home.r_next; + while (here != &self->ring_home) + { + PyObject *v; + cPersistentObject *object = OBJECT_FROM_RING(self, here); + + if (object == NULL) + { + Py_DECREF(l); + return NULL; + } + v = Py_BuildValue("OO", object->oid, object); + if (v == NULL) + { + Py_DECREF(l); + return NULL; + } + if (PyList_Append(l, v) < 0) + { + Py_DECREF(v); + Py_DECREF(l); + return NULL; + } + Py_DECREF(v); + here = here->r_next; + } + + return l; +} + +static void +cc_oid_unreferenced(ccobject *self, PyObject *oid) +{ + /* This is called by the persistent object deallocation function + when the reference count on a persistent object reaches + zero. We need to fix up our dictionary; its reference is now + dangling because we stole its reference count. Be careful to + not release the global interpreter lock until this is + complete. */ + + PyObject *v; + + /* If the cache has been cleared by GC, data will be NULL. */ + if (!self->data) + return; + + v = PyDict_GetItem(self->data, oid); + assert(v); + assert(v->ob_refcnt == 0); + /* Need to be very hairy here because a dictionary is about + to decref an already deleted object. + */ + +#ifdef Py_TRACE_REFS + /* This is called from the deallocation function after the + interpreter has untracked the reference. Track it again. + */ + _Py_NewReference(v); + /* Don't increment total refcount as a result of the + shenanigans played in this function. The _Py_NewReference() + call above creates artificial references to v. + */ + _Py_RefTotal--; + assert(v->ob_type); +#else + Py_INCREF(v); +#endif + assert(v->ob_refcnt == 1); + /* Incremement the refcount again, because delitem is going to + DECREF it. If it's refcount reached zero again, we'd call back to + the dealloc function that called us. + */ + Py_INCREF(v); + + /* TODO: Should we call _Py_ForgetReference() on error exit? */ + if (PyDict_DelItem(self->data, oid) < 0) + return; + Py_DECREF((ccobject *)((cPersistentObject *)v)->cache); + ((cPersistentObject *)v)->cache = NULL; + + assert(v->ob_refcnt == 1); + + /* Undo the temporary resurrection. + Don't DECREF the object, because this function is called from + the object's dealloc function. If the refcnt reaches zero, it + will all be invoked recursively. + */ + _Py_ForgetReference(v); +} + +static PyObject * +cc_ringlen(ccobject *self) +{ + CPersistentRing *here; + int c = 0; + + for (here = self->ring_home.r_next; here != &self->ring_home; + here = here->r_next) + c++; + return INT_FROM_LONG(c); +} + +static PyObject * +cc_update_object_size_estimation(ccobject *self, PyObject *args) +{ + PyObject *oid; + cPersistentObject *v; + unsigned int new_size; + if (!PyArg_ParseTuple(args, "OI:updateObjectSizeEstimation", + &oid, &new_size)) + return NULL; + /* Note: reference borrowed */ + v = (cPersistentObject *)PyDict_GetItem(self->data, oid); + if (v) + { + /* we know this object -- update our "total_size_estimation" + we must only update when the object is in the ring + */ + if (v->ring.r_next) + { + self->total_estimated_size += _estimated_size_in_bytes( + (int)(_estimated_size_in_24_bits(new_size)) + - (int)(v->estimated_size) + ); + /* we do this in "Connection" as we need it even when the + object is not in the cache (or not the ring) + */ + /* v->estimated_size = new_size; */ + } + } + Py_RETURN_NONE; +} + +static PyObject* +cc_new_ghost(ccobject *self, PyObject *args) +{ + PyObject *tmp, *key, *v; + + if (!PyArg_ParseTuple(args, "OO:new_ghost", &key, &v)) + return NULL; + + /* Sanity check the value given to make sure it is allowed in the cache */ + if (PyType_Check(v)) + { + /* Its a persistent class, such as a ZClass. Thats ok. */ + } + else if (! PER_TypeCheck(v)) + { + /* If it's not an instance of a persistent class, (ie Python + classes that derive from persistent.Persistent, BTrees, + etc), report an error. + + */ + PyErr_SetString(PyExc_TypeError, + "Cache values must be persistent objects."); + return NULL; + } + + /* Can't access v->oid directly because the object might be a + * persistent class. + */ + tmp = PyObject_GetAttr(v, py__p_oid); + if (tmp == NULL) + return NULL; + Py_DECREF(tmp); + if (tmp != Py_None) + { + PyErr_SetString(PyExc_AssertionError, + "New ghost object must not have an oid"); + return NULL; + } + + /* useful sanity check, but not strictly an invariant of this class */ + tmp = PyObject_GetAttr(v, py__p_jar); + if (tmp == NULL) + return NULL; + Py_DECREF(tmp); + if (tmp != Py_None) + { + PyErr_SetString(PyExc_AssertionError, + "New ghost object must not have a jar"); + return NULL; + } + + tmp = PyDict_GetItem(self->data, key); + if (tmp) + { + Py_DECREF(tmp); + PyErr_SetString(PyExc_AssertionError, + "The given oid is already in the cache"); + return NULL; + } + + if (PyType_Check(v)) + { + if (PyObject_SetAttr(v, py__p_jar, self->jar) < 0) + return NULL; + if (PyObject_SetAttr(v, py__p_oid, key) < 0) + return NULL; + if (PyDict_SetItem(self->data, key, v) < 0) + return NULL; + PyObject_GC_UnTrack((void *)self->data); + self->klass_count++; + } + else + { + cPersistentObject *p = (cPersistentObject *)v; + + if(p->cache != NULL) + { + PyErr_SetString(PyExc_AssertionError, "Already in a cache"); + return NULL; + } + + if (PyDict_SetItem(self->data, key, v) < 0) + return NULL; + /* the dict should have a borrowed reference */ + PyObject_GC_UnTrack((void *)self->data); + Py_DECREF(v); + + Py_INCREF(self); + p->cache = (PerCache *)self; + Py_INCREF(self->jar); + p->jar = self->jar; + Py_INCREF(key); + p->oid = key; + p->state = cPersistent_GHOST_STATE; + } + + Py_RETURN_NONE; +} + +static struct PyMethodDef cc_methods[] = { + {"items", (PyCFunction)cc_items, METH_NOARGS, + "Return list of oid, object pairs for all items in cache."}, + + {"lru_items", (PyCFunction)cc_lru_items, METH_NOARGS, + "List (oid, object) pairs from the lru list, as 2-tuples."}, + + {"klass_items", (PyCFunction)cc_klass_items, METH_NOARGS, + "List (oid, object) pairs of cached persistent classes."}, + + {"full_sweep", (PyCFunction)cc_full_sweep, METH_VARARGS, + "full_sweep() -- Perform a full sweep of the cache."}, + + {"minimize", (PyCFunction)cc_minimize, METH_VARARGS, + "minimize([ignored]) -- Remove as many objects as possible\n\n" + "Ghostify all objects that are not modified. Takes an optional\n" + "argument, but ignores it."}, + + {"incrgc", (PyCFunction)cc_incrgc, METH_VARARGS, + "incrgc() -- Perform incremental garbage collection\n\n" + "This method had been depricated!" + "Some other implementations support an optional parameter 'n' which\n" + "indicates a repetition count; this value is ignored."}, + + {"invalidate", (PyCFunction)cc_invalidate, METH_O, + "invalidate(oids) -- invalidate one, many, or all ids"}, + + {"get", (PyCFunction)cc_get, METH_VARARGS, + "get(key [, default]) -- get an item, or a default"}, + + {"ringlen", (PyCFunction)cc_ringlen, METH_NOARGS, + "ringlen() -- Returns number of non-ghost items in cache."}, + + {"debug_info", (PyCFunction)cc_debug_info, METH_NOARGS, + "debug_info() -- Returns debugging data about objects in the cache."}, + + {"update_object_size_estimation", + (PyCFunction)cc_update_object_size_estimation, METH_VARARGS, + "update_object_size_estimation(oid, new_size) -- " + "update the caches size estimation for *oid* " + "(if this is known to the cache)."}, + + {"new_ghost", (PyCFunction)cc_new_ghost, METH_VARARGS, + "new_ghost() -- Initialize a ghost and add it to the cache."}, + + {NULL, NULL} /* sentinel */ +}; + +static int +cc_init(ccobject *self, PyObject *args, PyObject *kwds) +{ + int cache_size = 100; + Py_ssize_t cache_size_bytes = 0; + PyObject *jar; + + if (!PyArg_ParseTuple(args, "O|in", &jar, &cache_size, &cache_size_bytes)) + return -1; + + self->jar = NULL; + self->data = PyDict_New(); + if (self->data == NULL) + { + Py_DECREF(self); + return -1; + } + /* Untrack the dict mapping oids to objects. + + The dict contains uncounted references to ghost objects, so it + isn't safe for GC to visit it. If GC finds an object with more + referents that refcounts, it will die with an assertion failure. + + When the cache participates in GC, it will need to traverse the + objects in the doubly-linked list, which will account for all the + non-ghost objects. + */ + PyObject_GC_UnTrack((void *)self->data); + self->jar = jar; + Py_INCREF(jar); + self->cache_size = cache_size; + self->cache_size_bytes = cache_size_bytes; + self->non_ghost_count = 0; + self->total_estimated_size = 0; + self->klass_count = 0; + self->cache_drain_resistance = 0; + self->ring_lock = 0; + self->ring_home.r_next = &self->ring_home; + self->ring_home.r_prev = &self->ring_home; + return 0; +} + +static void +cc_dealloc(ccobject *self) +{ + PyObject_GC_UnTrack((PyObject *)self); + Py_XDECREF(self->data); + Py_XDECREF(self->jar); + PyObject_GC_Del(self); +} + +static int +cc_clear(ccobject *self) +{ + Py_ssize_t pos = 0; + PyObject *k, *v; + /* Clearing the cache is delicate. + + A non-ghost object will show up in the ring and in the dict. If + we deallocating the dict before clearing the ring, the GC will + decref each object in the dict. Since the dict references are + uncounted, this will lead to objects having negative refcounts. + + Freeing the non-ghost objects should eliminate many objects from + the cache, but there may still be ghost objects left. It's + not safe to decref the dict until it's empty, so we need to manually + clear those out of the dict, too. We accomplish that by replacing + all the ghost objects with None. + */ + + /* We don't need to lock the ring, because the cache is unreachable. + It should be impossible for anyone to be modifying the cache. + */ + assert(! self->ring_lock); + + while (self->ring_home.r_next != &self->ring_home) + { + CPersistentRing *here = self->ring_home.r_next; + cPersistentObject *o = OBJECT_FROM_RING(self, here); + + if (o->cache) + { + Py_INCREF(o); /* account for uncounted reference */ + if (PyDict_DelItem(self->data, o->oid) < 0) + return -1; + } + o->cache = NULL; + Py_DECREF(self); + self->ring_home.r_next = here->r_next; + o->ring.r_prev = NULL; + o->ring.r_next = NULL; + Py_DECREF(o); + here = here->r_next; + } + + Py_XDECREF(self->jar); + + while (PyDict_Next(self->data, &pos, &k, &v)) + { + Py_INCREF(v); + if (PyDict_SetItem(self->data, k, Py_None) < 0) + return -1; + } + Py_XDECREF(self->data); + self->data = NULL; + self->jar = NULL; + return 0; +} + +static int +cc_traverse(ccobject *self, visitproc visit, void *arg) +{ + int err; + CPersistentRing *here; + + /* If we're in the midst of cleaning up old objects, the ring contains + * assorted junk we must not pass on to the visit() callback. This + * should be rare (our cleanup code would need to have called back + * into Python, which in turn triggered Python's gc). When it happens, + * simply don't chase any pointers. The cache will appear to be a + * source of external references then, and at worst we miss cleaning + * up a dead cycle until the next time Python's gc runs. + */ + if (self->ring_lock) + return 0; + +#define VISIT(SLOT) \ + if (SLOT) { \ + err = visit((PyObject *)(SLOT), arg); \ + if (err) \ + return err; \ + } + + VISIT(self->jar); + + here = self->ring_home.r_next; + + /* It is possible that an object is traversed after it is cleared. + In that case, there is no ring. + */ + if (!here) + return 0; + + while (here != &self->ring_home) + { + cPersistentObject *o = OBJECT_FROM_RING(self, here); + VISIT(o); + here = here->r_next; + } +#undef VISIT + + return 0; +} + +static Py_ssize_t +cc_length(ccobject *self) +{ + return PyObject_Length(self->data); +} + +static PyObject * +cc_subscript(ccobject *self, PyObject *key) +{ + PyObject *r; + + r = PyDict_GetItem(self->data, key); + if (r == NULL) + { + PyErr_SetObject(PyExc_KeyError, key); + return NULL; + } + Py_INCREF(r); + + return r; +} + +static int +cc_add_item(ccobject *self, PyObject *key, PyObject *v) +{ + int result; + PyObject *oid, *object_again, *jar; + cPersistentObject *p; + + /* Sanity check the value given to make sure it is allowed in the cache */ + if (PyType_Check(v)) + { + /* Its a persistent class, such as a ZClass. Thats ok. */ + } + else if (! PER_TypeCheck(v)) + { + /* If it's not an instance of a persistent class, (ie Python + classes that derive from persistent.Persistent, BTrees, + etc), report an error. + */ + PyErr_SetString(PyExc_TypeError, + "Cache values must be persistent objects."); + return -1; + } + + /* Can't access v->oid directly because the object might be a + * persistent class. + */ + oid = PyObject_GetAttr(v, py__p_oid); + if (oid == NULL) + return -1; + if (! PyBytes_Check(oid)) + { + Py_DECREF(oid); + PyErr_Format(PyExc_TypeError, + "Cached object oid must be bytes, not a %s", + oid->ob_type->tp_name); + + return -1; + } + + /* we know they are both strings. + * now check if they are the same string. + */ + result = PyObject_RichCompareBool(key, oid, Py_NE); + Py_DECREF(oid); + if (result < 0) + { + return -1; + } + if (result) + { + PyErr_SetString(PyExc_ValueError, "Cache key does not match oid"); + return -1; + } + + /* useful sanity check, but not strictly an invariant of this class */ + jar = PyObject_GetAttr(v, py__p_jar); + if (jar == NULL) + return -1; + if (jar==Py_None) + { + Py_DECREF(jar); + PyErr_SetString(PyExc_ValueError, + "Cached object jar missing"); + return -1; + } + Py_DECREF(jar); + + object_again = PyDict_GetItem(self->data, key); + if (object_again) + { + if (object_again != v) + { + PyErr_SetString(PyExc_ValueError, + "A different object already has the same oid"); + return -1; + } + else + { + /* re-register under the same oid - no work needed */ + return 0; + } + } + + if (PyType_Check(v)) + { + if (PyDict_SetItem(self->data, key, v) < 0) + return -1; + PyObject_GC_UnTrack((void *)self->data); + self->klass_count++; + return 0; + } + else + { + PerCache *cache = ((cPersistentObject *)v)->cache; + if (cache) + { + if (cache != (PerCache *)self) + /* This object is already in a different cache. */ + PyErr_SetString(PyExc_ValueError, + "Cache values may only be in one cache."); + return -1; + } + /* else: + + This object is already one of ours, which is ok. It + would be very strange if someone was trying to register + the same object under a different key. + */ + } + + if (PyDict_SetItem(self->data, key, v) < 0) + return -1; + /* the dict should have a borrowed reference */ + PyObject_GC_UnTrack((void *)self->data); + Py_DECREF(v); + + p = (cPersistentObject *)v; + Py_INCREF(self); + p->cache = (PerCache *)self; + if (p->state >= 0) + { + /* insert this non-ghost object into the ring just + behind the home position. */ + self->non_ghost_count++; + ring_add(&self->ring_home, &p->ring); + /* this list should have a new reference to the object */ + Py_INCREF(v); + } + return 0; +} + +static int +cc_del_item(ccobject *self, PyObject *key) +{ + PyObject *v; + cPersistentObject *p; + + /* unlink this item from the ring */ + v = PyDict_GetItem(self->data, key); + if (v == NULL) + { + PyErr_SetObject(PyExc_KeyError, key); + return -1; + } + + if (PyType_Check(v)) + { + self->klass_count--; + } + else + { + p = (cPersistentObject *)v; + if (p->state >= 0) + { + self->non_ghost_count--; + ring_del(&p->ring); + /* The DelItem below will account for the reference + held by the list. */ + } + else + { + /* This is a ghost object, so we haven't kept a reference + count on it. For it have stayed alive this long + someone else must be keeping a reference to + it. Therefore we need to temporarily give it back a + reference count before calling DelItem below */ + Py_INCREF(v); + } + + Py_DECREF((PyObject *)p->cache); + p->cache = NULL; + } + + if (PyDict_DelItem(self->data, key) < 0) + { + PyErr_SetString(PyExc_RuntimeError, + "unexpectedly couldn't remove key in cc_ass_sub"); + return -1; + } + + return 0; +} + +static int +cc_ass_sub(ccobject *self, PyObject *key, PyObject *v) +{ + if (!PyBytes_Check(key)) + { + PyErr_Format(PyExc_TypeError, + "cPickleCache key must be bytes, not a %s", + key->ob_type->tp_name); + return -1; + } + if (v) + return cc_add_item(self, key, v); + else + return cc_del_item(self, key); +} + +static PyMappingMethods cc_as_mapping = +{ + (lenfunc)cc_length, /* mp_length */ + (binaryfunc)cc_subscript, /* mp_subscript */ + (objobjargproc)cc_ass_sub, /* mp_ass_subscript */ +}; + +static PyObject * +cc_cache_data(ccobject *self, void *context) +{ + return PyDict_Copy(self->data); +} + +static PyGetSetDef cc_getsets[] = +{ + {"cache_data", (getter)cc_cache_data}, + {NULL} +}; + + +static PyMemberDef cc_members[] = +{ + {"cache_size", T_INT, offsetof(ccobject, cache_size)}, + {"cache_size_bytes", T_PYSSIZET, offsetof(ccobject, cache_size_bytes)}, + {"total_estimated_size", T_PYSSIZET, + offsetof(ccobject, total_estimated_size), READONLY}, + {"cache_drain_resistance", T_INT, + offsetof(ccobject, cache_drain_resistance)}, + {"cache_non_ghost_count", T_INT, offsetof(ccobject, non_ghost_count), + READONLY}, + {"cache_klass_count", T_INT, offsetof(ccobject, klass_count), READONLY}, + {NULL} +}; + +/* This module is compiled as a shared library. Some compilers don't + allow addresses of Python objects defined in other libraries to be + used in static initializers here. The DEFERRED_ADDRESS macro is + used to tag the slots where such addresses appear; the module init + function must fill in the tagged slots at runtime. The argument is + for documentation -- the macro ignores it. +*/ +#define DEFERRED_ADDRESS(ADDR) 0 + +static PyTypeObject Cctype = +{ + PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) + "persistent.PickleCache", /* tp_name */ + sizeof(ccobject), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)cc_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + &cc_as_mapping, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | + Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_HAVE_GC, /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)cc_traverse, /* tp_traverse */ + (inquiry)cc_clear, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + cc_methods, /* tp_methods */ + cc_members, /* tp_members */ + cc_getsets, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + (initproc)cc_init, /* tp_init */ +}; + +#ifdef PY3K +static struct PyModuleDef moduledef = +{ + PyModuleDef_HEAD_INIT, + "cPickleCache", /* m_name */ + cPickleCache_doc_string, /* m_doc */ + -1, /* m_size */ + NULL, /* m_methods */ + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL, /* m_free */ +}; + +#endif + +static PyObject* +module_init(void) +{ + PyObject *module; + +#ifdef PY3K + ((PyObject*)&Cctype)->ob_type = &PyType_Type; +#else + Cctype.ob_type = &PyType_Type; +#endif + Cctype.tp_new = &PyType_GenericNew; + if (PyType_Ready(&Cctype) < 0) + { + return NULL; + } + +#ifdef PY3K + module = PyModule_Create(&moduledef); +#else + module = Py_InitModule3("cPickleCache", NULL, cPickleCache_doc_string); +#endif + +#ifdef PY3K + cPersistenceCAPI = (cPersistenceCAPIstruct *)PyCapsule_Import(CAPI_CAPSULE_NAME, 0); +#else + cPersistenceCAPI = (cPersistenceCAPIstruct *)PyCObject_Import( + "persistent.cPersistence", "CAPI"); +#endif + if (!cPersistenceCAPI) + return NULL; + cPersistenceCAPI->percachedel = (percachedelfunc)cc_oid_unreferenced; + + py__p_changed = INTERN("_p_changed"); + if (!py__p_changed) + return NULL; + py__p_deactivate = INTERN("_p_deactivate"); + if (!py__p_deactivate) + return NULL; + py__p_jar = INTERN("_p_jar"); + if (!py__p_jar) + return NULL; + py__p_oid = INTERN("_p_oid"); + if (!py__p_oid) + return NULL; + + if (PyModule_AddStringConstant(module, "cache_variant", "stiff/c") < 0) + return NULL; + + /* This leaks a reference to Cctype, but it doesn't matter. */ + if (PyModule_AddObject(module, "PickleCache", (PyObject *)&Cctype) < 0) + return NULL; + + return module; +} + +#ifdef PY3K +PyMODINIT_FUNC PyInit_cPickleCache(void) +{ + return module_init(); +} +#else +PyMODINIT_FUNC initcPickleCache(void) +{ + module_init(); +} +#endif diff --git a/thesisenv/lib/python3.6/site-packages/persistent/cPickleCache.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/persistent/cPickleCache.cpython-36m-darwin.so new file mode 100755 index 0000000..eefa030 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/persistent/cPickleCache.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/persistent/dict.py b/thesisenv/lib/python3.6/site-packages/persistent/dict.py new file mode 100644 index 0000000..77ed37b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/dict.py @@ -0,0 +1,16 @@ +############################################################################## +# +# Copyright Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +# persistent.dict is deprecated. Use persistent.mapping +from persistent.mapping import PersistentMapping as PersistentDict diff --git a/thesisenv/lib/python3.6/site-packages/persistent/interfaces.py b/thesisenv/lib/python3.6/site-packages/persistent/interfaces.py new file mode 100644 index 0000000..61ed594 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/interfaces.py @@ -0,0 +1,568 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Persistence Interfaces +""" + +from zope.interface import Interface +from zope.interface import Attribute + +# Allowed values for _p_state +try: + from persistent.cPersistence import GHOST + from persistent.cPersistence import UPTODATE + from persistent.cPersistence import CHANGED + from persistent.cPersistence import STICKY +except ImportError: # pragma: no cover + GHOST = -1 + UPTODATE = 0 + CHANGED = 1 + STICKY = 2 + + +OID_TYPE = SERIAL_TYPE = bytes + +class IPersistent(Interface): + """Python persistent interface + + A persistent object can be in one of several states: + + - Unsaved + + The object has been created but not saved in a data manager. + + In this state, the _p_changed attribute is non-None and false + and the _p_jar attribute is None. + + - Saved + + The object has been saved and has not been changed since it was saved. + + In this state, the _p_changed attribute is non-None and false + and the _p_jar attribute is set to a data manager. + + - Sticky + + This state is identical to the saved state except that the + object cannot transition to the ghost state. This is a special + state used by C methods of persistent objects to make sure that + state is not unloaded in the middle of computation. + + In this state, the _p_changed attribute is non-None and false + and the _p_jar attribute is set to a data manager. + + There is no Python API for detecting whether an object is in the + sticky state. + + - Changed + + The object has been changed. + + In this state, the _p_changed attribute is true + and the _p_jar attribute is set to a data manager. + + - Ghost + + the object is in memory but its state has not been loaded from + the database (or its state has been unloaded). In this state, + the object doesn't contain any application data. + + In this state, the _p_changed attribute is None, and the _p_jar + attribute is set to the data manager from which the object was + obtained. + + In all the above, _p_oid (the persistent object id) is set when + _p_jar first gets set. + + The following state transitions are possible: + + - Unsaved -> Saved + + This transition occurs when an object is saved in the + database. This usually happens when an unsaved object is added + to (e.g. as an attribute or item of) a saved (or changed) object + and the transaction is committed. + + - Saved -> Changed + Sticky -> Changed + Ghost -> Changed + + This transition occurs when someone sets an attribute or sets + _p_changed to a true value on a saved, sticky or ghost object. When + the transition occurs, the persistent object is required to call the + register() method on its data manager, passing itself as the + only argument. + + Prior to ZODB 3.6, setting _p_changed to a true value on a ghost object + was ignored (the object remained a ghost, and getting its _p_changed + attribute continued to return None). + + - Saved -> Sticky + + This transition occurs when C code marks the object as sticky to + prevent its deactivation. + + - Saved -> Ghost + + This transition occurs when a saved object is deactivated or + invalidated. See discussion below. + + - Sticky -> Saved + + This transition occurs when C code unmarks the object as sticky to + allow its deactivation. + + - Changed -> Saved + + This transition occurs when a transaction is committed. After + saving the state of a changed object during transaction commit, + the data manager sets the object's _p_changed to a non-None false + value. + + - Changed -> Ghost + + This transition occurs when a transaction is aborted. All changed + objects are invalidated by the data manager by an abort. + + - Ghost -> Saved + + This transition occurs when an attribute or operation of a ghost + is accessed and the object's state is loaded from the database. + + Note that there is a separate C API that is not included here. + The C API requires a specific data layout and defines the sticky + state. + + + About Invalidation, Deactivation and the Sticky & Ghost States + + The sticky state is intended to be a short-lived state, to prevent + an object's state from being discarded while we're in C routines. It + is an error to invalidate an object in the sticky state. + + Deactivation is a request that an object discard its state (become + a ghost). Deactivation is an optimization, and a request to + deactivate may be ignored. There are two equivalent ways to + request deactivation: + + - call _p_deactivate() + - set _p_changed to None + + There are two ways to invalidate an object: call the + _p_invalidate() method (preferred) or delete its _p_changed + attribute. This cannot be ignored, and is used when semantics + require invalidation. Normally, an invalidated object transitions + to the ghost state. However, some objects cannot be ghosts. When + these objects are invalidated, they immediately reload their state + from their data manager, and are then in the saved state. + + reprs + + By default, persistent objects include the reprs of their + _p_oid and _p_jar, if any, in their repr. If a subclass implements + the optional method ``_p_repr``, it will be called and its results returned + instead of the default repr; if this method raises an exception, that + exception will be caught and its repr included in the default repr. + + """ + + _p_jar = Attribute( + """The data manager for the object. + + The data manager should implement IPersistentDataManager (note that + this constraint is not enforced). + + If there is no data manager, then this is None. + + Once assigned to a data manager, an object cannot be re-assigned + to another. + """) + + _p_oid = Attribute( + """The object id. + + It is up to the data manager to assign this. + + The special value None is reserved to indicate that an object + id has not been assigned. Non-None object ids must be non-empty + strings. The 8-byte string consisting of 8 NUL bytes + ('\x00\x00\x00\x00\x00\x00\x00\x00') is reserved to identify the + database root object. + + Once assigned an OID, an object cannot be re-assigned another. + """) + + _p_changed = Attribute( + """The persistent state of the object. + + This is one of: + + None -- The object is a ghost. + + false but not None -- The object is saved (or has never been saved). + + true -- The object has been modified since it was last saved. + + The object state may be changed by assigning or deleting this + attribute; however, assigning None is ignored if the object is + not in the saved state, and may be ignored even if the object is + in the saved state. + + At and after ZODB 3.6, setting _p_changed to a true value for a ghost + object activates the object; prior to 3.6, setting _p_changed to a + true value on a ghost object was ignored. + + Note that an object can transition to the changed state only if + it has a data manager. When such a state change occurs, the + 'register' method of the data manager must be called, passing the + persistent object. + + Deleting this attribute forces invalidation independent of + existing state, although it is an error if the sticky state is + current. + """) + + _p_serial = Attribute( + """The object serial number. + + This member is used by the data manager to distiguish distinct + revisions of a given persistent object. + + This is an 8-byte string (not Unicode). + """) + + _p_mtime = Attribute( + """The object's modification time (read-only). + + This is a float, representing seconds since the epoch (as returned + by time.time). + """) + + _p_state = Attribute( + """The object's persistence state token. + + Must be one of GHOST, UPTODATE, CHANGED, or STICKY. + """) + + _p_estimated_size = Attribute( + """An estimate of the object's size in bytes. + + May be set by the data manager. + """) + + # Attribute access protocol + def __getattribute__(name): + """ Handle activating ghosts before returning an attribute value. + + "Special" attributes and '_p_*' attributes don't require activation. + """ + + def __setattr__(name, value): + """ Handle activating ghosts before setting an attribute value. + + "Special" attributes and '_p_*' attributes don't require activation. + """ + + def __delattr__(name): + """ Handle activating ghosts before deleting an attribute value. + + "Special" attributes and '_p_*' attributes don't require activation. + """ + + # Pickling protocol. + def __getstate__(): + """Get the object data. + + The state should not include persistent attributes ("_p_name"). + The result must be picklable. + """ + + def __setstate__(state): + """Set the object data. + """ + + def __reduce__(): + """Reduce an object to contituent parts for serialization. + """ + + # Custom methods + def _p_activate(): + """Activate the object. + + Change the object to the saved state if it is a ghost. + """ + + def _p_deactivate(): + """Deactivate the object. + + Possibly change an object in the saved state to the + ghost state. It may not be possible to make some persistent + objects ghosts, and, for optimization reasons, the implementation + may choose to keep an object in the saved state. + """ + + def _p_invalidate(): + """Invalidate the object. + + Invalidate the object. This causes any data to be thrown + away, even if the object is in the changed state. The object + is moved to the ghost state; further accesses will cause + object data to be reloaded. + """ + + def _p_getattr(name): + """Test whether the base class must handle the name + + The method unghostifies the object, if necessary. + The method records the object access, if necessary. + + This method should be called by subclass __getattribute__ + implementations before doing anything else. If the method + returns True, then __getattribute__ implementations must delegate + to the base class, Persistent. + """ + + def _p_setattr(name, value): + """Save persistent meta data + + This method should be called by subclass __setattr__ implementations + before doing anything else. If it returns true, then the attribute + was handled by the base class. + + The method unghostifies the object, if necessary. + The method records the object access, if necessary. + """ + + def _p_delattr(name): + """Delete persistent meta data + + This method should be called by subclass __delattr__ implementations + before doing anything else. If it returns true, then the attribute + was handled by the base class. + + The method unghostifies the object, if necessary. + The method records the object access, if necessary. + """ + +# TODO: document conflict resolution. + +class IPersistentDataManager(Interface): + """Provide services for managing persistent state. + + This interface is used by a persistent object to interact with its + data manager in the context of a transaction. + """ + _cache = Attribute("The pickle cache associated with this connection.") + + def setstate(object): + """Load the state for the given object. + + The object should be in the ghost state. The object's state will be + set and the object will end up in the saved state. + + The object must provide the IPersistent interface. + """ + + def oldstate(obj, tid): + """Return copy of 'obj' that was written by transaction 'tid'. + + The returned object does not have the typical metadata (_p_jar, _p_oid, + _p_serial) set. I'm not sure how references to other peristent objects + are handled. + + Parameters + obj: a persistent object from this Connection. + tid: id of a transaction that wrote an earlier revision. + + Raises KeyError if tid does not exist or if tid deleted a revision of + obj. + """ + + def register(object): + """Register an IPersistent with the current transaction. + + This method must be called when the object transitions to + the changed state. + + A subclass could override this method to customize the default + policy of one transaction manager for each thread. + """ + +# Maybe later: +## def mtime(object): +## """Return the modification time of the object. + +## The modification time may not be known, in which case None +## is returned. If non-None, the return value is the kind of +## timestamp supplied by Python's time.time(). +## """ + + +class IPickleCache(Interface): + """ API of the cache for a ZODB connection. + """ + def __getitem__(oid): + """ -> the persistent object for OID. + + o Raise KeyError if not found. + """ + + def __setitem__(oid, value): + """ Save the persistent object under OID. + + o 'oid' must be a string, else raise ValueError. + + o Raise KeyError on duplicate + """ + + def __delitem__(oid): + """ Remove the persistent object for OID. + + o 'oid' must be a string, else raise ValueError. + + o Raise KeyError if not found. + """ + + def get(oid, default=None): + """ -> the persistent object for OID. + + o Return 'default' if not found. + """ + + def mru(oid): + """ Move the element corresonding to 'oid' to the head. + + o Raise KeyError if no element is found. + """ + + def __len__(): + """ -> the number of OIDs in the cache. + """ + + def items(): + """-> a sequence of tuples (oid, value) for cached objects. + + o Only includes items in 'data' (no p-classes). + """ + + def ringlen(): + """ -> the number of persistent objects in the ring. + + o Only includes items in the ring (no ghosts or p-classes). + """ + + def lru_items(): + """ -> a sequence of tuples (oid, value) for cached objects. + + o Tuples will be in LRU order. + + o Only includes items in the ring (no ghosts or p-classes). + """ + + def klass_items(): + """-> a sequence of tuples (oid, value) for cached p-classes. + + o Only includes persistent classes. + """ + + def incrgc(): + """ Perform an incremental garbage collection sweep. + + o Reduce number of non-ghosts to 'cache_size', if possible. + + o Ghostify in LRU order. + + o Skip dirty or sticky objects. + + o Quit once we get down to 'cache_size'. + """ + + def full_sweep(): + """ Perform a full garbage collection sweep. + + o Reduce number of non-ghosts to 0, if possible. + + o Ghostify all non-sticky / non-changed objecs. + """ + + def minimize(): + """ Alias for 'full_sweep'. + + o XXX? + """ + + def new_ghost(oid, obj): + """ Add the given (ghost) object to the cache. + + Also, set its _p_jar and _p_oid, and ensure it is in the + GHOST state. + + If the object doesn't define '_p_oid' / '_p_jar', raise. + + If the object's '_p_oid' is not None, raise. + + If the object's '_p_jar' is not None, raise. + + If 'oid' is already in the cache, raise. + """ + + def reify(to_reify): + """ Reify the indicated objects. + + o If 'to_reify' is a string, treat it as an OID. + + o Otherwise, iterate over it as a sequence of OIDs. + + o For each OID, if present in 'data' and in GHOST state: + + o Call '_p_activate' on the object. + + o Add it to the ring. + + o If any OID is present but not in GHOST state, skip it. + + o Raise KeyErrory if any OID is not present. + """ + + def invalidate(to_invalidate): + """ Invalidate the indicated objects. + + o If 'to_invalidate' is a string, treat it as an OID. + + o Otherwise, iterate over it as a sequence of OIDs. + + o Any OID corresponding to a p-class will cause the corresponding + p-class to be removed from the cache. + + o For all other OIDs, ghostify the corrsponding object and + remove it from the ring. + """ + + def debug_info(): + """Return debugging data about objects in the cache. + + o Return a sequence of tuples, (oid, refcount, typename, state). + """ + + def update_object_size_estimation(oid, new_size): + """Update the cache's size estimation for 'oid', if known to the cache. + """ + + cache_size = Attribute('Target size of the cache') + cache_drain_resistance = Attribute('Factor for draining cache below ' + 'target size') + cache_non_ghost_count = Attribute('Number of non-ghosts in the cache ' + '(XXX how is it different from ' + 'ringlen?') + cache_data = Attribute("Property: copy of our 'data' dict") + cache_klass_count = Attribute("Property: len of 'persistent_classes'") diff --git a/thesisenv/lib/python3.6/site-packages/persistent/list.py b/thesisenv/lib/python3.6/site-packages/persistent/list.py new file mode 100644 index 0000000..8dd4a2b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/list.py @@ -0,0 +1,98 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +"""Python implementation of persistent list. + +$Id$""" + +import persistent +from persistent._compat import UserList +from persistent._compat import PYTHON2 + +class PersistentList(UserList, persistent.Persistent): + """A persistent wrapper for list objects. + + Mutating instances of this class will cause them to be marked + as changed and automatically persisted. + """ + __super_setitem = UserList.__setitem__ + __super_delitem = UserList.__delitem__ + __super_iadd = UserList.__iadd__ + __super_imul = UserList.__imul__ + __super_append = UserList.append + __super_insert = UserList.insert + __super_pop = UserList.pop + __super_remove = UserList.remove + __super_reverse = UserList.reverse + __super_sort = UserList.sort + __super_extend = UserList.extend + + def __setitem__(self, i, item): + self.__super_setitem(i, item) + self._p_changed = 1 + + def __delitem__(self, i): + self.__super_delitem(i) + self._p_changed = 1 + + if PYTHON2: # pragma: no cover + __super_setslice = UserList.__setslice__ + __super_delslice = UserList.__delslice__ + + def __setslice__(self, i, j, other): + self.__super_setslice(i, j, other) + self._p_changed = 1 + + def __delslice__(self, i, j): + self.__super_delslice(i, j) + self._p_changed = 1 + + def __iadd__(self, other): + L = self.__super_iadd(other) + self._p_changed = 1 + return L + + def __imul__(self, n): + L = self.__super_imul(n) + self._p_changed = 1 + return L + + def append(self, item): + self.__super_append(item) + self._p_changed = 1 + + def insert(self, i, item): + self.__super_insert(i, item) + self._p_changed = 1 + + def pop(self, i=-1): + rtn = self.__super_pop(i) + self._p_changed = 1 + return rtn + + def remove(self, item): + self.__super_remove(item) + self._p_changed = 1 + + def reverse(self): + self.__super_reverse() + self._p_changed = 1 + + def sort(self, *args, **kwargs): + self.__super_sort(*args, **kwargs) + self._p_changed = 1 + + def extend(self, other): + self.__super_extend(other) + self._p_changed = 1 diff --git a/thesisenv/lib/python3.6/site-packages/persistent/mapping.py b/thesisenv/lib/python3.6/site-packages/persistent/mapping.py new file mode 100644 index 0000000..c023e98 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/mapping.py @@ -0,0 +1,102 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +"""Python implementation of persistent base types + +$Id$""" + +import persistent +from persistent._compat import IterableUserDict + +class default(object): + + def __init__(self, func): + self.func = func + + def __get__(self, inst, class_): + if inst is None: + return self + return self.func(inst) + + +class PersistentMapping(IterableUserDict, persistent.Persistent): + """A persistent wrapper for mapping objects. + + This class allows wrapping of mapping objects so that object + changes are registered. As a side effect, mapping objects may be + subclassed. + + A subclass of PersistentMapping or any code that adds new + attributes should not create an attribute named _container. This + is reserved for backwards compatibility reasons. + """ + + # UserDict provides all of the mapping behavior. The + # PersistentMapping class is responsible marking the persistent + # state as changed when a method actually changes the state. At + # the mapping API evolves, we may need to add more methods here. + + __super_delitem = IterableUserDict.__delitem__ + __super_setitem = IterableUserDict.__setitem__ + __super_clear = IterableUserDict.clear + __super_update = IterableUserDict.update + __super_setdefault = IterableUserDict.setdefault + __super_pop = IterableUserDict.pop + __super_popitem = IterableUserDict.popitem + + def __delitem__(self, key): + self.__super_delitem(key) + self._p_changed = 1 + + def __setitem__(self, key, v): + self.__super_setitem(key, v) + self._p_changed = 1 + + def clear(self): + self.__super_clear() + self._p_changed = 1 + + def update(self, b): + self.__super_update(b) + self._p_changed = 1 + + def setdefault(self, key, failobj=None): + # We could inline all of UserDict's implementation into the + # method here, but I'd rather not depend at all on the + # implementation in UserDict (simple as it is). + if not key in self.data: + self._p_changed = 1 + return self.__super_setdefault(key, failobj) + + def pop(self, key, *args): + self._p_changed = 1 + return self.__super_pop(key, *args) + + def popitem(self): + self._p_changed = 1 + return self.__super_popitem() + + # Old implementations used _container rather than data. + # Use a descriptor to provide data when we have _container instead + + @default + def data(self): + # We don't want to cause a write on read, so wer're careful not to + # do anything that would cause us to become marked as changed, however, + # if we're modified, then the saved record will have data, not + # _container. + data = self.__dict__.pop('_container') + self.__dict__['data'] = data + + return data diff --git a/thesisenv/lib/python3.6/site-packages/persistent/persistence.py b/thesisenv/lib/python3.6/site-packages/persistent/persistence.py new file mode 100644 index 0000000..dc44fc6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/persistence.py @@ -0,0 +1,608 @@ +############################################################################## +# +# Copyright (c) 2011 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import struct + +from zope.interface import implementer + +from persistent.interfaces import IPersistent +from persistent.interfaces import GHOST +from persistent.interfaces import UPTODATE +from persistent.interfaces import CHANGED +from persistent.interfaces import STICKY + +from persistent.interfaces import SERIAL_TYPE +from persistent.timestamp import TimeStamp +from persistent.timestamp import _ZERO +from persistent._compat import copy_reg +from persistent._compat import intern + + +_INITIAL_SERIAL = _ZERO + + +# Bitwise flags +_CHANGED = 0x0001 +_STICKY = 0x0002 + +_OGA = object.__getattribute__ +_OSA = object.__setattr__ +_ODA = object.__delattr__ + +# These names can be used from a ghost without causing it to be +# activated. These are standardized with the C implementation +SPECIAL_NAMES = ('__class__', + '__del__', + '__dict__', + '__of__', + '__setstate__',) + +# And this is an implementation detail of this class; it holds +# the standard names plus the slot names, allowing for just one +# check in __getattribute__ +_SPECIAL_NAMES = set(SPECIAL_NAMES) + +# Represent 8-byte OIDs as hex integer, just like +# ZODB does. +_OID_STRUCT = struct.Struct('>Q') +_OID_UNPACK = _OID_STRUCT.unpack + +@implementer(IPersistent) +class Persistent(object): + """ Pure Python implmentation of Persistent base class + """ + __slots__ = ('__jar', '__oid', '__serial', '__flags', '__size', '__ring',) + + def __new__(cls, *args, **kw): + inst = super(Persistent, cls).__new__(cls) + # We bypass the __setattr__ implementation of this object + # at __new__ time, just like the C implementation does. This + # makes us compatible with subclasses that want to access + # properties like _p_changed in their setattr implementation + _OSA(inst, '_Persistent__jar', None) + _OSA(inst, '_Persistent__oid', None) + _OSA(inst, '_Persistent__serial', None) + _OSA(inst, '_Persistent__flags', None) + _OSA(inst, '_Persistent__size', 0) + _OSA(inst, '_Persistent__ring', None) + return inst + + # _p_jar: see IPersistent. + def _get_jar(self): + return _OGA(self, '_Persistent__jar') + + def _set_jar(self, value): + jar = _OGA(self, '_Persistent__jar') + if self._p_is_in_cache(jar) and value is not None and jar != value: + # The C implementation only forbids changing the jar + # if we're already in a cache. Match its error message + raise ValueError('can not change _p_jar of cached object') + + if _OGA(self, '_Persistent__jar') != value: + _OSA(self, '_Persistent__jar', value) + _OSA(self, '_Persistent__flags', 0) + + def _del_jar(self): + jar = _OGA(self, '_Persistent__jar') + if jar is not None: + if self._p_is_in_cache(jar): + raise ValueError("can't delete _p_jar of cached object") + _OSA(self, '_Persistent__jar', None) + _OSA(self, '_Persistent__flags', None) + + _p_jar = property(_get_jar, _set_jar, _del_jar) + + # _p_oid: see IPersistent. + def _get_oid(self): + return _OGA(self, '_Persistent__oid') + + def _set_oid(self, value): + if value == _OGA(self, '_Persistent__oid'): + return + # The C implementation allows *any* value to be + # used as the _p_oid. + #if value is not None: + # if not isinstance(value, OID_TYPE): + # raise ValueError('Invalid OID type: %s' % value) + # The C implementation only forbids changing the OID + # if we're in a cache, regardless of what the current + # value or jar is + if self._p_is_in_cache(): + # match the C error message + raise ValueError('can not change _p_oid of cached object') + _OSA(self, '_Persistent__oid', value) + + def _del_oid(self): + jar = _OGA(self, '_Persistent__jar') + oid = _OGA(self, '_Persistent__oid') + if jar is not None: + if oid and jar._cache.get(oid): + raise ValueError('Cannot delete _p_oid of cached object') + _OSA(self, '_Persistent__oid', None) + + _p_oid = property(_get_oid, _set_oid, _del_oid) + + # _p_serial: see IPersistent. + def _get_serial(self): + serial = _OGA(self, '_Persistent__serial') + if serial is not None: + return serial + return _INITIAL_SERIAL + + def _set_serial(self, value): + if not isinstance(value, SERIAL_TYPE): + raise ValueError('Invalid SERIAL type: %s' % value) + if len(value) != 8: + raise ValueError('SERIAL must be 8 octets') + _OSA(self, '_Persistent__serial', value) + + def _del_serial(self): + _OSA(self, '_Persistent__serial', None) + + _p_serial = property(_get_serial, _set_serial, _del_serial) + + # _p_changed: see IPersistent. + def _get_changed(self): + if _OGA(self, '_Persistent__jar') is None: + return False + flags = _OGA(self, '_Persistent__flags') + if flags is None: # ghost + return None + return bool(flags & _CHANGED) + + def _set_changed(self, value): + if _OGA(self, '_Persistent__flags') is None: + if value: + self._p_activate() + self._p_set_changed_flag(value) + else: + if value is None: # -> ghost + self._p_deactivate() + else: + self._p_set_changed_flag(value) + + def _del_changed(self): + self._p_invalidate() + + _p_changed = property(_get_changed, _set_changed, _del_changed) + + # _p_mtime + def _get_mtime(self): + # The C implementation automatically unghostifies the object + # when _p_mtime is accessed. + self._p_activate() + self._p_accessed() + serial = _OGA(self, '_Persistent__serial') + if serial is not None: + ts = TimeStamp(serial) + return ts.timeTime() + + _p_mtime = property(_get_mtime) + + # _p_state + def _get_state(self): + # Note the use of OGA and caching to avoid recursive calls to __getattribute__: + # __getattribute__ calls _p_accessed calls cache.mru() calls _p_state + if _OGA(self, '_Persistent__jar') is None: + return UPTODATE + flags = _OGA(self, '_Persistent__flags') + if flags is None: + return GHOST + if flags & _CHANGED: + result = CHANGED + else: + result = UPTODATE + if flags & _STICKY: + return STICKY + return result + + _p_state = property(_get_state) + + # _p_estimated_size: XXX don't want to reserve the space? + def _get_estimated_size(self): + return _OGA(self, '_Persistent__size') * 64 + + def _set_estimated_size(self, value): + if isinstance(value, int): + if value < 0: + raise ValueError('_p_estimated_size must not be negative') + _OSA(self, '_Persistent__size', _estimated_size_in_24_bits(value)) + else: + raise TypeError("_p_estimated_size must be an integer") + + def _del_estimated_size(self): + _OSA(self, '_Persistent__size', 0) + + _p_estimated_size = property( + _get_estimated_size, _set_estimated_size, _del_estimated_size) + + # The '_p_sticky' property is not (yet) part of the API: for now, + # it exists to simplify debugging and testing assertions. + def _get_sticky(self): + flags = _OGA(self, '_Persistent__flags') + if flags is None: + return False + return bool(flags & _STICKY) + def _set_sticky(self, value): + flags = _OGA(self, '_Persistent__flags') + if flags is None: + raise ValueError('Ghost') + if value: + flags |= _STICKY + else: + flags &= ~_STICKY + _OSA(self, '_Persistent__flags', flags) + _p_sticky = property(_get_sticky, _set_sticky) + + # The '_p_status' property is not (yet) part of the API: for now, + # it exists to simplify debugging and testing assertions. + def _get_status(self): + if _OGA(self, '_Persistent__jar') is None: + return 'unsaved' + flags = _OGA(self, '_Persistent__flags') + if flags is None: + return 'ghost' + if flags & _STICKY: + return 'sticky' + if flags & _CHANGED: + return 'changed' + return 'saved' + + _p_status = property(_get_status) + + # Methods from IPersistent. + def __getattribute__(self, name): + """ See IPersistent. + """ + oga = _OGA + if (not name.startswith('_p_') and + name not in _SPECIAL_NAMES): + if oga(self, '_Persistent__flags') is None: + oga(self, '_p_activate')() + oga(self, '_p_accessed')() + return oga(self, name) + + def __setattr__(self, name, value): + special_name = (name in _SPECIAL_NAMES or + name.startswith('_p_')) + volatile = name.startswith('_v_') + if not special_name: + if _OGA(self, '_Persistent__flags') is None: + _OGA(self, '_p_activate')() + if not volatile: + _OGA(self, '_p_accessed')() + _OSA(self, name, value) + if (_OGA(self, '_Persistent__jar') is not None and + _OGA(self, '_Persistent__oid') is not None and + not special_name and + not volatile): + before = _OGA(self, '_Persistent__flags') + after = before | _CHANGED + if before != after: + _OSA(self, '_Persistent__flags', after) + _OGA(self, '_p_register')() + + def __delattr__(self, name): + special_name = (name in _SPECIAL_NAMES or + name.startswith('_p_')) + if not special_name: + if _OGA(self, '_Persistent__flags') is None: + _OGA(self, '_p_activate')() + _OGA(self, '_p_accessed')() + before = _OGA(self, '_Persistent__flags') + after = before | _CHANGED + if before != after: + _OSA(self, '_Persistent__flags', after) + if (_OGA(self, '_Persistent__jar') is not None and + _OGA(self, '_Persistent__oid') is not None): + _OGA(self, '_p_register')() + _ODA(self, name) + + def _slotnames(self, _v_exclude=True): + slotnames = copy_reg._slotnames(type(self)) + return [x for x in slotnames + if not x.startswith('_p_') and + not (x.startswith('_v_') and _v_exclude) and + not x.startswith('_Persistent__') and + x not in Persistent.__slots__] + + def __getstate__(self): + """ See IPersistent. + """ + idict = getattr(self, '__dict__', None) + slotnames = self._slotnames() + if idict is not None: + d = dict([x for x in idict.items() + if not x[0].startswith('_p_') and + not x[0].startswith('_v_')]) + else: + d = None + if slotnames: + s = {} + for slotname in slotnames: + value = getattr(self, slotname, self) + if value is not self: + s[slotname] = value + return d, s + return d + + def __setstate__(self, state): + """ See IPersistent. + """ + if isinstance(state,tuple): + inst_dict, slots = state + else: + inst_dict, slots = state, () + idict = getattr(self, '__dict__', None) + if inst_dict is not None: + if idict is None: + raise TypeError('No instance dict') + idict.clear() + for k, v in inst_dict.items(): + # Normally the keys for instance attributes are interned. + # Do that here, but only if it is possible to do so. + idict[intern(k) if type(k) is str else k] = v + slotnames = self._slotnames() + if slotnames: + for k, v in slots.items(): + setattr(self, k, v) + + def __reduce__(self): + """ See IPersistent. + """ + gna = getattr(self, '__getnewargs__', lambda: ()) + return (copy_reg.__newobj__, + (type(self),) + gna(), self.__getstate__()) + + def _p_activate(self): + """ See IPersistent. + """ + oga = _OGA + before = oga(self, '_Persistent__flags') + if before is None: # Only do this if we're a ghost + # Begin by marking up-to-date in case we bail early + _OSA(self, '_Persistent__flags', 0) + jar = oga(self, '_Persistent__jar') + if jar is None: + return + oid = oga(self, '_Persistent__oid') + if oid is None: + return + + # If we're actually going to execute a set-state, + # mark as changed to prevent any recursive call + # (actually, our earlier check that we're a ghost should + # prevent this, but the C implementation sets it to changed + # while calling jar.setstate, and this is observable to clients). + # The main point of this is to prevent changes made during + # setstate from registering the object with the jar. + _OSA(self, '_Persistent__flags', CHANGED) + try: + jar.setstate(self) + except: + _OSA(self, '_Persistent__flags', before) + raise + else: + # If we succeed, no matter what the implementation + # of setstate did, mark ourself as up-to-date. The + # C implementation unconditionally does this. + _OSA(self, '_Persistent__flags', 0) # up-to-date + + # In the C implementation, _p_invalidate winds up calling + # _p_deactivate. There are ZODB tests that depend on this; + # it's not documented but there may be code in the wild + # that does as well + + def _p_deactivate(self): + """ See IPersistent. + """ + flags = _OGA(self, '_Persistent__flags') + if flags is not None and not flags: + self._p_invalidate_deactivate_helper() + + def _p_invalidate(self): + """ See IPersistent. + """ + # If we think we have changes, we must pretend + # like we don't so that deactivate does its job + _OSA(self, '_Persistent__flags', 0) + self._p_deactivate() + + def _p_invalidate_deactivate_helper(self, clear=True): + jar = _OGA(self, '_Persistent__jar') + if jar is None: + return + + if _OGA(self, '_Persistent__flags') is not None: + _OSA(self, '_Persistent__flags', None) + + if clear: + try: + idict = _OGA(self, '__dict__') + except AttributeError: + pass + else: + idict.clear() + type_ = type(self) + # for backward-compatibility reason we release __slots__ only if + # class does not override __new__ + if type_.__new__ is Persistent.__new__: + for slotname in Persistent._slotnames(self, _v_exclude=False): + try: + getattr(type_, slotname).__delete__(self) + except AttributeError: + # AttributeError means slot variable was not initialized at all - + # - we can simply skip its deletion. + pass + + # Implementation detail: deactivating/invalidating + # updates the size of the cache (if we have one) + # by telling it this object no longer takes any bytes + # (-1 is a magic number to compensate for the implementation, + # which always adds one to the size given) + try: + cache = jar._cache + except AttributeError: + pass + else: + cache.update_object_size_estimation(_OGA(self, '_Persistent__oid'), -1) + # See notes in PickleCache.sweep for why we have to do this + cache._persistent_deactivate_ran = True + + def _p_getattr(self, name): + """ See IPersistent. + """ + if name.startswith('_p_') or name in _SPECIAL_NAMES: + return True + self._p_activate() + self._p_accessed() + return False + + def _p_setattr(self, name, value): + """ See IPersistent. + """ + if name.startswith('_p_'): + _OSA(self, name, value) + return True + self._p_activate() + self._p_accessed() + return False + + def _p_delattr(self, name): + """ See IPersistent. + """ + if name.startswith('_p_'): + if name == '_p_oid' and self._p_is_in_cache(_OGA(self, '_Persistent__jar')): + # The C implementation forbids deleting the oid + # if we're already in a cache. Match its error message + raise ValueError('can not change _p_jar of cached object') + + _ODA(self, name) + return True + self._p_activate() + self._p_accessed() + return False + + # Helper methods: not APIs: we name them with '_p_' to bypass + # the __getattribute__ bit which bumps the cache. + def _p_register(self): + jar = _OGA(self, '_Persistent__jar') + if jar is not None and _OGA(self, '_Persistent__oid') is not None: + jar.register(self) + + def _p_set_changed_flag(self, value): + if value: + before = _OGA(self, '_Persistent__flags') + after = before | _CHANGED + if before != after: + self._p_register() + _OSA(self, '_Persistent__flags', after) + else: + flags = _OGA(self, '_Persistent__flags') + flags &= ~_CHANGED + _OSA(self, '_Persistent__flags', flags) + + + def _p_accessed(self): + # Notify the jar's pickle cache that we have been accessed. + # This relies on what has been (until now) an implementation + # detail, the '_cache' attribute of the jar. We made it a + # private API to avoid the cycle of keeping a reference to + # the cache on the persistent object. + + # The below is the equivalent of this, but avoids + # several recursive through __getattribute__, especially for _p_state, + # and benchmarks much faster + # + # if(self.__jar is None or + # self.__oid is None or + # self._p_state < 0 ): return + + oga = _OGA + jar = oga(self, '_Persistent__jar') + if jar is None: + return + oid = oga(self, '_Persistent__oid') + if oid is None: + return + flags = oga(self, '_Persistent__flags') + if flags is None: # ghost + return + + + # The KeyError arises in ZODB: ZODB.serialize.ObjectWriter + # can assign a jar and an oid to newly seen persistent objects, + # but because they are newly created, they aren't in the + # pickle cache yet. There doesn't seem to be a way to distinguish + # that at this level, all we can do is catch it. + # The AttributeError arises in ZODB test cases + try: + jar._cache.mru(oid) + except (AttributeError,KeyError): + pass + + + def _p_is_in_cache(self, jar=None): + oid = _OGA(self, '_Persistent__oid') + if not oid: + return False + + jar = jar or _OGA(self, '_Persistent__jar') + cache = getattr(jar, '_cache', None) + if cache is not None: + return cache.get(oid) is self + + def __repr__(self): + p_repr_str = '' + p_repr = getattr(type(self), '_p_repr', None) + if p_repr is not None: + try: + return p_repr(self) + except Exception as e: + p_repr_str = ' _p_repr %r' % (e,) + + oid = _OGA(self, '_Persistent__oid') + jar = _OGA(self, '_Persistent__jar') + + oid_str = '' + jar_str = '' + + if oid is not None: + try: + if isinstance(oid, bytes) and len(oid) == 8: + oid_str = ' oid 0x%x' % (_OID_UNPACK(oid)[0],) + else: + oid_str = ' oid %r' % (oid,) + except Exception as e: + oid_str = ' oid %r' % (e,) + + if jar is not None: + try: + jar_str = ' in %r' % (jar,) + except Exception as e: + jar_str = ' in %r' % (e,) + + return '<%s.%s object at 0x%x%s%s%s>' % ( + # Match the C name for this exact class + type(self).__module__ if type(self) is not Persistent else 'persistent', + type(self).__name__, id(self), + oid_str, jar_str, p_repr_str + ) + + +def _estimated_size_in_24_bits(value): + if value > 1073741696: + return 16777215 + return (value//64) + 1 + +_SPECIAL_NAMES.update([intern('_Persistent' + x) for x in Persistent.__slots__]) diff --git a/thesisenv/lib/python3.6/site-packages/persistent/picklecache.py b/thesisenv/lib/python3.6/site-packages/persistent/picklecache.py new file mode 100644 index 0000000..9bcb573 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/picklecache.py @@ -0,0 +1,384 @@ +############################################################################## +# +# Copyright (c) 2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import gc +import weakref + + +from zope.interface import implementer + +from persistent.interfaces import GHOST +from persistent.interfaces import IPickleCache +from persistent.interfaces import OID_TYPE +from persistent.interfaces import UPTODATE +from persistent.persistence import Persistent +from persistent.persistence import _estimated_size_in_24_bits + +# Tests may modify this to add additional types +_CACHEABLE_TYPES = (type, Persistent) +_SWEEPABLE_TYPES = (Persistent,) + +# On Jython, we need to explicitly ask it to monitor +# objects if we want a more deterministic GC +if hasattr(gc, 'monitorObject'): # pragma: no cover + _gc_monitor = gc.monitorObject +else: + def _gc_monitor(o): + pass + +_OGA = object.__getattribute__ + +def _sweeping_ring(f): + # A decorator for functions in the PickleCache + # that are sweeping the entire ring (mutating it); + # serves as a pseudo-lock to not mutate the ring further + # in other functions + def locked(self, *args, **kwargs): + self._is_sweeping_ring = True + try: + return f(self, *args, **kwargs) + finally: + self._is_sweeping_ring = False + return locked + +from .ring import Ring + +@implementer(IPickleCache) +class PickleCache(object): + + total_estimated_size = 0 + cache_size_bytes = 0 + + # Set by functions that sweep the entire ring (via _sweeping_ring) + # Serves as a pseudo-lock + _is_sweeping_ring = False + + def __init__(self, jar, target_size=0, cache_size_bytes=0): + # TODO: forward-port Dieter's bytes stuff + self.jar = jar + # We expect the jars to be able to have a pointer to + # us; this is a reference cycle, but certain + # aspects of invalidation and accessing depend on it. + # The actual Connection objects we're used with do set this + # automatically, but many test objects don't. + # TODO: track this on the persistent objects themself? + try: + jar._cache = self + except AttributeError: + # Some ZODB tests pass in an object that cannot have an _cache + pass + self.cache_size = target_size + self.drain_resistance = 0 + self.non_ghost_count = 0 + self.persistent_classes = {} + self.data = weakref.WeakValueDictionary() + self.ring = Ring() + self.cache_size_bytes = cache_size_bytes + + # IPickleCache API + def __len__(self): + """ See IPickleCache. + """ + return (len(self.persistent_classes) + + len(self.data)) + + def __getitem__(self, oid): + """ See IPickleCache. + """ + value = self.data.get(oid) + if value is not None: + return value + return self.persistent_classes[oid] + + def __setitem__(self, oid, value): + """ See IPickleCache. + """ + # The order of checks matters for C compatibility; + # the ZODB tests depend on this + + # The C impl requires either a type or a Persistent subclass + if not isinstance(value, _CACHEABLE_TYPES): + raise TypeError("Cache values must be persistent objects.") + + value_oid = value._p_oid + if not isinstance(oid, OID_TYPE) or not isinstance(value_oid, OID_TYPE): + raise TypeError('OID must be %s: key=%s _p_oid=%s' % (OID_TYPE, oid, value_oid)) + + if value_oid != oid: + raise ValueError("Cache key does not match oid") + + # XXX + if oid in self.persistent_classes or oid in self.data: + # Have to be careful here, a GC might have just run + # and cleaned up the object + existing_data = self.get(oid) + if existing_data is not None and existing_data is not value: + # Raise the same type of exception as the C impl with the same + # message. + raise ValueError('A different object already has the same oid') + # Match the C impl: it requires a jar + jar = getattr(value, '_p_jar', None) + if jar is None and not isinstance(value, type): + raise ValueError("Cached object jar missing") + # It also requires that it cannot be cached more than one place + existing_cache = getattr(jar, '_cache', None) + if (existing_cache is not None + and existing_cache is not self + and existing_cache.data.get(oid) is not None): + raise ValueError("Object already in another cache") + + if isinstance(value, type): # ZODB.persistentclass.PersistentMetaClass + self.persistent_classes[oid] = value + else: + self.data[oid] = value + _gc_monitor(value) + if _OGA(value, '_p_state') != GHOST and value not in self.ring: + self.ring.add(value) + self.non_ghost_count += 1 + + def __delitem__(self, oid): + """ See IPickleCache. + """ + if not isinstance(oid, OID_TYPE): + raise TypeError('OID must be %s: %s' % (OID_TYPE, oid)) + if oid in self.persistent_classes: + del self.persistent_classes[oid] + else: + value = self.data.pop(oid) + self.ring.delete(value) + + def get(self, oid, default=None): + """ See IPickleCache. + """ + + value = self.data.get(oid, self) + if value is not self: + return value + return self.persistent_classes.get(oid, default) + + def mru(self, oid): + """ See IPickleCache. + """ + if self._is_sweeping_ring: + # accessess during sweeping, such as with an + # overridden _p_deactivate, don't mutate the ring + # because that could leave it inconsistent + return False # marker return for tests + + value = self.data[oid] + + was_in_ring = value in self.ring + if not was_in_ring: + if _OGA(value, '_p_state') != GHOST: + self.ring.add(value) + self.non_ghost_count += 1 + else: + self.ring.move_to_head(value) + + def ringlen(self): + """ See IPickleCache. + """ + return len(self.ring) + + def items(self): + """ See IPickleCache. + """ + return self.data.items() + + def lru_items(self): + """ See IPickleCache. + """ + result = [] + for obj in self.ring: + result.append((obj._p_oid, obj)) + return result + + def klass_items(self): + """ See IPickleCache. + """ + return self.persistent_classes.items() + + def incrgc(self, ignored=None): + """ See IPickleCache. + """ + target = self.cache_size + if self.drain_resistance >= 1: + size = self.non_ghost_count + target2 = size - 1 - (size // self.drain_resistance) + if target2 < target: + target = target2 + # return value for testing + return self._sweep(target, self.cache_size_bytes) + + def full_sweep(self, target=None): + """ See IPickleCache. + """ + # return value for testing + return self._sweep(0) + + minimize = full_sweep + + def new_ghost(self, oid, obj): + """ See IPickleCache. + """ + if obj._p_oid is not None: + raise ValueError('Object already has oid') + if obj._p_jar is not None: + raise ValueError('Object already has jar') + if oid in self.persistent_classes or oid in self.data: + raise KeyError('Duplicate OID: %s' % oid) + obj._p_oid = oid + obj._p_jar = self.jar + if not isinstance(obj, type): + if obj._p_state != GHOST: + # The C implementation sets this stuff directly, + # but we delegate to the class. However, we must be + # careful to avoid broken _p_invalidate and _p_deactivate + # that don't call the super class. See ZODB's + # testConnection.doctest_proper_ghost_initialization_with_empty__p_deactivate + obj._p_invalidate_deactivate_helper(False) + self[oid] = obj + + def reify(self, to_reify): + """ See IPickleCache. + """ + if isinstance(to_reify, OID_TYPE): #bytes + to_reify = [to_reify] + for oid in to_reify: + value = self[oid] + if value._p_state == GHOST: + value._p_activate() + self.non_ghost_count += 1 + self.mru(oid) + + def invalidate(self, to_invalidate): + """ See IPickleCache. + """ + if isinstance(to_invalidate, OID_TYPE): + self._invalidate(to_invalidate) + else: + for oid in to_invalidate: + self._invalidate(oid) + + def debug_info(self): + result = [] + for oid, klass in self.persistent_classes.items(): + result.append((oid, + len(gc.get_referents(klass)), + type(klass).__name__, + klass._p_state, + )) + for oid, value in self.data.items(): + result.append((oid, + len(gc.get_referents(value)), + type(value).__name__, + value._p_state, + )) + return result + + def update_object_size_estimation(self, oid, new_size): + """ See IPickleCache. + """ + value = self.data.get(oid) + if value is not None: + # Recall that while the argument is given in bytes, + # we have to work with 64-block chunks (plus one) + # to match the C implementation. Hence the convoluted + # arithmetic + new_size_in_24 = _estimated_size_in_24_bits(new_size) + p_est_size_in_24 = value._Persistent__size + new_est_size_in_bytes = (new_size_in_24 - p_est_size_in_24) * 64 + + self.total_estimated_size += new_est_size_in_bytes + + cache_drain_resistance = property(lambda self: self.drain_resistance) + cache_non_ghost_count = property(lambda self: self.non_ghost_count) + cache_data = property(lambda self: dict(self.data.items())) + cache_klass_count = property(lambda self: len(self.persistent_classes)) + + # Helpers + + # Set to true when a deactivation happens in our code. For + # compatibility with the C implementation, we can only remove the + # node and decrement our non-ghost count if our implementation + # actually runs (broken subclasses can forget to call super; ZODB + # has tests for this). This gets set to false everytime we examine + # a node and checked afterwards. The C implementation has a very + # incestuous relationship between cPickleCache and cPersistence: + # the pickle cache calls _p_deactivate, which is responsible for + # both decrementing the non-ghost count and removing its node from + # the cache ring (and, if it gets deallocated, from the pickle + # cache's dictionary). We're trying to keep that to a minimum, but + # there's no way around it if we want full compatibility. + _persistent_deactivate_ran = False + + @_sweeping_ring + def _sweep(self, target, target_size_bytes=0): + # To avoid mutating datastructures in place or making a copy, + # and to work efficiently with both the CFFI ring and the + # deque-based ring, we collect the objects and their indexes + # up front and then hand them off for ejection. + # We don't use enumerate because that's slow under PyPy + i = -1 + to_eject = [] + for value in self.ring: + if self.non_ghost_count <= target and (self.total_estimated_size <= target_size_bytes or not target_size_bytes): + break + i += 1 + if value._p_state == UPTODATE: + # The C implementation will only evict things that are specifically + # in the up-to-date state + self._persistent_deactivate_ran = False + + # sweeping an object out of the cache should also + # ghost it---that's what C does. This winds up + # calling `update_object_size_estimation`. + # Also in C, if this was the last reference to the object, + # it removes itself from the `data` dictionary. + # If we're under PyPy or Jython, we need to run a GC collection + # to make this happen...this is only noticeable though, when + # we eject objects. Also, note that we can only take any of these + # actions if our _p_deactivate ran, in case of buggy subclasses. + # see _persistent_deactivate_ran + + value._p_deactivate() + if (self._persistent_deactivate_ran + # Test-cases sneak in non-Persistent objects, sigh, so naturally + # they don't cooperate (without this check a bunch of test_picklecache + # breaks) + or not isinstance(value, _SWEEPABLE_TYPES)): + to_eject.append((i, value)) + self.non_ghost_count -= 1 + + ejected = len(to_eject) + if ejected: + self.ring.delete_all(to_eject) + + return ejected + + @_sweeping_ring + def _invalidate(self, oid): + value = self.data.get(oid) + if value is not None and value._p_state != GHOST: + value._p_invalidate() + was_in_ring = self.ring.delete(value) + self.non_ghost_count -= 1 + elif oid in self.persistent_classes: + persistent_class = self.persistent_classes.pop(oid) + try: + # ZODB.persistentclass.PersistentMetaClass objects + # have this method and it must be called for transaction abort + # and other forms of invalidation to work + persistent_class._p_invalidate() + except AttributeError: + pass diff --git a/thesisenv/lib/python3.6/site-packages/persistent/ring.c b/thesisenv/lib/python3.6/site-packages/persistent/ring.c new file mode 100644 index 0000000..a5151ed --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/ring.c @@ -0,0 +1,61 @@ +/***************************************************************************** + + Copyright (c) 2003 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +#define RING_C "$Id$\n" + +/* Support routines for the doubly-linked list of cached objects. + +The cache stores a doubly-linked list of persistent objects, with +space for the pointers allocated in the objects themselves. The cache +stores the distinguished head of the list, which is not a valid +persistent object. + +The next pointers traverse the ring in order starting with the least +recently used object. The prev pointers traverse the ring in order +starting with the most recently used object. + +*/ + +#include "Python.h" +#include "ring.h" + +void +ring_add(CPersistentRing *ring, CPersistentRing *elt) +{ + assert(!elt->r_next); + elt->r_next = ring; + elt->r_prev = ring->r_prev; + ring->r_prev->r_next = elt; + ring->r_prev = elt; +} + +void +ring_del(CPersistentRing *elt) +{ + elt->r_next->r_prev = elt->r_prev; + elt->r_prev->r_next = elt->r_next; + elt->r_next = NULL; + elt->r_prev = NULL; +} + +void +ring_move_to_head(CPersistentRing *ring, CPersistentRing *elt) +{ + elt->r_prev->r_next = elt->r_next; + elt->r_next->r_prev = elt->r_prev; + elt->r_next = ring; + elt->r_prev = ring->r_prev; + ring->r_prev->r_next = elt; + ring->r_prev = elt; +} diff --git a/thesisenv/lib/python3.6/site-packages/persistent/ring.h b/thesisenv/lib/python3.6/site-packages/persistent/ring.h new file mode 100644 index 0000000..df8e822 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/ring.h @@ -0,0 +1,66 @@ +/***************************************************************************** + + Copyright (c) 2003 Zope Foundation and Contributors. + All Rights Reserved. + + This software is subject to the provisions of the Zope Public License, + Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + FOR A PARTICULAR PURPOSE + + ****************************************************************************/ + +/* Support routines for the doubly-linked list of cached objects. + +The cache stores a headed, doubly-linked, circular list of persistent +objects, with space for the pointers allocated in the objects themselves. +The cache stores the distinguished head of the list, which is not a valid +persistent object. The other list members are non-ghost persistent +objects, linked in LRU (least-recently used) order. + +The r_next pointers traverse the ring starting with the least recently used +object. The r_prev pointers traverse the ring starting with the most +recently used object. + +Obscure: While each object is pointed at twice by list pointers (once by +its predecessor's r_next, again by its successor's r_prev), the refcount +on the object is bumped only by 1. This leads to some possibly surprising +sequences of incref and decref code. Note that since the refcount is +bumped at least once, the list does hold a strong reference to each +object in it. +*/ + +typedef struct CPersistentRing_struct +{ + struct CPersistentRing_struct *r_prev; + struct CPersistentRing_struct *r_next; +} CPersistentRing; + +/* The list operations here take constant time independent of the + * number of objects in the list: + */ + +/* Add elt as the most recently used object. elt must not already be + * in the list, although this isn't checked. + */ +void ring_add(CPersistentRing *ring, CPersistentRing *elt); + +/* Remove elt from the list. elt must already be in the list, although + * this isn't checked. + */ +void ring_del(CPersistentRing *elt); + +/* elt must already be in the list, although this isn't checked. It's + * unlinked from its current position, and relinked into the list as the + * most recently used object (which is arguably the tail of the list + * instead of the head -- but the name of this function could be argued + * either way). This is equivalent to + * + * ring_del(elt); + * ring_add(ring, elt); + * + * but may be a little quicker. + */ +void ring_move_to_head(CPersistentRing *ring, CPersistentRing *elt); diff --git a/thesisenv/lib/python3.6/site-packages/persistent/ring.py b/thesisenv/lib/python3.6/site-packages/persistent/ring.py new file mode 100644 index 0000000..1bad105 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/ring.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +############################################################################## +# +# Copyright (c) 2015 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +#pylint: disable=W0212,E0211,W0622,E0213,W0221,E0239 + +from zope.interface import Interface +from zope.interface import implementer + +class IRing(Interface): + """Conceptually, a doubly-linked list for efficiently keeping track of least- + and most-recently used :class:`persistent.interfaces.IPersistent` objects. + + This is meant to be used by the :class:`persistent.picklecache.PickleCache` + and should not be considered a public API. This interface documentation exists + to assist development of the picklecache and alternate implementations by + explaining assumptions and performance requirements. + """ + + def __len__(): + """Return the number of persistent objects stored in the ring. + + Should be constant time. + """ + + def __contains__(object): + """Answer whether the given persistent object is found in the ring. + + Must not rely on object equality or object hashing, but only + identity or the `_p_oid`. Should be constant time. + """ + + def add(object): + """Add the persistent object to the ring as most-recently used. + + When an object is in the ring, the ring holds a strong + reference to it so it can be deactivated later by the pickle + cache. Should be constant time. + + The object should not already be in the ring, but this is not necessarily + enforced. + """ + + def delete(object): + """Remove the object from the ring if it is present. + + Returns a true value if it was present and a false value + otherwise. An ideal implementation should be constant time, + but linear time is allowed. + """ + + def move_to_head(object): + """Place the object as the most recently used object in the ring. + + The object should already be in the ring, but this is not + necessarily enforced, and attempting to move an object that is + not in the ring has undefined consequences. An ideal + implementation should be constant time, but linear time is + allowed. + """ + + def delete_all(indexes_and_values): + """Given a sequence of pairs (index, object), remove all of them from + the ring. + + This should be equivalent to calling :meth:`delete` for each + value, but allows for a more efficient bulk deletion process. + + If the index and object pairs do not match with the actual state of the + ring, this operation is undefined. + + Should be at least linear time (not quadratic). + """ + + def __iter__(): + """Iterate over each persistent object in the ring, in the order of least + recently used to most recently used. + + Mutating the ring while an iteration is in progress has + undefined consequences. + """ + +from collections import deque + +@implementer(IRing) +class _DequeRing(object): + """A ring backed by the :class:`collections.deque` class. + + Operations are a mix of constant and linear time. + + It is available on all platforms. + """ + + __slots__ = ('ring', 'ring_oids') + + def __init__(self): + + self.ring = deque() + self.ring_oids = set() + + def __len__(self): + return len(self.ring) + + def __contains__(self, pobj): + return pobj._p_oid in self.ring_oids + + def add(self, pobj): + self.ring.append(pobj) + self.ring_oids.add(pobj._p_oid) + + def delete(self, pobj): + # Note that we do not use self.ring.remove() because that + # uses equality semantics and we don't want to call the persistent + # object's __eq__ method (which might wake it up just after we + # tried to ghost it) + for i, o in enumerate(self.ring): + if o is pobj: + del self.ring[i] + self.ring_oids.discard(pobj._p_oid) + return 1 + + def move_to_head(self, pobj): + self.delete(pobj) + self.add(pobj) + + def delete_all(self, indexes_and_values): + for ix, value in reversed(indexes_and_values): + del self.ring[ix] + self.ring_oids.discard(value._p_oid) + + def __iter__(self): + return iter(self.ring) + + +try: + from persistent import _ring +except ImportError: # pragma: no cover + _CFFIRing = None +else: + ffi = _ring.ffi + _FFI_RING = _ring.lib + + _OGA = object.__getattribute__ + _OSA = object.__setattr__ + + #pylint: disable=E1101 + @implementer(IRing) + class _CFFIRing(object): + """A ring backed by a C implementation. All operations are constant time. + + It is only available on platforms with ``cffi`` installed. + """ + + __slots__ = ('ring_home', 'ring_to_obj') + + def __init__(self): + node = self.ring_home = ffi.new("CPersistentRing*") + node.r_next = node + node.r_prev = node + + # In order for the CFFI objects to stay alive, we must keep + # a strong reference to them, otherwise they get freed. We must + # also keep strong references to the objects so they can be deactivated + self.ring_to_obj = dict() + + def __len__(self): + return len(self.ring_to_obj) + + def __contains__(self, pobj): + return getattr(pobj, '_Persistent__ring', self) in self.ring_to_obj + + def add(self, pobj): + node = ffi.new("CPersistentRing*") + _FFI_RING.ring_add(self.ring_home, node) + self.ring_to_obj[node] = pobj + _OSA(pobj, '_Persistent__ring', node) + + def delete(self, pobj): + its_node = getattr(pobj, '_Persistent__ring', None) + our_obj = self.ring_to_obj.pop(its_node, None) + if its_node is not None and our_obj is not None and its_node.r_next: + _FFI_RING.ring_del(its_node) + return 1 + + def move_to_head(self, pobj): + node = _OGA(pobj, '_Persistent__ring') + _FFI_RING.ring_move_to_head(self.ring_home, node) + + def delete_all(self, indexes_and_values): + for _, value in indexes_and_values: + self.delete(value) + + def iteritems(self): + head = self.ring_home + here = head.r_next + while here != head: + yield here + here = here.r_next + + def __iter__(self): + ring_to_obj = self.ring_to_obj + for node in self.iteritems(): + yield ring_to_obj[node] + +# Export the best available implementation +Ring = _CFFIRing if _CFFIRing else _DequeRing diff --git a/thesisenv/lib/python3.6/site-packages/persistent/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/persistent/tests/__init__.py new file mode 100644 index 0000000..5bb534f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/tests/__init__.py @@ -0,0 +1 @@ +# package diff --git a/thesisenv/lib/python3.6/site-packages/persistent/tests/attrhooks.py b/thesisenv/lib/python3.6/site-packages/persistent/tests/attrhooks.py new file mode 100644 index 0000000..528efe9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/tests/attrhooks.py @@ -0,0 +1,123 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Overriding attr methods + +Examples for overriding attribute access methods. +""" + +from persistent import Persistent + +def _resettingJar(): + from persistent.tests.utils import ResettingJar + return ResettingJar() + +def _rememberingJar(): + from persistent.tests.utils import RememberingJar + return RememberingJar() + + +class OverridesGetattr(Persistent): + """Example of overriding __getattr__ + """ + def __getattr__(self, name): + """Get attributes that can't be gotten the usual way + """ + # Don't pretend we have any special attributes. + if name.startswith("__") and name.endswrith("__"): + raise AttributeError(name) # pragma: no cover + return name.upper(), self._p_changed + + +class VeryPrivate(Persistent): + """Example of overriding __getattribute__, __setattr__, and __delattr__ + """ + def __init__(self, **kw): + self.__dict__['__secret__'] = kw.copy() + + def __getattribute__(self, name): + """Get an attribute value + + See the very important note in the comment below! + """ + ################################################################# + # IMPORTANT! READ THIS! 8-> + # + # We *always* give Persistent a chance first. + # Persistent handles certain special attributes, like _p_ + # attributes. In particular, the base class handles __dict__ + # and __class__. + # + # We call _p_getattr. If it returns True, then we have to + # use Persistent.__getattribute__ to get the value. + # + ################################################################# + if Persistent._p_getattr(self, name): + return Persistent.__getattribute__(self, name) + + # Data should be in our secret dictionary: + secret = self.__dict__['__secret__'] + if name in secret: + return secret[name] + + # Maybe it's a method: + meth = getattr(self.__class__, name, None) + if meth is None: + raise AttributeError(name) + + return meth.__get__(self, self.__class__) + + + def __setattr__(self, name, value): + """Set an attribute value + """ + ################################################################# + # IMPORTANT! READ THIS! 8-> + # + # We *always* give Persistent a chance first. + # Persistent handles certain special attributes, like _p_ + # attributes. + # + # We call _p_setattr. If it returns True, then we are done. + # It has already set the attribute. + # + ################################################################# + if Persistent._p_setattr(self, name, value): + return + + self.__dict__['__secret__'][name] = value + + if not name.startswith('tmp_'): + self._p_changed = 1 + + def __delattr__(self, name): + """Delete an attribute value + """ + ################################################################# + # IMPORTANT! READ THIS! 8-> + # + # We *always* give Persistent a chance first. + # Persistent handles certain special attributes, like _p_ + # attributes. + # + # We call _p_delattr. If it returns True, then we are done. + # It has already deleted the attribute. + # + ################################################################# + if Persistent._p_delattr(self, name): + return + + del self.__dict__['__secret__'][name] + + if not name.startswith('tmp_'): + self._p_changed = 1 diff --git a/thesisenv/lib/python3.6/site-packages/persistent/tests/cucumbers.py b/thesisenv/lib/python3.6/site-packages/persistent/tests/cucumbers.py new file mode 100644 index 0000000..b45712c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/tests/cucumbers.py @@ -0,0 +1,109 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +# Example objects for pickling. + +from persistent import Persistent + +def print_dict(d): + d = sorted(d.items()) + print('{%s}' % (', '.join( + [('%r: %r' % (k, v)) for (k, v) in d] + ))) + +def cmpattrs(self, other, *attrs): + result = 0 + for attr in attrs: + if attr[:3] in ('_v_', '_p_'): + raise AssertionError("_v_ and _p_ attrs not allowed") + lhs = getattr(self, attr, None) + rhs = getattr(other, attr, None) + result += lhs != rhs + return result + +class Simple(Persistent): + def __init__(self, name, **kw): + self.__name__ = name + self.__dict__.update(kw) + self._v_favorite_color = 'blue' + self._p_foo = 'bar' + + @property + def _attrs(self): + return list(self.__dict__.keys()) + + def __eq__(self, other): + return cmpattrs(self, other, '__class__', *self._attrs) == 0 + + +class Custom(Simple): + + def __new__(cls, x, y): + r = Persistent.__new__(cls) + r.x, r.y = x, y + return r + + def __init__(self, x, y): + self.a = 42 + + def __getnewargs__(self): + return self.x, self.y + + def __getstate__(self): + return self.a + + def __setstate__(self, a): + self.a = a + + +class Slotted(Persistent): + + __slots__ = 's1', 's2', '_p_splat', '_v_eek' + + def __init__(self, s1, s2): + self.s1, self.s2 = s1, s2 + self._v_eek = 1 + self._p_splat = 2 + + @property + def _attrs(self): + raise NotImplementedError() + + def __eq__(self, other): + return cmpattrs(self, other, '__class__', *self._attrs) == 0 + + +class SubSlotted(Slotted): + + __slots__ = 's3', 's4' + + def __init__(self, s1, s2, s3): + Slotted.__init__(self, s1, s2) + self.s3 = s3 + + @property + def _attrs(self): + return ('s1', 's2', 's3', 's4') + + +class SubSubSlotted(SubSlotted): + + def __init__(self, s1, s2, s3, **kw): + SubSlotted.__init__(self, s1, s2, s3) + self.__dict__.update(kw) + self._v_favorite_color = 'blue' + self._p_foo = 'bar' + + @property + def _attrs(self): + return ['s1', 's2', 's3', 's4'] + list(self.__dict__.keys()) diff --git a/thesisenv/lib/python3.6/site-packages/persistent/tests/test_docs.py b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_docs.py new file mode 100644 index 0000000..3449811 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_docs.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" +Tests for the documentation. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + + +# disable: accessing protected members, too many methods +# pylint: disable=W0212,R0904 + +import os.path +import unittest +import doctest + +import manuel.capture +import manuel.codeblock +import manuel.doctest +import manuel.ignore +import manuel.testing + + +def test_suite(): + here = os.path.dirname(os.path.abspath(__file__)) + while not os.path.exists(os.path.join(here, 'setup.py')): + prev, here = here, os.path.dirname(here) + if here == prev: + # Let's avoid infinite loops at root + raise AssertionError('could not find my setup.py') + + docs = os.path.join(here, 'docs', 'api') + + files_to_test = ( + 'cache.rst', + 'attributes.rst', + 'pickling.rst', + ) + paths = [os.path.join(docs, f) for f in files_to_test] + + m = manuel.ignore.Manuel() + m += manuel.doctest.Manuel(optionflags=( + doctest.NORMALIZE_WHITESPACE + | doctest.ELLIPSIS + | doctest.IGNORE_EXCEPTION_DETAIL + )) + m += manuel.codeblock.Manuel() + m += manuel.capture.Manuel() + + suite = unittest.TestSuite() + suite.addTest( + manuel.testing.TestSuite( + m, + *paths + ) + ) + + return suite diff --git a/thesisenv/lib/python3.6/site-packages/persistent/tests/test_list.py b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_list.py new file mode 100644 index 0000000..e05ccfd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_list.py @@ -0,0 +1,332 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests for PersistentList +""" + +import unittest + +l0 = [] +l1 = [0] +l2 = [0, 1] + +# pylint:disable=protected-access + +class OtherList: + def __init__(self, initlist): + self.__data = initlist + def __len__(self): + return len(self.__data) + def __getitem__(self, i): + return self.__data[i] + +class TestPList(unittest.TestCase): + + def _getTargetClass(self): + from persistent.list import PersistentList + return PersistentList + + def _makeJar(self): + class Jar(object): + def register(self, obj): + "no-op" + return Jar() + + def _makeOne(self, *args): + inst = self._getTargetClass()(*args) + inst._p_jar = self._makeJar() + return inst + + def test_volatile_attributes_not_persisted(self): + # http://www.zope.org/Collectors/Zope/2052 + m = self._getTargetClass()() + m.foo = 'bar' + m._v_baz = 'qux' + state = m.__getstate__() + self.assertTrue('foo' in state) + self.assertFalse('_v_baz' in state) + + def testTheWorld(self): + from persistent._compat import PYTHON2 + # Test constructors + pl = self._getTargetClass() + u = pl() + u0 = pl(l0) + u1 = pl(l1) + u2 = pl(l2) + + uu = pl(u) + uu0 = pl(u0) + uu1 = pl(u1) + uu2 = pl(u2) + + pl(tuple(u)) + pl(OtherList(u0)) + pl("this is also a sequence") + + # Test __repr__ + eq = self.assertEqual + + eq(str(u0), str(l0), "str(u0) == str(l0)") + eq(repr(u1), repr(l1), "repr(u1) == repr(l1)") + + # Test __cmp__ and __len__ + try: + cmp + except NameError: + def cmp(a, b): + if a == b: + return 0 + if a < b: + return -1 + return 1 + + def mycmp(a, b): + r = cmp(a, b) + if r < 0: + return -1 + if r > 0: + return 1 + return r + + to_test = [l0, l1, l2, u, u0, u1, u2, uu, uu0, uu1, uu2] + for a in to_test: + for b in to_test: + eq(mycmp(a, b), mycmp(len(a), len(b)), + "mycmp(a, b) == mycmp(len(a), len(b))") + + # Test __getitem__ + + for i, val in enumerate(u2): + eq(val, i, "u2[i] == i") + + # Test __setitem__ + + uu2[0] = 0 + uu2[1] = 100 + with self.assertRaises(IndexError): + uu2[2] = 200 + + # Test __delitem__ + + del uu2[1] + del uu2[0] + with self.assertRaises(IndexError): + del uu2[0] + + # Test __getslice__ + + for i in range(-3, 4): + eq(u2[:i], l2[:i], "u2[:i] == l2[:i]") + eq(u2[i:], l2[i:], "u2[i:] == l2[i:]") + for j in range(-3, 4): + eq(u2[i:j], l2[i:j], "u2[i:j] == l2[i:j]") + + # Test __setslice__ + + for i in range(-3, 4): + u2[:i] = l2[:i] + eq(u2, l2, "u2 == l2") + u2[i:] = l2[i:] + eq(u2, l2, "u2 == l2") + for j in range(-3, 4): + u2[i:j] = l2[i:j] + eq(u2, l2, "u2 == l2") + + uu2 = u2[:] + uu2[:0] = [-2, -1] + eq(uu2, [-2, -1, 0, 1], "uu2 == [-2, -1, 0, 1]") + uu2[0:] = [] + eq(uu2, [], "uu2 == []") + + # Test __contains__ + for i in u2: + self.assertTrue(i in u2, "i in u2") + for i in min(u2)-1, max(u2)+1: + self.assertTrue(i not in u2, "i not in u2") + + # Test __delslice__ + + uu2 = u2[:] + del uu2[1:2] + del uu2[0:1] + eq(uu2, [], "uu2 == []") + + uu2 = u2[:] + del uu2[1:] + del uu2[:1] + eq(uu2, [], "uu2 == []") + + # Test __add__, __radd__, __mul__ and __rmul__ + + #self.assertTrue(u1 + [] == [] + u1 == u1, "u1 + [] == [] + u1 == u1") + self.assertTrue(u1 + [1] == u2, "u1 + [1] == u2") + #self.assertTrue([-1] + u1 == [-1, 0], "[-1] + u1 == [-1, 0]") + self.assertTrue(u2 == u2*1 == 1*u2, "u2 == u2*1 == 1*u2") + self.assertTrue(u2+u2 == u2*2 == 2*u2, "u2+u2 == u2*2 == 2*u2") + self.assertTrue(u2+u2+u2 == u2*3 == 3*u2, "u2+u2+u2 == u2*3 == 3*u2") + + # Test append + + u = u1[:] + u.append(1) + eq(u, u2, "u == u2") + + # Test insert + + u = u2[:] + u.insert(0, -1) + eq(u, [-1, 0, 1], "u == [-1, 0, 1]") + + # Test pop + + u = pl([0, -1, 1]) + u.pop() + eq(u, [0, -1], "u == [0, -1]") + u.pop(0) + eq(u, [-1], "u == [-1]") + + # Test remove + + u = u2[:] + u.remove(1) + eq(u, u1, "u == u1") + + # Test count + u = u2*3 + eq(u.count(0), 3, "u.count(0) == 3") + eq(u.count(1), 3, "u.count(1) == 3") + eq(u.count(2), 0, "u.count(2) == 0") + + + # Test index + + eq(u2.index(0), 0, "u2.index(0) == 0") + eq(u2.index(1), 1, "u2.index(1) == 1") + with self.assertRaises(ValueError): + u2.index(2) + + # Test reverse + + u = u2[:] + u.reverse() + eq(u, [1, 0], "u == [1, 0]") + u.reverse() + eq(u, u2, "u == u2") + + # Test sort + + u = pl([1, 0]) + u.sort() + eq(u, u2, "u == u2") + + # Test keyword arguments to sort + if PYTHON2: # pragma: no cover + u.sort(cmp=lambda x, y: cmp(y, x)) + eq(u, [1, 0], "u == [1, 0]") + + u.sort(key=lambda x: -x) + eq(u, [1, 0], "u == [1, 0]") + + u.sort(reverse=True) + eq(u, [1, 0], "u == [1, 0]") + + # Passing any other keyword arguments results in a TypeError + with self.assertRaises(TypeError): + u.sort(blah=True) + + # Test extend + + u = u1[:] + u.extend(u2) + eq(u, u1 + u2, "u == u1 + u2") + + # Test iadd + u = u1[:] + u += u2 + eq(u, u1 + u2, "u == u1 + u2") + + # Test imul + u = u1[:] + u *= 3 + eq(u, u1 + u1 + u1, "u == u1 + u1 + u1") + + def test_setslice(self): + inst = self._makeOne() + self.assertFalse(inst._p_changed) + inst[:] = [1, 2, 3] + self.assertEqual(inst, [1, 2, 3]) + self.assertTrue(inst._p_changed) + + def test_delslice(self): + inst = self._makeOne([1, 2, 3]) + self.assertFalse(inst._p_changed) + self.assertEqual(inst, [1, 2, 3]) + del inst[:] + self.assertTrue(inst._p_changed) + + def test_iadd(self): + inst = self._makeOne() + self.assertFalse(inst._p_changed) + inst += [1, 2, 3] + self.assertEqual(inst, [1, 2, 3]) + self.assertTrue(inst._p_changed) + + def test_extend(self): + inst = self._makeOne() + self.assertFalse(inst._p_changed) + inst.extend([1, 2, 3]) + self.assertEqual(inst, [1, 2, 3]) + self.assertTrue(inst._p_changed) + + def test_imul(self): + inst = self._makeOne([1]) + self.assertFalse(inst._p_changed) + inst *= 2 + self.assertEqual(inst, [1, 1]) + self.assertTrue(inst._p_changed) + + def test_append(self): + inst = self._makeOne() + self.assertFalse(inst._p_changed) + inst.append(1) + self.assertEqual(inst, [1]) + self.assertTrue(inst._p_changed) + + def test_insert(self): + inst = self._makeOne() + self.assertFalse(inst._p_changed) + inst.insert(0, 1) + self.assertEqual(inst, [1]) + self.assertTrue(inst._p_changed) + + def test_remove(self): + inst = self._makeOne([1]) + self.assertFalse(inst._p_changed) + inst.remove(1) + self.assertEqual(inst, []) + self.assertTrue(inst._p_changed) + + def test_reverse(self): + inst = self._makeOne([2, 1]) + self.assertFalse(inst._p_changed) + inst.reverse() + self.assertEqual(inst, [1, 2]) + self.assertTrue(inst._p_changed) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) + +if __name__ == '__main__': + unittest.main() diff --git a/thesisenv/lib/python3.6/site-packages/persistent/tests/test_mapping.py b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_mapping.py new file mode 100644 index 0000000..c252378 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_mapping.py @@ -0,0 +1,237 @@ +############################################################################## +# +# Copyright (c) Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + +# pylint:disable=blacklisted-name, protected-access + +class Test_default(unittest.TestCase): + + def _getTargetClass(self): + from persistent.mapping import default + return default + + def _makeOne(self, func): + return self._getTargetClass()(func) + + def test___get___from_class(self): + def _test(inst): + raise AssertionError("Must not be caled") + + descr = self._makeOne(_test) + class Foo(object): + testing = descr + self.assertIs(Foo.testing, descr) + + + def test___get___from_instance(self): + _called_with = [] + def _test(inst): + _called_with.append(inst) + return 'TESTING' + descr = self._makeOne(_test) + class Foo(object): + testing = descr + foo = Foo() + self.assertEqual(foo.testing, 'TESTING') + self.assertEqual(_called_with, [foo]) + + +class PersistentMappingTests(unittest.TestCase): + + def _getTargetClass(self): + from persistent.mapping import PersistentMapping + return PersistentMapping + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_volatile_attributes_not_persisted(self): + # http://www.zope.org/Collectors/Zope/2052 + m = self._makeOne() + m.foo = 'bar' + m._v_baz = 'qux' + state = m.__getstate__() + self.assertTrue('foo' in state) + self.assertFalse('_v_baz' in state) + + def testTheWorld(self): + from persistent._compat import PYTHON2 + # Test constructors + l0 = {} + l1 = {0:0} + l2 = {0:0, 1:1} + u = self._makeOne() + u0 = self._makeOne(l0) + u1 = self._makeOne(l1) + u2 = self._makeOne(l2) + + uu = self._makeOne(u) + uu0 = self._makeOne(u0) + uu1 = self._makeOne(u1) + uu2 = self._makeOne(u2) + + class OtherMapping(dict): + def __init__(self, initmapping): + self.__data = initmapping + def items(self): + raise AssertionError("Not called") + self._makeOne(OtherMapping(u0)) + self._makeOne([(0, 0), (1, 1)]) + + # Test __repr__ + eq = self.assertEqual + + eq(str(u0), str(l0), "str(u0) == str(l0)") + eq(repr(u1), repr(l1), "repr(u1) == repr(l1)") + + # Test __cmp__ and __len__ + try: + cmp + except NameError: + def cmp(a, b): + if a == b: + return 0 + if hasattr(a, 'items'): + a = sorted(a.items()) + b = sorted(b.items()) + if a < b: + return -1 + return 1 + + def mycmp(a, b): + r = cmp(a, b) + if r < 0: + return -1 + if r > 0: + return 1 + return r + + to_test = [l0, l1, l2, u, u0, u1, u2, uu, uu0, uu1, uu2] + for a in to_test: + for b in to_test: + eq(mycmp(a, b), mycmp(len(a), len(b)), + "mycmp(a, b) == mycmp(len(a), len(b))") + + # Test __getitem__ + + for i, val in enumerate(u2): + eq(val, i, "u2[i] == i") + + # Test get + + for i in range(len(u2)): + eq(u2.get(i), i, "u2.get(i) == i") + eq(u2.get(i, 5), i, "u2.get(i, 5) == i") + + for i in min(u2)-1, max(u2)+1: + eq(u2.get(i), None, "u2.get(i) == None") + eq(u2.get(i, 5), 5, "u2.get(i, 5) == 5") + + # Test __setitem__ + + uu2[0] = 0 + uu2[1] = 100 + uu2[2] = 200 + + # Test __delitem__ + + del uu2[1] + del uu2[0] + with self.assertRaises(KeyError): + del uu2[0] + + # Test __contains__ + for i in u2: + self.assertTrue(i in u2, "i in u2") + for i in min(u2)-1, max(u2)+1: + self.assertTrue(i not in u2, "i not in u2") + + # Test update + + l = {"a":"b"} + u = self._makeOne(l) + u.update(u2) + for i in u: + self.assertTrue(i in l or i in u2, "i in l or i in u2") + for i in l: + self.assertTrue(i in u, "i in u") + for i in u2: + self.assertTrue(i in u, "i in u") + + # Test setdefault + + x = u2.setdefault(0, 5) + eq(x, 0, "u2.setdefault(0, 5) == 0") + + x = u2.setdefault(5, 5) + eq(x, 5, "u2.setdefault(5, 5) == 5") + self.assertTrue(5 in u2, "5 in u2") + + # Test pop + + x = u2.pop(1) + eq(x, 1, "u2.pop(1) == 1") + self.assertTrue(1 not in u2, "1 not in u2") + + with self.assertRaises(KeyError): + u2.pop(1) + + x = u2.pop(1, 7) + eq(x, 7, "u2.pop(1, 7) == 7") + + # Test popitem + + items = list(u2.items()) + key, value = u2.popitem() + self.assertTrue((key, value) in items, "key, value in items") + self.assertTrue(key not in u2, "key not in u2") + + # Test clear + + u2.clear() + eq(u2, {}, "u2 == {}") + + def test___repr___converts_legacy_container_attr(self): + # In the past, PM used a _container attribute. For some time, the + # implementation continued to use a _container attribute in pickles + # (__get/setstate__) to be compatible with older releases. This isn't + # really necessary any more. In fact, releases for which this might + # matter can no longer share databases with current releases. Because + # releases as recent as 3.9.0b5 still use _container in saved state, we + # need to accept such state, but we stop producing it. + pm = self._makeOne() + self.assertEqual(pm.__dict__, {'data': {}}) + # Make it look like an older instance + pm.__dict__.clear() + pm.__dict__['_container'] = {'a': 1} + self.assertEqual(pm.__dict__, {'_container': {'a': 1}}) + pm._p_changed = 0 + self.assertEqual(repr(pm), "{'a': 1}") + self.assertEqual(pm.__dict__, {'data': {'a': 1}}) + self.assertEqual(pm.__getstate__(), {'data': {'a': 1}}) + + +class Test_legacy_PersistentDict(unittest.TestCase): + + def _getTargetClass(self): + from persistent.dict import PersistentDict + return PersistentDict + + def test_PD_is_alias_to_PM(self): + from persistent.mapping import PersistentMapping + self.assertTrue(self._getTargetClass() is PersistentMapping) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/persistent/tests/test_persistence.py b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_persistence.py new file mode 100644 index 0000000..392f246 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_persistence.py @@ -0,0 +1,2058 @@ +############################################################################## +# +# Copyright (c) 2011 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import platform +import re +import sys +import unittest + +import persistent +from persistent._compat import copy_reg + + +_is_pypy3 = platform.python_implementation() == 'PyPy' and sys.version_info[0] > 2 +_is_jython = platform.python_implementation() == 'Jython' + +# pylint:disable=R0904,W0212,E1101 +# pylint:disable=attribute-defined-outside-init,too-many-lines +# pylint:disable=blacklisted-name,useless-object-inheritance +# Hundreds of unused jar and OID vars make this useless +# pylint:disable=unused-variable + +def skipIfNoCExtension(o): + return unittest.skipIf( + persistent._cPersistence is None, + "The C extension is not available")(o) + + +class _Persistent_Base(object): + + # py2/3 compat + assertRaisesRegex = getattr(unittest.TestCase, + 'assertRaisesRegex', + unittest.TestCase.assertRaisesRegexp) + + def _getTargetClass(self): + # concrete testcase classes must override + raise NotImplementedError() + + def _makeCache(self, jar): + # concrete testcase classes must override + raise NotImplementedError() + + def _makeRealCache(self, jar): + return self._makeCache(jar) + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def _makeJar(self): + from zope.interface import implementer + from persistent.interfaces import IPersistentDataManager + + @implementer(IPersistentDataManager) + class _Jar(object): + _cache = None + # Set this to a value to have our `setstate` + # pass it through to the object's __setstate__ + setstate_calls_object = None + + # Set this to a value to have our `setstate` + # set the _p_serial of the object + setstate_sets_serial = None + def __init__(self): + self._loaded = [] + self._registered = [] + def setstate(self, obj): + self._loaded.append(obj._p_oid) + if self.setstate_calls_object is not None: + obj.__setstate__(self.setstate_calls_object) + if self.setstate_sets_serial is not None: + obj._p_serial = self.setstate_sets_serial + def register(self, obj): + self._registered.append(obj._p_oid) + + jar = _Jar() + jar._cache = self._makeCache(jar) + return jar + + def _makeBrokenJar(self): + from zope.interface import implementer + from persistent.interfaces import IPersistentDataManager + + @implementer(IPersistentDataManager) + class _BrokenJar(object): + def __init__(self): + self.called = 0 + def register(self, ob): + self.called += 1 + raise NotImplementedError() + def setstate(self, ob): + raise NotImplementedError() + + jar = _BrokenJar() + jar._cache = self._makeCache(jar) + return jar + + def _makeOneWithJar(self, klass=None, broken_jar=False): + OID = b'\x01' * 8 + if klass is not None: + inst = klass() + else: + inst = self._makeOne() + jar = self._makeJar() if not broken_jar else self._makeBrokenJar() + jar._cache.new_ghost(OID, inst) # assigns _p_jar, _p_oid + return inst, jar, OID + + def test_class_conforms_to_IPersistent(self): + from zope.interface.verify import verifyClass + from persistent.interfaces import IPersistent + verifyClass(IPersistent, self._getTargetClass()) + + def test_instance_conforms_to_IPersistent(self): + from zope.interface.verify import verifyObject + from persistent.interfaces import IPersistent + verifyObject(IPersistent, self._makeOne()) + + def test_ctor(self): + from persistent.persistence import _INITIAL_SERIAL + inst = self._makeOne() + self.assertEqual(inst._p_jar, None) + self.assertEqual(inst._p_oid, None) + self.assertEqual(inst._p_serial, _INITIAL_SERIAL) + self.assertEqual(inst._p_changed, False) + self.assertEqual(inst._p_sticky, False) + self.assertEqual(inst._p_status, 'unsaved') + + def test_del_jar_no_jar(self): + inst = self._makeOne() + del inst._p_jar # does not raise + self.assertEqual(inst._p_jar, None) + + def test_del_jar_while_in_cache(self): + inst, _, OID = self._makeOneWithJar() + def _test(): + del inst._p_jar + self.assertRaises(ValueError, _test) + + def test_del_jar_like_ZODB_abort(self): + # When a ZODB connection aborts, it removes registered objects from + # the cache, deletes their jar, deletes their OID, and finally sets + # p_changed to false + inst, jar, OID = self._makeOneWithJar() + del jar._cache[OID] + del inst._p_jar + self.assertEqual(inst._p_jar, None) + + def test_del_jar_of_inactive_object_that_has_no_state(self): + # If an object is ghosted, and we try to delete its + # jar, we shouldn't activate the object. + + # Simulate a POSKeyError on _p_activate; this can happen aborting + # a transaction using ZEO + broken_jar = self._makeBrokenJar() + inst = self._makeOne() + inst._p_oid = 42 + inst._p_jar = broken_jar + + # make it inactive + inst._p_deactivate() + self.assertEqual(inst._p_status, "ghost") + + # delete the jar; if we activated the object, the broken + # jar would raise NotImplementedError + del inst._p_jar + + def test_assign_p_jar_w_new_jar(self): + inst, jar, OID = self._makeOneWithJar() + new_jar = self._makeJar() + + with self.assertRaisesRegex(ValueError, + "can not change _p_jar of cached object"): + inst._p_jar = new_jar + + def test_assign_p_jar_w_valid_jar(self): + jar = self._makeJar() + inst = self._makeOne() + inst._p_jar = jar + self.assertEqual(inst._p_status, 'saved') + self.assertTrue(inst._p_jar is jar) + inst._p_jar = jar # reassign only to same DM + + def test_assign_p_jar_not_in_cache_allowed(self): + jar = self._makeJar() + inst = self._makeOne() + inst._p_jar = jar + # Both of these are allowed + inst._p_jar = self._makeJar() + inst._p_jar = None + self.assertEqual(inst._p_jar, None) + + def test_assign_p_oid_w_invalid_oid(self): + inst, jar, OID = self._makeOneWithJar() + + with self.assertRaisesRegex(ValueError, + 'can not change _p_oid of cached object'): + inst._p_oid = object() + + def test_assign_p_oid_w_valid_oid(self): + OID = b'\x01' * 8 + inst = self._makeOne() + inst._p_oid = OID + self.assertEqual(inst._p_oid, OID) + inst._p_oid = OID # reassign only same OID + + def test_assign_p_oid_w_new_oid_wo_jar(self): + OID1 = b'\x01' * 8 + OID2 = b'\x02' * 8 + inst = self._makeOne() + inst._p_oid = OID1 + inst._p_oid = OID2 + self.assertEqual(inst._p_oid, OID2) + + def test_assign_p_oid_w_None_wo_jar(self): + OID1 = b'\x01' * 8 + inst = self._makeOne() + inst._p_oid = OID1 + inst._p_oid = None + self.assertEqual(inst._p_oid, None) + + def test_assign_p_oid_w_new_oid_w_jar(self): + inst, jar, OID = self._makeOneWithJar() + new_OID = b'\x02' * 8 + def _test(): + inst._p_oid = new_OID + self.assertRaises(ValueError, _test) + + def test_assign_p_oid_not_in_cache_allowed(self): + jar = self._makeJar() + inst = self._makeOne() + inst._p_jar = jar + inst._p_oid = 1 # anything goes + inst._p_oid = 42 + self.assertEqual(inst._p_oid, 42) + + def test_delete_p_oid_wo_jar(self): + OID = b'\x01' * 8 + inst = self._makeOne() + inst._p_oid = OID + del inst._p_oid + self.assertEqual(inst._p_oid, None) + + def test_delete_p_oid_w_jar(self): + inst, jar, OID = self._makeOneWithJar() + with self.assertRaises(ValueError): + del inst._p_oid + + def test_delete_p_oid_of_subclass_calling_p_delattr(self): + class P(self._getTargetClass()): + def __delattr__(self, name): + super(P, self)._p_delattr(name) + raise AssertionError("Should not get here") + + inst, _jar, _oid = self._makeOneWithJar(klass=P) + with self.assertRaises(ValueError): + del inst._p_oid + + def test_del_oid_like_ZODB_abort(self): + # When a ZODB connection aborts, it removes registered objects from + # the cache, deletes their jar, deletes their OID, and finally sets + # p_changed to false + inst, jar, OID = self._makeOneWithJar() + del jar._cache[OID] + del inst._p_oid + self.assertEqual(inst._p_oid, None) + + def test_assign_p_serial_w_invalid_type(self): + inst = self._makeOne() + def _test(): + inst._p_serial = object() + self.assertRaises(ValueError, _test) + + def test_assign_p_serial_w_None(self): + inst = self._makeOne() + def _test(): + inst._p_serial = None + self.assertRaises(ValueError, _test) + + def test_assign_p_serial_too_short(self): + inst = self._makeOne() + def _test(): + inst._p_serial = b'\x01\x02\x03' + self.assertRaises(ValueError, _test) + + def test_assign_p_serial_too_long(self): + inst = self._makeOne() + def _test(): + inst._p_serial = b'\x01\x02\x03' * 3 + self.assertRaises(ValueError, _test) + + def test_assign_p_serial_w_valid_serial(self): + SERIAL = b'\x01' * 8 + inst = self._makeOne() + inst._p_serial = SERIAL + self.assertEqual(inst._p_serial, SERIAL) + + def test_delete_p_serial(self): + from persistent.persistence import _INITIAL_SERIAL + SERIAL = b'\x01' * 8 + inst = self._makeOne() + inst._p_serial = SERIAL + self.assertEqual(inst._p_serial, SERIAL) + del inst._p_serial + self.assertEqual(inst._p_serial, _INITIAL_SERIAL) + + def test_query_p_changed_unsaved(self): + inst = self._makeOne() + self.assertEqual(inst._p_changed, False) + + def test_query_p_changed_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + self.assertEqual(inst._p_changed, None) + + def test_query_p_changed_saved(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + self.assertEqual(inst._p_changed, False) + + def test_query_p_changed_changed(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + inst._p_changed = True + self.assertEqual(inst._p_changed, True) + + def test_assign_p_changed_none_from_unsaved(self): + inst = self._makeOne() + inst._p_changed = None + self.assertEqual(inst._p_status, 'unsaved') + + def test_assign_p_changed_true_from_unsaved(self): + inst = self._makeOne() + inst._p_changed = True + self.assertEqual(inst._p_status, 'unsaved') + + def test_assign_p_changed_false_from_unsaved(self): + inst = self._makeOne() + inst._p_changed = False + self.assertEqual(inst._p_status, 'unsaved') + + def test_assign_p_changed_none_from_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + inst._p_changed = None + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test_assign_p_changed_true_from_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + inst._p_changed = True + self.assertEqual(inst._p_status, 'changed') + self.assertEqual(list(jar._loaded), [OID]) + self.assertEqual(list(jar._registered), [OID]) + + def test_assign_p_changed_false_from_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + inst._p_changed = False + self.assertEqual(inst._p_status, 'ghost') # ??? this is what C does + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test_assign_p_changed_none_from_saved(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + jar._loaded = [] + inst._p_changed = None + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test_assign_p_changed_true_from_saved(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() # XXX + jar._loaded[:] = [] + inst._p_changed = True + self.assertEqual(inst._p_status, 'changed') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), [OID]) + + def test_assign_p_changed_false_from_saved(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + jar._loaded = [] + inst._p_changed = False + self.assertEqual(inst._p_status, 'saved') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test_assign_p_changed_none_from_changed(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + inst._p_changed = True + jar._loaded = [] + jar._registered = [] + inst._p_changed = None + # assigning None is ignored when dirty + self.assertEqual(inst._p_status, 'changed') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test_assign_p_changed_true_from_changed(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + inst._p_changed = True + jar._loaded = [] + jar._registered = [] + inst._p_changed = True + self.assertEqual(inst._p_status, 'changed') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test_assign_p_changed_false_from_changed(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + inst._p_changed = True + jar._loaded = [] + jar._registered = [] + inst._p_changed = False + self.assertEqual(inst._p_status, 'saved') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test_assign_p_changed_none_when_sticky(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() # XXX + inst._p_changed = False + inst._p_sticky = True + inst._p_changed = None + self.assertEqual(inst._p_status, 'sticky') + self.assertEqual(inst._p_changed, False) + self.assertEqual(inst._p_sticky, True) + + def test_delete_p_changed_from_unsaved(self): + inst = self._makeOne() + del inst._p_changed + self.assertEqual(inst._p_status, 'unsaved') + + def test_delete_p_changed_from_unsaved_w_dict(self): + class Derived(self._getTargetClass()): + pass + inst = Derived() + inst.foo = 'bar' + del inst._p_changed + self.assertEqual(inst._p_status, 'unsaved') + self.assertEqual(inst.foo, 'bar') + + def test_delete_p_changed_from_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + del inst._p_changed + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test_delete_p_changed_from_saved(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + jar._loaded = [] + jar._registered = [] + del inst._p_changed + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test_delete_p_changed_from_changed(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + inst._p_changed = True + jar._loaded = [] + jar._registered = [] + del inst._p_changed + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test_delete_p_changed_when_sticky(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() # XXX + inst._p_changed = False + inst._p_sticky = True + del inst._p_changed + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(inst._p_changed, None) + self.assertEqual(inst._p_sticky, False) + + def test_assign_p_sticky_true_when_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() # XXX + def _test(): + inst._p_sticky = True + self.assertRaises(ValueError, _test) + + def test_assign_p_sticky_false_when_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() # XXX + def _test(): + inst._p_sticky = False + self.assertRaises(ValueError, _test) + + def test_assign_p_sticky_true_non_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() # XXX + inst._p_changed = False + inst._p_sticky = True + self.assertTrue(inst._p_sticky) + + def test_assign_p_sticky_false_non_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() # XXX + inst._p_changed = False + inst._p_sticky = False + self.assertFalse(inst._p_sticky) + + def test__p_status_unsaved(self): + inst = self._makeOne() + self.assertEqual(inst._p_status, 'unsaved') + + def test__p_status_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + self.assertEqual(inst._p_status, 'ghost') + + def test__p_status_changed(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_changed = True + self.assertEqual(inst._p_status, 'changed') + + def test__p_status_changed_sticky(self): + # 'sticky' is not a state, but a separate flag. + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + inst._p_changed = True + inst._p_sticky = True + self.assertEqual(inst._p_status, 'sticky') + + def test__p_status_saved(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() # XXX + inst._p_changed = False + self.assertEqual(inst._p_status, 'saved') + + def test__p_status_saved_sticky(self): + # 'sticky' is not a state, but a separate flag. + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + inst._p_changed = False + inst._p_sticky = True + self.assertEqual(inst._p_status, 'sticky') + + def test__p_mtime_no_serial(self): + inst = self._makeOne() + self.assertEqual(inst._p_mtime, None) + + def test__p_mtime_w_serial(self): + from persistent.timestamp import TimeStamp + WHEN_TUPLE = (2011, 2, 15, 13, 33, 27.5) + ts = TimeStamp(*WHEN_TUPLE) + inst, jar, OID = self._makeOneWithJar() + inst._p_serial = ts.raw() + self.assertEqual(inst._p_mtime, ts.timeTime()) + + def test__p_mtime_activates_object(self): + # Accessing _p_mtime implicitly unghostifies the object + from persistent.timestamp import TimeStamp + WHEN_TUPLE = (2011, 2, 15, 13, 33, 27.5) + ts = TimeStamp(*WHEN_TUPLE) + inst, jar, OID = self._makeOneWithJar() + jar.setstate_sets_serial = ts.raw() + inst._p_invalidate() + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(inst._p_mtime, ts.timeTime()) + self.assertEqual(inst._p_status, 'saved') + + def test__p_state_unsaved(self): + inst = self._makeOne() + inst._p_changed = True + self.assertEqual(inst._p_state, 0) + + def test__p_state_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + self.assertEqual(inst._p_state, -1) + + def test__p_state_changed(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_changed = True + self.assertEqual(inst._p_state, 1) + + def test__p_state_changed_sticky(self): + # 'sticky' is not a state, but a separate flag. + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + inst._p_changed = True + inst._p_sticky = True + self.assertEqual(inst._p_state, 2) + + def test__p_state_saved(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() # XXX + inst._p_changed = False + self.assertEqual(inst._p_state, 0) + + def test__p_state_saved_sticky(self): + # 'sticky' is not a state, but a separate flag. + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + inst._p_changed = False + inst._p_sticky = True + self.assertEqual(inst._p_state, 2) + + def test_query_p_estimated_size_new(self): + inst = self._makeOne() + self.assertEqual(inst._p_estimated_size, 0) + + def test_query_p_estimated_size_del(self): + inst = self._makeOne() + inst._p_estimated_size = 123 + self.assertEqual(inst._p_estimated_size, 128) + del inst._p_estimated_size + self.assertEqual(inst._p_estimated_size, 0) + + def test_assign_p_estimated_size_wrong_type(self): + inst = self._makeOne() + + with self.assertRaises(TypeError): + inst._p_estimated_size = None + + try: + constructor = long + except NameError: + constructor = str + + with self.assertRaises(TypeError): + inst._p_estimated_size = constructor(1) + + def test_assign_p_estimated_size_negative(self): + inst = self._makeOne() + def _test(): + inst._p_estimated_size = -1 + self.assertRaises(ValueError, _test) + + def test_assign_p_estimated_size_small(self): + inst = self._makeOne() + inst._p_estimated_size = 123 + self.assertEqual(inst._p_estimated_size, 128) + + def test_assign_p_estimated_size_just_over_threshold(self): + inst = self._makeOne() + inst._p_estimated_size = 1073741697 + self.assertEqual(inst._p_estimated_size, 16777215 * 64) + + def test_assign_p_estimated_size_bigger(self): + inst = self._makeOne() + inst._p_estimated_size = 1073741697 * 2 + self.assertEqual(inst._p_estimated_size, 16777215 * 64) + + def test___getattribute___p__names(self): + NAMES = ['_p_jar', + '_p_oid', + '_p_changed', + '_p_serial', + '_p_state', + '_p_estimated_size', + '_p_sticky', + '_p_status', + ] + inst, jar, OID = self._makeOneWithJar() + self._clearMRU(jar) + for name in NAMES: + getattr(inst, name) + self._checkMRU(jar, []) + # _p_mtime is special, it activates the object + getattr(inst, '_p_mtime') + self._checkMRU(jar, [OID]) + + def test___getattribute__special_name(self): + from persistent.persistence import SPECIAL_NAMES + inst, jar, OID = self._makeOneWithJar() + self._clearMRU(jar) + for name in SPECIAL_NAMES: + getattr(inst, name, None) + self._checkMRU(jar, []) + + def test___getattribute__normal_name_from_unsaved(self): + class Derived(self._getTargetClass()): + normal = 'value' + inst = Derived() + self.assertEqual(getattr(inst, 'normal', None), 'value') + + def test___getattribute__normal_name_from_ghost(self): + class Derived(self._getTargetClass()): + normal = 'value' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_deactivate() + self._clearMRU(jar) + self.assertEqual(getattr(inst, 'normal', None), 'value') + self._checkMRU(jar, [OID]) + + def test___getattribute__normal_name_from_saved(self): + class Derived(self._getTargetClass()): + normal = 'value' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_changed = False + self._clearMRU(jar) + self.assertEqual(getattr(inst, 'normal', None), 'value') + self._checkMRU(jar, [OID]) + + def test___getattribute__normal_name_from_changed(self): + class Derived(self._getTargetClass()): + normal = 'value' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_changed = True + self._clearMRU(jar) + self.assertEqual(getattr(inst, 'normal', None), 'value') + self._checkMRU(jar, [OID]) + + def test___getattribute___non_cooperative(self): + # Getting attributes is NOT cooperative with the superclass. + # This comes from the C implementation and is maintained + # for backwards compatibility. (For example, Persistent and + # ExtensionClass.Base/Acquisition take special care to mix together.) + class Base(object): + def __getattribute__(self, name): + if name == 'magic': + return 42 + return super(Base, self).__getattribute__(name) # pragma: no cover + + self.assertEqual(getattr(Base(), 'magic'), 42) + + class Derived(self._getTargetClass(), Base): + pass + + self.assertRaises(AttributeError, getattr, Derived(), 'magic') + + def test___setattr___p__names(self): + SERIAL = b'\x01' * 8 + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + NAMES = [('_p_jar', jar), + ('_p_oid', OID), + ('_p_changed', False), + ('_p_serial', SERIAL), + ('_p_estimated_size', 0), + ('_p_sticky', False), + ] + self._clearMRU(jar) + for name, value in NAMES: + setattr(inst, name, value) + self._checkMRU(jar, []) + + def test___setattr___v__name(self): + class Derived(self._getTargetClass()): + pass + inst, jar, OID = self._makeOneWithJar(Derived) + self._clearMRU(jar) + inst._v_foo = 'bar' + self.assertEqual(inst._p_status, 'saved') + self._checkMRU(jar, []) + + def test___setattr__normal_name_from_unsaved(self): + class Derived(self._getTargetClass()): + normal = 'before' + inst = Derived() + setattr(inst, 'normal', 'after') + self.assertEqual(getattr(inst, 'normal', None), 'after') + self.assertEqual(inst._p_status, 'unsaved') + + def test___setattr__normal_name_from_ghost(self): + class Derived(self._getTargetClass()): + normal = 'before' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_deactivate() + self._clearMRU(jar) + setattr(inst, 'normal', 'after') + self._checkMRU(jar, [OID]) + self.assertEqual(jar._registered, [OID]) + self.assertEqual(getattr(inst, 'normal', None), 'after') + self.assertEqual(inst._p_status, 'changed') + + def test___setattr__normal_name_from_saved(self): + class Derived(self._getTargetClass()): + normal = 'before' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_changed = False + self._clearMRU(jar) + setattr(inst, 'normal', 'after') + self._checkMRU(jar, [OID]) + self.assertEqual(jar._registered, [OID]) + self.assertEqual(getattr(inst, 'normal', None), 'after') + self.assertEqual(inst._p_status, 'changed') + + def test___setattr__normal_name_from_changed(self): + class Derived(self._getTargetClass()): + normal = 'before' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_changed = True + self._clearMRU(jar) + jar._registered = [] + setattr(inst, 'normal', 'after') + self._checkMRU(jar, [OID]) + self.assertEqual(jar._registered, []) + self.assertEqual(getattr(inst, 'normal', None), 'after') + self.assertEqual(inst._p_status, 'changed') + + def test___delattr___p__names(self): + NAMES = ['_p_changed', + '_p_serial', + ] + inst, jar, OID = self._makeOneWithJar() + self._clearMRU(jar) + jar._registered = [] + for name in NAMES: + delattr(inst, name) + self._checkMRU(jar, []) + self.assertEqual(jar._registered, []) + + def test___delattr__normal_name_from_unsaved(self): + class Derived(self._getTargetClass()): + normal = 'before' + def __init__(self): + self.__dict__['normal'] = 'after' + inst = Derived() + delattr(inst, 'normal') + self.assertEqual(getattr(inst, 'normal', None), 'before') + + def test___delattr__normal_name_from_ghost(self): + class Derived(self._getTargetClass()): + normal = 'before' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_deactivate() + self._clearMRU(jar) + jar._registered = [] + def _test(): + delattr(inst, 'normal') + self.assertRaises(AttributeError, _test) + self.assertEqual(inst._p_status, 'changed') # ??? this is what C does + self._checkMRU(jar, [OID]) + self.assertEqual(jar._registered, [OID]) + self.assertEqual(getattr(inst, 'normal', None), 'before') + + def test___delattr__normal_name_from_saved(self): + class Derived(self._getTargetClass()): + normal = 'before' + def __init__(self): + self.__dict__['normal'] = 'after' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_changed = False + self._clearMRU(jar) + jar._registered = [] + delattr(inst, 'normal') + self._checkMRU(jar, [OID]) + self.assertEqual(jar._registered, [OID]) + self.assertEqual(getattr(inst, 'normal', None), 'before') + + def test___delattr__normal_name_from_changed(self): + class Derived(self._getTargetClass()): + normal = 'before' + def __init__(self): + self.__dict__['normal'] = 'after' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_changed = True + self._clearMRU(jar) + jar._registered = [] + delattr(inst, 'normal') + self._checkMRU(jar, [OID]) + self.assertEqual(jar._registered, []) + self.assertEqual(getattr(inst, 'normal', None), 'before') + + def test___getstate__(self): + inst = self._makeOne() + self.assertEqual(inst.__getstate__(), None) + + def test___getstate___derived_w_dict(self): + class Derived(self._getTargetClass()): + pass + inst = Derived() + inst.foo = 'bar' + inst._p_baz = 'bam' + inst._v_qux = 'spam' + self.assertEqual(inst.__getstate__(), {'foo': 'bar'}) + + def test___getstate___derived_w_slots(self): + class Derived(self._getTargetClass()): + __slots__ = ('foo', 'baz', '_p_baz', '_v_qux') + inst = Derived() + inst.foo = 'bar' + inst._p_baz = 'bam' + inst._v_qux = 'spam' + self.assertEqual(inst.__getstate__(), (None, {'foo': 'bar'})) + + def test___getstate___derived_w_slots_in_base_and_derived(self): + class Base(self._getTargetClass()): + __slots__ = ('foo',) + class Derived(Base): + __slots__ = ('baz', 'qux',) + inst = Derived() + inst.foo = 'bar' + inst.baz = 'bam' + inst.qux = 'spam' + self.assertEqual(inst.__getstate__(), + (None, {'foo': 'bar', 'baz': 'bam', 'qux': 'spam'})) + + def test___getstate___derived_w_slots_in_base_but_not_derived(self): + class Base(self._getTargetClass()): + __slots__ = ('foo',) + class Derived(Base): + pass + inst = Derived() + inst.foo = 'bar' + inst.baz = 'bam' + inst.qux = 'spam' + self.assertEqual(inst.__getstate__(), + ({'baz': 'bam', 'qux': 'spam'}, {'foo': 'bar'})) + + def test___setstate___empty(self): + inst = self._makeOne() + inst.__setstate__(None) # doesn't raise, but doesn't change anything + + def test___setstate___nonempty(self): + from persistent.persistence import _INITIAL_SERIAL + inst = self._makeOne() + self.assertRaises((ValueError, TypeError), + inst.__setstate__, {'bogus': 1}) + self.assertEqual(inst._p_jar, None) + self.assertEqual(inst._p_oid, None) + self.assertEqual(inst._p_serial, _INITIAL_SERIAL) + self.assertEqual(inst._p_changed, False) + self.assertEqual(inst._p_sticky, False) + + def test___setstate___nonempty_derived_w_dict(self): + class Derived(self._getTargetClass()): + pass + inst = Derived() + inst.foo = 'bar' + inst.__setstate__({'baz': 'bam'}) + self.assertEqual(inst.__dict__, {'baz': 'bam'}) + + def test___setstate___nonempty_derived_w_dict_w_two_keys(self): + class Derived(self._getTargetClass()): + pass + inst = Derived() + inst.foo = 'bar' + inst.__setstate__({'baz': 'bam', 'biz': 'boz'}) + self.assertEqual(inst.__dict__, {'baz': 'bam', 'biz': 'boz'}) + + def test___setstate___derived_w_slots(self): + class Derived(self._getTargetClass()): + __slots__ = ('foo', '_p_baz', '_v_qux') + inst = Derived() + inst.__setstate__((None, {'foo': 'bar'})) + self.assertEqual(inst.foo, 'bar') + + def test___setstate___derived_w_slots_in_base_classes(self): + class Base(self._getTargetClass()): + __slots__ = ('foo',) + class Derived(Base): + __slots__ = ('baz', 'qux',) + inst = Derived() + inst.__setstate__((None, {'foo': 'bar', 'baz': 'bam', 'qux': 'spam'})) + self.assertEqual(inst.foo, 'bar') + self.assertEqual(inst.baz, 'bam') + self.assertEqual(inst.qux, 'spam') + + def test___setstate___derived_w_slots_in_base_but_not_derived(self): + class Base(self._getTargetClass()): + __slots__ = ('foo',) + class Derived(Base): + pass + inst = Derived() + inst.__setstate__(({'baz': 'bam', 'qux': 'spam'}, {'foo': 'bar'})) + self.assertEqual(inst.foo, 'bar') + self.assertEqual(inst.baz, 'bam') + self.assertEqual(inst.qux, 'spam') + + if not _is_pypy3 and not _is_jython: + def test___setstate___interns_dict_keys(self): + class Derived(self._getTargetClass()): + pass + inst1 = Derived() + inst2 = Derived() + key1 = 'key' + key2 = 'ke'; key2 += 'y' # construct in a way that won't intern the literal + self.assertFalse(key1 is key2) + inst1.__setstate__({key1: 1}) + inst2.__setstate__({key2: 2}) + key1 = list(inst1.__dict__.keys())[0] + key2 = list(inst2.__dict__.keys())[0] + self.assertTrue(key1 is key2) + + from persistent._compat import IterableUserDict + inst1 = Derived() + inst2 = Derived() + key1 = 'key' + key2 = 'ke'; key2 += 'y' # construct in a way that won't intern the literal + self.assertFalse(key1 is key2) + state1 = IterableUserDict({key1: 1}) + state2 = IterableUserDict({key2: 2}) + k1 = list(state1.keys())[0] + k2 = list(state2.keys())[0] + self.assertFalse(k1 is k2) # verify + inst1.__setstate__(state1) + inst2.__setstate__(state2) + key1 = list(inst1.__dict__.keys())[0] + key2 = list(inst2.__dict__.keys())[0] + self.assertTrue(key1 is key2) + + def test___setstate___doesnt_fail_on_non_string_keys(self): + class Derived(self._getTargetClass()): + pass + inst1 = Derived() + inst1.__setstate__({1: 2}) + self.assertTrue(1 in inst1.__dict__) + + class MyStr(str): + pass + mystr = MyStr('mystr') + inst1.__setstate__({mystr: 2}) + self.assertTrue(mystr in inst1.__dict__) + + def test___setstate___doesnt_fail_on_non_dict(self): + class Derived(self._getTargetClass()): + pass + inst1 = Derived() + + from persistent._compat import IterableUserDict + state = IterableUserDict({'foobar': [1, 2]}) + + inst1.__setstate__(state) + self.assertTrue(hasattr(inst1, 'foobar')) + + def test___reduce__(self): + inst = self._makeOne() + first, second, third = inst.__reduce__() + self.assertTrue(first is copy_reg.__newobj__) + self.assertEqual(second, (self._getTargetClass(),)) + self.assertEqual(third, None) + + def test___reduce__w_subclass_having_getnewargs(self): + class Derived(self._getTargetClass()): + def __getnewargs__(self): + return ('a', 'b') + inst = Derived() + first, second, third = inst.__reduce__() + self.assertTrue(first is copy_reg.__newobj__) + self.assertEqual(second, (Derived, 'a', 'b')) + self.assertEqual(third, {}) + + def test___reduce__w_subclass_having_getstate(self): + class Derived(self._getTargetClass()): + def __getstate__(self): + return {} + inst = Derived() + first, second, third = inst.__reduce__() + self.assertTrue(first is copy_reg.__newobj__) + self.assertEqual(second, (Derived,)) + self.assertEqual(third, {}) + + def test___reduce__w_subclass_having_getnewargs_and_getstate(self): + class Derived(self._getTargetClass()): + def __getnewargs__(self): + return ('a', 'b') + def __getstate__(self): + return {'foo': 'bar'} + inst = Derived() + first, second, third = inst.__reduce__() + self.assertTrue(first is copy_reg.__newobj__) + self.assertEqual(second, (Derived, 'a', 'b')) + self.assertEqual(third, {'foo': 'bar'}) + + def test_pickle_roundtrip_simple(self): + import pickle + # XXX s.b. 'examples' + from persistent.tests.cucumbers import Simple + inst = Simple('testing') + copy = pickle.loads(pickle.dumps(inst)) + self.assertEqual(copy, inst) + for protocol in 0, 1, 2: + copy = pickle.loads(pickle.dumps(inst, protocol)) + self.assertEqual(copy, inst) + + def test_pickle_roundtrip_w_getnewargs_and_getstate(self): + import pickle + # XXX s.b. 'examples' + from persistent.tests.cucumbers import Custom + inst = Custom('x', 'y') + copy = pickle.loads(pickle.dumps(inst)) + self.assertEqual(copy, inst) + for protocol in 0, 1, 2: + copy = pickle.loads(pickle.dumps(inst, protocol)) + self.assertEqual(copy, inst) + + def test_pickle_roundtrip_w_slots_missing_slot(self): + import pickle + # XXX s.b. 'examples' + from persistent.tests.cucumbers import SubSlotted + inst = SubSlotted('x', 'y', 'z') + copy = pickle.loads(pickle.dumps(inst)) + self.assertEqual(copy, inst) + for protocol in 0, 1, 2: + copy = pickle.loads(pickle.dumps(inst, protocol)) + self.assertEqual(copy, inst) + + def test_pickle_roundtrip_w_slots_filled_slot(self): + import pickle + # XXX s.b. 'examples' + from persistent.tests.cucumbers import SubSlotted + inst = SubSlotted('x', 'y', 'z') + inst.s4 = 'a' + copy = pickle.loads(pickle.dumps(inst)) + self.assertEqual(copy, inst) + for protocol in 0, 1, 2: + copy = pickle.loads(pickle.dumps(inst, protocol)) + self.assertEqual(copy, inst) + + def test_pickle_roundtrip_w_slots_and_empty_dict(self): + import pickle + # XXX s.b. 'examples' + from persistent.tests.cucumbers import SubSubSlotted + inst = SubSubSlotted('x', 'y', 'z') + copy = pickle.loads(pickle.dumps(inst)) + self.assertEqual(copy, inst) + for protocol in 0, 1, 2: + copy = pickle.loads(pickle.dumps(inst, protocol)) + self.assertEqual(copy, inst) + + def test_pickle_roundtrip_w_slots_and_filled_dict(self): + import pickle + # XXX s.b. 'examples' + from persistent.tests.cucumbers import SubSubSlotted + inst = SubSubSlotted('x', 'y', 'z', foo='bar', baz='bam') + inst.s4 = 'a' + copy = pickle.loads(pickle.dumps(inst)) + self.assertEqual(copy, inst) + for protocol in 0, 1, 2: + copy = pickle.loads(pickle.dumps(inst, protocol)) + self.assertEqual(copy, inst) + + def test__p_activate_from_unsaved(self): + inst = self._makeOne() + inst._p_activate() # noop w/o jar + self.assertEqual(inst._p_status, 'unsaved') + + def test__p_activate_from_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + inst._p_activate() + self.assertEqual(inst._p_status, 'saved') + + def test__p_activate_from_saved(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_changed = False + inst._p_activate() # noop from 'saved' state + self.assertEqual(inst._p_status, 'saved') + + def test__p_activate_only_sets_state_once(self): + inst, jar, OID = self._makeOneWithJar() + # No matter how many times we call _p_activate, it + # only sets state once, the first time + inst._p_invalidate() # make it a ghost + self.assertEqual(list(jar._loaded), []) + + inst._p_activate() + self.assertEqual(list(jar._loaded), [OID]) + + inst._p_activate() + self.assertEqual(list(jar._loaded), [OID]) + + def test__p_activate_leaves_object_in_saved_even_if_object_mutated_self(self): + # If the object's __setstate__ set's attributes + # when called by p_activate, the state is still + # 'saved' when done. Furthemore, the object is not + # registered with the jar + + class WithSetstate(self._getTargetClass()): + state = None + def __setstate__(self, state): + self.state = state + + inst, jar, OID = self._makeOneWithJar(klass=WithSetstate) + inst._p_invalidate() # make it a ghost + self.assertEqual(inst._p_status, 'ghost') + + jar.setstate_calls_object = 42 + inst._p_activate() + # It get loaded + self.assertEqual(list(jar._loaded), [OID]) + # and __setstate__ got called to mutate the object + self.assertEqual(inst.state, 42) + # but it's still in the saved state + self.assertEqual(inst._p_status, 'saved') + # and it is not registered as changed by the jar + self.assertEqual(list(jar._registered), []) + + def test__p_deactivate_from_unsaved(self): + inst = self._makeOne() + inst._p_deactivate() + self.assertEqual(inst._p_status, 'unsaved') + + def test__p_deactivate_from_unsaved_w_dict(self): + class Derived(self._getTargetClass()): + normal = 'before' + def __init__(self): + self.__dict__['normal'] = 'after' + inst = Derived() + inst._p_changed = True + inst._p_deactivate() + self.assertEqual(inst._p_status, 'unsaved') + self.assertEqual(inst.__dict__, {'normal': 'after'}) + + def test__p_deactivate_from_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test__p_deactivate_from_saved(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + jar._loaded = [] + inst._p_deactivate() + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test__p_deactivate_from_saved_w_dict(self): + class Derived(self._getTargetClass()): + normal = 'before' + def __init__(self): + self.__dict__['normal'] = 'after' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_activate() + jar._loaded = [] + inst._p_deactivate() + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(inst.__dict__, {}) + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test__p_deactivate_from_changed(self): + class Derived(self._getTargetClass()): + normal = 'before' + inst, jar, OID = self._makeOneWithJar(Derived) + inst.normal = 'after' + jar._loaded = [] + jar._registered = [] + inst._p_deactivate() + # assigning None is ignored when dirty + self.assertEqual(inst._p_status, 'changed') + self.assertEqual(inst.__dict__, {'normal': 'after'}) + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test__p_deactivate_from_changed_w_dict(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + inst._p_changed = True + jar._loaded = [] + jar._registered = [] + inst._p_deactivate() + # assigning None is ignored when dirty + self.assertEqual(inst._p_status, 'changed') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test__p_deactivate_when_sticky(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() # XXX + inst._p_changed = False + inst._p_sticky = True + inst._p_deactivate() + self.assertEqual(inst._p_status, 'sticky') + self.assertEqual(inst._p_changed, False) + self.assertEqual(inst._p_sticky, True) + + def test__p_invalidate_from_unsaved(self): + inst = self._makeOne() + inst._p_invalidate() + self.assertEqual(inst._p_status, 'unsaved') + + def test__p_invalidate_from_unsaved_w_dict(self): + class Derived(self._getTargetClass()): + normal = 'before' + def __init__(self): + self.__dict__['normal'] = 'after' + inst = Derived() + inst._p_invalidate() + self.assertEqual(inst._p_status, 'unsaved') + self.assertEqual(inst.__dict__, {'normal': 'after'}) + + def test__p_invalidate_from_ghost(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + inst._p_invalidate() + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test__p_invalidate_from_saved(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + jar._loaded = [] + jar._registered = [] + inst._p_invalidate() + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test__p_invalidate_from_saved_w_dict(self): + class Derived(self._getTargetClass()): + normal = 'before' + def __init__(self): + self.__dict__['normal'] = 'after' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_activate() + jar._loaded = [] + jar._registered = [] + inst._p_invalidate() + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(inst.__dict__, {}) + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test__p_invalidate_from_changed(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() + inst._p_changed = True + jar._loaded = [] + jar._registered = [] + inst._p_invalidate() + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test__p_invalidate_from_changed_w_dict(self): + class Derived(self._getTargetClass()): + normal = 'before' + def __init__(self): + self.__dict__['normal'] = 'after' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_activate() + inst._p_changed = True + jar._loaded = [] + jar._registered = [] + inst._p_invalidate() + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(inst.__dict__, {}) + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test__p_invalidate_from_changed_w_slots(self): + class Derived(self._getTargetClass()): + __slots__ = ('myattr1', 'myattr2', 'unset') + def __init__(self): + self.myattr1 = 'value1' + self.myattr2 = 'value2' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_activate() + inst._p_changed = True + jar._loaded = [] + jar._registered = [] + self.assertEqual(Derived.myattr1.__get__(inst), 'value1') + self.assertEqual(Derived.myattr2.__get__(inst), 'value2') + inst._p_invalidate() + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self.assertRaises(AttributeError, lambda: Derived.myattr1.__get__(inst)) + self.assertRaises(AttributeError, lambda: Derived.myattr2.__get__(inst)) + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test__p_invalidate_from_changed_w_slots_compat(self): + # check that (for backward-compatibility reason) slots are not released + # for classes where __new__ is overwritten. Attributes in __dict__ + # should be always released. + class Derived(self._getTargetClass()): + __slots__ = ('myattr1', 'myattr2', '__dict__') + def __new__(cls): + obj = cls.__base__.__new__(cls) + obj.myattr1 = 'value1' + obj.myattr2 = 'value2' + obj.foo = 'foo1' # .foo & .bar are in __dict__ + obj.bar = 'bar2' + return obj + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_activate() + inst._p_changed = True + jar._loaded = [] + jar._registered = [] + self.assertEqual(Derived.myattr1.__get__(inst), 'value1') + self.assertEqual(Derived.myattr2.__get__(inst), 'value2') + self.assertEqual(inst.__dict__, {'foo': 'foo1', 'bar': 'bar2'}) + inst._p_invalidate() + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self.assertEqual(Derived.myattr1.__get__(inst), 'value1') + self.assertEqual(Derived.myattr2.__get__(inst), 'value2') + self.assertEqual(inst.__dict__, {}) + self.assertEqual(list(jar._loaded), []) + self.assertEqual(list(jar._registered), []) + + def test_p_invalidate_with_slots_broken_jar(self): + # If jar.setstate() raises a POSKeyError (or any error) + # clearing an object with unset slots doesn't result in a + # SystemError, the original error is propagated + + class Derived(self._getTargetClass()): + __slots__ = ('slot1',) + + # Pre-cache in __slotnames__; cpersistent goes directly for this + # and avoids a call to copy_reg. (If it calls the python code in + # copy_reg, the pending exception will be immediately propagated by + # copy_reg, not by us.) + copy_reg._slotnames(Derived) + + inst, jar, OID = self._makeOneWithJar(Derived, broken_jar=True) + inst._p_invalidate() + self.assertEqual(inst._p_status, 'ghost') + self.assertRaises(NotImplementedError, inst._p_activate) + + + def test__p_invalidate_from_sticky(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_activate() # XXX + inst._p_changed = False + inst._p_sticky = True + self.assertEqual(inst._p_status, 'sticky') + inst._p_invalidate() + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(inst._p_changed, None) + self.assertEqual(inst._p_sticky, False) + + def test__p_invalidate_from_sticky_w_dict(self): + class Derived(self._getTargetClass()): + def __init__(self): + self.normal = 'value' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_activate() # XXX + inst._p_changed = False + inst._p_sticky = True + inst._p_invalidate() + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(inst._p_changed, None) + self.assertEqual(inst._p_sticky, False) + self.assertEqual(inst.__dict__, {}) + + def test__p_getattr_w__p__names(self): + NAMES = ['_p_jar', + '_p_oid', + '_p_changed', + '_p_serial', + '_p_mtime', + '_p_state', + '_p_estimated_size', + '_p_sticky', + '_p_status', + ] + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + for name in NAMES: + self.assertTrue(inst._p_getattr(name)) + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self._checkMRU(jar, []) + + def test__p_getattr_w_special_names(self): + from persistent.persistence import SPECIAL_NAMES + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + for name in SPECIAL_NAMES: + self.assertTrue(inst._p_getattr(name)) + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(list(jar._loaded), []) + self._checkMRU(jar, []) + + def test__p_getattr_w_normal_name(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + self.assertFalse(inst._p_getattr('normal')) + self.assertEqual(inst._p_status, 'saved') + self.assertEqual(list(jar._loaded), [OID]) + self._checkMRU(jar, [OID]) + + def test__p_setattr_w__p__name(self): + SERIAL = b'\x01' * 8 + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + self.assertTrue(inst._p_setattr('_p_serial', SERIAL)) + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(inst._p_serial, SERIAL) + self.assertEqual(list(jar._loaded), []) + self._checkMRU(jar, []) + + def test__p_setattr_w_normal_name(self): + inst, jar, OID = self._makeOneWithJar() + inst._p_deactivate() + self.assertFalse(inst._p_setattr('normal', 'value')) + # _p_setattr doesn't do the actual write for normal names + self.assertEqual(inst._p_status, 'saved') + self.assertEqual(list(jar._loaded), [OID]) + self._checkMRU(jar, [OID]) + + def test__p_delattr_w__p__names(self): + NAMES = ['_p_changed', + '_p_serial', + ] + inst, jar, OID = self._makeOneWithJar() + inst._p_changed = True + jar._loaded = [] + for name in NAMES: + self.assertTrue(inst._p_delattr(name)) + self.assertEqual(inst._p_status, 'ghost') + self.assertEqual(inst._p_changed, None) + self.assertEqual(list(jar._loaded), []) + self._checkMRU(jar, []) + + def test__p_delattr_w_normal_name(self): + class Derived(self._getTargetClass()): + normal = 'value' + inst, jar, OID = self._makeOneWithJar(Derived) + inst._p_deactivate() + self.assertFalse(inst._p_delattr('normal')) + # _p_delattr doesn't do the actual delete for normal names + self.assertEqual(inst._p_status, 'saved') + self.assertEqual(list(jar._loaded), [OID]) + self._checkMRU(jar, [OID]) + + def test_set__p_changed_w_broken_jar(self): + # When an object is modified, it registers with its data manager. + # If that registration fails, the exception is propagated and the + # object stays in the up-to-date state. + # It shouldn't change to the modified state, because it won't + # be saved when the transaction commits. + class P(self._getTargetClass()): + def __init__(self): + self.x = 0 + + p = P() + p._p_oid = b'1' + p._p_jar = self._makeBrokenJar() + self.assertEqual(p._p_state, 0) + self.assertEqual(p._p_jar.called, 0) + def _try(): + p._p_changed = 1 + self.assertRaises(NotImplementedError, _try) + self.assertEqual(p._p_jar.called, 1) + self.assertEqual(p._p_state, 0) + + def test__p_activate_w_broken_jar(self): + # Make sure that exceptions that occur inside the data manager's + # ``setstate()`` method propagate out to the caller. + class P(self._getTargetClass()): + def __init__(self): + self.x = 0 + p = P() + p._p_oid = b'1' + p._p_jar = self._makeBrokenJar() + p._p_deactivate() + self.assertEqual(p._p_state, -1) + self.assertRaises(NotImplementedError, p._p_activate) + self.assertEqual(p._p_state, -1) + + def test__ancient_dict_layout_bug(self): + # We once had a bug in the `Persistent` class that calculated an + # incorrect offset for the ``__dict__`` attribute. It assigned + # ``__dict__`` and ``_p_jar`` to the same location in memory. + # This is a simple test to make sure they have different locations. + class P(self._getTargetClass()): + def __init__(self): + self.x = 0 + def inc(self): + self.x += 1 + p = P() + p.inc() + p.inc() + self.assertTrue('x' in p.__dict__) + self.assertTrue(p._p_jar is None) + + def test_w_diamond_inheritance(self): + class A(self._getTargetClass()): + pass + class B(self._getTargetClass()): + pass + class C(A, B): + pass + class D(object): + pass + class E(D, B): + pass + # no raise + A(), B(), C(), D(), E() + + def test_w_alternate_metaclass(self): + class alternateMeta(type): + pass + class alternate(object): + __metaclass__ = alternateMeta + class mixedMeta(alternateMeta, type): + pass + # no raise + class mixed1(alternate, self._getTargetClass()): + pass + class mixed2(self._getTargetClass(), alternate): + pass + + def test_setattr_in_subclass_is_not_called_creating_an_instance(self): + class subclass(self._getTargetClass()): + _v_setattr_called = False + def __setattr__(self, name, value): + raise AssertionError("Should not be called") + inst = subclass() + self.assertEqual(object.__getattribute__(inst, '_v_setattr_called'), False) + + def test_can_set__p_attrs_if_subclass_denies_setattr(self): + # ZODB defines a PersistentBroken subclass that only lets us + # set things that start with _p, so make sure we can do that + class Broken(self._getTargetClass()): + def __setattr__(self, name, value): + if name.startswith('_p_'): + super(Broken, self).__setattr__(name, value) + else: + raise AssertionError("Can't change broken objects") + + KEY = b'123' + jar = self._makeJar() + + broken = Broken() + broken._p_oid = KEY + broken._p_jar = jar + + broken._p_changed = True + broken._p_changed = 0 + + def test_p_invalidate_calls_p_deactivate(self): + class P(self._getTargetClass()): + deactivated = False + def _p_deactivate(self): + self.deactivated = True + p = P() + p._p_invalidate() + self.assertTrue(p.deactivated) + + + def test_new_ghost_success_not_already_ghost_dict(self): + # https://github.com/zopefoundation/persistent/issues/49 + # calling new_ghost on an object that already has state just changes + # its flags, it doesn't destroy the state. + from persistent.interfaces import GHOST + from persistent.interfaces import UPTODATE + class TestPersistent(self._getTargetClass()): + pass + KEY = b'123' + jar = self._makeJar() + cache = self._makeRealCache(jar) + candidate = TestPersistent() + + candidate.set_by_new = 1 + self.assertEqual(candidate._p_state, UPTODATE) + cache.new_ghost(KEY, candidate) + + self.assertIs(cache.get(KEY), candidate) + self.assertEqual(candidate._p_oid, KEY) + self.assertEqual(candidate._p_state, GHOST) + self.assertEqual(candidate.set_by_new, 1) + + def test_new_ghost_success_not_already_ghost_slot(self): + # https://github.com/zopefoundation/persistent/issues/49 + # calling new_ghost on an object that already has state just changes + # its flags, it doesn't destroy the state. + from persistent.interfaces import GHOST + from persistent.interfaces import UPTODATE + class TestPersistent(self._getTargetClass()): + __slots__ = ('set_by_new', '__weakref__') + KEY = b'123' + jar = self._makeJar() + cache = self._makeRealCache(jar) + candidate = TestPersistent() + candidate.set_by_new = 1 + self.assertEqual(candidate._p_state, UPTODATE) + cache.new_ghost(KEY, candidate) + + self.assertIs(cache.get(KEY), candidate) + self.assertEqual(candidate._p_oid, KEY) + self.assertEqual(candidate._p_state, GHOST) + self.assertEqual(candidate.set_by_new, 1) + + # The number 12345678 as a p64, 8-byte string + _PACKED_OID = b'\x00\x00\x00\x00\x00\xbcaN' + # The number 12345678 printed in hex + _HEX_OID = '0xbc614e' + + def _normalize_repr(self, r): + # addresses + r = re.sub(r'at 0x[0-9a-fA-F]*', 'at 0xdeadbeef', r) + # Python 3.7 removed the trailing , in exception reprs + r = r.replace("',)", "')") + return r + + def _normalized_repr(self, o): + return self._normalize_repr(repr(o)) + + def test_repr_no_oid_no_jar(self): + p = self._makeOne() + result = self._normalized_repr(p) + self.assertEqual(result, '') + + def test_repr_no_oid_in_jar(self): + p = self._makeOne() + + class Jar(object): + def __repr__(self): + return '' + + p._p_jar = Jar() + + result = self._normalized_repr(p) + self.assertEqual( + result, + ">") + + def test_repr_oid_no_jar(self): + p = self._makeOne() + p._p_oid = self._PACKED_OID + + result = self._normalized_repr(p) + self.assertEqual( + result, + "") + + def test_64bit_oid(self): + import struct + p = self._makeOne() + oid_value = 2 << 62 + self.assertEqual(oid_value.bit_length(), 64) + oid = struct.pack(">Q", oid_value) + self.assertEqual(oid, b'\x80\x00\x00\x00\x00\x00\x00\x00') + + p._p_oid = oid + result = self._normalized_repr(p) + self.assertEqual( + result, + '' + ) + + def test_repr_no_oid_repr_jar_raises_exception(self): + p = self._makeOne() + + class Jar(object): + def __repr__(self): + raise Exception('jar repr failed') + + p._p_jar = Jar() + + result = self._normalized_repr(p) + self.assertEqual( + result, + "") + + + def test_repr_oid_raises_exception_no_jar(self): + p = self._makeOne() + + class BadOID(bytes): + def __repr__(self): + raise Exception("oid repr failed") + + # Our OID is bytes, 8 bytes long. We don't call its repr. + p._p_oid = BadOID(self._PACKED_OID) + + result = self._normalized_repr(p) + self.assertEqual( + result, + "") + + # Anything other than 8 bytes, though, we do. + p._p_oid = BadOID(b'1234567') + + result = self._normalized_repr(p) + self.assertEqual( + result, + "") + + + def test_repr_oid_and_jar_raise_exception(self): + p = self._makeOne() + + class BadOID(bytes): + def __repr__(self): + raise Exception("oid repr failed") + p._p_oid = BadOID(b'1234567') + + class Jar(object): + def __repr__(self): + raise Exception('jar repr failed') + + p._p_jar = Jar() + + + result = self._normalized_repr(p) + self.assertEqual( + result, + "") + + def test_repr_no_oid_repr_jar_raises_baseexception(self): + p = self._makeOne() + + class Jar(object): + def __repr__(self): + raise BaseException('jar repr failed') + + p._p_jar = Jar() + with self.assertRaisesRegex(BaseException, 'jar repr failed'): + repr(p) + + def test_repr_oid_raises_baseexception_no_jar(self): + p = self._makeOne() + + class BadOID(bytes): + def __repr__(self): + raise BaseException("oid repr failed") + p._p_oid = BadOID(b'12345678') + + # An 8 byte byte string doesn't have repr called. + repr(p) + + # Anything other does. + p._p_oid = BadOID(b'1234567') + with self.assertRaisesRegex(BaseException, 'oid repr failed'): + repr(p) + + def test_repr_oid_and_jar(self): + p = self._makeOne() + p._p_oid = self._PACKED_OID + + class Jar(object): + def __repr__(self): + return '' + + p._p_jar = Jar() + + result = self._normalized_repr(p) + self.assertEqual( + result, + ">") + + def test__p_repr(self): + class P(self._getTargetClass()): + def _p_repr(self): + return "Override" + p = P() + self.assertEqual("Override", repr(p)) + + def test__p_repr_exception(self): + class P(self._getTargetClass()): + def _p_repr(self): + raise Exception("_p_repr failed") + p = P() + result = self._normalized_repr(p) + self.assertEqual( + result, + "") + + p._p_oid = self._PACKED_OID + result = self._normalized_repr(p) + self.assertEqual( + result, + "") + + class Jar(object): + def __repr__(self): + return '' + + p._p_jar = Jar() + result = self._normalized_repr(p) + self.assertEqual( + result, + " _p_repr Exception('_p_repr failed')>") + + def test__p_repr_in_instance_ignored(self): + class P(self._getTargetClass()): + pass + p = P() + p._p_repr = lambda: "Instance" + result = self._normalized_repr(p) + self.assertEqual(result, + '') + + def test__p_repr_baseexception(self): + class P(self._getTargetClass()): + def _p_repr(self): + raise BaseException("_p_repr failed") + p = P() + with self.assertRaisesRegex(BaseException, '_p_repr failed'): + repr(p) + +class PyPersistentTests(unittest.TestCase, _Persistent_Base): + + def _getTargetClass(self): + from persistent.persistence import Persistent + return Persistent + + def _makeCache(self, jar): + + class _Cache(object): + def __init__(self, jar): + self._jar = jar + self._mru = [] + self._data = {} + def mru(self, oid): + self._mru.append(oid) + def new_ghost(self, oid, obj): + obj._p_jar = self._jar + obj._p_oid = oid + self._data[oid] = obj + def get(self, oid): + return self._data.get(oid) + def __delitem__(self, oid): + del self._data[oid] + def update_object_size_estimation(self, oid, new_size): + pass + + return _Cache(jar) + + def _makeRealCache(self, jar): + from persistent.picklecache import PickleCache + return PickleCache(jar, 10) + + def _checkMRU(self, jar, value): + self.assertEqual(list(jar._cache._mru), value) + + def _clearMRU(self, jar): + jar._cache._mru[:] = [] + + def test_accessed_with_jar_and_oid_but_not_in_cache(self): + # This scenario arises in ZODB: ZODB.serialize.ObjectWriter + # can assign a jar and an oid to newly seen persistent objects, + # but because they are newly created, they aren't in the + # pickle cache yet. + # Nothing should blow up when this happens + KEY = b'123' + jar = self._makeJar() + c1 = self._makeOne() + c1._p_oid = KEY + c1._p_jar = jar + + def mru(oid): + # Mimic what the real cache does + if oid not in jar._cache._mru: + raise KeyError(oid) + raise AssertionError("Should never get here") + jar._cache.mru = mru + c1._p_accessed() + self._checkMRU(jar, []) + + def test_accessed_invalidated_with_jar_and_oid_but_no_cache(self): + # This scenario arises in ZODB tests where the jar is faked + KEY = b'123' + class Jar(object): + accessed = False + def __getattr__(self, name): + if name == '_cache': + self.accessed = True + raise AttributeError(name) + def register(self, *args): + pass + c1 = self._makeOne() + + c1._p_oid = KEY + c1._p_jar = Jar() + c1._p_changed = True + self.assertEqual(c1._p_state, 1) + c1._p_accessed() + self.assertTrue(c1._p_jar.accessed) + + c1._p_jar.accessed = False + c1._p_invalidate_deactivate_helper() + self.assertTrue(c1._p_jar.accessed) + + c1._p_jar.accessed = False + c1._Persistent__flags = None # coverage + c1._p_invalidate_deactivate_helper() + self.assertTrue(c1._p_jar.accessed) + + def test_p_activate_with_jar_without_oid(self): + # Works, but nothing happens + inst = self._makeOne() + inst._p_jar = object() + inst._p_oid = None + object.__setattr__(inst, '_Persistent__flags', None) + inst._p_activate() + + def test_p_accessed_with_jar_without_oid(self): + # Works, but nothing happens + inst = self._makeOne() + inst._p_jar = object() + inst._p_accessed() + + def test_p_accessed_with_jar_with_oid_as_ghost(self): + # Works, but nothing happens + inst = self._makeOne() + inst._p_jar = object() + inst._p_oid = 42 + inst._Persistent__flags = None + inst._p_accessed() + + +@skipIfNoCExtension +class CPersistentTests(unittest.TestCase, _Persistent_Base): + + def _getTargetClass(self): + from persistent.cPersistence import Persistent + return Persistent + + def _checkMRU(self, jar, value): + pass # Figure this out later + + def _clearMRU(self, jar): + pass # Figure this out later + + def _makeCache(self, jar): + from persistent.cPickleCache import PickleCache + return PickleCache(jar) + + +@skipIfNoCExtension +class Test_simple_new(unittest.TestCase): + + def _callFUT(self, x): + from persistent.cPersistence import simple_new + return simple_new(x) + + def test_w_non_type(self): + self.assertRaises(TypeError, self._callFUT, '') + + def test_w_type(self): + TO_CREATE = [type, list, tuple, object, dict] + for typ in TO_CREATE: + self.assertTrue(isinstance(self._callFUT(typ), typ)) + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/persistent/tests/test_picklecache.py b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_picklecache.py new file mode 100644 index 0000000..87cf433 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_picklecache.py @@ -0,0 +1,996 @@ +############################################################################## +# +# Copyright (c) 2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import gc +import os +import platform +import sys +import unittest + +from persistent.interfaces import UPTODATE + +_is_pypy = platform.python_implementation() == 'PyPy' +_is_jython = 'java' in sys.platform + +_marker = object() + +class PickleCacheTests(unittest.TestCase): + + # py2/3 compat + assertRaisesRegex = getattr(unittest.TestCase, + 'assertRaisesRegex', + unittest.TestCase.assertRaisesRegexp) + + + def setUp(self): + import persistent.picklecache + self.orig_types = persistent.picklecache._CACHEABLE_TYPES + persistent.picklecache._CACHEABLE_TYPES += (DummyPersistent,) + + def tearDown(self): + import persistent.picklecache + persistent.picklecache._CACHEABLE_TYPES = self.orig_types + + def _getTargetClass(self): + from persistent.picklecache import PickleCache + return PickleCache + + def _makeOne(self, jar=None, target_size=10): + if jar is None: + jar = DummyConnection() + return self._getTargetClass()(jar, target_size) + + def _makePersist(self, state=None, oid=b'foo', jar=_marker): + from persistent.interfaces import GHOST + + if state is None: + state = GHOST + if jar is _marker: + jar = DummyConnection() + persist = DummyPersistent() + persist._p_state = state + persist._p_oid = oid + persist._p_jar = jar + return persist + + def test_class_conforms_to_IPickleCache(self): + from zope.interface.verify import verifyClass + from persistent.interfaces import IPickleCache + verifyClass(IPickleCache, self._getTargetClass()) + + def test_instance_conforms_to_IPickleCache(self): + from zope.interface.verify import verifyObject + from persistent.interfaces import IPickleCache + verifyObject(IPickleCache, self._makeOne()) + + def test_empty(self): + cache = self._makeOne() + + self.assertEqual(len(cache), 0) + self.assertEqual(_len(cache.items()), 0) + self.assertEqual(_len(cache.klass_items()), 0) + self.assertEqual(cache.ringlen(), 0) + self.assertEqual(len(cache.lru_items()), 0) + self.assertEqual(cache.cache_size, 10) + self.assertEqual(cache.cache_drain_resistance, 0) + self.assertEqual(cache.cache_non_ghost_count, 0) + self.assertEqual(dict(cache.cache_data), {}) + self.assertEqual(cache.cache_klass_count, 0) + + def test___getitem___nonesuch_raises_KeyError(self): + cache = self._makeOne() + + self.assertRaises(KeyError, lambda: cache['nonesuch']) + + def test_get_nonesuch_no_default(self): + cache = self._makeOne() + + self.assertEqual(cache.get('nonesuch'), None) + + def test_get_nonesuch_w_default(self): + cache = self._makeOne() + default = object + + self.assertTrue(cache.get('nonesuch', default) is default) + + def test___setitem___non_string_oid_raises_TypeError(self): + cache = self._makeOne() + + with self.assertRaises(TypeError): + cache[object()] = self._makePersist() + + def test___setitem___duplicate_oid_same_obj(self): + + KEY = b'original' + cache = self._makeOne() + original = self._makePersist(oid=KEY) + cache[KEY] = original + cache[KEY] = original + + def test___setitem___duplicate_oid_raises_ValueError(self): + + KEY = b'original' + cache = self._makeOne() + original = self._makePersist(oid=KEY) + cache[KEY] = original + duplicate = self._makePersist(oid=KEY) + + with self.assertRaises(ValueError): + cache[KEY] = duplicate + + def test___setitem___ghost(self): + from persistent.interfaces import GHOST + + KEY = b'ghost' + cache = self._makeOne() + ghost = self._makePersist(state=GHOST, oid=KEY) + + cache[KEY] = ghost + + self.assertEqual(len(cache), 1) + items = list(cache.items()) + self.assertEqual(len(items), 1) + self.assertEqual(_len(cache.klass_items()), 0) + self.assertEqual(items[0][0], KEY) + self.assertEqual(cache.ringlen(), 0) + self.assertTrue(items[0][1] is ghost) + self.assertTrue(cache[KEY] is ghost) + + def test___setitem___mismatch_key_oid(self): + KEY = b'uptodate' + cache = self._makeOne() + uptodate = self._makePersist(state=UPTODATE) + + with self.assertRaises(ValueError): + cache[KEY] = uptodate + + + def test___setitem___non_ghost(self): + KEY = b'uptodate' + cache = self._makeOne() + uptodate = self._makePersist(state=UPTODATE, oid=KEY) + + cache[KEY] = uptodate + + self.assertEqual(len(cache), 1) + items = list(cache.items()) + self.assertEqual(len(items), 1) + self.assertEqual(_len(cache.klass_items()), 0) + self.assertEqual(items[0][0], KEY) + self.assertEqual(cache.ringlen(), 1) + self.assertTrue(items[0][1] is uptodate) + self.assertTrue(cache[KEY] is uptodate) + self.assertTrue(cache.get(KEY) is uptodate) + + def test___setitem___persistent_class(self): + + KEY = b'pclass' + class pclass(object): + _p_oid = KEY + cache = self._makeOne() + + cache[KEY] = pclass + + kitems = list(cache.klass_items()) + self.assertEqual(len(cache), 1) + self.assertEqual(_len(cache.items()), 0) + self.assertEqual(len(kitems), 1) + self.assertEqual(kitems[0][0], KEY) + self.assertTrue(kitems[0][1] is pclass) + self.assertTrue(cache[KEY] is pclass) + self.assertTrue(cache.get(KEY) is pclass) + + def test___delitem___non_string_oid_raises_TypeError(self): + cache = self._makeOne() + + with self.assertRaises(TypeError): + del cache[object()] + + def test___delitem___nonesuch_raises_KeyError(self): + + cache = self._makeOne() + + with self.assertRaises(KeyError): + del cache[b'nonesuch'] + + def test___delitem___w_persistent_class(self): + + KEY = b'pclass' + cache = self._makeOne() + class pclass(object): + _p_oid = KEY + cache = self._makeOne() + + cache[KEY] = pclass + del cache[KEY] + self.assertTrue(cache.get(KEY, self) is self) + self.assertFalse(KEY in cache.persistent_classes) + self.assertEqual(cache.ringlen(), 0) + + def test___delitem___w_normal_object(self): + KEY = b'uptodate' + cache = self._makeOne() + uptodate = self._makePersist(state=UPTODATE, oid=KEY) + + cache[KEY] = uptodate + + del cache[KEY] + self.assertTrue(cache.get(KEY, self) is self) + + def test___delitem___w_ghost(self): + from persistent.interfaces import GHOST + + cache = self._makeOne() + KEY = b'ghost' + ghost = self._makePersist(state=GHOST, oid=KEY) + + cache[KEY] = ghost + + del cache[KEY] + self.assertTrue(cache.get(KEY, self) is self) + + def test___delitem___w_remaining_object(self): + cache = self._makeOne() + REMAINS = b'remains' + UPTODATE = b'uptodate' + remains = self._makePersist(state=UPTODATE, oid=REMAINS) + uptodate = self._makePersist(state=UPTODATE, oid=UPTODATE) + + cache[REMAINS] = remains + cache[UPTODATE] = uptodate + + del cache[UPTODATE] + self.assertTrue(cache.get(UPTODATE, self) is self) + self.assertTrue(cache.get(REMAINS, self) is remains) + + def test_lruitems(self): + cache = self._makeOne() + ONE = b'one' + TWO = b'two' + THREE = b'three' + cache[ONE] = self._makePersist(oid=b'one', state=UPTODATE) + cache[TWO] = self._makePersist(oid=b'two', state=UPTODATE) + cache[THREE] = self._makePersist(oid=b'three', state=UPTODATE) + + items = cache.lru_items() + self.assertEqual(_len(items), 3) + self.assertEqual(items[0][0], ONE) + self.assertEqual(items[1][0], TWO) + self.assertEqual(items[2][0], THREE) + + def test_mru_nonesuch_raises_KeyError(self): + cache = self._makeOne() + + self.assertRaises(KeyError, cache.mru, b'nonesuch') + + def test_mru_normal(self): + ONE = b'one' + TWO = b'two' + THREE = b'three' + cache = self._makeOne() + cache[ONE] = self._makePersist(oid=b'one', state=UPTODATE) + cache[TWO] = self._makePersist(oid=b'two', state=UPTODATE) + cache[THREE] = self._makePersist(oid=b'three', state=UPTODATE) + + cache.mru(TWO) + + self.assertEqual(cache.ringlen(), 3) + items = cache.lru_items() + self.assertEqual(_len(items), 3) + self.assertEqual(items[0][0], ONE) + self.assertEqual(items[1][0], THREE) + self.assertEqual(items[2][0], TWO) + + def test_mru_ghost(self): + from persistent.interfaces import GHOST + + ONE = b'one' + TWO = b'two' + THREE = b'three' + cache = self._makeOne() + cache[ONE] = self._makePersist(oid=b'one', state=UPTODATE) + two = cache[TWO] = self._makePersist(oid=b'two', state=GHOST) + # two must live to survive gc + self.assertIsNotNone(two) + cache[THREE] = self._makePersist(oid=b'three', state=UPTODATE) + + cache.mru(TWO) + + self.assertEqual(cache.ringlen(), 2) + items = cache.lru_items() + self.assertEqual(_len(items), 2) + self.assertEqual(items[0][0], ONE) + self.assertEqual(items[1][0], THREE) + + def test_mru_was_ghost_now_active(self): + from persistent.interfaces import GHOST + + ONE = b'one' + TWO = b'two' + THREE = b'three' + cache = self._makeOne() + cache[ONE] = self._makePersist(oid=b'one', state=UPTODATE) + two = cache[TWO] = self._makePersist(oid=b'two', state=GHOST) + cache[THREE] = self._makePersist(oid=b'three', state=UPTODATE) + + two._p_state = UPTODATE + cache.mru(TWO) + + self.assertEqual(cache.ringlen(), 3) + items = cache.lru_items() + self.assertEqual(_len(items), 3) + self.assertEqual(items[0][0], ONE) + self.assertEqual(items[1][0], THREE) + self.assertEqual(items[2][0], TWO) + + def test_mru_first(self): + ONE = b'one' + TWO = b'two' + THREE = b'three' + cache = self._makeOne() + cache[ONE] = self._makePersist(oid=b'one', state=UPTODATE) + cache[TWO] = self._makePersist(oid=b'two', state=UPTODATE) + cache[THREE] = self._makePersist(oid=b'three', state=UPTODATE) + + cache.mru(ONE) + + self.assertEqual(cache.ringlen(), 3) + items = cache.lru_items() + self.assertEqual(_len(items), 3) + self.assertEqual(items[0][0], TWO) + self.assertEqual(items[1][0], THREE) + self.assertEqual(items[2][0], ONE) + + def test_mru_last(self): + ONE = b'one' + TWO = b'two' + THREE = b'three' + cache = self._makeOne() + cache[ONE] = self._makePersist(oid=b'one', state=UPTODATE) + cache[TWO] = self._makePersist(oid=b'two', state=UPTODATE) + cache[THREE] = self._makePersist(oid=b'three', state=UPTODATE) + + cache.mru(THREE) + + self.assertEqual(cache.ringlen(), 3) + items = cache.lru_items() + self.assertEqual(_len(items), 3) + self.assertEqual(items[0][0], ONE) + self.assertEqual(items[1][0], TWO) + self.assertEqual(items[2][0], THREE) + + def _numbered_oid(self, i): + # Python 3.4 doesn't support % on bytes, + # so we go the long way + oid_s = 'oid_%04d' % i + return oid_s.encode('ascii') + + def _populate_cache(self, cache, count=100, + state_0=UPTODATE, + state_rest=UPTODATE): + + oids = [] + for i in range(100): + oid = self._numbered_oid(i) + oids.append(oid) + state = state_0 if i == 0 else state_rest + cache[oid] = self._makePersist(oid=oid, state=state) + return oids + + def test_incrgc_simple(self): + cache = self._makeOne() + oids = self._populate_cache(cache) + self.assertEqual(cache.cache_non_ghost_count, 100) + + cache.incrgc() + gc.collect() # banish the ghosts who are no longer in the ring + + self.assertEqual(cache.cache_non_ghost_count, 10) + items = cache.lru_items() + self.assertEqual(_len(items), 10) + self.assertEqual(items[0][0], b'oid_0090') + self.assertEqual(items[1][0], b'oid_0091') + self.assertEqual(items[2][0], b'oid_0092') + self.assertEqual(items[3][0], b'oid_0093') + self.assertEqual(items[4][0], b'oid_0094') + self.assertEqual(items[5][0], b'oid_0095') + self.assertEqual(items[6][0], b'oid_0096') + self.assertEqual(items[7][0], b'oid_0097') + self.assertEqual(items[8][0], b'oid_0098') + self.assertEqual(items[9][0], b'oid_0099') + + for oid in oids[:90]: + self.assertTrue(cache.get(oid) is None) + + for oid in oids[90:]: + self.assertFalse(cache.get(oid) is None) + + def test_incrgc_w_smaller_drain_resistance(self): + cache = self._makeOne() + cache.drain_resistance = 2 + self._populate_cache(cache) + self.assertEqual(cache.cache_non_ghost_count, 100) + + cache.incrgc() + + self.assertEqual(cache.cache_non_ghost_count, 10) + + def test_incrgc_w_larger_drain_resistance(self): + cache = self._makeOne() + cache.drain_resistance = 2 + cache.cache_size = 90 + self._populate_cache(cache) + + self.assertEqual(cache.cache_non_ghost_count, 100) + + cache.incrgc() + + self.assertEqual(cache.cache_non_ghost_count, 49) + + def test_full_sweep(self): + cache = self._makeOne() + oids = self._populate_cache(cache) + self.assertEqual(cache.cache_non_ghost_count, 100) + + cache.full_sweep() + gc.collect() # banish the ghosts who are no longer in the ring + + self.assertEqual(cache.cache_non_ghost_count, 0) + + for oid in oids: + self.assertTrue(cache.get(oid) is None) + + def test_full_sweep_w_sticky(self): + from persistent.interfaces import STICKY + + cache = self._makeOne() + oids = self._populate_cache(cache, state_0=STICKY) + self.assertEqual(cache.cache_non_ghost_count, 100) + + cache.full_sweep() + gc.collect() # banish the ghosts who are no longer in the ring + + self.assertEqual(cache.cache_non_ghost_count, 1) + + self.assertTrue(cache.get(oids[0]) is not None) + for oid in oids[1:]: + self.assertTrue(cache.get(oid) is None) + + def test_full_sweep_w_changed(self): + from persistent.interfaces import CHANGED + + cache = self._makeOne() + oids = self._populate_cache(cache, state_0=CHANGED) + self.assertEqual(cache.cache_non_ghost_count, 100) + + cache.full_sweep() + gc.collect() # banish the ghosts who are no longer in the ring + + self.assertEqual(cache.cache_non_ghost_count, 1) + + self.assertTrue(cache.get(oids[0]) is not None) + for oid in oids[1:]: + self.assertTrue(cache.get(oid) is None) + + def test_minimize(self): + + cache = self._makeOne() + oids = self._populate_cache(cache) + self.assertEqual(cache.cache_non_ghost_count, 100) + + cache.minimize() + gc.collect() # banish the ghosts who are no longer in the ring + + self.assertEqual(cache.cache_non_ghost_count, 0) + + for oid in oids: + self.assertTrue(cache.get(oid) is None) + + def test_minimize_turns_into_ghosts(self): + from persistent.interfaces import GHOST + + cache = self._makeOne() + oid = self._numbered_oid(1) + obj = cache[oid] = self._makePersist(oid=oid, state=UPTODATE) + self.assertEqual(cache.cache_non_ghost_count, 1) + + cache.minimize() + gc.collect() # banish the ghosts who are no longer in the ring + + self.assertEqual(cache.cache_non_ghost_count, 0) + + self.assertEqual(obj._p_state, GHOST) + + def test_new_ghost_non_persistent_object(self): + + cache = self._makeOne() + with self.assertRaises(AttributeError): + cache.new_ghost(b'123', object()) + + def test_new_ghost_obj_already_has_oid(self): + + from persistent.interfaces import GHOST + candidate = self._makePersist(oid=b'123', state=GHOST) + cache = self._makeOne() + with self.assertRaises(ValueError): + cache.new_ghost(b'123', candidate) + + def test_new_ghost_obj_already_has_jar(self): + cache = self._makeOne() + candidate = self._makePersist(oid=None, jar=object()) + with self.assertRaises(ValueError): + cache.new_ghost(b'123', candidate) + + def test_new_ghost_obj_already_in_cache(self): + + KEY = b'123' + cache = self._makeOne() + candidate = self._makePersist(oid=KEY) + cache[KEY] = candidate + # Now, normally we can't get in the cache without an oid and jar + # (the C implementation doesn't allow it), so if we try to create + # a ghost, we get the value error + self.assertRaises(ValueError, cache.new_ghost, KEY, candidate) + candidate._p_oid = None + self.assertRaises(ValueError, cache.new_ghost, KEY, candidate) + # if we're sneaky and remove the OID and jar, then we get the duplicate + # key error + candidate._p_jar = None + self.assertRaises(KeyError, cache.new_ghost, KEY, candidate) + + def test_new_ghost_success_already_ghost(self): + from persistent.interfaces import GHOST + + KEY = b'123' + cache = self._makeOne() + candidate = self._makePersist(oid=None, jar=None) + cache.new_ghost(KEY, candidate) + self.assertTrue(cache.get(KEY) is candidate) + self.assertEqual(candidate._p_oid, KEY) + self.assertEqual(candidate._p_jar, cache.jar) + self.assertEqual(candidate._p_state, GHOST) + + def test_new_ghost_success_not_already_ghost(self): + from persistent.interfaces import GHOST + + KEY = b'123' + cache = self._makeOne() + candidate = self._makePersist(oid=None, jar=None, state=UPTODATE) + cache.new_ghost(KEY, candidate) + self.assertTrue(cache.get(KEY) is candidate) + self.assertEqual(candidate._p_oid, KEY) + self.assertEqual(candidate._p_jar, cache.jar) + self.assertEqual(candidate._p_state, GHOST) + + def test_new_ghost_w_pclass_non_ghost(self): + KEY = b'123' + class Pclass(object): + _p_oid = None + _p_jar = None + cache = self._makeOne() + cache.new_ghost(KEY, Pclass) + self.assertTrue(cache.get(KEY) is Pclass) + self.assertTrue(cache.persistent_classes[KEY] is Pclass) + self.assertEqual(Pclass._p_oid, KEY) + self.assertEqual(Pclass._p_jar, cache.jar) + + def test_new_ghost_w_pclass_ghost(self): + KEY = b'123' + class Pclass(object): + _p_oid = None + _p_jar = None + cache = self._makeOne() + cache.new_ghost(KEY, Pclass) + self.assertTrue(cache.get(KEY) is Pclass) + self.assertTrue(cache.persistent_classes[KEY] is Pclass) + self.assertEqual(Pclass._p_oid, KEY) + self.assertEqual(Pclass._p_jar, cache.jar) + + def test_reify_miss_single(self): + KEY = b'123' + cache = self._makeOne() + self.assertRaises(KeyError, cache.reify, KEY) + + def test_reify_miss_multiple(self): + KEY = b'123' + KEY2 = b'456' + cache = self._makeOne() + self.assertRaises(KeyError, cache.reify, [KEY, KEY2]) + + def test_reify_hit_single_ghost(self): + from persistent.interfaces import GHOST + + KEY = b'123' + cache = self._makeOne() + candidate = self._makePersist(oid=KEY, jar=cache.jar, state=GHOST) + cache[KEY] = candidate + self.assertEqual(cache.ringlen(), 0) + cache.reify(KEY) + self.assertEqual(cache.ringlen(), 1) + items = cache.lru_items() + self.assertEqual(items[0][0], KEY) + self.assertTrue(items[0][1] is candidate) + self.assertEqual(candidate._p_state, UPTODATE) + + def test_reify_hit_single_non_ghost(self): + KEY = b'123' + cache = self._makeOne() + candidate = self._makePersist(oid=KEY, jar=cache.jar, state=UPTODATE) + cache[KEY] = candidate + self.assertEqual(cache.ringlen(), 1) + cache.reify(KEY) + self.assertEqual(cache.ringlen(), 1) + self.assertEqual(candidate._p_state, UPTODATE) + + def test_reify_hit_multiple_mixed(self): + from persistent.interfaces import GHOST + + KEY = b'123' + KEY2 = b'456' + cache = self._makeOne() + c1 = self._makePersist(oid=KEY, jar=cache.jar, state=GHOST) + cache[KEY] = c1 + c2 = self._makePersist(oid=KEY2, jar=cache.jar, state=UPTODATE) + cache[KEY2] = c2 + self.assertEqual(cache.ringlen(), 1) + cache.reify([KEY, KEY2]) + self.assertEqual(cache.ringlen(), 2) + self.assertEqual(c1._p_state, UPTODATE) + self.assertEqual(c2._p_state, UPTODATE) + + def test_invalidate_miss_single(self): + KEY = b'123' + cache = self._makeOne() + cache.invalidate(KEY) # doesn't raise + + def test_invalidate_miss_multiple(self): + KEY = b'123' + KEY2 = b'456' + cache = self._makeOne() + cache.invalidate([KEY, KEY2]) # doesn't raise + + def test_invalidate_hit_single_ghost(self): + from persistent.interfaces import GHOST + + KEY = b'123' + cache = self._makeOne() + candidate = self._makePersist(oid=b'123', jar=cache.jar, state=GHOST) + cache[KEY] = candidate + self.assertEqual(cache.ringlen(), 0) + cache.invalidate(KEY) + self.assertEqual(cache.ringlen(), 0) + self.assertEqual(candidate._p_state, GHOST) + + def test_invalidate_hit_single_non_ghost(self): + from persistent.interfaces import GHOST + + KEY = b'123' + cache = self._makeOne() + candidate = self._makePersist(oid=b'123', jar=cache.jar, state=UPTODATE) + cache[KEY] = candidate + self.assertEqual(cache.ringlen(), 1) + cache.invalidate(KEY) + self.assertEqual(cache.ringlen(), 0) + self.assertEqual(candidate._p_state, GHOST) + + def test_invalidate_hit_multiple_mixed(self): + from persistent.interfaces import GHOST + + KEY = b'123' + KEY2 = b'456' + cache = self._makeOne() + c1 = self._makePersist(oid=KEY, jar=cache.jar, state=GHOST) + cache[KEY] = c1 + c2 = self._makePersist(oid=KEY2, jar=cache.jar, state=UPTODATE) + cache[KEY2] = c2 + self.assertEqual(cache.ringlen(), 1) + cache.invalidate([KEY, KEY2]) + self.assertEqual(cache.ringlen(), 0) + self.assertEqual(c1._p_state, GHOST) + self.assertEqual(c2._p_state, GHOST) + + def test_invalidate_hit_multiple_non_ghost(self): + from persistent.interfaces import GHOST + + KEY = b'123' + KEY2 = b'456' + cache = self._makeOne() + c1 = self._makePersist(oid=KEY, jar=cache.jar, state=UPTODATE) + cache[KEY] = c1 + c2 = self._makePersist(oid=KEY2, jar=cache.jar, state=UPTODATE) + cache[KEY2] = c2 + self.assertEqual(cache.ringlen(), 2) + # These should be in the opposite order of how they were + # added to the ring to ensure ring traversal works + cache.invalidate([KEY2, KEY]) + self.assertEqual(cache.ringlen(), 0) + self.assertEqual(c1._p_state, GHOST) + self.assertEqual(c2._p_state, GHOST) + + def test_invalidate_hit_pclass(self): + KEY = b'123' + class Pclass(object): + _p_oid = KEY + _p_jar = None + cache = self._makeOne() + cache[KEY] = Pclass + self.assertTrue(cache.persistent_classes[KEY] is Pclass) + cache.invalidate(KEY) + self.assertFalse(KEY in cache.persistent_classes) + + def test_debug_info_w_persistent_class(self): + KEY = b'pclass' + class pclass(object): + _p_oid = KEY + cache = self._makeOne() + pclass._p_state = UPTODATE + cache[KEY] = pclass + + gc.collect() # pypy vs. refcounting + info = cache.debug_info() + + self.assertEqual(len(info), 1) + oid, refc, typ, state = info[0] + self.assertEqual(oid, KEY) + self.assertEqual(refc, len(gc.get_referents(pclass))) + self.assertEqual(typ, 'type') + self.assertEqual(state, UPTODATE) + + def test_debug_info_w_normal_object(self): + KEY = b'uptodate' + cache = self._makeOne() + uptodate = self._makePersist(state=UPTODATE, oid=KEY) + cache[KEY] = uptodate + + gc.collect() # pypy vs. refcounting + info = cache.debug_info() + + self.assertEqual(len(info), 1) + oid, refc, typ, state = info[0] + self.assertEqual(oid, KEY) + self.assertEqual(refc, len(gc.get_referents(uptodate))) + self.assertEqual(typ, 'DummyPersistent') + self.assertEqual(state, UPTODATE) + + + def test_debug_info_w_ghost(self): + from persistent.interfaces import GHOST + + KEY = b'ghost' + cache = self._makeOne() + ghost = self._makePersist(state=GHOST, oid=KEY) + cache[KEY] = ghost + + gc.collect() # pypy vs. refcounting + info = cache.debug_info() + + self.assertEqual(len(info), 1) + oid, refc, typ, state = info[0] + self.assertEqual(oid, KEY) + self.assertEqual(refc, len(gc.get_referents(ghost))) + self.assertEqual(typ, 'DummyPersistent') + self.assertEqual(state, GHOST) + + def test_init_with_cacheless_jar(self): + # Sometimes ZODB tests pass objects that don't + # have a _cache + class Jar(object): + was_set = False + def __setattr__(self, name, value): + if name == '_cache': + object.__setattr__(self, 'was_set', True) + raise AttributeError(name) + + jar = Jar() + self._makeOne(jar) + self.assertTrue(jar.was_set) + + def test_setting_non_persistent_item(self): + cache = self._makeOne() + with self.assertRaisesRegex(TypeError, + "Cache values must be persistent objects."): + cache[None] = object() + + def test_setting_without_jar(self): + cache = self._makeOne() + p = self._makePersist(jar=None) + with self.assertRaisesRegex(ValueError, + "Cached object jar missing"): + cache[p._p_oid] = p + + def test_setting_already_cached(self): + cache1 = self._makeOne() + p = self._makePersist(jar=cache1.jar) + + cache1[p._p_oid] = p + + cache2 = self._makeOne() + with self.assertRaisesRegex(ValueError, + "Object already in another cache"): + cache2[p._p_oid] = p + + def test_cannot_update_mru_while_already_locked(self): + cache = self._makeOne() + cache._is_sweeping_ring = True + + updated = cache.mru(None) + self.assertFalse(updated) + + def test_update_object_size_estimation_simple(self): + cache = self._makeOne() + p = self._makePersist(jar=cache.jar) + + cache[p._p_oid] = p + # The cache accesses the private attribute directly to bypass + # the bit conversion. + # Note that the _p_estimated_size is set *after* + # the update call is made in ZODB's serialize + p._Persistent__size = 0 + + cache.update_object_size_estimation(p._p_oid, 2) + + self.assertEqual(cache.total_estimated_size, 64) + + # A missing object does nothing + cache.update_object_size_estimation(None, 2) + self.assertEqual(cache.total_estimated_size, 64) + + def test_cache_size(self): + size = 42 + cache = self._makeOne(target_size=size) + self.assertEqual(cache.cache_size, size) + + cache.cache_size = 64 + self.assertEqual(cache.cache_size, 64) + + def test_sweep_empty(self): + cache = self._makeOne() + self.assertEqual(cache.incrgc(), 0) + + def test_sweep_of_non_deactivating_object(self): + cache = self._makeOne() + p = self._makePersist(jar=cache.jar) + + p._p_state = 0 # non-ghost, get in the ring + cache[p._p_oid] = p + + + def bad_deactivate(): + "Doesn't call super, for it's own reasons, so can't be ejected" + + + p._p_deactivate = bad_deactivate + + import persistent.picklecache + sweep_types = persistent.picklecache._SWEEPABLE_TYPES + persistent.picklecache._SWEEPABLE_TYPES = DummyPersistent + try: + self.assertEqual(cache.full_sweep(), 0) + finally: + persistent.picklecache._SWEEPABLE_TYPES = sweep_types + + del p._p_deactivate + self.assertEqual(cache.full_sweep(), 1) + + if _is_jython: # pragma: no cover + def with_deterministic_gc(f): + def test(self): + old_flags = gc.getMonitorGlobal() + gc.setMonitorGlobal(True) + try: + f(self, force_collect=True) + finally: + gc.setMonitorGlobal(old_flags) + return test + else: + def with_deterministic_gc(f): + return f + + @with_deterministic_gc + def test_cache_garbage_collection_bytes_also_deactivates_object(self, + force_collect=_is_pypy or _is_jython): + cache = self._makeOne() + cache.cache_size = 1000 + oids = [] + for i in range(100): + oid = self._numbered_oid(i) + oids.append(oid) + o = cache[oid] = self._makePersist(oid=oid, state=UPTODATE) + o._Persistent__size = 0 # must start 0, ZODB sets it AFTER updating the size + cache.update_object_size_estimation(oid, 64) + o._Persistent__size = 2 + + # mimic what the real persistent object does to update the cache + # size; if we don't get deactivated by sweeping, the cache size + # won't shrink so this also validates that _p_deactivate gets + # called when ejecting an object. + o._p_deactivate = lambda: cache.update_object_size_estimation(oid, -1) + self.assertEqual(cache.cache_non_ghost_count, 100) + + # A GC at this point does nothing + cache.incrgc() + self.assertEqual(cache.cache_non_ghost_count, 100) + self.assertEqual(len(cache), 100) + + # Now if we set a byte target: + + cache.cache_size_bytes = 1 + # verify the change worked as expected + self.assertEqual(cache.cache_size_bytes, 1) + # verify our entrance assumption is fulfilled + self.assertTrue(cache.cache_size > 100) + self.assertTrue(cache.total_estimated_size > 1) + # A gc shrinks the bytes + cache.incrgc() + self.assertEqual(cache.total_estimated_size, 0) + + # It also shrank the measured size of the cache; + # this would fail under PyPy if _SWEEP_NEEDS_GC was False + if force_collect: # pragma: no cover + gc.collect() + self.assertEqual(len(cache), 1) + + def test_invalidate_persistent_class_calls_p_invalidate(self): + KEY = b'pclass' + class pclass(object): + _p_oid = KEY + invalidated = False + @classmethod + def _p_invalidate(cls): + cls.invalidated = True + + + cache = self._makeOne() + + cache[KEY] = pclass + + cache.invalidate(KEY) + + self.assertTrue(pclass.invalidated) + + def test_ring_impl(self): + from .. import ring + + expected = (ring._CFFIRing + if _is_pypy or ring._CFFIRing is not None or os.environ.get('USING_CFFI') + else ring._DequeRing) + self.assertIs(ring.Ring, expected) + + +class DummyPersistent(object): + + def _p_invalidate(self): + from persistent.interfaces import GHOST + self._p_state = GHOST + + _p_deactivate = _p_invalidate + + def _p_invalidate_deactivate_helper(self, clear=True): + self._p_invalidate() + + def _p_activate(self): + self._p_state = UPTODATE + + +class DummyConnection(object): + pass + + +def _len(seq): + return len(list(seq)) + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) + +if __name__ == '__main__': + unittest.main() diff --git a/thesisenv/lib/python3.6/site-packages/persistent/tests/test_ring.py b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_ring.py new file mode 100644 index 0000000..55464b1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_ring.py @@ -0,0 +1,157 @@ +############################################################################## +# +# Copyright (c) 2015 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + +from .. import ring + +#pylint: disable=R0904,W0212,E1101 + +class DummyPersistent(object): + _p_oid = None + + __next_oid = 0 + + @classmethod + def _next_oid(cls): + cls.__next_oid += 1 + return cls.__next_oid + + def __init__(self, oid=None): + if oid is None: + self._p_oid = self._next_oid() + + def __repr__(self): # pragma: no cover + return "" % self._p_oid + +class _Ring_Base(object): + + def _getTargetClass(self): + """Return the type of the ring to test""" + raise NotImplementedError() + + def _makeOne(self): + return self._getTargetClass()() + + def test_empty_len(self): + self.assertEqual(0, len(self._makeOne())) + + def test_empty_contains(self): + r = self._makeOne() + self.assertFalse(DummyPersistent() in r) + + def test_empty_iter(self): + self.assertEqual([], list(self._makeOne())) + + def test_add_one_len1(self): + r = self._makeOne() + p = DummyPersistent() + r.add(p) + self.assertEqual(1, len(r)) + + def test_add_one_contains(self): + r = self._makeOne() + p = DummyPersistent() + r.add(p) + self.assertTrue(p in r) + + def test_delete_one_len0(self): + r = self._makeOne() + p = DummyPersistent() + r.add(p) + r.delete(p) + self.assertEqual(0, len(r)) + + def test_delete_one_multiple(self): + r = self._makeOne() + p = DummyPersistent() + r.add(p) + r.delete(p) + self.assertEqual(0, len(r)) + self.assertFalse(p in r) + + r.delete(p) + self.assertEqual(0, len(r)) + self.assertFalse(p in r) + + def test_delete_from_wrong_ring(self): + r1 = self._makeOne() + r2 = self._makeOne() + p1 = DummyPersistent() + p2 = DummyPersistent() + + r1.add(p1) + r2.add(p2) + + r2.delete(p1) + + self.assertEqual(1, len(r1)) + self.assertEqual(1, len(r2)) + + self.assertEqual([p1], list(r1)) + self.assertEqual([p2], list(r2)) + + def test_move_to_head(self): + r = self._makeOne() + p1 = DummyPersistent() + p2 = DummyPersistent() + p3 = DummyPersistent() + + r.add(p1) + r.add(p2) + r.add(p3) + + self.assertEqual([p1, p2, p3], list(r)) + self.assertEqual(3, len(r)) + + r.move_to_head(p1) + self.assertEqual([p2, p3, p1], list(r)) + + r.move_to_head(p3) + self.assertEqual([p2, p1, p3], list(r)) + + r.move_to_head(p3) + self.assertEqual([p2, p1, p3], list(r)) + + def test_delete_all(self): + r = self._makeOne() + p1 = DummyPersistent() + p2 = DummyPersistent() + p3 = DummyPersistent() + + r.add(p1) + r.add(p2) + r.add(p3) + self.assertEqual([p1, p2, p3], list(r)) + + r.delete_all([(0, p1), (2, p3)]) + self.assertEqual([p2], list(r)) + self.assertEqual(1, len(r)) + +class DequeRingTests(unittest.TestCase, _Ring_Base): + + def _getTargetClass(self): + return ring._DequeRing + +_add_to_suite = [DequeRingTests] + +if ring._CFFIRing: + class CFFIRingTests(unittest.TestCase, _Ring_Base): + + def _getTargetClass(self): + return ring._CFFIRing + + _add_to_suite.append(CFFIRingTests) + +def test_suite(): + return unittest.TestSuite([unittest.makeSuite(x) for x in _add_to_suite]) diff --git a/thesisenv/lib/python3.6/site-packages/persistent/tests/test_timestamp.py b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_timestamp.py new file mode 100644 index 0000000..ff8b6a9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_timestamp.py @@ -0,0 +1,429 @@ +############################################################################## +# +# Copyright (c) 2011 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest +import sys + +MAX_32_BITS = 2 ** 31 - 1 +MAX_64_BITS = 2 ** 63 - 1 + +import persistent.timestamp + +class Test__UTC(unittest.TestCase): + + def _getTargetClass(self): + from persistent.timestamp import _UTC + return _UTC + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_tzname(self): + utc = self._makeOne() + self.assertEqual(utc.tzname(), 'UTC') + + def test_utcoffset(self): + from datetime import timedelta + utc = self._makeOne() + self.assertEqual(utc.utcoffset(object()), timedelta(0)) + + def test_dst(self): + utc = self._makeOne() + self.assertEqual(utc.dst(), 0) + + def test_fromutc(self): + source = object() + utc = self._makeOne() + self.assertTrue(utc.fromutc(source) is source) + + +class pyTimeStampTests(unittest.TestCase): + + def _getTargetClass(self): + from persistent.timestamp import pyTimeStamp + return pyTimeStamp + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_invalid_arglist(self): + BAD_ARGS = [(), + (1,), + (1, 2), + (1, 2, 3), + (1, 2, 3, 4), + (1, 2, 3, 4, 5), + ('1', '2', '3', '4', '5', '6'), + (1, 2, 3, 4, 5, 6, 7), + (b'123',), + ] + for args in BAD_ARGS: + with self.assertRaises((TypeError, ValueError)): + self._makeOne(*args) + + def test_ctor_from_invalid_strings(self): + BAD_ARGS = ['' + '\x00', + '\x00' * 2, + '\x00' * 3, + '\x00' * 4, + '\x00' * 5, + '\x00' * 7, + ] + for args in BAD_ARGS: + self.assertRaises((TypeError, ValueError), self._makeOne, *args) + + def test_ctor_from_string(self): + from persistent.timestamp import _makeUTC + ZERO = _makeUTC(1900, 1, 1, 0, 0, 0) + EPOCH = _makeUTC(1970, 1, 1, 0, 0, 0) + DELTA = ZERO - EPOCH + DELTA_SECS = DELTA.days * 86400 + DELTA.seconds + SERIAL = b'\x00' * 8 + ts = self._makeOne(SERIAL) + self.assertEqual(ts.raw(), SERIAL) + self.assertEqual(ts.year(), 1900) + self.assertEqual(ts.month(), 1) + self.assertEqual(ts.day(), 1) + self.assertEqual(ts.hour(), 0) + self.assertEqual(ts.minute(), 0) + self.assertEqual(ts.second(), 0.0) + self.assertEqual(ts.timeTime(), DELTA_SECS) + + def test_ctor_from_string_non_zero(self): + before = self._makeOne(2011, 2, 16, 14, 37, 22.80544) + after = self._makeOne(before.raw()) + self.assertEqual(before.raw(), after.raw()) + self.assertEqual(before.timeTime(), 1297867042.80544) + + def test_ctor_from_elements(self): + from persistent.timestamp import _makeUTC + ZERO = _makeUTC(1900, 1, 1, 0, 0, 0) + EPOCH = _makeUTC(1970, 1, 1, 0, 0, 0) + DELTA = ZERO - EPOCH + DELTA_SECS = DELTA.days * 86400 + DELTA.seconds + SERIAL = b'\x00' * 8 + ts = self._makeOne(1900, 1, 1, 0, 0, 0.0) + self.assertEqual(ts.raw(), SERIAL) + self.assertEqual(ts.year(), 1900) + self.assertEqual(ts.month(), 1) + self.assertEqual(ts.day(), 1) + self.assertEqual(ts.hour(), 0) + self.assertEqual(ts.minute(), 0) + self.assertEqual(ts.second(), 0.0) + self.assertEqual(ts.timeTime(), DELTA_SECS) + + def test_laterThan_invalid(self): + ERRORS = (ValueError, TypeError) + SERIAL = b'\x01' * 8 + ts = self._makeOne(SERIAL) + self.assertRaises(ERRORS, ts.laterThan, None) + self.assertRaises(ERRORS, ts.laterThan, '') + self.assertRaises(ERRORS, ts.laterThan, ()) + self.assertRaises(ERRORS, ts.laterThan, []) + self.assertRaises(ERRORS, ts.laterThan, {}) + self.assertRaises(ERRORS, ts.laterThan, object()) + + def test_laterThan_self_is_earlier(self): + SERIAL1 = b'\x01' * 8 + SERIAL2 = b'\x02' * 8 + ts1 = self._makeOne(SERIAL1) + ts2 = self._makeOne(SERIAL2) + later = ts1.laterThan(ts2) + self.assertEqual(later.raw(), b'\x02' * 7 + b'\x03') + + def test_laterThan_self_is_later(self): + SERIAL1 = b'\x01' * 8 + SERIAL2 = b'\x02' * 8 + ts1 = self._makeOne(SERIAL1) + ts2 = self._makeOne(SERIAL2) + later = ts2.laterThan(ts1) + self.assertTrue(later is ts2) + + def test_repr(self): + SERIAL = b'\x01' * 8 + ts = self._makeOne(SERIAL) + self.assertEqual(repr(ts), repr(SERIAL)) + + def test_comparisons_to_non_timestamps(self): + import operator + from persistent._compat import PYTHON2 + # Check the corner cases when comparing non-comparable types + ts = self._makeOne(2011, 2, 16, 14, 37, 22.0) + + def check_common(op, passes): + if passes == 'neither': + self.assertFalse(op(ts, None)) + self.assertFalse(op(None, ts)) + return True + + if passes == 'both': + self.assertTrue(op(ts, None)) + self.assertTrue(op(None, ts)) + return True + return False + + def check_py2(op, passes): # pragma: no cover + if passes == 'first': + self.assertTrue(op(ts, None)) + self.assertFalse(op(None, ts)) + else: + self.assertFalse(op(ts, None)) + self.assertTrue(op(None, ts)) + + def check_py3(op, passes): + self.assertRaises(TypeError, op, ts, None) + self.assertRaises(TypeError, op, None, ts) + + check = check_py2 if PYTHON2 else check_py3 + + for op_name, passes in (('lt', 'second'), + ('gt', 'first'), + ('le', 'second'), + ('ge', 'first'), + ('eq', 'neither'), + ('ne', 'both')): + op = getattr(operator, op_name) + if not check_common(op, passes): + check(op, passes) + + +class TimeStampTests(pyTimeStampTests): + + def _getTargetClass(self): + from persistent.timestamp import TimeStamp + return TimeStamp + +@unittest.skipIf(persistent.timestamp.CTimeStamp is None, + "CTimeStamp not available") +class PyAndCComparisonTests(unittest.TestCase): + """ + Compares C and Python implementations. + """ + + # A particular instant in time + now = 1229959248.3 + # That instant in time split as the result of this expression: + # (time.gmtime(now)[:5] + (now % 60,)) + now_ts_args = (2008, 12, 22, 15, 20, 48.299999952316284) + + def _make_many_instants(self): + # Given the above data, return many slight variations on + # it to test matching + yield self.now_ts_args + for i in range(2000): + yield self.now_ts_args[:-1] + (self.now_ts_args[-1] + (i % 60.0)/100.0, ) + + def _makeC(self, *args, **kwargs): + from persistent.timestamp import TimeStamp + return TimeStamp(*args, **kwargs) + + def _makePy(self, *args, **kwargs): + from persistent.timestamp import pyTimeStamp + return pyTimeStamp(*args, **kwargs) + + @property + def _is_jython(self): + import platform + py_impl = getattr(platform, 'python_implementation', lambda: None) + return py_impl() == 'Jython' + + def _make_C_and_Py(self, *args, **kwargs): + return self._makeC(*args, **kwargs), self._makePy(*args, **kwargs) + + def test_reprs_equal(self): + for args in self._make_many_instants(): + c, py = self._make_C_and_Py(*args) + self.assertEqual(repr(c), repr(py)) + + def test_strs_equal(self): + for args in self._make_many_instants(): + c, py = self._make_C_and_Py(*args) + self.assertEqual(str(c), str(py)) + + def test_raw_equal(self): + c, py = self._make_C_and_Py(*self.now_ts_args) + self.assertEqual(c.raw(), py.raw()) + + def test_equal(self): + c, py = self._make_C_and_Py(*self.now_ts_args) + self.assertEqual(c, py) + + def test_hash_equal(self): + c, py = self._make_C_and_Py(*self.now_ts_args) + self.assertEqual(hash(c), hash(py)) + + def test_py_hash_32_64_bit(self): + # We happen to know that on a 32-bit platform, the hashcode + # of the c version should be exactly + # -1419374591 + # and the 64-bit should be exactly: + # -3850693964765720575 + # Fake out the python version to think it's on a 32-bit + # platform and test the same; also verify 64 bit + from persistent import timestamp as MUT + bit_32_hash = -1419374591 + bit_64_hash = -3850693964765720575 + orig_maxint = MUT._MAXINT + + is_32_bit_hash = orig_maxint == MAX_32_BITS + + orig_c_long = None + c_int64 = None + c_int32 = None + if hasattr(MUT, 'c_long'): + import ctypes + orig_c_long = MUT.c_long + c_int32 = ctypes.c_int32 + c_int64 = ctypes.c_int64 + # win32, even on 64-bit long, has funny sizes + is_32_bit_hash = c_int32 == ctypes.c_long + + try: + MUT._MAXINT = MAX_32_BITS + MUT.c_long = c_int32 + + py = self._makePy(*self.now_ts_args) + self.assertEqual(hash(py), bit_32_hash) + + MUT._MAXINT = int(2 ** 63 - 1) + MUT.c_long = c_int64 + # call __hash__ directly to avoid interpreter truncation + # in hash() on 32-bit platforms + if not self._is_jython: + self.assertEqual(py.__hash__(), bit_64_hash) + else: # pragma: no cover + # Jython 2.7's ctypes module doesn't properly + # implement the 'value' attribute by truncating. + # (It does for native calls, but not visibly to Python). + # Therefore we get back the full python long. The actual + # hash() calls are correct, though, because the JVM uses + # 32-bit ints for its hashCode methods. + self.assertEqual( + py.__hash__(), + 384009219096809580920179179233996861765753210540033) + finally: + MUT._MAXINT = orig_maxint + if orig_c_long is not None: + MUT.c_long = orig_c_long + else: # pragma: no cover + del MUT.c_long + + # These are *usually* aliases, but aren't required + # to be (and aren't under Jython 2.7). + expected_hash = bit_32_hash if is_32_bit_hash else bit_64_hash + self.assertEqual(py.__hash__(), expected_hash) + + def test_hash_equal_constants(self): + # The simple constants make it easier to diagnose + # a difference in algorithms + import persistent.timestamp as MUT + # We get 32-bit hash values on 32-bit platforms, or on the JVM + # OR on Windows (whether compiled in 64 or 32-bit mode) + is_32_bit = MUT._MAXINT == (2**31 - 1) or self._is_jython or sys.platform == 'win32' + + c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x00\x00\x00\x00') + self.assertEqual(hash(c), 8) + self.assertEqual(hash(c), hash(py)) + + c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x00\x00\x00\x01') + self.assertEqual(hash(c), 9) + self.assertEqual(hash(c), hash(py)) + + c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x00\x00\x01\x00') + self.assertEqual(hash(c), 1000011) + self.assertEqual(hash(c), hash(py)) + + # overflow kicks in here on 32-bit platforms + c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x00\x01\x00\x00') + expected = -721379967 if is_32_bit else 1000006000001 + self.assertEqual(hash(c), expected) + self.assertEqual(hash(c), hash(py)) + + c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x01\x00\x00\x00') + expected = 583896275 if is_32_bit else 1000009000027000019 + self.assertEqual(hash(c), expected) + self.assertEqual(hash(c), hash(py)) + + # Overflow kicks in at this point on 64-bit platforms + c, py = self._make_C_and_Py(b'\x00\x00\x00\x01\x00\x00\x00\x00') + expected = 1525764953 if is_32_bit else -4442925868394654887 + self.assertEqual(hash(c), expected) + self.assertEqual(hash(c), hash(py)) + + c, py = self._make_C_and_Py(b'\x00\x00\x01\x00\x00\x00\x00\x00') + expected = -429739973 if is_32_bit else -3993531167153147845 + self.assertEqual(hash(c), expected) + self.assertEqual(hash(c), hash(py)) + + c, py = self._make_C_and_Py(b'\x01\x00\x00\x00\x00\x00\x00\x00') + expected = 263152323 if is_32_bit else -3099646879006235965 + self.assertEqual(hash(c), expected) + self.assertEqual(hash(c), hash(py)) + + def test_ordering(self): + small_c = self._makeC(b'\x00\x00\x00\x00\x00\x00\x00\x01') + small_py = self._makePy(b'\x00\x00\x00\x00\x00\x00\x00\x01') + + big_c = self._makeC(b'\x01\x00\x00\x00\x00\x00\x00\x00') + big_py = self._makePy(b'\x01\x00\x00\x00\x00\x00\x00\x00') + + self.assertTrue(small_py < big_py) + self.assertTrue(small_py <= big_py) + + self.assertTrue(small_py < big_c) + self.assertTrue(small_py <= big_c) + self.assertTrue(small_py <= small_c) + + self.assertTrue(small_c < big_c) + self.assertTrue(small_c <= big_c) + + self.assertTrue(small_c <= big_py) + self.assertTrue(big_c > small_py) + self.assertTrue(big_c >= big_py) + + self.assertFalse(big_c == small_py) + self.assertFalse(small_py == big_c) + + self.assertTrue(big_c != small_py) + self.assertTrue(small_py != big_c) + + def test_seconds_precision(self, seconds=6.123456789): + # https://github.com/zopefoundation/persistent/issues/41 + args = (2001, 2, 3, 4, 5, seconds) + c = self._makeC(*args) + py = self._makePy(*args) + + self.assertEqual(c, py) + self.assertEqual(c.second(), py.second()) + + py2 = self._makePy(c.raw()) + self.assertEqual(py2, c) + + c2 = self._makeC(c.raw()) + self.assertEqual(c2, c) + + def test_seconds_precision_half(self): + # make sure our rounding matches + self.test_seconds_precision(seconds=6.5) + self.test_seconds_precision(seconds=6.55) + self.test_seconds_precision(seconds=6.555) + self.test_seconds_precision(seconds=6.5555) + self.test_seconds_precision(seconds=6.55555) + self.test_seconds_precision(seconds=6.555555) + self.test_seconds_precision(seconds=6.5555555) + self.test_seconds_precision(seconds=6.55555555) + self.test_seconds_precision(seconds=6.555555555) + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/persistent/tests/test_wref.py b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_wref.py new file mode 100644 index 0000000..db4c502 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/tests/test_wref.py @@ -0,0 +1,338 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + + +class WeakRefTests(unittest.TestCase): + + def _getTargetClass(self): + from persistent.wref import WeakRef + return WeakRef + + def _makeOne(self, ob): + return self._getTargetClass()(ob) + + def test_ctor_target_wo_jar(self): + target = _makeTarget() + wref = self._makeOne(target) + self.assertTrue(wref._v_ob is target) + self.assertEqual(wref.oid, b'OID') + self.assertTrue(wref.dm is None) + self.assertFalse('database_name' in wref.__dict__) + + def test_ctor_target_w_jar(self): + target = _makeTarget() + target._p_jar = jar = _makeJar() + wref = self._makeOne(target) + self.assertTrue(wref._v_ob is target) + self.assertEqual(wref.oid, b'OID') + self.assertTrue(wref.dm is jar) + self.assertEqual(wref.database_name, 'testing') + + def test___call___target_in_volatile(self): + target = _makeTarget() + target._p_jar = jar = _makeJar() + wref = self._makeOne(target) + self.assertTrue(wref() is target) + + def test___call___target_in_jar(self): + target = _makeTarget() + target._p_jar = jar = _makeJar() + jar[target._p_oid] = target + wref = self._makeOne(target) + del wref._v_ob + self.assertTrue(wref() is target) + + def test___call___target_not_in_jar(self): + target = _makeTarget() + target._p_jar = jar = _makeJar() + wref = self._makeOne(target) + del wref._v_ob + self.assertTrue(wref() is None) + + def test___hash___w_target(self): + target = _makeTarget() + target._p_jar = jar = _makeJar() + wref = self._makeOne(target) + self.assertEqual(hash(wref), hash(target)) + + def test___hash___wo_target(self): + target = _makeTarget() + target._p_jar = jar = _makeJar() + wref = self._makeOne(target) + del wref._v_ob + self.assertRaises(TypeError, hash, wref) + + def test___eq___w_non_weakref(self): + target = _makeTarget() + lhs = self._makeOne(target) + self.assertNotEqual(lhs, object()) + # Test belt-and-suspenders directly + self.assertFalse(lhs.__eq__(object())) + + def test___eq___w_both_same_target(self): + target = _makeTarget() + lhs = self._makeOne(target) + rhs_target = _makeTarget() + rhs = self._makeOne(target) + self.assertEqual(lhs, rhs) + + def test___eq___w_both_different_targets(self): + lhs_target = _makeTarget(oid='LHS') + lhs = self._makeOne(lhs_target) + rhs_target = _makeTarget(oid='RHS') + rhs = self._makeOne(rhs_target) + self.assertNotEqual(lhs, rhs) + + def test___eq___w_lhs_gone_target_not_in_jar(self): + target = _makeTarget() + target._p_jar = jar = _makeJar() + lhs = self._makeOne(target) + del lhs._v_ob + rhs = self._makeOne(target) + self.assertRaises(TypeError, lambda: lhs == rhs) + + def test___eq___w_lhs_gone_target_in_jar(self): + target = _makeTarget() + target._p_jar = jar = _makeJar() + jar[target._p_oid] = target + lhs = self._makeOne(target) + del lhs._v_ob + rhs_target = _makeTarget() + rhs = self._makeOne(target) + self.assertEqual(lhs, rhs) + + def test___eq___w_rhs_gone_target_not_in_jar(self): + target = _makeTarget() + target._p_jar = jar = _makeJar() + lhs = self._makeOne(target) + rhs = self._makeOne(target) + del rhs._v_ob + self.assertRaises(TypeError, lambda: lhs == rhs) + + def test___eq___w_rhs_gone_target_in_jar(self): + target = _makeTarget() + target._p_jar = jar = _makeJar() + jar[target._p_oid] = target + lhs = self._makeOne(target) + rhs = self._makeOne(target) + del rhs._v_ob + self.assertEqual(lhs, rhs) + + +class PersistentWeakKeyDictionaryTests(unittest.TestCase): + + def _getTargetClass(self): + from persistent.wref import PersistentWeakKeyDictionary + return PersistentWeakKeyDictionary + + def _makeOne(self, adict, **kw): + return self._getTargetClass()(adict, **kw) + + def test_ctor_w_adict_none_no_kwargs(self): + pwkd = self._makeOne(None) + self.assertEqual(pwkd.data, {}) + + def test_ctor_w_adict_as_dict(self): + jar = _makeJar() + key = jar['key'] = _makeTarget(oid='KEY') + key._p_jar = jar + value = jar['value'] = _makeTarget(oid='VALUE') + value._p_jar = jar + pwkd = self._makeOne({key: value}) + self.assertTrue(pwkd[key] is value) + + def test_ctor_w_adict_as_items(self): + jar = _makeJar() + key = jar['key'] = _makeTarget(oid='KEY') + key._p_jar = jar + value = jar['value'] = _makeTarget(oid='VALUE') + value._p_jar = jar + pwkd = self._makeOne([(key, value)]) + self.assertTrue(pwkd[key] is value) + + def test___getstate___empty(self): + pwkd = self._makeOne(None) + self.assertEqual(pwkd.__getstate__(), {'data': []}) + + def test___getstate___filled(self): + from persistent.wref import WeakRef + jar = _makeJar() + key = jar['key'] = _makeTarget(oid='KEY') + key._p_jar = jar + value = jar['value'] = _makeTarget(oid='VALUE') + value._p_jar = jar + pwkd = self._makeOne([(key, value)]) + self.assertEqual(pwkd.__getstate__(), + {'data': [(WeakRef(key), value)]}) + + def test___setstate___empty(self): + from persistent.wref import WeakRef + jar = _makeJar() + KEY = b'KEY' + KEY2 = b'KEY2' + KEY3 = b'KEY3' + VALUE = b'VALUE' + VALUE2 = b'VALUE2' + VALUE3 = b'VALUE3' + key = jar[KEY] = _makeTarget(oid=KEY) + key._p_jar = jar + kref = WeakRef(key) + value = jar[VALUE] = _makeTarget(oid=VALUE) + value._p_jar = jar + key2 = _makeTarget(oid=KEY2) + key2._p_jar = jar # not findable + kref2 = WeakRef(key2) + del kref2._v_ob # force a miss + value2 = jar[VALUE2] = _makeTarget(oid=VALUE2) + value2._p_jar = jar + key3 = jar[KEY3] = _makeTarget(oid=KEY3) # findable + key3._p_jar = jar + kref3 = WeakRef(key3) + del kref3._v_ob # force a miss, but win in the lookup + value3 = jar[VALUE3] = _makeTarget(oid=VALUE3) + value3._p_jar = jar + pwkd = self._makeOne(None) + pwkd.__setstate__({'data': + [(kref, value), (kref2, value2), (kref3, value3)]}) + self.assertTrue(pwkd[key] is value) + self.assertTrue(pwkd.get(key2) is None) + self.assertTrue(pwkd[key3] is value3) + + def test___setitem__(self): + jar = _makeJar() + key = jar['key'] = _makeTarget(oid='KEY') + key._p_jar = jar + value = jar['value'] = _makeTarget(oid='VALUE') + value._p_jar = jar + pwkd = self._makeOne(None) + pwkd[key] = value + self.assertTrue(pwkd[key] is value) + + def test___getitem___miss(self): + jar = _makeJar() + key = jar['key'] = _makeTarget(oid='KEY') + key._p_jar = jar + value = jar['value'] = _makeTarget(oid='VALUE') + value._p_jar = jar + pwkd = self._makeOne(None) + def _try(): + return pwkd[key] + self.assertRaises(KeyError, _try) + + def test___delitem__(self): + jar = _makeJar() + key = jar['key'] = _makeTarget(oid='KEY') + key._p_jar = jar + value = jar['value'] = _makeTarget(oid='VALUE') + value._p_jar = jar + pwkd = self._makeOne([(key, value)]) + del pwkd[key] + self.assertTrue(pwkd.get(key) is None) + + def test___delitem___miss(self): + jar = _makeJar() + key = jar['key'] = _makeTarget(oid='KEY') + key._p_jar = jar + value = jar['value'] = _makeTarget(oid='VALUE') + value._p_jar = jar + pwkd = self._makeOne(None) + def _try(): + del pwkd[key] + self.assertRaises(KeyError, _try) + + def test_get_miss_w_explicit_default(self): + jar = _makeJar() + key = jar['key'] = _makeTarget(oid='KEY') + key._p_jar = jar + value = jar['value'] = _makeTarget(oid='VALUE') + value._p_jar = jar + pwkd = self._makeOne(None) + self.assertTrue(pwkd.get(key, value) is value) + + def test___contains___miss(self): + jar = _makeJar() + key = jar['key'] = _makeTarget(oid='KEY') + key._p_jar = jar + pwkd = self._makeOne(None) + self.assertFalse(key in pwkd) + + def test___contains___hit(self): + jar = _makeJar() + key = jar['key'] = _makeTarget(oid='KEY') + key._p_jar = jar + value = jar['value'] = _makeTarget(oid='VALUE') + value._p_jar = jar + pwkd = self._makeOne([(key, value)]) + self.assertTrue(key in pwkd) + + def test___iter___empty(self): + jar = _makeJar() + pwkd = self._makeOne(None) + self.assertEqual(list(pwkd), []) + + def test___iter___filled(self): + jar = _makeJar() + key = jar['key'] = _makeTarget(oid='KEY') + key._p_jar = jar + value = jar['value'] = _makeTarget(oid='VALUE') + value._p_jar = jar + pwkd = self._makeOne([(key, value)]) + self.assertEqual(list(pwkd), [key]) + + def test_update_w_other_pwkd(self): + jar = _makeJar() + key = jar['key'] = _makeTarget(oid='KEY') + key._p_jar = jar + value = jar['value'] = _makeTarget(oid='VALUE') + value._p_jar = jar + source = self._makeOne([(key, value)]) + target = self._makeOne(None) + target.update(source) + self.assertTrue(target[key] is value) + + def test_update_w_dict(self): + jar = _makeJar() + key = jar['key'] = _makeTarget(oid='KEY') + key._p_jar = jar + value = jar['value'] = _makeTarget(oid='VALUE') + value._p_jar = jar + source = dict([(key, value)]) + target = self._makeOne(None) + target.update(source) + self.assertTrue(target[key] is value) + + +def _makeTarget(oid=b'OID'): + from persistent import Persistent + class Derived(Persistent): + def __hash__(self): + return hash(self._p_oid) + def __eq__(self, other): + return self._p_oid == other._p_oid + def __repr__(self): # pragma: no cover + return 'Derived: %s' % self._p_oid + derived = Derived() + derived._p_oid = oid + return derived + +def _makeJar(): + class _DB(object): + database_name = 'testing' + class _Jar(dict): + db = lambda self: _DB() + return _Jar() + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/persistent/tests/utils.py b/thesisenv/lib/python3.6/site-packages/persistent/tests/utils.py new file mode 100644 index 0000000..41eb2ba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/tests/utils.py @@ -0,0 +1,64 @@ + +class ResettingJar(object): + """Testing stub for _p_jar attribute. + """ + def __init__(self): + from persistent import PickleCache # XXX stub it! + from persistent.interfaces import IPersistentDataManager + from zope.interface import directlyProvides + self.cache = self._cache = PickleCache(self) + self.oid = 1 + self.registered = {} + directlyProvides(self, IPersistentDataManager) + + def add(self, obj): + import struct + obj._p_oid = struct.pack(">Q", self.oid) + self.oid += 1 + obj._p_jar = self + self.cache[obj._p_oid] = obj + + + # the following methods must be implemented to be a jar + + def setstate(self, obj): + # Trivial setstate() implementation that just re-initializes + # the object. This isn't what setstate() is supposed to do, + # but it suffices for the tests. + obj.__class__.__init__(obj) + +class RememberingJar(object): + """Testing stub for _p_jar attribute. + """ + def __init__(self): + from persistent import PickleCache # XXX stub it! + self.cache = PickleCache(self) + self.oid = 1 + self.registered = {} + + def add(self, obj): + import struct + obj._p_oid = struct.pack(">Q", self.oid) + self.oid += 1 + obj._p_jar = self + self.cache[obj._p_oid] = obj + # Remember object's state for later. + self.obj = obj + self.remembered = obj.__getstate__() + + + def fake_commit(self): + self.remembered = self.obj.__getstate__() + self.obj._p_changed = 0 + + # the following methods must be implemented to be a jar + + def register(self, obj): + self.registered[obj] = 1 + + def setstate(self, obj): + # Trivial setstate() implementation that resets the object's + # state as of the time it was added to the jar. + # This isn't what setstate() is supposed to do, + # but it suffices for the tests. + obj.__setstate__(self.remembered) diff --git a/thesisenv/lib/python3.6/site-packages/persistent/timestamp.py b/thesisenv/lib/python3.6/site-packages/persistent/timestamp.py new file mode 100644 index 0000000..168db20 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/timestamp.py @@ -0,0 +1,220 @@ +############################################################################## +# +# Copyright (c) 2011 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +__all__ = ('TimeStamp',) + +import datetime +import math +import struct +import sys + +from persistent._compat import PURE_PYTHON + +_RAWTYPE = bytes +_MAXINT = sys.maxsize + +_ZERO = b'\x00' * 8 + +try: + # Make sure to overflow and wraparound just + # like the C code does. + from ctypes import c_long +except ImportError: # pragma: no cover + # XXX: This is broken on 64-bit windows, where + # sizeof(long) != sizeof(Py_ssize_t) + # sizeof(long) == 4, sizeof(Py_ssize_t) == 8 + # It can be fixed by setting _MAXINT = 2 ** 31 - 1 on all + # win32 platforms, but then that breaks PyPy3 64 bit for an unknown + # reason. + def _wraparound(x): + return int(((x + (_MAXINT + 1)) & ((_MAXINT << 1) + 1)) - (_MAXINT + 1)) +else: + def _wraparound(x): + return c_long(x).value + +class _UTC(datetime.tzinfo): + def tzname(self): + return 'UTC' + def utcoffset(self, when): + return datetime.timedelta(0, 0, 0) + def dst(self): + return 0 + def fromutc(self, dt): + return dt + +def _makeUTC(y, mo, d, h, mi, s): + s = round(s, 6) # microsecond precision, to match the C implementation + usec, sec = math.modf(s) + sec = int(sec) + usec = int(usec * 1e6) + return datetime.datetime(y, mo, d, h, mi, sec, usec, tzinfo=_UTC()) + +_EPOCH = _makeUTC(1970, 1, 1, 0, 0, 0) + +_TS_SECOND_BYTES_BIAS = 60.0 / (1<<16) / (1<<16) + +def _makeRaw(year, month, day, hour, minute, second): + a = (((year - 1900) * 12 + month - 1) * 31 + day - 1) + a = (a * 24 + hour) * 60 + minute + b = int(second / _TS_SECOND_BYTES_BIAS) # Don't round() this; the C version does simple truncation + return struct.pack('>II', a, b) + +def _parseRaw(octets): + a, b = struct.unpack('>II', octets) + minute = a % 60 + hour = a // 60 % 24 + day = a // (60 * 24) % 31 + 1 + month = a // (60 * 24 * 31) % 12 + 1 + year = a // (60 * 24 * 31 * 12) + 1900 + second = b * _TS_SECOND_BYTES_BIAS + return (year, month, day, hour, minute, second) + + +class pyTimeStamp(object): + __slots__ = ('_raw', '_elements') + + def __init__(self, *args): + self._elements = None + if len(args) == 1: + raw = args[0] + if not isinstance(raw, _RAWTYPE): + raise TypeError('Raw octets must be of type: %s' % _RAWTYPE) + if len(raw) != 8: + raise TypeError('Raw must be 8 octets') + self._raw = raw + elif len(args) == 6: + self._raw = _makeRaw(*args) + # Note that we don't preserve the incoming arguments in self._elements, + # we derive them from the raw value. This is because the incoming + # seconds value could have more precision than would survive + # in the raw data, so we must be consistent. + else: + raise TypeError('Pass either a single 8-octet arg ' + 'or 5 integers and a float') + + self._elements = _parseRaw(self._raw) + + def raw(self): + return self._raw + + def __repr__(self): + return repr(self._raw) + + def __str__(self): + return "%4.4d-%2.2d-%2.2d %2.2d:%2.2d:%09.6f" % ( + self.year(), self.month(), self.day(), + self.hour(), self.minute(), + self.second()) + + def year(self): + return self._elements[0] + + def month(self): + return self._elements[1] + + def day(self): + return self._elements[2] + + def hour(self): + return self._elements[3] + + def minute(self): + return self._elements[4] + + def second(self): + return self._elements[5] + + def timeTime(self): + """ -> seconds since epoch, as a float. + """ + delta = _makeUTC(*self._elements) - _EPOCH + return delta.days * 86400 + delta.seconds + delta.microseconds / 1e6 + + def laterThan(self, other): + """ Return a timestamp instance which is later than 'other'. + + If self already qualifies, return self. + + Otherwise, return a new instance one moment later than 'other'. + """ + if not isinstance(other, self.__class__): + raise ValueError() + if self._raw > other._raw: + return self + a, b = struct.unpack('>II', other._raw) + later = struct.pack('>II', a, b + 1) + return self.__class__(later) + + def __eq__(self, other): + try: + return self.raw() == other.raw() + except AttributeError: + return NotImplemented + + def __ne__(self, other): + try: + return self.raw() != other.raw() + except AttributeError: + return NotImplemented + + def __hash__(self): + # Match the C implementation + a = bytearray(self._raw) + x = a[0] << 7 + for i in a: + x = (1000003 * x) ^ i + x ^= 8 + + x = _wraparound(x) + + if x == -1: # pragma: no cover + # The C version has this condition, but it's not clear + # why; it's also not immediately obvious what bytestring + # would generate this---hence the no-cover + x = -2 + return x + + # Now the rest of the comparison operators + # Sigh. Python 2.6 doesn't have functools.total_ordering + # so we have to do it by hand + def __lt__(self, other): + try: + return self.raw() < other.raw() + except AttributeError: + return NotImplemented + + def __gt__(self, other): + try: + return self.raw() > other.raw() + except AttributeError: + return NotImplemented + + def __le__(self, other): + try: + return self.raw() <= other.raw() + except AttributeError: + return NotImplemented + + def __ge__(self, other): + try: + return self.raw() >= other.raw() + except AttributeError: + return NotImplemented + + +try: + from persistent._timestamp import TimeStamp as CTimeStamp +except ImportError: # pragma: no cover + CTimeStamp = None + +TimeStamp = pyTimeStamp if PURE_PYTHON or CTimeStamp is None else CTimeStamp diff --git a/thesisenv/lib/python3.6/site-packages/persistent/wref.py b/thesisenv/lib/python3.6/site-packages/persistent/wref.py new file mode 100644 index 0000000..2ebd608 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/persistent/wref.py @@ -0,0 +1,129 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""ZODB-based persistent weakrefs +""" + +from persistent import Persistent + +WeakRefMarker = object() + +class WeakRef(object): + """Persistent weak references + + Persistent weak references are used much like Python weak + references. The major difference is that you can't specify an + object to be called when the object is removed from the database. + """ + # We set _p_oid to a marker so that the serialization system can + # provide special handling of weakrefs. + _p_oid = WeakRefMarker + + def __init__(self, ob): + self._v_ob = ob + self.oid = ob._p_oid + self.dm = ob._p_jar + if self.dm is not None: + self.database_name = self.dm.db().database_name + + def __call__(self): + try: + return self._v_ob + except AttributeError: + try: + self._v_ob = self.dm[self.oid] + except (KeyError, AttributeError): + return None + return self._v_ob + + def __hash__(self): + self = self() + if self is None: + raise TypeError('Weakly-referenced object has gone away') + return hash(self) + + def __eq__(self, other): + if not isinstance(other, WeakRef): + return False + self = self() + if self is None: + raise TypeError('Weakly-referenced object has gone away') + other = other() + if other is None: + raise TypeError('Weakly-referenced object has gone away') + + return self == other + + +class PersistentWeakKeyDictionary(Persistent): + """Persistent weak key dictionary + + This is akin to WeakKeyDictionaries. Note, however, that removal + of items is extremely lazy. + """ + # TODO: It's expensive trying to load dead objects from the database. + # It would be helpful if the data manager/connection cached these. + + def __init__(self, adict=None, **kwargs): + self.data = {} + if adict is not None: + keys = getattr(adict, "keys", None) + if keys is None: + adict = dict(adict) + self.update(adict) + # XXX 'kwargs' is pointless, because keys must be strings, but we + # are going to try (and fail) to wrap a WeakRef around them. + if kwargs: # pragma: no cover + self.update(kwargs) + + def __getstate__(self): + state = Persistent.__getstate__(self) + state['data'] = list(state['data'].items()) + return state + + def __setstate__(self, state): + state['data'] = dict([ + (k, v) for (k, v) in state['data'] + if k() is not None + ]) + Persistent.__setstate__(self, state) + + def __setitem__(self, key, value): + self.data[WeakRef(key)] = value + + def __getitem__(self, key): + return self.data[WeakRef(key)] + + def __delitem__(self, key): + del self.data[WeakRef(key)] + + def get(self, key, default=None): + """D.get(k[, d]) -> D[k] if k in D, else d. + """ + return self.data.get(WeakRef(key), default) + + def __contains__(self, key): + return WeakRef(key) in self.data + + def __iter__(self): + for k in self.data: + yield k() + + def update(self, adict): + if isinstance(adict, PersistentWeakKeyDictionary): + self.data.update(adict.data) + else: + for k, v in adict.items(): + self.data[WeakRef(k)] = v + + # TODO: May need more methods and tests. diff --git a/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6-nspkg.pth b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6-nspkg.pth new file mode 100644 index 0000000..882cb97 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6-nspkg.pth @@ -0,0 +1 @@ +import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('plone',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('plone', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('plone', [os.path.dirname(p)])));m = m or sys.modules.setdefault('plone', types.ModuleType('plone'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) diff --git a/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/PKG-INFO b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..3a2acfb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/PKG-INFO @@ -0,0 +1,187 @@ +Metadata-Version: 2.1 +Name: plone.uuid +Version: 1.0.5 +Summary: UUIDs for content items +Home-page: https://github.com/plone/plone.uuid +Author: Martin Aspeli +Author-email: optilude@gmail.com +License: BSD +Description: plone.uuid + ========== + + This is a minimal package that can be used to obtain a universally unique + identifier (UUID) for an object. + + The default implementation uses the Python standard library ``uuid`` module + to generate an RFC 4122-compliant UUID, using the ``uuid4()`` function. It + will assign a UUID upon object creation (by subscribing to + ``IObjectCreatedEvent`` from ``zope.lifecycleevent``) and store it in an + attribute on the object. + + Why use an attribute and not annotations? The most common form of annotation + is the one provided by ``IAttributeAnnotations``. This stores annotations in + a BTree in an attribute called ``__annotations__``, which means that + annotation values do not end up in the same ZODB persistent object as the + parent. This is good for "large" values, but not very good for small ones + that are frequently required, as it requires a separate ZODB object load. + + Simple usage + ============ + + To automatically assign a UUID to your objects using the default + implementation outlined above, you should: + + * Make sure they implement ``plone.uuid.interfaces.IAttributeUUID``. You + can do this in code, via the ``implements()`` directive, or in ZCML, with + a statement like:: + + + + + + * Make sure that an ``IObjectCreatedEvent`` is fired for this object when it + is first created. + + Once the event handler has triggered, you can obtain a UUID by adapting the + object to the ``IUUID`` interface:: + + from plone.uuid.interfaces import IUUID + uuid = IUUID(context) + + The ``uuid`` variable will now be a (byte) string containing a UUID. If the + UUID has not yet been assigned, adaptation will fail with a ``TypeError``. + + If you would rather get ``None`` instead of a ``TypeError``, you can do:: + + uuid = IUUID(context, None) + + UUID view + ========= + + If you require a UUID in a page template or remotely, you can use the + ``@@uuid`` view, which is registered for all objects that provide the + ``IUUIDAware`` marker interface (which is a super-interface of the + ``IAttributeUUID`` marker seen above). + + For example:: + +
    + ... +
    + + The view simply returns the UUID string as looked up by the ``IUUID`` adapter. + + Customising behaviour + ===================== + + There are two primary customisation points for this package: + + * You can change the default UUID generating algorithm by overriding the + unnamed utility providing the ``IUUIDGenerator`` interface. The default + implementation simply calls ``uuid.uuid4()`` and casts the result to a + ``str``. + + * You can change the UUID storage by providing a custom ``IUUID`` adapter + implementation. If you do this, you must also provide a mechanism for + assigning UUIDs upon object creation, usually via an event handler. To + obtain a UUID, use the ``IUUIDGenerator`` interface:: + + from zope.component import getUtility + from plone.uuid.interfaces import IUUIDGenerator + + generator = getUtility(IUUIDGenerator) + uuid = generator() + + You should also make sure that instances with a UUID provide a sub-interface + of ``plone.uuid.interfaces.IUUIDAware``. + + Changelog + ========= + + 1.0.5 (2018-01-18) + ------------------ + + Bug fixes: + + - Fix package dependencies. + [gforcada] + + - Fix documentation and uuid generator class name to reflect the fact that we use the ``uuid4`` implementation instead of ``uuid1``. + [thet] + + + 1.0.4 (2016-06-02) + ------------------ + + Bug fixes: + + - Update setup.py url to point to github. + [esteele] + + - Fixed issues preventing tests passing on Python 3 + [datakurre] + + + 1.0.3 (2012-05-31) + ------------------ + + - Use zope.browserpage. + [hannosch] + + - Defensive UUID assignment in addAttributeUUID() handler: keep existing + UUID value if handler called more than once, except in case of object + copy event, where original and destination should have distinct UUID. + [seanupton] + + + 1.0.2 - 2011-10-18 + ------------------ + + - Generate UUID without dashes. + [elro] + + + 1.0.1 - 2011-05-20 + ------------------ + + - Relicense under modified BSD license. + See http://plone.org/foundation/materials/foundation-resolutions/plone-framework-components-relicensing-policy + [davisagli] + + + 1.0 - 2011-05-13 + ---------------- + + - Release 1.0 Final + [esteele] + + - Add MANIFEST.in. + [WouterVH] + + + 1.0b2 - 2011-01-03 + ------------------ + + - Add MutableUUID component + [toutpt] + + + 1.0b1 - 2010-11-27 + ------------------ + + - Initial release + + +Keywords: plone uuid +Platform: UNKNOWN +Classifier: Framework :: Plone +Classifier: Framework :: Plone :: 4.3 +Classifier: Framework :: Plone :: 5.0 +Classifier: Framework :: Plone :: 5.1 +Classifier: Framework :: Plone :: 5.2 +Classifier: Programming Language :: Python +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.5 +Provides-Extra: test diff --git a/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/SOURCES.txt b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..8d1a196 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,25 @@ +CHANGES.rst +CONTRIBUTING.rst +MANIFEST.in +README.rst +setup.cfg +setup.py +docs/INSTALL.txt +docs/LICENSE.txt +plone/__init__.py +plone.uuid.egg-info/PKG-INFO +plone.uuid.egg-info/SOURCES.txt +plone.uuid.egg-info/dependency_links.txt +plone.uuid.egg-info/entry_points.txt +plone.uuid.egg-info/namespace_packages.txt +plone.uuid.egg-info/not-zip-safe +plone.uuid.egg-info/requires.txt +plone.uuid.egg-info/top_level.txt +plone/uuid/__init__.py +plone/uuid/adapter.py +plone/uuid/browser.py +plone/uuid/configure.zcml +plone/uuid/generator.py +plone/uuid/handlers.py +plone/uuid/interfaces.py +plone/uuid/tests.py \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/dependency_links.txt b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/entry_points.txt b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/entry_points.txt new file mode 100644 index 0000000..b0dac65 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/entry_points.txt @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/installed-files.txt b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..3ee5c38 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/installed-files.txt @@ -0,0 +1,24 @@ +../plone.uuid-1.0.5-py3.6-nspkg.pth +../plone/uuid/__init__.py +../plone/uuid/__pycache__/__init__.cpython-36.pyc +../plone/uuid/__pycache__/adapter.cpython-36.pyc +../plone/uuid/__pycache__/browser.cpython-36.pyc +../plone/uuid/__pycache__/generator.cpython-36.pyc +../plone/uuid/__pycache__/handlers.cpython-36.pyc +../plone/uuid/__pycache__/interfaces.cpython-36.pyc +../plone/uuid/__pycache__/tests.cpython-36.pyc +../plone/uuid/adapter.py +../plone/uuid/browser.py +../plone/uuid/configure.zcml +../plone/uuid/generator.py +../plone/uuid/handlers.py +../plone/uuid/interfaces.py +../plone/uuid/tests.py +PKG-INFO +SOURCES.txt +dependency_links.txt +entry_points.txt +namespace_packages.txt +not-zip-safe +requires.txt +top_level.txt diff --git a/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/namespace_packages.txt b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/namespace_packages.txt new file mode 100644 index 0000000..1d2c38d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/namespace_packages.txt @@ -0,0 +1 @@ +plone diff --git a/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/not-zip-safe b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/requires.txt b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/requires.txt new file mode 100644 index 0000000..07124c0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/requires.txt @@ -0,0 +1,11 @@ +Acquisition +setuptools +zope.component +zope.browserpage +zope.interface +zope.lifecycleevent +zope.publisher + +[test] +zope.configuration +zope.event diff --git a/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..1d2c38d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone.uuid-1.0.5-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +plone diff --git a/thesisenv/lib/python3.6/site-packages/plone/uuid/__init__.py b/thesisenv/lib/python3.6/site-packages/plone/uuid/__init__.py new file mode 100644 index 0000000..40a96af --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone/uuid/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/thesisenv/lib/python3.6/site-packages/plone/uuid/adapter.py b/thesisenv/lib/python3.6/site-packages/plone/uuid/adapter.py new file mode 100644 index 0000000..dcc2946 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone/uuid/adapter.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +from plone.uuid import interfaces +from zope import component +from zope import interface + + +@interface.implementer(interfaces.IUUID) +@component.adapter(interfaces.IAttributeUUID) +def attributeUUID(context): + return getattr(context, interfaces.ATTRIBUTE_NAME, None) + + +@interface.implementer(interfaces.IMutableUUID) +@component.adapter(interfaces.IAttributeUUID) +class MutableAttributeUUID(object): + + def __init__(self, context): + self.context = context + + def get(self): + return getattr(self.context, interfaces.ATTRIBUTE_NAME, None) + + def set(self, uuid): + uuid = str(uuid) + setattr(self.context, interfaces.ATTRIBUTE_NAME, uuid) diff --git a/thesisenv/lib/python3.6/site-packages/plone/uuid/browser.py b/thesisenv/lib/python3.6/site-packages/plone/uuid/browser.py new file mode 100644 index 0000000..2052ada --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone/uuid/browser.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +from plone.uuid.interfaces import IUUID +from zope.publisher.browser import BrowserView + +import sys + + +if sys.version_info >= (3,): + text_type = str +else: + text_type = unicode + + +class UUIDView(BrowserView): + """A simple browser view that renders the UUID of its context + """ + + def __call__(self): + return text_type(IUUID(self.context, u"")) diff --git a/thesisenv/lib/python3.6/site-packages/plone/uuid/configure.zcml b/thesisenv/lib/python3.6/site-packages/plone/uuid/configure.zcml new file mode 100644 index 0000000..e399af5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone/uuid/configure.zcml @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/plone/uuid/generator.py b/thesisenv/lib/python3.6/site-packages/plone/uuid/generator.py new file mode 100644 index 0000000..8387122 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone/uuid/generator.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +from plone.uuid.interfaces import IUUIDGenerator +from zope.deprecation import deprecate +from zope.interface import implementer + +import uuid + + +@implementer(IUUIDGenerator) +class UUID4Generator(object): + """Default UUID implementation. + + Uses uuid.uuid4() + """ + + def __call__(self): + return uuid.uuid4().hex + + +@deprecate( + 'UUID1Generator was renamed to UUID4Generator, as we use uuid4 instead of ' + 'uuid1. Please use UUID4Generator instead.' +) +class UUID1Generator(UUID4Generator): + """BBB. Remove with next major version. + """ diff --git a/thesisenv/lib/python3.6/site-packages/plone/uuid/handlers.py b/thesisenv/lib/python3.6/site-packages/plone/uuid/handlers.py new file mode 100644 index 0000000..f9999fe --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone/uuid/handlers.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +from plone.uuid.interfaces import ATTRIBUTE_NAME +from plone.uuid.interfaces import IAttributeUUID +from plone.uuid.interfaces import IUUIDGenerator +from zope.component import adapter +from zope.component import queryUtility +from zope.lifecycleevent.interfaces import IObjectCopiedEvent +from zope.lifecycleevent.interfaces import IObjectCreatedEvent + + +try: + from Acquisition import aq_base +except ImportError: + aq_base = lambda v: v # soft-dependency on Zope2, fallback + + +@adapter(IAttributeUUID, IObjectCreatedEvent) +def addAttributeUUID(obj, event): + + if not IObjectCopiedEvent.providedBy(event): + if getattr(aq_base(obj), ATTRIBUTE_NAME, None): + return # defensive: keep existing UUID on non-copy create + + generator = queryUtility(IUUIDGenerator) + if generator is None: + return + + uuid = generator() + if not uuid: + return + + setattr(obj, ATTRIBUTE_NAME, uuid) diff --git a/thesisenv/lib/python3.6/site-packages/plone/uuid/interfaces.py b/thesisenv/lib/python3.6/site-packages/plone/uuid/interfaces.py new file mode 100644 index 0000000..4bd4823 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone/uuid/interfaces.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +from zope.interface import Interface + + +ATTRIBUTE_NAME = '_plone.uuid' + + +class IUUIDGenerator(Interface): + """Utility for generating UUIDs + """ + + def __call__(): + """Generate a new UUID. + """ + + +class IUUIDAware(Interface): + """Marker interface for objects that have UUIDs. These should be + adaptable to IUUID. + """ + + +class IAttributeUUID(IUUIDAware): + """Marker interface for objects that have UUIDs stored in a simple + attribute. + + This interface also confers an event handler that will add UUIDs when + objects are created (IObjectCreatedEvent). + """ + + +class IUUID(Interface): + """Abstract representation of a UUID. + + Adapt an object to this interface to obtain its UUID. Adaptation will + fail if the object does not have a UUID (yet). + """ + + +class IMutableUUID(Interface): + """Adapt an object to this interface to manage the UUID of an object + + Be sure of what you are doing. UUID is supposed to be stable and + widely used + """ + def get(): + """Return the UUID of the context""" + + def set(uuid): + """Set the unique id of the context with the uuid value. + """ diff --git a/thesisenv/lib/python3.6/site-packages/plone/uuid/tests.py b/thesisenv/lib/python3.6/site-packages/plone/uuid/tests.py new file mode 100644 index 0000000..b433e4b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/plone/uuid/tests.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +import sys +import unittest + + +if sys.version_info >= (3,): + text_type = str +else: + text_type = unicode + + +class TestUUID(unittest.TestCase): + + def setUp(self): + import zope.component.testing + import plone.uuid + + from zope.configuration import xmlconfig + + zope.component.testing.setUp() + xmlconfig.file('configure.zcml', plone.uuid) + + def tearDown(self): + import zope.component.testing + zope.component.testing.tearDown() + + def test_default_generator(self): + + from zope.component import getUtility + from plone.uuid.interfaces import IUUIDGenerator + + generator = getUtility(IUUIDGenerator) + + uuid1 = generator() + uuid2 = generator() + + self.assertNotEqual(uuid1, uuid2) + self.assertTrue(isinstance(uuid1, str)) + self.assertTrue(isinstance(uuid2, str)) + + def test_attribute_uuid_not_set(self): + + from zope.interface import implementer + + from plone.uuid.interfaces import IAttributeUUID + from plone.uuid.interfaces import IUUID + + @implementer(IAttributeUUID) + class Context(object): + pass + + context = Context() + + uuid = IUUID(context, None) + self.assertEqual(uuid, None) + + def test_attribute_uuid_create_handler(self): + + from zope.interface import implementer + from zope.event import notify + from zope.lifecycleevent import ObjectCreatedEvent + from zope.lifecycleevent import ObjectCopiedEvent + + from plone.uuid.interfaces import IAttributeUUID + from plone.uuid.interfaces import IUUID + from plone.uuid.interfaces import ATTRIBUTE_NAME + + @implementer(IAttributeUUID) + class Context(object): + pass + + context = Context() + notify(ObjectCreatedEvent(context)) + + uuid = IUUID(context, None) + self.assertNotEqual(uuid, None) + self.assertTrue(isinstance(uuid, str)) + + # calling handler again won't change if UUID already present: + notify(ObjectCreatedEvent(context)) + self.assertEqual(uuid, IUUID(context, None)) + + # ...except when the UUID attribute was the result of a copy + copied = Context() + setattr(copied, ATTRIBUTE_NAME, IUUID(context, None)) + self.assertNotEqual(IUUID(copied, None), None) # mimic copied state + self.assertEqual(uuid, IUUID(copied, None)) # before handler + notify(ObjectCopiedEvent(copied, original=context)) + self.assertNotEqual(uuid, None) + self.assertNotEqual(uuid, IUUID(copied, None)) # copy has new UID + + def test_uuid_view_not_set(self): + + from zope.interface import implementer + from zope.component import getMultiAdapter + from zope.publisher.browser import TestRequest + + from plone.uuid.interfaces import IAttributeUUID + + @implementer(IAttributeUUID) + class Context(object): + pass + + context = Context() + + request = TestRequest() + view = getMultiAdapter((context, request), name=u"uuid") + response = view() + + self.assertEquals(u"", response) + self.assertTrue(isinstance(response, text_type)) + + def test_uuid_view(self): + + from zope.interface import implementer + from zope.component import getMultiAdapter + from zope.event import notify + from zope.lifecycleevent import ObjectCreatedEvent + from zope.publisher.browser import TestRequest + + from plone.uuid.interfaces import IAttributeUUID + from plone.uuid.interfaces import IUUID + + @implementer(IAttributeUUID) + class Context(object): + pass + + context = Context() + notify(ObjectCreatedEvent(context)) + + uuid = IUUID(context, None) + + request = TestRequest() + view = getMultiAdapter((context, request), name=u"uuid") + response = view() + + self.assertEquals(text_type(uuid), response) + self.assertTrue(isinstance(response, text_type)) + + def test_uuid_mutable(self): + from zope import interface + from zope import lifecycleevent + from zope import event + from plone.uuid import interfaces + + @interface.implementer(interfaces.IAttributeUUID) + class Context(object): + pass + + context = Context() + event.notify(lifecycleevent.ObjectCreatedEvent(context)) + + mutable = interfaces.IMutableUUID(context) + + uuid1 = mutable.get() + mutable.set('a uuid to set') + uuid2 = mutable.get() + uuid3 = interfaces.IUUID(context) + + self.failUnless(uuid1 != uuid2) + self.failUnless(uuid2 == uuid3) diff --git a/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/PKG-INFO b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..73e94c8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/PKG-INFO @@ -0,0 +1,33 @@ +Metadata-Version: 1.1 +Name: python-card-me +Version: 0.9.3 +Summary: UNKNOWN +Home-page: https://github.com/tBaxter/python-card-me +Author: Tim Baxter, Jeffrey Harris +Author-email: mail.baxter@gmail.com +License: Apache +Description: + Python-Card-Me parses iCalendar and vCard files into Python data structures, + decoding the relevant encodings. + It can also serialize vobject data structures to iCalendar, vCard, or hCalendar unicode strings. + + It is a substantially reworked version of Vobject, updated for python3, with other improvements. + + Requirements + ------------ + + Requires Python 2.7 or later, dateutil (http://labix.org/python-dateutil) 2.4 or later, and six. + + For older changes, see + - http://vobject.skyhouseconsulting.com/history.html or + - http://websvn.osafoundation.org/listing.php?repname=vobject&path=/trunk/ + +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: License :: OSI Approved :: BSD License +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Operating System :: OS Independent +Classifier: Topic :: Text Processing diff --git a/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/SOURCES.txt b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..7a7762e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,38 @@ +MANIFEST.in +README.md +setup.cfg +setup.py +card_me/__init__.py +card_me/base.py +card_me/behavior.py +card_me/change_tz.py +card_me/hcalendar.py +card_me/icalendar.py +card_me/ics_diff.py +card_me/vcard.py +card_me/win32tz.py +python_card_me.egg-info/PKG-INFO +python_card_me.egg-info/SOURCES.txt +python_card_me.egg-info/dependency_links.txt +python_card_me.egg-info/entry_points.txt +python_card_me.egg-info/requires.txt +python_card_me.egg-info/top_level.txt +python_card_me.egg-info/zip-safe +test_files/availablity.ics +test_files/badline.ics +test_files/badstream.ics +test_files/freebusy.ics +test_files/journal.ics +test_files/ms_tzid.ics +test_files/recurrence.ics +test_files/ruby_rrule.ics +test_files/silly_test.ics +test_files/simple_2_0_test.ics +test_files/simple_3_0_test.ics +test_files/simple_test.ics +test_files/standard_test.ics +test_files/timezones.ics +test_files/tzid_8bit.ics +test_files/utf8_test.ics +test_files/vcard_with_groups.ics +test_files/vtodo.ics \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/dependency_links.txt b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/entry_points.txt b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/entry_points.txt new file mode 100644 index 0000000..9f77988 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/entry_points.txt @@ -0,0 +1,4 @@ +[console_scripts] +change_tz = card_me.change_tz:main +ics_diff = card_me.ics_diff:main + diff --git a/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/installed-files.txt b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..b3c703a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/installed-files.txt @@ -0,0 +1,27 @@ +../../../../bin/change_tz +../../../../bin/ics_diff +../card_me/__init__.py +../card_me/__pycache__/__init__.cpython-36.pyc +../card_me/__pycache__/base.cpython-36.pyc +../card_me/__pycache__/behavior.cpython-36.pyc +../card_me/__pycache__/change_tz.cpython-36.pyc +../card_me/__pycache__/hcalendar.cpython-36.pyc +../card_me/__pycache__/icalendar.cpython-36.pyc +../card_me/__pycache__/ics_diff.cpython-36.pyc +../card_me/__pycache__/vcard.cpython-36.pyc +../card_me/__pycache__/win32tz.cpython-36.pyc +../card_me/base.py +../card_me/behavior.py +../card_me/change_tz.py +../card_me/hcalendar.py +../card_me/icalendar.py +../card_me/ics_diff.py +../card_me/vcard.py +../card_me/win32tz.py +PKG-INFO +SOURCES.txt +dependency_links.txt +entry_points.txt +requires.txt +top_level.txt +zip-safe diff --git a/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/requires.txt b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/requires.txt new file mode 100644 index 0000000..0f08daa --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/requires.txt @@ -0,0 +1 @@ +python-dateutil diff --git a/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..a1c0141 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +card_me diff --git a/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/zip-safe b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_card_me-0.9.3-py3.6.egg-info/zip-safe @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/PKG-INFO b/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..79b1fa4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/PKG-INFO @@ -0,0 +1,156 @@ +Metadata-Version: 1.1 +Name: python-gettext +Version: 3.0 +Summary: Python Gettext po to mo file compiler. +Home-page: http://pypi.python.org/pypi/python-gettext +Author: Hanno Schlichting +Author-email: hanno@hannosch.eu +License: BSD +Description: Overview + ======== + + This implementation of Gettext for Python includes a Msgfmt class which can be + used to generate compiled mo files from Gettext po files and includes support + for the newer msgctxt keyword. + + The idea for this project had been rather ambitious, but never lived up to what + is was supposed to do. Look at Babel (http://pypi.python.org/pypi/Babel) for a + project more worthy of this projects name. + + Development takes place at https://github.com/hannosch/python-gettext + + Contributors + ------------ + + * Hanno Schlichting + * Christian Heimes + * Andrei Polushin + * Michael Howitz + + + Changelog + ========= + + 3.0 - 2016-01-04 + ---------------- + + - Use `u''` strings again for easier cross Python 2/3 code. + [hannosch] + + - #5: Fix plural form support under Python 3.x. + [hannosch] + + - Break dependency on ``unittest2`` for Python 2.7. + [icemac] + + 2.1 - 2013-02-10 + ---------------- + + - Prefer `ast.literal_eval` over `eval` under Python 2, instead of just under + Python 3. We only support Python 2.6+ where the function is available. + [hannosch] + + - Tested successfully under Python 3.3. + [hannosch] + + 2.0 - 2011-12-22 + ---------------- + + - Tested successfully under PyPy 1.7. + [hannosch] + + - Handle non-latin-1 characters in the header correctly. + [hannosch] + + - Python 2 and 3 compatibility in the same codebase. + [hannosch] + + 1.2 - 2011-11-01 + ---------------- + + - Make sure empty `po` files don't break. + [Alexandru Plugaru] + + - Add support for messages with plural forms. + [Andrei Polushin] + + 1.1.1 - 2011-03-21 + ------------------ + + - Simplify test folder discovery. + [hannosch] + + 1.1 - 2011-03-20 + ---------------- + + - Handle Unicode Byte Order Mark at the beginning of files. This closes + http://dev.plone.org/plone/ticket/10813. + [hannosch, kleist] + + - Fixed potential UnicodeError in exception handling. This closes + http://dev.plone.org/plone/ticket/11058. + [hannosch, vincentfretin] + + 1.0 - 2009-05-23 + ---------------- + + - Updated package description and metadata, relicense from GPL to BSD. Note + about Babel which supersedes this package in all possible ways. + [hannosch] + + 0.6 - 2007-11-02 + ---------------- + + - Fixed header parsing. + [hannosch] + + 0.5 - 2007-11-01 + ---------------- + + - Optimized file parsing by using more elif clauses and avoiding an insane + number of startswith and isinstance calls. + [hannosch] + + 0.4 - 2007-10-13 + ---------------- + + - Added header_only argument to the read method, that allows to only parse + the header of a file without reading and parsing all the messages. + [hannosch] + + 0.3 - 2007-08-25 + ---------------- + + - Added an explicit read method, which only parses the po file and stores + it in the messages dict, but does not compile it. + [hannosch] + + 0.2 - 2007-08-23 + ---------------- + + - Bumped version number to accommodate the number in the egg info. + [hannosch] + + 0.1 - 2007-06-19 + ---------------- + + - Initial implementation of a Msgfmt class which supports the generation of + Gettext mo files including support for the new msgctxt keyword. + [hannosch] + +Keywords: Python Gettext Msgctxt +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Internationalization +Classifier: Topic :: Software Development :: Localization diff --git a/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/SOURCES.txt b/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..f91a82a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,33 @@ +CHANGES.rst +LICENSE.rst +MANIFEST.in +README.rst +pip-delete-this-directory.txt +setup.cfg +setup.py +python_gettext.egg-info/PKG-INFO +python_gettext.egg-info/SOURCES.txt +python_gettext.egg-info/dependency_links.txt +python_gettext.egg-info/not-zip-safe +python_gettext.egg-info/top_level.txt +pythongettext/__init__.py +pythongettext/msgfmt.py +pythongettext/tests/__init__.py +pythongettext/tests/test.mo +pythongettext/tests/test.po +pythongettext/tests/test.pot +pythongettext/tests/test2.mo +pythongettext/tests/test2.po +pythongettext/tests/test3.mo +pythongettext/tests/test3.po +pythongettext/tests/test4.po +pythongettext/tests/test5.po +pythongettext/tests/test6.mo +pythongettext/tests/test6.po +pythongettext/tests/test_compile.py +pythongettext/tests/test_empty.mo +pythongettext/tests/test_empty.po +pythongettext/tests/test_escape.po +pythongettext/tests/test_plural.po +pythongettext/tests/test_unicode_bom.mo +pythongettext/tests/test_unicode_bom.po \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/dependency_links.txt b/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/installed-files.txt b/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..9980116 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/installed-files.txt @@ -0,0 +1,30 @@ +../pythongettext/__init__.py +../pythongettext/__pycache__/__init__.cpython-36.pyc +../pythongettext/__pycache__/msgfmt.cpython-36.pyc +../pythongettext/msgfmt.py +../pythongettext/tests/__init__.py +../pythongettext/tests/__pycache__/__init__.cpython-36.pyc +../pythongettext/tests/__pycache__/test_compile.cpython-36.pyc +../pythongettext/tests/test.mo +../pythongettext/tests/test.po +../pythongettext/tests/test.pot +../pythongettext/tests/test2.mo +../pythongettext/tests/test2.po +../pythongettext/tests/test3.mo +../pythongettext/tests/test3.po +../pythongettext/tests/test4.po +../pythongettext/tests/test5.po +../pythongettext/tests/test6.mo +../pythongettext/tests/test6.po +../pythongettext/tests/test_compile.py +../pythongettext/tests/test_empty.mo +../pythongettext/tests/test_empty.po +../pythongettext/tests/test_escape.po +../pythongettext/tests/test_plural.po +../pythongettext/tests/test_unicode_bom.mo +../pythongettext/tests/test_unicode_bom.po +PKG-INFO +SOURCES.txt +dependency_links.txt +not-zip-safe +top_level.txt diff --git a/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/not-zip-safe b/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..486f023 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/python_gettext-3.0-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +pythongettext diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/__init__.py b/thesisenv/lib/python3.6/site-packages/pythongettext/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/msgfmt.py b/thesisenv/lib/python3.6/site-packages/pythongettext/msgfmt.py new file mode 100644 index 0000000..6641b52 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/pythongettext/msgfmt.py @@ -0,0 +1,281 @@ +#! /usr/bin/env python +# -*- coding: iso-8859-1 -*- +# Written by Martin v. Loewis +# +# Changed by Christian 'Tiran' Heimes for the placeless +# translation service (PTS) of Zope +# +# Fixed some bugs and updated to support msgctxt +# by Hanno Schlichting + +"""Generate binary message catalog from textual translation description. + +This program converts a textual Uniforum-style message catalog (.po file) into +a binary GNU catalog (.mo file). This is essentially the same function as the +GNU msgfmt program, however, it is a simpler implementation. + +This file was taken from Python-2.3.2/Tools/i18n and altered in several ways. +Now you can simply use it from another python module: + + from msgfmt import Msgfmt + mo = Msgfmt(po).get() + +where po is path to a po file as string, an opened po file ready for reading or +a list of strings (readlines of a po file) and mo is the compiled mo file as +binary string. + +Exceptions: + + * IOError if the file couldn't be read + + * msgfmt.PoSyntaxError if the po file has syntax errors +""" + +import array +from ast import literal_eval +import codecs +from email.parser import HeaderParser +import struct +import sys + +PY3 = sys.version_info[0] == 3 +if PY3: + def header_charset(s): + p = HeaderParser() + return p.parsestr(s).get_content_charset() + + import io + BytesIO = io.BytesIO + FILE_TYPE = io.IOBase +else: + def header_charset(s): + p = HeaderParser() + return p.parsestr(s.encode('utf-8', 'ignore')).get_content_charset() + + from cStringIO import StringIO as BytesIO + FILE_TYPE = file + + +class PoSyntaxError(Exception): + """ Syntax error in a po file """ + + def __init__(self, msg): + self.msg = msg + + def __str__(self): + return 'Po file syntax error: %s' % self.msg + + +class Msgfmt: + + def __init__(self, po, name='unknown'): + self.po = po + self.name = name + self.messages = {} + self.openfile = False + # Start off assuming latin-1, so everything decodes without failure, + # until we know the exact encoding + self.encoding = 'latin-1' + + def readPoData(self): + """ read po data from self.po and return an iterator """ + output = [] + if isinstance(self.po, str): + output = open(self.po, 'rb') + elif isinstance(self.po, FILE_TYPE): + self.po.seek(0) + self.openfile = True + output = self.po + elif isinstance(self.po, list): + output = self.po + if not output: + raise ValueError("self.po is invalid! %s" % type(self.po)) + if isinstance(output, FILE_TYPE): + # remove BOM from the start of the parsed input + first = output.readline() + if len(first) == 0: + return output.readlines() + if first.startswith(codecs.BOM_UTF8): + first = first.lstrip(codecs.BOM_UTF8) + return [first] + output.readlines() + return output + + def add(self, context, id, string, fuzzy): + "Add a non-empty and non-fuzzy translation to the dictionary." + if string and not fuzzy: + # The context is put before the id and separated by a EOT char. + if context: + id = context + u'\x04' + id + if not id: + # See whether there is an encoding declaration + charset = header_charset(string) + if charset: + # decode header in proper encoding + string = string.encode(self.encoding).decode(charset) + if not PY3: + # undo damage done by literal_eval in Python 2.x + string = string.encode(self.encoding).decode(charset) + self.encoding = charset + self.messages[id] = string + + def generate(self): + "Return the generated output." + # the keys are sorted in the .mo file + keys = sorted(self.messages.keys()) + offsets = [] + ids = strs = b'' + for id in keys: + msg = self.messages[id].encode(self.encoding) + id = id.encode(self.encoding) + # For each string, we need size and file offset. Each string is + # NUL terminated; the NUL does not count into the size. + offsets.append((len(ids), len(id), len(strs), + len(msg))) + ids += id + b'\0' + strs += msg + b'\0' + output = b'' + # The header is 7 32-bit unsigned integers. We don't use hash tables, + # so the keys start right after the index tables. + keystart = 7 * 4 + 16 * len(keys) + # and the values start after the keys + valuestart = keystart + len(ids) + koffsets = [] + voffsets = [] + # The string table first has the list of keys, then the list of values. + # Each entry has first the size of the string, then the file offset. + for o1, l1, o2, l2 in offsets: + koffsets += [l1, o1 + keystart] + voffsets += [l2, o2 + valuestart] + offsets = koffsets + voffsets + # Even though we don't use a hashtable, we still set its offset to be + # binary compatible with the gnu gettext format produced by: + # msgfmt file.po --no-hash + output = struct.pack("Iiiiiii", + 0x950412de, # Magic + 0, # Version + len(keys), # # of entries + 7 * 4, # start of key index + 7 * 4 + len(keys) * 8, # start of value index + 0, keystart) # size and offset of hash table + if PY3: + output += array.array("i", offsets).tobytes() + else: + output += array.array("i", offsets).tostring() + output += ids + output += strs + return output + + def get(self): + """ """ + self.read() + # Compute output + return self.generate() + + def read(self, header_only=False): + """ """ + ID = 1 + STR = 2 + CTXT = 3 + + section = None + fuzzy = 0 + msgid = msgstr = msgctxt = u'' + + # Parse the catalog + lno = 0 + for l in self.readPoData(): + l = l.decode(self.encoding) + lno += 1 + # If we get a comment line after a msgstr or a line starting with + # msgid or msgctxt, this is a new entry + if section == STR and (l[0] == '#' or (l[0] == 'm' and + (l.startswith('msgctxt') or l.startswith('msgid')))): + self.add(msgctxt, msgid, msgstr, fuzzy) + section = None + fuzzy = 0 + # If we only want the header we stop after the first message + if header_only: + break + # Record a fuzzy mark + if l[:2] == '#,' and 'fuzzy' in l: + fuzzy = 1 + # Skip comments + if l[0] == '#': + continue + # Now we are in a msgctxt section + if l.startswith('msgctxt'): + section = CTXT + l = l[7:] + msgctxt = u'' + # Now we are in a msgid section, output previous section + elif (l.startswith('msgid') and + not l.startswith('msgid_plural')): + if section == STR: + self.add(msgid, msgstr, fuzzy) + section = ID + l = l[5:] + msgid = msgstr = u'' + is_plural = False + # This is a message with plural forms + elif l.startswith('msgid_plural'): + if section != ID: + raise PoSyntaxError( + 'msgid_plural not preceeded by ' + 'msgid on line %d of po file %s' % + (lno, repr(self.name))) + l = l[12:] + msgid += u'\0' # separator of singular and plural + is_plural = True + # Now we are in a msgstr section + elif l.startswith('msgstr'): + section = STR + if l.startswith('msgstr['): + if not is_plural: + raise PoSyntaxError( + 'plural without msgid_plural ' + 'on line %d of po file %s' % + (lno, repr(self.name))) + l = l.split(']', 1)[1] + if msgstr: + # Separator of the various plural forms + msgstr += u'\0' + else: + if is_plural: + raise PoSyntaxError( + 'indexed msgstr required for ' + 'plural on line %d of po file %s' % + (lno, repr(self.name))) + l = l[6:] + # Skip empty lines + l = l.strip() + if not l: + continue + # TODO: Does this always follow Python escape semantics? + try: + l = literal_eval(l) + except Exception as msg: + raise PoSyntaxError( + '%s (line %d of po file %s): \n%s' % + (msg, lno, repr(self.name), l)) + if isinstance(l, bytes): + l = l.decode(self.encoding) + if section == CTXT: + msgctxt += l + elif section == ID: + msgid += l + elif section == STR: + msgstr += l + else: + raise PoSyntaxError( + 'error on line %d of po file %s' % + (lno, repr(self.name))) + + # Add last entry + if section == STR: + self.add(msgctxt, msgid, msgstr, fuzzy) + + if self.openfile: + self.po.close() + + def getAsFile(self): + return BytesIO(self.get()) diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test.mo b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test.mo new file mode 100644 index 0000000..810ed74 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test.po b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test.po new file mode 100644 index 0000000..10734f3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test.po @@ -0,0 +1,41 @@ +# Some comments +# Some more comments +msgid "" +msgstr "" +"Project-Id-Version: test\n" +"POT-Creation-Date: 2007-05-31 19:30+0100\n" +"Last-Translator: Hanno C. Schlichting \n" +"Language-Team: <>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +# comment1 +#. More comments: "default1" +#: folder1/file1 +#: folder2/file1 +msgid "msgid1" +msgstr "msgstr1" + +# comment2 +#: file2 +msgid "msgid2" +msgstr "msgstr2" + +# Default: "default3" +#: file3 +msgid "msgid3" +msgstr "msgstr3" + +#. Default: "default4" +msgid "msgid4" +msgstr "msgstr4" + +# comment5 +msgid "msgid5" +msgstr "msgstr5" + +#, fuzzy +msgid "msgid6" +msgstr "msgstr6" diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test.pot b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test.pot new file mode 100644 index 0000000..0463cf0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test.pot @@ -0,0 +1,41 @@ +# Some comments +# Some more comments +msgid "" +msgstr "" +"Project-Id-Version: test\n" +"POT-Creation-Date: 2007-05-31 19:30+0100\n" +"Last-Translator: Hanno Schlichting \n" +"Language-Team: <>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +# comment1 +#. More comments: "default1" +#: folder1/file1 +#: folder2/file1 +msgid "msgid1" +msgstr "" + +# comment2 +#: file2 +msgid "msgid2" +msgstr "" + +# Default: "default3" +#: file3 +msgid "msgid3" +msgstr "" + +#. Default: "default4" +msgid "msgid4" +msgstr "" + +# comment5 +msgid "msgid5" +msgstr "" + +#, fuzzy +msgid "msgid6" +msgstr "" diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test2.mo b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test2.mo new file mode 100644 index 0000000..61c2f0a Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test2.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test2.po b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test2.po new file mode 100644 index 0000000..2af7851 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test2.po @@ -0,0 +1,21 @@ +# Some comments +msgid "" +msgstr "" +"Project-Id-Version: test2\n" +"POT-Creation-Date: 2007-05-31 22:15+0100\n" +"Last-Translator: Hanno C. Schlichting \n" +"Language-Team: <>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +# comment1 +#. More comments: "default1" +#: folder1/file1 +msgid "msgid1" +msgstr "msgstr1" + +#, fuzzy +msgid "msgid2" +msgstr "msgstr2" diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test3.mo b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test3.mo new file mode 100644 index 0000000..67e1100 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test3.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test3.po b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test3.po new file mode 100644 index 0000000..3db1892 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test3.po @@ -0,0 +1,36 @@ +msgid "" +msgstr "" +"Project-Id-Version: test3\n" +"POT-Creation-Date: 2007-05-31 22:15+0100\n" +"Last-Translator: Hanno C. Schlichting \n" +"Language-Team: <>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +# comment1 +#. More comments: "default1" +#: folder1/file1 +msgid "msgid1" +msgstr "msgstr1" + +#, fuzzy +msgid "msgid2" +msgstr "msgstr2" + +msgctxt "msgctext3" +msgid "msgid3" +msgstr "msgstr3" + +# comment4 +#. More comments: "default4" +#: folder4/file4 +msgctxt "msgctext4" +msgid "msgid4" +msgstr "msgstr4" + +#, fuzzy +msgctxt "msgctext5" +msgid "msgid5" +msgstr "msgstr5" diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test4.po b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test4.po new file mode 100644 index 0000000..8d04d31 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test4.po @@ -0,0 +1,22 @@ +# Translation of foo.pot to Afrikaans +# Mighty translator, 2007 +msgid "" +msgstr "" +"Project-Id-Version: foo\n" +"POT-Creation-Date: 2007-09-01 12:34+0000\n" +"PO-Revision-Date: 2006-05-26 14:12+0200\n" +"Last-Translator: Mighty translator\n" +"Language-Team: Afrikaans \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=iso-8859-1\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0\n" +"Language-Code: af\n" +"Language-Name: Afrikaans\n" +"Preferred-Encodings: utf-8 latin1\n" +"Domain: foo\n" + +#. Default: "Message 1" +#: ./foo/bar.py:42 +msgid "message_1" +msgstr "Message 1" diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test5.po b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test5.po new file mode 100644 index 0000000..e55e6f5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test5.po @@ -0,0 +1,17 @@ +msgid "" +msgstr "" +"Project-Id-Version: test\n" +"POT-Creation-Date: 2007-05-31 19:30+0100\n" +"Last-Translator: Hanno C. Schlichting \n" +"Language-Team: <>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +msgid "msgid1" +msgstr "føø" + +msgid "msgid2" +msgstr "føø +bår" diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test6.mo b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test6.mo new file mode 100644 index 0000000..42ea8d3 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test6.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test6.po b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test6.po new file mode 100644 index 0000000..b47480e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test6.po @@ -0,0 +1,14 @@ +msgid "" +msgstr "" +"Project-Id-Version: Tøst 1.0\n" +"POT-Creation-Date: 2007-05-31 19:30+0100\n" +"PO-Revision-Date: 2007-05-31 19:30+0100\n" +"Last-Translator: Föø Bår \n" +"Language-Team: <>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +msgid "msgid1" +msgstr "føø" diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_compile.py b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_compile.py new file mode 100644 index 0000000..d406ff4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_compile.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +import os + +from pythongettext.msgfmt import Msgfmt +from pythongettext.msgfmt import PoSyntaxError +try: + import unittest2 as unittest +except ImportError: # Python 2.7 or newer + import unittest + +FOLDER = os.path.dirname(__file__) + + +class TestWriter(unittest.TestCase): + + def compare_po_mo(self, poname, moname): + po_file = None + mo_file = None + try: + po_file = open(os.path.join(FOLDER, poname), 'rb') + po = Msgfmt(po_file).get() + mo_file = open(os.path.join(FOLDER, moname), 'rb') + mo = b''.join(mo_file.readlines()) + finally: + if po_file is not None: + po_file.close() + if mo_file is not None: + mo_file.close() + + self.assertEqual(mo, po) + + def test_empty(self): + self.compare_po_mo('test_empty.po', 'test_empty.mo') + + def test_test(self): + self.compare_po_mo('test.po', 'test.mo') + + def test_test2(self): + self.compare_po_mo('test2.po', 'test2.mo') + + def test_msgctxt(self): + self.compare_po_mo('test3.po', 'test3.mo') + + def test_test4(self): + po_file = open(os.path.join(FOLDER, 'test4.po'), 'rb') + po = Msgfmt(po_file) + po.read(header_only=True) + po_file.close() + self.assertTrue( + po.messages[u''].startswith('Project-Id-Version: foo')) + self.assertEqual(po.encoding, u'iso-8859-1') + + def test_test5(self): + po_file = open(os.path.join(FOLDER, 'test5.po'), 'rb') + po = Msgfmt(po_file) + try: + with self.assertRaises(PoSyntaxError): + po.read() + finally: + po_file.close() + self.assertEqual(po.encoding, u'utf-8') + + def test_test5_unicode_name(self): + po_file = open(os.path.join(FOLDER, 'test5.po'), 'rb') + po = Msgfmt(po_file, name=u'dømain') + try: + with self.assertRaises(PoSyntaxError): + po.read() + finally: + po_file.close() + self.assertEqual(po.encoding, u'utf-8') + + def test_test6(self): + self.compare_po_mo('test6.po', 'test6.mo') + + def test_test6_unicode_header(self): + po_file = open(os.path.join(FOLDER, 'test6.po'), 'rb') + po = Msgfmt(po_file) + po.read(header_only=True) + po_file.close() + self.assertTrue(po.messages[u''].startswith( + u'Project-Id-Version: Tøst 1.0')) + self.assertEqual(po.encoding, u'utf-8') + + def test_escape(self): + po_file = open(os.path.join(FOLDER, 'test_escape.po'), 'rb') + po = Msgfmt(po_file) + try: + with self.assertRaises(PoSyntaxError) as e: + po.read() + self.assertTrue('line 19' in e.exception.msg) + self.assertEqual(po.encoding, u'utf-8') + finally: + po_file.close() + + def test_unicode_bom(self): + self.compare_po_mo('test_unicode_bom.po', 'test_unicode_bom.mo') + + def test_plural(self): + po_file = open(os.path.join(FOLDER, 'test_plural.po'), 'rb') + po = Msgfmt(po_file) + try: + po.read() + finally: + po_file.close() + self.assertEqual( + set(po.messages.keys()), + set([u'', u'm1', u'm2 ø\x00{d} ømsgid', + u'øcontext\x04m3 ø\x00{d} ømsgid context'])) diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_empty.mo b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_empty.mo new file mode 100644 index 0000000..7d18eb0 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_empty.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_empty.po b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_empty.po new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_escape.po b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_escape.po new file mode 100644 index 0000000..d87f9d9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_escape.po @@ -0,0 +1,19 @@ +msgid "" +msgstr "" +"Project-Id-Version: test\n" +"POT-Creation-Date: 2007-05-31 19:30+0100\n" +"Last-Translator: Hanno C. Schlichting \n" +"Language-Team: <>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +msgid "msgid1" +msgstr "Hello ${foo}" + +msgid "msgid2" +msgstr "Hellø \"bar\"" + +msgid "msgid3" +msgstr "Hellø "bar\"" diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_plural.po b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_plural.po new file mode 100644 index 0000000..5e655e8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_plural.po @@ -0,0 +1,26 @@ +msgid "" +msgstr "" +"Project-Id-Version: test_plural\n" +"POT-Creation-Date: 2016-01-04 16:20+0100\n" +"Last-Translator: Hanno C. Schlichting \n" +"Language-Team: <>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;\n" + +msgid "m1" +msgstr "msgstr" + +msgid "m2 ø" +msgid_plural "{d} ømsgid" +msgstr[0] "ø" +msgstr[1] "øø" +msgstr[2] "øøø" + +msgctxt "øcontext" +msgid "m3 ø" +msgid_plural "{d} ømsgid context" +msgstr[0] "ø" +msgstr[1] "øø" +msgstr[2] "øøø" diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_unicode_bom.mo b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_unicode_bom.mo new file mode 100644 index 0000000..278042d Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_unicode_bom.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_unicode_bom.po b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_unicode_bom.po new file mode 100644 index 0000000..3c4b1c1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/pythongettext/tests/test_unicode_bom.po @@ -0,0 +1,13 @@ +msgid "" +msgstr "" +"Project-Id-Version: test\n" +"POT-Creation-Date: 2007-05-31 19:30+0100\n" +"Last-Translator: Hanno C. Schlichting \n" +"Language-Team: <>\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +msgid "msgid1" +msgstr "føø" diff --git a/thesisenv/lib/python3.6/site-packages/sorl/__init__.py b/thesisenv/lib/python3.6/site-packages/sorl/__init__.py new file mode 100644 index 0000000..28da247 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/__init__.py @@ -0,0 +1,20 @@ +# encoding=utf-8 +from __future__ import unicode_literals + +import logging + +__author__ = "Mikko Hellsing" +__license__ = "BSD" +__version__ = '12.5.0' +__maintainer__ = "Jazzband" +__email__ = "mariocesar@humanzilla.com" + + +class NullHandler(logging.Handler): + def emit(self, record): + pass + + +# Add a logging handler that does nothing to silence messages with no logger +# configured +logging.getLogger('sorl').addHandler(NullHandler()) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/__init__.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/__init__.py new file mode 100644 index 0000000..ac2da56 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/__init__.py @@ -0,0 +1,4 @@ +from sorl.thumbnail.fields import ImageField +from sorl.thumbnail.shortcuts import get_thumbnail, delete +from sorl import __version__ + diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/admin/__init__.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/admin/__init__.py new file mode 100644 index 0000000..cd51e2b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/admin/__init__.py @@ -0,0 +1,4 @@ +from django.forms import ClearableFileInput +from .current import AdminImageMixin + +AdminInlineImageMixin = AdminImageMixin # backwards compatibility diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/admin/current.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/admin/current.py new file mode 100644 index 0000000..42d43f0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/admin/current.py @@ -0,0 +1,64 @@ +from __future__ import unicode_literals +import logging + +from django import forms +from django.utils.safestring import mark_safe +from sorl.thumbnail.fields import ImageField +from sorl.thumbnail.shortcuts import get_thumbnail + + +logger = logging.getLogger(__name__) + + +class AdminImageWidget(forms.ClearableFileInput): + """ + An ImageField Widget for django.contrib.admin that shows a thumbnailed + image as well as a link to the current one if it hase one. + """ + + template_with_initial = ( + '%(clear_template)s
    ' + '' + ) + template_with_clear = '' + + def render(self, name, value, attrs=None, **kwargs): + output = super(AdminImageWidget, self).render(name, value, attrs, **kwargs) + if value and hasattr(value, 'url'): + ext = 'JPEG' + try: + aux_ext = str(value).split('.') + if aux_ext[len(aux_ext) - 1].lower() == 'png': + ext = 'PNG' + elif aux_ext[len(aux_ext) - 1].lower() == 'gif': + ext = 'GIF' + except Exception: + pass + try: + mini = get_thumbnail(value, 'x80', upscale=False, format=ext) + except Exception as e: + logger.warning("Unable to get the thumbnail", exc_info=e) + else: + try: + output = ( + '
    ' + '' + '%s
    ' + ) % (mini.width, value.url, mini.url, output) + except (AttributeError, TypeError): + pass + return mark_safe(output) + + +class AdminImageMixin(object): + """ + This is a mix-in for InlineModelAdmin subclasses to make ``ImageField`` + show nicer form widget + """ + + def formfield_for_dbfield(self, db_field, **kwargs): + if isinstance(db_field, ImageField): + return db_field.formfield(widget=AdminImageWidget) + sup = super(AdminImageMixin, self) + return sup.formfield_for_dbfield(db_field, **kwargs) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/base.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/base.py new file mode 100644 index 0000000..c41197f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/base.py @@ -0,0 +1,208 @@ +from __future__ import unicode_literals + +import logging +import os +import re + +from django.utils.six import string_types + +from sorl.thumbnail.conf import settings, defaults as default_settings +from sorl.thumbnail.helpers import tokey, serialize +from sorl.thumbnail.images import ImageFile, DummyImageFile +from sorl.thumbnail import default +from sorl.thumbnail.parsers import parse_geometry + + +logger = logging.getLogger(__name__) + +EXTENSIONS = { + 'JPEG': 'jpg', + 'PNG': 'png', + 'GIF': 'gif', + 'WEBP': 'webp', +} + + +class ThumbnailBackend(object): + """ + The main class for sorl-thumbnail, you can subclass this if you for example + want to change the way destination filename is generated. + """ + + default_options = { + 'format': settings.THUMBNAIL_FORMAT, + 'quality': settings.THUMBNAIL_QUALITY, + 'colorspace': settings.THUMBNAIL_COLORSPACE, + 'upscale': settings.THUMBNAIL_UPSCALE, + 'crop': False, + 'cropbox': None, + 'rounded': None, + 'padding': settings.THUMBNAIL_PADDING, + 'padding_color': settings.THUMBNAIL_PADDING_COLOR, + } + + extra_options = ( + ('progressive', 'THUMBNAIL_PROGRESSIVE'), + ('orientation', 'THUMBNAIL_ORIENTATION'), + ('blur', 'THUMBNAIL_BLUR'), + ) + + def file_extension(self, source): + return os.path.splitext(source.name)[1].lower() + + def _get_format(self, source): + file_extension = self.file_extension(source) + + if file_extension == '.jpg' or file_extension == '.jpeg': + return 'JPEG' + elif file_extension == '.png': + return 'PNG' + elif file_extension == '.gif': + return 'GIF' + elif file_extension == '.webp': + return 'WEBP' + else: + from django.conf import settings + + return getattr(settings, 'THUMBNAIL_FORMAT', default_settings.THUMBNAIL_FORMAT) + + def get_thumbnail(self, file_, geometry_string, **options): + """ + Returns thumbnail as an ImageFile instance for file with geometry and + options given. First it will try to get it from the key value store, + secondly it will create it. + """ + logger.debug('Getting thumbnail for file [%s] at [%s]', file_, geometry_string) + + if file_: + source = ImageFile(file_) + else: + raise ValueError('falsey file_ argument in get_thumbnail()') + + # preserve image filetype + if settings.THUMBNAIL_PRESERVE_FORMAT: + options.setdefault('format', self._get_format(source)) + + for key, value in self.default_options.items(): + options.setdefault(key, value) + + # For the future I think it is better to add options only if they + # differ from the default settings as below. This will ensure the same + # filenames being generated for new options at default. + for key, attr in self.extra_options: + value = getattr(settings, attr) + if value != getattr(default_settings, attr): + options.setdefault(key, value) + + name = self._get_thumbnail_filename(source, geometry_string, options) + thumbnail = ImageFile(name, default.storage) + cached = default.kvstore.get(thumbnail) + + if cached: + return cached + + # We have to check exists() because the Storage backend does not + # overwrite in some implementations. + if settings.THUMBNAIL_FORCE_OVERWRITE or not thumbnail.exists(): + try: + source_image = default.engine.get_image(source) + except IOError as e: + logger.exception(e) + if settings.THUMBNAIL_DUMMY: + return DummyImageFile(geometry_string) + else: + # if S3Storage says file doesn't exist remotely, don't try to + # create it and exit early. + # Will return working empty image type; 404'd image + logger.warning( + 'Remote file [%s] at [%s] does not exist', + file_, geometry_string, + ) + return thumbnail + + # We might as well set the size since we have the image in memory + image_info = default.engine.get_image_info(source_image) + options['image_info'] = image_info + size = default.engine.get_image_size(source_image) + source.set_size(size) + + try: + self._create_thumbnail(source_image, geometry_string, options, + thumbnail) + self._create_alternative_resolutions(source_image, geometry_string, + options, thumbnail.name) + finally: + default.engine.cleanup(source_image) + + # If the thumbnail exists we don't create it, the other option is + # to delete and write but this could lead to race conditions so I + # will just leave that out for now. + default.kvstore.get_or_set(source) + default.kvstore.set(thumbnail, source) + return thumbnail + + def delete(self, file_, delete_file=True): + """ + Deletes file_ references in Key Value store and optionally the file_ + it self. + """ + image_file = ImageFile(file_) + if delete_file: + image_file.delete() + default.kvstore.delete(image_file) + + def _create_thumbnail(self, source_image, geometry_string, options, + thumbnail): + """ + Creates the thumbnail by using default.engine + """ + logger.debug('Creating thumbnail file [%s] at [%s] with [%s]', + thumbnail.name, geometry_string, options) + ratio = default.engine.get_image_ratio(source_image, options) + geometry = parse_geometry(geometry_string, ratio) + image = default.engine.create(source_image, geometry, options) + default.engine.write(image, options, thumbnail) + # It's much cheaper to set the size here + size = default.engine.get_image_size(image) + thumbnail.set_size(size) + + def _create_alternative_resolutions(self, source_image, geometry_string, + options, name): + """ + Creates the thumbnail by using default.engine with multiple output + sizes. Appends @x to the file name. + """ + ratio = default.engine.get_image_ratio(source_image, options) + geometry = parse_geometry(geometry_string, ratio) + file_name, dot_file_ext = os.path.splitext(name) + + for resolution in settings.THUMBNAIL_ALTERNATIVE_RESOLUTIONS: + resolution_geometry = (int(geometry[0] * resolution), int(geometry[1] * resolution)) + resolution_options = options.copy() + if 'crop' in options and isinstance(options['crop'], string_types): + crop = options['crop'].split(" ") + for i in range(len(crop)): + s = re.match("(\d+)px", crop[i]) + if s: + crop[i] = "%spx" % int(int(s.group(1)) * resolution) + resolution_options['crop'] = " ".join(crop) + + image = default.engine.create(source_image, resolution_geometry, options) + thumbnail_name = '%(file_name)s%(suffix)s%(file_ext)s' % { + 'file_name': file_name, + 'suffix': '@%sx' % resolution, + 'file_ext': dot_file_ext + } + thumbnail = ImageFile(thumbnail_name, default.storage) + default.engine.write(image, resolution_options, thumbnail) + size = default.engine.get_image_size(image) + thumbnail.set_size(size) + + def _get_thumbnail_filename(self, source, geometry_string, options): + """ + Computes the destination filename. + """ + key = tokey(source.key, geometry_string, serialize(options)) + # make some subdirs + path = '%s/%s/%s' % (key[:2], key[2:4], key) + return '%s%s.%s' % (settings.THUMBNAIL_PREFIX, path, EXTENSIONS[options['format']]) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/compat.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/compat.py new file mode 100644 index 0000000..644e283 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/compat.py @@ -0,0 +1,76 @@ +from __future__ import unicode_literals + +import sys + +__all__ = [ + 'BufferIO', + 'urlopen', + 'urlparse', + 'quote', + 'quote_plus', + 'URLError', +] + +PythonVersion = sys.version_info[0] + +PY2 = PythonVersion == 2 +PY3 = PythonVersion == 3 + +# -- Python 2 and 3 + +if PY3: + from urllib.error import URLError + from urllib.request import Request + from urllib.request import urlopen as _urlopen + from urllib.parse import quote, quote_plus + + import urllib.parse as urlparse + + from io import BytesIO as BufferIO + + + def b(s): + return s.encode("latin-1") + + + def encode(value, charset='utf-8', errors='ignore'): + if isinstance(value, bytes): + return value + return value.encode(charset, errors) + + + def urlsplit(url): + return urlparse.urlsplit(url.decode('ascii', 'ignore')) + +elif PY2: + from urllib2 import URLError, Request + from urllib2 import urlopen as _urlopen + from urllib import quote, quote_plus + + import urlparse + + from cStringIO import StringIO as BufferIO + + urlsplit = urlparse.urlsplit + + + def b(s): + return s + + + def encode(value, charset='utf-8', errors='ignore'): + if isinstance(value, unicode): + return value.encode(charset, errors) + return unicode(value, errors=errors).encode(charset) + + +# -- Urlopen with a proper default user agent + +def urlopen(url): + from sorl.thumbnail.conf import settings + + req = Request( + url, + headers={'User-Agent': "python-urllib%s/0.6" % PythonVersion} + ) + return _urlopen(req, timeout=settings.THUMBNAIL_URL_TIMEOUT) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/conf/__init__.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/conf/__init__.py new file mode 100644 index 0000000..59b71a0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/conf/__init__.py @@ -0,0 +1,19 @@ +from django.conf import settings as user_settings +from django.utils.functional import LazyObject +from sorl.thumbnail.conf import defaults + + +class Settings(object): + pass + + +class LazySettings(LazyObject): + def _setup(self): + self._wrapped = Settings() + for obj in (defaults, user_settings): + for attr in dir(obj): + if attr == attr.upper(): + setattr(self, attr, getattr(obj, attr)) + + +settings = LazySettings() diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/conf/defaults.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/conf/defaults.py new file mode 100644 index 0000000..6a5aa20 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/conf/defaults.py @@ -0,0 +1,128 @@ +from __future__ import unicode_literals +from django.conf import settings + +# When True ThumbnailNode.render can raise errors +THUMBNAIL_DEBUG = False + +# Backend +THUMBNAIL_BACKEND = 'sorl.thumbnail.base.ThumbnailBackend' + +# Key-value store, ships with: +# sorl.thumbnail.kvstores.cached_db_kvstore.KVStore +# sorl.thumbnail.kvstores.redis_kvstore.KVStore +# Redis requires some more work, see docs +THUMBNAIL_KVSTORE = 'sorl.thumbnail.kvstores.cached_db_kvstore.KVStore' + +# Change this to something else for MSSQL +THUMBNAIL_KEY_DBCOLUMN = 'key' + +# Engine, ships with: +# sorl.thumbnail.engines.convert_engine.Engine +# sorl.thumbnail.engines.pil_engine.Engine +# sorl.thumbnail.engines.pgmagick_engine.Engine +# convert is preferred but requires imagemagick or graphicsmagick, se docs +THUMBNAIL_ENGINE = 'sorl.thumbnail.engines.pil_engine.Engine' + +# Path to Imagemagick or Graphicsmagick ``convert`` and ``identify``. +THUMBNAIL_CONVERT = 'convert' +THUMBNAIL_IDENTIFY = 'identify' + +# Path to ``vipsthumbnail`` and ``vipsheader`` +THUMBNAIL_VIPSTHUMBNAIL = 'vipsthumbnail' +THUMBNAIL_VIPSHEADER = 'vipsheader' + +# Storage for the generated thumbnails +THUMBNAIL_STORAGE = settings.DEFAULT_FILE_STORAGE + +# Redis settings +THUMBNAIL_REDIS_DB = 0 +THUMBNAIL_REDIS_PASSWORD = '' +THUMBNAIL_REDIS_HOST = 'localhost' +THUMBNAIL_REDIS_PORT = 6379 +THUMBNAIL_REDIS_UNIX_SOCKET_PATH = None +THUMBNAIL_REDIS_SSL = False +THUMBNAIL_REDIS_TIMEOUT = 3600 * 24 * 365 * 10 # 10 years + +# DBM settings +THUMBNAIL_DBM_FILE = "thumbnail_kvstore" +THUMBNAIL_DBM_MODE = 0o644 + +# Cache timeout for ``cached_db`` store. You should probably keep this at +# maximum or ``0`` if your caching backend can handle that as infinate. +THUMBNAIL_CACHE_TIMEOUT = 3600 * 24 * 365 * 10 # 10 years + +# The cache configuration to use for storing thumbnail data +THUMBNAIL_CACHE = 'default' + +# Key prefix used by the key value store +THUMBNAIL_KEY_PREFIX = 'sorl-thumbnail' + +# Thumbnail filename prefix +THUMBNAIL_PREFIX = 'cache/' + +# Image format, common formats are: JPEG, PNG, GIF +# Make sure the backend can handle the format you specify +THUMBNAIL_FORMAT = 'JPEG' + +THUMBNAIL_PRESERVE_FORMAT = False + +# Colorspace, backends are required to implement: RGB, GRAY +# Setting this to None will keep the original colorspace. +THUMBNAIL_COLORSPACE = 'RGB' + +# Should we upscale images by default +THUMBNAIL_UPSCALE = True + +# Quality, 0-100 +THUMBNAIL_QUALITY = 95 + +# Gaussian blur radius +THUMBNAIL_BLUR = 0 + +# Adds padding around the image to match the requested size without cropping +THUMBNAIL_PADDING = False +THUMBNAIL_PADDING_COLOR = '#ffffff' + +# Save as progressive when saving as jpeg +THUMBNAIL_PROGRESSIVE = True + +# Orientate the thumbnail with respect to source EXIF orientation tag +THUMBNAIL_ORIENTATION = True + +# This means sorl.thumbnail will generate and serve a generated dummy image +# regardless of the thumbnail source content +THUMBNAIL_DUMMY = False + +# Thumbnail dummy (placeholder) source. Some you might try are: +# http://placekitten.com/%(width)s/%(height)s +# http://placekitten.com/g/%(width)s/%(height)s +# http://placehold.it/%(width)sx%(height)s +THUMBNAIL_DUMMY_SOURCE = 'http://dummyimage.com/%(width)sx%(height)s' + +# Sets the source image ratio for dummy generation of images with only width +# or height given +THUMBNAIL_DUMMY_RATIO = 1.5 + +# Enables creation of multiple-resolution (aka "Retina") images. +# We don't create retina images by default to optimize performance. +THUMBNAIL_ALTERNATIVE_RESOLUTIONS = [] + +# Lazy fill empty thumbnail like THUMBNAIL_DUMMY +THUMBNAIL_LAZY_FILL_EMPTY = False + +# Timeout, in seconds, to use when retrieving images with urllib2 +THUMBNAIL_URL_TIMEOUT = None + +# Default width when using filters for texts +THUMBNAIL_FILTER_WIDTH = 500 + +# Should we flatten images by default (fixes a lot of transparency issues with +# imagemagick) +THUMBNAIL_FLATTEN = False + +# Whenever we will check an existing thumbnail exists and avoid to overwrite or not. +# Set this to true if you have an slow .exists() implementation on your storage backend of choice. +THUMBNAIL_FORCE_OVERWRITE = False + +# Should we remove GET arguments from URLs? (suggested for Amazon S3 image urls) +THUMBNAIL_REMOVE_URL_ARGS = True diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/default.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/default.py new file mode 100644 index 0000000..fbbccc4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/default.py @@ -0,0 +1,30 @@ +from django.utils.functional import LazyObject + +from sorl.thumbnail.conf import settings +from sorl.thumbnail.helpers import get_module_class + + +class Backend(LazyObject): + def _setup(self): + self._wrapped = get_module_class(settings.THUMBNAIL_BACKEND)() + + +class KVStore(LazyObject): + def _setup(self): + self._wrapped = get_module_class(settings.THUMBNAIL_KVSTORE)() + + +class Engine(LazyObject): + def _setup(self): + self._wrapped = get_module_class(settings.THUMBNAIL_ENGINE)() + + +class Storage(LazyObject): + def _setup(self): + self._wrapped = get_module_class(settings.THUMBNAIL_STORAGE)() + + +backend = Backend() +kvstore = KVStore() +engine = Engine() +storage = Storage() diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/__init__.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/base.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/base.py new file mode 100644 index 0000000..a3698bb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/base.py @@ -0,0 +1,258 @@ +# coding=utf-8 +from __future__ import division + +from sorl.thumbnail.conf import settings +from sorl.thumbnail.helpers import toint +from sorl.thumbnail.parsers import parse_crop +from sorl.thumbnail.parsers import parse_cropbox + + +class EngineBase(object): + """ + ABC for Thumbnail engines, methods are static + """ + + def create(self, image, geometry, options): + """ + Processing conductor, returns the thumbnail as an image engine instance + """ + image = self.cropbox(image, geometry, options) + image = self.orientation(image, geometry, options) + image = self.colorspace(image, geometry, options) + image = self.remove_border(image, options) + image = self.scale(image, geometry, options) + image = self.crop(image, geometry, options) + image = self.rounded(image, geometry, options) + image = self.blur(image, geometry, options) + image = self.padding(image, geometry, options) + return image + + def cropbox(self, image, geometry, options): + """ + Wrapper for ``_cropbox`` + """ + cropbox = options['cropbox'] + if not cropbox: + return image + x, y, x2, y2 = parse_cropbox(cropbox) + return self._cropbox(image, x, y, x2, y2) + + def orientation(self, image, geometry, options): + """ + Wrapper for ``_orientation`` + """ + if options.get('orientation', settings.THUMBNAIL_ORIENTATION): + return self._orientation(image) + self.reoriented = True + return image + + def flip_dimensions(self, image, geometry=None, options=None): + options = options or {} + reoriented = hasattr(self, 'reoriented') + if options.get('orientation', settings.THUMBNAIL_ORIENTATION) and not reoriented: + return self._flip_dimensions(image) + return False + + def colorspace(self, image, geometry, options): + """ + Wrapper for ``_colorspace`` + """ + colorspace = options['colorspace'] + return self._colorspace(image, colorspace) + + def remove_border(self, image, options): + + if options.get('remove_border', False): + x_image, y_image = self.get_image_size(image) + image = self._remove_border(image, x_image, y_image) + + return image + + def _calculate_scaling_factor(self, x_image, y_image, geometry, options): + crop = options['crop'] + factors = (geometry[0] / x_image, geometry[1] / y_image) + return max(factors) if crop else min(factors) + + def scale(self, image, geometry, options): + """ + Wrapper for ``_scale`` + """ + upscale = options['upscale'] + x_image, y_image = map(float, self.get_image_size(image)) + factor = self._calculate_scaling_factor(x_image, y_image, geometry, options) + + if factor < 1 or upscale: + width = toint(x_image * factor) + height = toint(y_image * factor) + image = self._scale(image, width, height) + + return image + + def crop(self, image, geometry, options): + """ + Wrapper for ``_crop`` + """ + crop = options['crop'] + x_image, y_image = self.get_image_size(image) + + if not crop or crop == 'noop': + return image + elif crop == 'smart': + # Smart cropping is suitably different from regular cropping + # to warrent it's own function + return self._entropy_crop(image, geometry[0], geometry[1], x_image, y_image) + + # Handle any other crop option with the backend crop function. + geometry = (min(x_image, geometry[0]), min(y_image, geometry[1])) + x_offset, y_offset = parse_crop(crop, (x_image, y_image), geometry) + return self._crop(image, geometry[0], geometry[1], x_offset, y_offset) + + def rounded(self, image, geometry, options): + """ + Wrapper for ``_rounded`` + """ + r = options['rounded'] + if not r: + return image + return self._rounded(image, int(r)) + + def blur(self, image, geometry, options): + """ + Wrapper for ``_blur`` + """ + if options.get('blur'): + return self._blur(image, int(options.get('blur'))) + return image + + def padding(self, image, geometry, options): + """ + Wrapper for ``_padding`` + """ + if options.get('padding') and self.get_image_size(image) != geometry: + return self._padding(image, geometry, options) + return image + + def write(self, image, options, thumbnail): + """ + Wrapper for ``_write`` + """ + format_ = options['format'] + quality = options['quality'] + image_info = options.get('image_info', {}) + # additional non-default-value options: + progressive = options.get('progressive', settings.THUMBNAIL_PROGRESSIVE) + raw_data = self._get_raw_data( + image, format_, quality, + image_info=image_info, + progressive=progressive + ) + thumbnail.write(raw_data) + + def cleanup(self, image): + """Some backends need to manually cleanup after thumbnails are created""" + pass + + def get_image_ratio(self, image, options): + """ + Calculates the image ratio. If cropbox option is used, the ratio + may have changed. + """ + cropbox = options['cropbox'] + + if cropbox: + x, y, x2, y2 = parse_cropbox(cropbox) + x = x2 - x + y = y2 - y + else: + x, y = self.get_image_size(image) + + return float(x) / y + + def get_image_info(self, image): + """ + Returns metadata of an ImageFile instance + """ + return {} + + # Methods which engines need to implement + # The ``image`` argument refers to a backend image object + def get_image(self, source): + """ + Returns the backend image objects from an ImageFile instance + """ + raise NotImplementedError() + + def get_image_size(self, image): + """ + Returns the image width and height as a tuple + """ + raise NotImplementedError() + + def is_valid_image(self, raw_data): + """ + Checks if the supplied raw data is valid image data + """ + raise NotImplementedError() + + def _orientation(self, image): + """ + Read orientation exif data and orientate the image accordingly + """ + return image + + def _colorspace(self, image, colorspace): + """ + `Valid colorspaces + `_. + Backends need to implement the following:: + + RGB, GRAY + """ + raise NotImplementedError() + + def _remove_border(self, image, image_width, image_height): + """ + Remove borders around images + """ + raise NotImplementedError() + + def _entropy_crop(self, image, geometry_width, geometry_height, image_width, image_height): + """ + Crop the image to the correct aspect ratio + by removing the lowest entropy parts + """ + raise NotImplementedError() + + def _scale(self, image, width, height): + """ + Does the resizing of the image + """ + raise NotImplementedError() + + def _crop(self, image, width, height, x_offset, y_offset): + """ + Crops the image + """ + raise NotImplementedError() + + def _get_raw_data(self, image, format_, quality, image_info=None, progressive=False): + """ + Gets raw data given the image, format and quality. This method is + called from :meth:`write` + """ + raise NotImplementedError() + + def _padding(self, image, geometry, options): + """ + Pads the image + """ + raise NotImplementedError() + + def _cropbox(self, image, x, y, x2, y2): + raise NotImplementedError() + + def _rounded(self, image, r): + raise NotImplementedError() + + def _blur(self, image, radius): + raise NotImplementedError() diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/convert_engine.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/convert_engine.py new file mode 100644 index 0000000..eb346e7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/convert_engine.py @@ -0,0 +1,194 @@ +from __future__ import unicode_literals, with_statement +import re +import os +import subprocess +import logging +from collections import OrderedDict + +from django.utils.encoding import smart_str +from django.core.files.temp import NamedTemporaryFile + +from sorl.thumbnail.base import EXTENSIONS +from sorl.thumbnail.compat import b +from sorl.thumbnail.conf import settings +from sorl.thumbnail.engines.base import EngineBase + +logger = logging.getLogger(__name__) + +size_re = re.compile(r'^(?:.+) (?:[A-Z]+) (?P\d+)x(?P\d+)') + + +class Engine(EngineBase): + """ + Image object is a dict with source path, options and size + """ + + def write(self, image, options, thumbnail): + """ + Writes the thumbnail image + """ + if options['format'] == 'JPEG' and options.get( + 'progressive', settings.THUMBNAIL_PROGRESSIVE): + image['options']['interlace'] = 'line' + + image['options']['quality'] = options['quality'] + + args = settings.THUMBNAIL_CONVERT.split(' ') + args.append(image['source'] + '[0]') + + for k in image['options']: + v = image['options'][k] + args.append('-%s' % k) + if v is not None: + args.append('%s' % v) + + flatten = "on" + if 'flatten' in options: + flatten = options['flatten'] + + if settings.THUMBNAIL_FLATTEN and not flatten == "off": + args.append('-flatten') + + suffix = '.%s' % EXTENSIONS[options['format']] + + with NamedTemporaryFile(suffix=suffix, mode='rb') as fp: + args.append(fp.name) + args = map(smart_str, args) + p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + returncode = p.wait() + out, err = p.communicate() + + if returncode: + raise EngineError( + "The command %r exited with a non-zero exit code and printed this to stderr: %s" + % (args, err) + ) + elif err: + logger.error("Captured stderr: %s", err) + + thumbnail.write(fp.read()) + + def cleanup(self, image): + os.remove(image['source']) # we should not need this now + + def get_image(self, source): + """ + Returns the backend image objects from a ImageFile instance + """ + with NamedTemporaryFile(mode='wb', delete=False) as fp: + fp.write(source.read()) + return {'source': fp.name, 'options': OrderedDict(), 'size': None} + + def get_image_size(self, image): + """ + Returns the image width and height as a tuple + """ + if image['size'] is None: + args = settings.THUMBNAIL_IDENTIFY.split(' ') + args.append(image['source'] + '[0]') + p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + p.wait() + m = size_re.match(str(p.stdout.read())) + image['size'] = int(m.group('x')), int(m.group('y')) + return image['size'] + + def is_valid_image(self, raw_data): + """ + This is not very good for imagemagick because it will say anything is + valid that it can use as input. + """ + with NamedTemporaryFile(mode='wb') as fp: + fp.write(raw_data) + fp.flush() + args = settings.THUMBNAIL_IDENTIFY.split(' ') + args.append(fp.name + '[0]') + p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + retcode = p.wait() + return retcode == 0 + + def _orientation(self, image): + # return image + # XXX need to get the dimensions right after a transpose. + + if settings.THUMBNAIL_CONVERT.endswith('gm convert'): + args = settings.THUMBNAIL_IDENTIFY.split() + args.extend(['-format', '%[exif:orientation]', image['source'] + '[0]']) + p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + p.wait() + result = p.stdout.read().strip() + if result and result != b('unknown'): + result = int(result) + options = image['options'] + if result == 2: + options['flop'] = None + elif result == 3: + options['rotate'] = '180' + elif result == 4: + options['flip'] = None + elif result == 5: + options['rotate'] = '90' + options['flop'] = None + elif result == 6: + options['rotate'] = '90' + elif result == 7: + options['rotate'] = '-90' + options['flop'] = None + elif result == 8: + options['rotate'] = '-90' + else: + # ImageMagick also corrects the orientation exif data for + # destination + image['options']['auto-orient'] = None + return image + + def _flip_dimensions(self, image): + if settings.THUMBNAIL_CONVERT.endswith('gm convert'): + args = settings.THUMBNAIL_IDENTIFY.split() + args.extend(['-format', '%[exif:orientation]', image['source'] + '[0]']) + p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + p.wait() + result = p.stdout.read().strip() + return result and result != 'unknown' and int(result) in [5, 6, 7, 8] + else: + return False + + def _colorspace(self, image, colorspace): + """ + `Valid colorspaces + `_. + Backends need to implement the following:: + + RGB, GRAY + """ + image['options']['colorspace'] = colorspace + return image + + def _crop(self, image, width, height, x_offset, y_offset): + """ + Crops the image + """ + image['options']['crop'] = '%sx%s+%s+%s' % (width, height, x_offset, y_offset) + image['size'] = (width, height) # update image size + return image + + def _scale(self, image, width, height): + """ + Does the resizing of the image + """ + image['options']['scale'] = '%sx%s!' % (width, height) + image['size'] = (width, height) # update image size + return image + + def _padding(self, image, geometry, options): + """ + Pads the image + """ + # The order is important. The gravity option should come before extent. + image['options']['background'] = options.get('padding_color') + image['options']['gravity'] = 'center' + image['options']['extent'] = '%sx%s' % (geometry[0], geometry[1]) + return image + + +class EngineError(Exception): + pass diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/pgmagick_engine.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/pgmagick_engine.py new file mode 100644 index 0000000..443dfaf --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/pgmagick_engine.py @@ -0,0 +1,93 @@ +from __future__ import unicode_literals + +from pgmagick import Blob, Geometry, Image, ImageType +from pgmagick import InterlaceType, OrientationType +from sorl.thumbnail.engines.base import EngineBase + +try: + from pgmagick._pgmagick import get_blob_data +except ImportError: + from base64 import b64decode + + def get_blob_data(blob): + return b64decode(blob.base64()) + + +class Engine(EngineBase): + def get_image(self, source): + blob = Blob() + blob.update(source.read()) + return Image(blob) + + def get_image_size(self, image): + geometry = image.size() + return geometry.width(), geometry.height() + + def is_valid_image(self, raw_data): + blob = Blob() + blob.update(raw_data) + im = Image(blob) + return im.isValid() + + def _cropbox(self, image, x, y, x2, y2): + geometry = Geometry(x2 - x, y2 - y, x, y) + image.crop(geometry) + return image + + def _orientation(self, image): + orientation = image.orientation() + if orientation == OrientationType.TopRightOrientation: + image.flop() + elif orientation == OrientationType.BottomRightOrientation: + image.rotate(180) + elif orientation == OrientationType.BottomLeftOrientation: + image.flip() + elif orientation == OrientationType.LeftTopOrientation: + image.rotate(90) + image.flop() + elif orientation == OrientationType.RightTopOrientation: + image.rotate(90) + elif orientation == OrientationType.RightBottomOrientation: + image.rotate(-90) + image.flop() + elif orientation == OrientationType.LeftBottomOrientation: + image.rotate(-90) + image.orientation(OrientationType.TopLeftOrientation) + + return image + + def flip_dimensions(self, image): + return image.orientation() in [ + OrientationType.LeftTopOrientation, + OrientationType.RightTopOrientation, + OrientationType.RightBottomOrientation, + OrientationType.LeftBottomOrientation, + ] + + def _colorspace(self, image, colorspace): + if colorspace == 'RGB': + image.type(ImageType.TrueColorMatteType) + elif colorspace == 'GRAY': + image.type(ImageType.GrayscaleMatteType) + else: + return image + return image + + def _scale(self, image, width, height): + geometry = Geometry(width, height) + image.scale(geometry) + return image + + def _crop(self, image, width, height, x_offset, y_offset): + geometry = Geometry(width, height, x_offset, y_offset) + image.crop(geometry) + return image + + def _get_raw_data(self, image, format_, quality, image_info=None, progressive=False): + image.magick(format_.encode('utf8')) + image.quality(quality) + if format_ == 'JPEG' and progressive: + image.interlaceType(InterlaceType.LineInterlace) + blob = Blob() + image.write(blob) + return get_blob_data(blob) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/pil_engine.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/pil_engine.py new file mode 100644 index 0000000..c2197ae --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/pil_engine.py @@ -0,0 +1,270 @@ +from __future__ import unicode_literals, division + +import math +from sorl.thumbnail.engines.base import EngineBase +from sorl.thumbnail.compat import BufferIO + +try: + from PIL import Image, ImageFile, ImageDraw, ImageFilter +except ImportError: + import Image + import ImageFile + import ImageDraw + +EXIF_ORIENTATION = 0x0112 + + +def round_corner(radius, fill): + """Draw a round corner""" + corner = Image.new('L', (radius, radius), 0) # (0, 0, 0, 0)) + draw = ImageDraw.Draw(corner) + draw.pieslice((0, 0, radius * 2, radius * 2), 180, 270, fill=fill) + return corner + + +def round_rectangle(size, radius, fill): + """Draw a rounded rectangle""" + width, height = size + rectangle = Image.new('L', size, 255) # fill + corner = round_corner(radius, 255) # fill + rectangle.paste(corner, (0, 0)) + rectangle.paste(corner.rotate(90), + (0, height - radius)) # Rotate the corner and paste it + rectangle.paste(corner.rotate(180), (width - radius, height - radius)) + rectangle.paste(corner.rotate(270), (width - radius, 0)) + return rectangle + + +class GaussianBlur(ImageFilter.Filter): + name = "GaussianBlur" + + def __init__(self, radius=2): + self.radius = radius + + def filter(self, image): + return image.gaussian_blur(self.radius) + + +class Engine(EngineBase): + def get_image(self, source): + buffer = BufferIO(source.read()) + return Image.open(buffer) + + def get_image_size(self, image): + return image.size + + def get_image_info(self, image): + return image.info or {} + + def is_valid_image(self, raw_data): + buffer = BufferIO(raw_data) + try: + trial_image = Image.open(buffer) + trial_image.verify() + except Exception: + return False + return True + + def colorspace(self, image, geometry, options): + """ + Wrapper for ``_colorspace`` + """ + colorspace = options['colorspace'] + format = options['format'] + + return self._colorspace(image, colorspace, format) + + def _cropbox(self, image, x, y, x2, y2): + return image.crop((x, y, x2, y2)) + + def _orientation(self, image): + try: + exif = image._getexif() + except Exception: + exif = None + + if exif: + orientation = exif.get(EXIF_ORIENTATION) + + if orientation == 2: + image = image.transpose(Image.FLIP_LEFT_RIGHT) + elif orientation == 3: + image = image.rotate(180) + elif orientation == 4: + image = image.transpose(Image.FLIP_TOP_BOTTOM) + elif orientation == 5: + image = image.rotate(-90, expand=1).transpose(Image.FLIP_LEFT_RIGHT) + elif orientation == 6: + image = image.rotate(-90, expand=1) + elif orientation == 7: + image = image.rotate(90, expand=1).transpose(Image.FLIP_LEFT_RIGHT) + elif orientation == 8: + image = image.rotate(90, expand=1) + + return image + + def _flip_dimensions(self, image): + try: + exif = image._getexif() + except (AttributeError, IOError, KeyError, IndexError): + exif = None + + if exif: + orientation = exif.get(0x0112) + return orientation in [5, 6, 7, 8] + + return False + + def _colorspace(self, image, colorspace, format): + if colorspace == 'RGB': + # Pillow JPEG doesn't allow RGBA anymore. It was converted to RGB before. + if image.mode == 'RGBA' and format != 'JPEG': + return image # RGBA is just RGB + Alpha + if image.mode == 'LA' or (image.mode == 'P' and 'transparency' in image.info): + newimage = image.convert('RGBA') + transparency = image.info.get('transparency') + if transparency is not None: + mask = image.convert('RGBA').split()[-1] + newimage.putalpha(mask) + return newimage + return image.convert('RGB') + if colorspace == 'GRAY': + return image.convert('L') + return image + + def _remove_border(self, image, image_width, image_height): + borders = { + 'top': lambda iy, dy, y: (dy, dy + y), + 'right': lambda ix, dx, x: (ix - dx - x, ix - dx), + 'bottom': lambda iy, dy, y: (iy - dy - y, iy - dy), + 'left': lambda ix, dx, x: (dx, dx + x), + } + + offset = {'top': 0, 'right': 0, 'bottom': 0, 'left': 0, } + + for border in ['top', 'bottom']: + # Don't remove too much, the image may just be plain + while offset[border] < image_height / 3.5: + slice_size = min(image_width / 20, 10) + y_range = borders[border](image_height, offset[border], slice_size) + section = image.crop((0, y_range[0], image_width, y_range[1])) + # If this section is below the threshold; remove it + if self._get_image_entropy(section) < 2.0: + offset[border] += slice_size + else: + break + + for border in ['left', 'right']: + while offset[border] < image_width / 3.5: + slice_size = min(image_height / 20, 10) + x_range = borders[border](image_width, offset[border], slice_size) + section = image.crop((x_range[0], 0, x_range[1], image_height)) + if self._get_image_entropy(section) < 2.0: + offset[border] += slice_size + else: + break + + return image.crop((offset['left'], offset['top'], image_width - offset['right'], + image_height - offset['bottom'])) + + # Credit to chrisopherhan https://github.com/christopherhan/pycrop + # This is just a slight rework of pycrops implimentation + def _entropy_crop(self, image, geometry_width, geometry_height, image_width, image_height): + geometry_ratio = geometry_width / geometry_height + + # The is proportionally wider than it should be + while image_width / image_height > geometry_ratio: + + slice_width = max(image_width - geometry_width, 10) + + right = image.crop((image_width - slice_width, 0, image_width, image_height)) + left = image.crop((0, 0, slice_width, image_height)) + + if self._get_image_entropy(left) < self._get_image_entropy(right): + image = image.crop((slice_width, 0, image_width, image_height)) + else: + image = image.crop((0, 0, image_height - slice_width, image_height)) + + image_width -= slice_width + + # The image is proportionally taller than it should be + while image_width / image_height < geometry_ratio: + + slice_height = min(image_height - geometry_height, 10) + + bottom = image.crop((0, image_height - slice_height, image_width, image_height)) + top = image.crop((0, 0, image_width, slice_height)) + + if self._get_image_entropy(bottom) < self._get_image_entropy(top): + image = image.crop((0, 0, image_width, image_height - slice_height)) + else: + image = image.crop((0, slice_height, image_width, image_height)) + + image_height -= slice_height + + return image + + def _scale(self, image, width, height): + return image.resize((width, height), resample=Image.ANTIALIAS) + + def _crop(self, image, width, height, x_offset, y_offset): + return image.crop((x_offset, y_offset, + width + x_offset, height + y_offset)) + + def _rounded(self, image, r): + i = round_rectangle(image.size, r, "notusedblack") + image.putalpha(i) + return image + + def _blur(self, image, radius): + return image.filter(GaussianBlur(radius)) + + def _padding(self, image, geometry, options): + x_image, y_image = self.get_image_size(image) + left = int((geometry[0] - x_image) / 2) + top = int((geometry[1] - y_image) / 2) + color = options.get('padding_color') + im = Image.new(image.mode, geometry, color) + im.paste(image, (left, top)) + return im + + def _get_raw_data(self, image, format_, quality, image_info=None, progressive=False): + # Increase (but never decrease) PIL buffer size + ImageFile.MAXBLOCK = max(ImageFile.MAXBLOCK, image.size[0] * image.size[1]) + bf = BufferIO() + + params = { + 'format': format_, + 'quality': quality, + 'optimize': 1, + } + + # keeps icc_profile + if 'icc_profile' in image_info: + params['icc_profile'] = image_info['icc_profile'] + + raw_data = None + + if format_ == 'JPEG' and progressive: + params['progressive'] = True + try: + # Do not save unnecessary exif data for smaller thumbnail size + params.pop('exif', {}) + image.save(bf, **params) + except (IOError, OSError): + # Try without optimization. + params.pop('optimize') + image.save(bf, **params) + else: + raw_data = bf.getvalue() + finally: + bf.close() + + return raw_data + + def _get_image_entropy(self, image): + """calculate the entropy of an image""" + hist = image.histogram() + hist_size = sum(hist) + hist = [float(h) / hist_size for h in hist] + return -sum([p * math.log(p, 2) for p in hist if p != 0]) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/vipsthumbnail_engine.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/vipsthumbnail_engine.py new file mode 100644 index 0000000..9ad8ccd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/vipsthumbnail_engine.py @@ -0,0 +1,122 @@ +from __future__ import unicode_literals, with_statement +import re +import os +import subprocess +from collections import OrderedDict + +from django.utils.encoding import smart_str +from django.core.files.temp import NamedTemporaryFile + +from sorl.thumbnail.base import EXTENSIONS +from sorl.thumbnail.conf import settings +from sorl.thumbnail.engines.base import EngineBase + + +size_re = re.compile(r'^(?:.+) (?P\d+)x(?P\d+)') + + +class Engine(EngineBase): + """ + Image object is a dict with source path, options and size + """ + + def write(self, image, options, thumbnail): + """ + Writes the thumbnail image + """ + + args = settings.THUMBNAIL_VIPSTHUMBNAIL.split(' ') + args.append(image['source']) + + for k in image['options']: + v = image['options'][k] + args.append('--%s' % k) + if v is not None: + args.append('%s' % v) + + suffix = '.%s' % EXTENSIONS[options['format']] + + write_options = [] + if options['format'] == 'JPEG' and options.get( + 'progressive', settings.THUMBNAIL_PROGRESSIVE): + write_options.append("interlace") + + if options['quality']: + if options['format'] == 'JPEG': + write_options.append("Q=%d" % options['quality']) + + with NamedTemporaryFile(suffix=suffix, mode='rb') as fp: + # older vipsthumbails used -o, this was renamed to -f in 8.0, use + # -o here for commpatibility + args.append("-o") + args.append(fp.name + "[%s]" % ",".join(write_options)) + + args = map(smart_str, args) + p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + p.wait() + out, err = p.communicate() + + if err: + raise Exception(err) + + thumbnail.write(fp.read()) + + def cleanup(self, image): + os.remove(image['source']) # we should not need this now + + def get_image(self, source): + """ + Returns the backend image objects from a ImageFile instance + """ + with NamedTemporaryFile(mode='wb', delete=False) as fp: + fp.write(source.read()) + return {'source': fp.name, 'options': OrderedDict(), 'size': None} + + def get_image_size(self, image): + """ + Returns the image width and height as a tuple + """ + if image['size'] is None: + args = settings.THUMBNAIL_VIPSHEADER.split(' ') + args.append(image['source']) + p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + p.wait() + m = size_re.match(str(p.stdout.read())) + image['size'] = int(m.group('x')), int(m.group('y')) + return image['size'] + + def is_valid_image(self, raw_data): + """ + vipsheader will try a lot of formats, including all those supported by + imagemagick if compiled with magick support, this can take a while + """ + with NamedTemporaryFile(mode='wb') as fp: + fp.write(raw_data) + fp.flush() + args = settings.THUMBNAIL_VIPSHEADER.split(' ') + args.append(fp.name) + p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + retcode = p.wait() + return retcode == 0 + + def _orientation(self, image): + # vipsthumbnail also corrects the orientation exif data for + # destination + image['options']['rotate'] = None + + return image + + def _colorspace(self, image, colorspace): + """ + vipsthumbnail does not support greyscaling of images, but pretend it + does + """ + return image + + def _scale(self, image, width, height): + """ + Does the resizing of the image + """ + image['options']['size'] = '%sx%s' % (width, height) + image['size'] = (width, height) # update image size + return image diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/wand_engine.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/wand_engine.py new file mode 100644 index 0000000..e59b7f9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/engines/wand_engine.py @@ -0,0 +1,83 @@ +''' +Wand (>=v0.3.0) engine for Sorl-thumbnail +''' +from __future__ import unicode_literals + +from wand.image import Image +from wand import exceptions +from sorl.thumbnail.engines.base import EngineBase + + +class Engine(EngineBase): + def get_image(self, source): + return Image(blob=source.read()) + + def get_image_size(self, image): + return image.size + + def is_valid_image(self, raw_data): + ''' + Wand library makes sure when opening any image that is fine, when + the image is corrupted raises an exception. + ''' + + try: + Image(blob=raw_data) + return True + except (exceptions.CorruptImageError, exceptions.MissingDelegateError): + return False + + def _orientation(self, image): + orientation = image.orientation + if orientation == 'top_right': + image.flop() + elif orientation == 'bottom_right': + image.rotate(degree=180) + elif orientation == 'bottom_left': + image.flip() + elif orientation == 'left_top': + image.rotate(degree=90) + image.flop() + elif orientation == 'right_top': + image.rotate(degree=90) + elif orientation == 'right_bottom': + image.rotate(degree=-90) + image.flop() + elif orientation == 'left_bottom': + image.rotate(degree=-90) + image.orientation = 'top_left' + return image + + def _flip_dimensions(self, image): + return image.orientation in ['left_top', 'right_top', 'right_bottom', 'left_bottom'] + + def _colorspace(self, image, colorspace): + if colorspace == 'RGB': + if image.alpha_channel: + image.type = 'truecolormatte' + else: + image.type = 'truecolor' + elif colorspace == 'GRAY': + if image.alpha_channel: + image.type = 'grayscalematte' + else: + image.type = 'grayscale' + else: + return image + return image + + def _scale(self, image, width, height): + image.resize(width, height) + return image + + def _crop(self, image, width, height, x_offset, y_offset): + image.crop(x_offset, y_offset, width=width, height=height) + return image + + def _get_raw_data(self, image, format_, quality, image_info=None, progressive=False): + image.compression_quality = quality + if format_ == 'JPEG' and progressive: + image.format = 'pjpeg' + else: + image.format = format_ + return image.make_blob() diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/fields.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/fields.py new file mode 100644 index 0000000..8c39452 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/fields.py @@ -0,0 +1,72 @@ +from __future__ import with_statement, unicode_literals + +from django.db import models +from django.db.models import Q +from django import forms +from django.utils.translation import ugettext_lazy as _ + +from sorl.thumbnail import default + + +__all__ = ('ImageField', 'ImageFormField') + + +class ImageField(models.ImageField): + def delete_file(self, instance, sender, **kwargs): + """ + Adds deletion of thumbnails and key value store references to the + parent class implementation. Only called in Django < 1.2.5 + """ + file_ = getattr(instance, self.attname) + + # If no other object of this type references the file, and it's not the + # default value for future objects, delete it from the backend. + query = Q(**{self.name: file_.name}) & ~Q(pk=instance.pk) + qs = sender._default_manager.filter(query) + + if (file_ and file_.name != self.default and not qs): + default.backend.delete(file_) + elif file_: + # Otherwise, just close the file, so it doesn't tie up resources. + file_.close() + + def formfield(self, **kwargs): + defaults = {'form_class': ImageFormField} + defaults.update(kwargs) + return super(ImageField, self).formfield(**defaults) + + def save_form_data(self, instance, data): + if data is not None: + setattr(instance, self.name, data or '') + + +class ImageFormField(forms.FileField): + default_error_messages = { + 'invalid_image': _("Upload a valid image. The file you uploaded was " + "either not an image or a corrupted image."), + } + + def to_python(self, data): + """ + Checks that the file-upload field data contains a valid image (GIF, + JPG, PNG, possibly others -- whatever the engine supports). + """ + f = super(ImageFormField, self).to_python(data) + if f is None: + return None + + # We need to get a file raw data to validate it. + if hasattr(data, 'temporary_file_path'): + with open(data.temporary_file_path(), 'rb') as fp: + raw_data = fp.read() + elif hasattr(data, 'read'): + raw_data = data.read() + else: + raw_data = data['content'] + + if not default.engine.is_valid_image(raw_data): + raise forms.ValidationError(self.default_error_messages['invalid_image']) + if hasattr(f, 'seek') and callable(f.seek): + f.seek(0) + + return f diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/helpers.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/helpers.py new file mode 100644 index 0000000..1fd13f0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/helpers.py @@ -0,0 +1,73 @@ +from __future__ import unicode_literals + +import hashlib +import json +import math +from importlib import import_module + +from django.core.exceptions import ImproperlyConfigured +from django.utils.encoding import force_text +from sorl.thumbnail.compat import encode + + +class ThumbnailError(Exception): + pass + + +class SortedJSONEncoder(json.JSONEncoder): + """ + A json encoder that sorts the dict keys + """ + + def __init__(self, **kwargs): + kwargs['sort_keys'] = True + super(SortedJSONEncoder, self).__init__(**kwargs) + + +def toint(number): + """ + Helper to return rounded int for a float or just the int it self. + """ + if isinstance(number, float): + if number > 1: + number = round(number, 0) + else: + # The following solves when image has small dimensions (like 1x54) + # then scale factor 1 * 0.296296 and `number` will store `0` + # that will later raise ZeroDivisionError. + number = round(math.ceil(number), 0) + return int(number) + + +def tokey(*args): + """ + Computes a unique key from arguments given. + """ + salt = '||'.join([force_text(arg) for arg in args]) + hash_ = hashlib.md5(encode(salt)) + return hash_.hexdigest() + + +def serialize(obj): + return json.dumps(obj, cls=SortedJSONEncoder) + + +def deserialize(s): + if isinstance(s, bytes): + return json.loads(s.decode('utf-8')) + return json.loads(s) + + +def get_module_class(class_path): + """ + imports and returns module class from ``path.to.module.Class`` + argument + """ + mod_name, cls_name = class_path.rsplit('.', 1) + + try: + mod = import_module(mod_name) + except ImportError as e: + raise ImproperlyConfigured(('Error importing module %s: "%s"' % (mod_name, e))) + + return getattr(mod, cls_name) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/images.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/images.py new file mode 100644 index 0000000..6e5de84 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/images.py @@ -0,0 +1,262 @@ +# encoding=utf-8 + +from __future__ import unicode_literals, division +import json +import os +import re + +from django.core.files.base import File, ContentFile +from django.core.files.storage import Storage # , default_storage +from django.utils.encoding import force_text, python_2_unicode_compatible +from django.utils.functional import LazyObject, empty +from sorl.thumbnail import default +from sorl.thumbnail.conf import settings +from sorl.thumbnail.compat import (urlopen, urlparse, urlsplit, + quote, quote_plus, URLError, encode) +from sorl.thumbnail.default import storage as default_storage +from sorl.thumbnail.helpers import ThumbnailError, tokey, get_module_class, deserialize +from sorl.thumbnail.parsers import parse_geometry + +url_pat = re.compile(r'^(https?|ftp):\/\/') + + +def serialize_image_file(image_file): + if image_file.size is None: + raise ThumbnailError('Trying to serialize an ``ImageFile`` with a ' + '``None`` size.') + data = { + 'name': image_file.name, + 'storage': image_file.serialize_storage(), + 'size': image_file.size, + } + return json.dumps(data) + + +def deserialize_image_file(s): + data = deserialize(s) + + class LazyStorage(LazyObject): + def _setup(self): + self._wrapped = get_module_class(data['storage'])() + + image_file = ImageFile(data['name'], LazyStorage()) + image_file.set_size(data['size']) + return image_file + + +class BaseImageFile(object): + size = [] + + def exists(self): + raise NotImplementedError() + + @property + def width(self): + return self.size[0] + + x = width + + @property + def height(self): + return self.size[1] + + y = height + + def is_portrait(self): + return self.y > self.x + + @property + def ratio(self): + return float(self.x) / float(self.y) + + @property + def url(self): + raise NotImplementedError() + + src = url + + +@python_2_unicode_compatible +class ImageFile(BaseImageFile): + _size = None + + def __init__(self, file_, storage=None): + if not file_: + raise ThumbnailError('File is empty.') + + # figure out name + if hasattr(file_, 'name'): + self.name = file_.name + else: + self.name = force_text(file_) + + # TODO: Add a customizable naming method as a signal + + # Remove query args from names. Fixes cache and signature arguments + # from third party services, like Amazon S3 and signature args. + if settings.THUMBNAIL_REMOVE_URL_ARGS: + self.name = self.name.split('?')[0] + + # Support for relative protocol urls + if self.name.startswith('//'): + self.name = 'http:' + self.name + + # figure out storage + if storage is not None: + self.storage = storage + elif hasattr(file_, 'storage'): + self.storage = file_.storage + elif url_pat.match(self.name): + self.storage = UrlStorage() + else: + self.storage = default_storage + + if hasattr(self.storage, 'location'): + location = self.storage.location + if not self.storage.location.endswith("/"): + location += "/" + if self.name.startswith(location): + self.name = self.name[len(location):] + + def __str__(self): + return self.name + + def exists(self): + return self.storage.exists(self.name) + + def set_size(self, size=None): + # set the size if given + if size is not None: + pass + # Don't try to set the size the expensive way if it already has a + # value. + elif self._size is not None: + return + elif hasattr(self.storage, 'image_size'): + # Storage backends can implement ``image_size`` method that + # optimizes this. + size = self.storage.image_size(self.name) + else: + # This is the worst case scenario + image = default.engine.get_image(self) + size = default.engine.get_image_size(image) + if self.flip_dimensions(image): + size = list(size) + size.reverse() + self._size = list(size) + + def flip_dimensions(self, image): + """ + Do not manipulate image, but ask engine whether we'd be doing a 90deg + rotation at some point. + """ + return default.engine.flip_dimensions(image) + + @property + def size(self): + return self._size + + @property + def url(self): + return self.storage.url(self.name) + + def read(self): + f = self.storage.open(self.name) + try: + return f.read() + finally: + f.close() + + def write(self, content): + if not isinstance(content, File): + content = ContentFile(content) + + self._size = None + self.name = self.storage.save(self.name, content) + + return self.name + + def delete(self): + return self.storage.delete(self.name) + + def serialize_storage(self): + if isinstance(self.storage, LazyObject): + # if storage is wrapped in a lazy object we need to get the real + # thing. + if self.storage._wrapped is empty: + self.storage._setup() + cls = self.storage._wrapped.__class__ + else: + cls = self.storage.__class__ + return '%s.%s' % (cls.__module__, cls.__name__) + + @property + def key(self): + return tokey(self.name, self.serialize_storage()) + + def serialize(self): + return serialize_image_file(self) + + +class DummyImageFile(BaseImageFile): + def __init__(self, geometry_string): + self.size = parse_geometry( + geometry_string, + settings.THUMBNAIL_DUMMY_RATIO, + ) + + def exists(self): + return True + + @property + def url(self): + return settings.THUMBNAIL_DUMMY_SOURCE % ( + {'width': self.x, 'height': self.y} + ) + + +class UrlStorage(Storage): + def normalize_url(self, url, charset='utf-8'): + url = encode(url, charset, 'ignore') + scheme, netloc, path, qs, anchor = urlsplit(url) + + path = quote(path, b'/%') + qs = quote_plus(qs, b':&%=') + + return urlparse.urlunsplit((scheme, netloc, path, qs, anchor)) + + def open(self, name, mode='rb'): + return urlopen(self.normalize_url(name)) + + def exists(self, name): + try: + self.open(name) + except URLError: + return False + return True + + def url(self, name): + return name + + def delete(self, name): + pass + + +def delete_all_thumbnails(): + storage = default.storage + path = settings.THUMBNAIL_PREFIX + + def walk(path): + dirs, files = storage.listdir(path) + for f in files: + storage.delete(os.path.join(path, f)) + for d in dirs: + directory = os.path.join(path, d) + walk(directory) + try: + full_path = storage.path(directory) + except Exception: + continue + os.rmdir(full_path) + + walk(path) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/__init__.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/base.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/base.py new file mode 100644 index 0000000..dad35e1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/base.py @@ -0,0 +1,207 @@ +from __future__ import unicode_literals +from sorl.thumbnail.conf import settings +from sorl.thumbnail.helpers import serialize, deserialize, ThumbnailError +from sorl.thumbnail.images import serialize_image_file, deserialize_image_file + + +def add_prefix(key, identity='image'): + """ + Adds prefixes to the key + """ + return '||'.join([settings.THUMBNAIL_KEY_PREFIX, identity, key]) + + +def del_prefix(key): + """ + Removes prefixes from the key + """ + return key.split('||')[-1] + + +class KVStoreBase(object): + def get(self, image_file): + """ + Gets the ``image_file`` from store. Returns ``None`` if not found. + """ + return self._get(image_file.key) + + def set(self, image_file, source=None): + """ + Updates store for the `image_file`. Makes sure the `image_file` has a + size set. + """ + image_file.set_size() # make sure its got a size + self._set(image_file.key, image_file) + if source is not None: + if not self.get(source): + # make sure the source is in kvstore + raise ThumbnailError('Cannot add thumbnails for source: `%s` ' + 'that is not in kvstore.' % source.name) + + # Update the list of thumbnails for source. + thumbnails = self._get(source.key, identity='thumbnails') or [] + thumbnails = set(thumbnails) + thumbnails.add(image_file.key) + + self._set(source.key, list(thumbnails), identity='thumbnails') + + def get_or_set(self, image_file): + cached = self.get(image_file) + if cached is not None: + return cached + self.set(image_file) + return image_file + + def delete(self, image_file, delete_thumbnails=True): + """ + Deletes the reference to the ``image_file`` and deletes the references + to thumbnails as well as thumbnail files if ``delete_thumbnails`` is + `True``. Does not delete the ``image_file`` is self. + """ + if delete_thumbnails: + self.delete_thumbnails(image_file) + self._delete(image_file.key) + + def delete_thumbnails(self, image_file): + """ + Deletes references to thumbnails as well as thumbnail ``image_files``. + """ + thumbnail_keys = self._get(image_file.key, identity='thumbnails') + if thumbnail_keys: + # Delete all thumbnail keys from store and delete the + # thumbnail ImageFiles. + + for key in thumbnail_keys: + thumbnail = self._get(key) + if thumbnail: + self.delete(thumbnail, False) + thumbnail.delete() # delete the actual file + + # Delete the thumbnails key from store + self._delete(image_file.key, identity='thumbnails') + + def delete_all_thumbnail_files(self): + for key in self._find_keys(identity='thumbnails'): + thumbnail_keys = self._get(key, identity='thumbnails') + if thumbnail_keys: + for key in thumbnail_keys: + thumbnail = self._get(key) + if thumbnail: + thumbnail.delete() + + def cleanup(self): + """ + Cleans up the key value store. In detail: + 1. Deletes all key store references for image_files that do not exist + and all key references for its thumbnails *and* their image_files. + 2. Deletes or updates all invalid thumbnail keys + """ + for key in self._find_keys(identity='image'): + image_file = self._get(key) + + if image_file and not image_file.exists(): + self.delete(image_file) + + for key in self._find_keys(identity='thumbnails'): + # We do not need to check for file existence in here since we + # already did that above for all image references + image_file = self._get(key) + + if image_file: + # if there is an image_file then we check all of its thumbnails + # for existence + thumbnail_keys = self._get(key, identity='thumbnails') or [] + thumbnail_keys_set = set(thumbnail_keys) + + for thumbnail_key in thumbnail_keys: + if not self._get(thumbnail_key): + thumbnail_keys_set.remove(thumbnail_key) + + thumbnail_keys = list(thumbnail_keys_set) + + if thumbnail_keys: + self._set(key, thumbnail_keys, identity='thumbnails') + continue + + # if there is no image_file then this thumbnails key is just + # hangin' loose, If the thumbnail_keys ended up empty there is no + # reason for keeping it either + self._delete(key, identity='thumbnails') + + def clear(self): + """ + Brutely clears the key value store for keys with THUMBNAIL_KEY_PREFIX + prefix. Use this in emergency situations. Normally you would probably + want to use the ``cleanup`` method instead. + """ + all_keys = self._find_keys_raw(settings.THUMBNAIL_KEY_PREFIX) + if all_keys: + self._delete_raw(*all_keys) + + def _get(self, key, identity='image'): + """ + Deserializing, prefix wrapper for _get_raw + """ + value = self._get_raw(add_prefix(key, identity)) + + if not value: + return None + + if identity == 'image': + return deserialize_image_file(value) + + return deserialize(value) + + def _set(self, key, value, identity='image'): + """ + Serializing, prefix wrapper for _set_raw + """ + if identity == 'image': + s = serialize_image_file(value) + else: + s = serialize(value) + self._set_raw(add_prefix(key, identity), s) + + def _delete(self, key, identity='image'): + """ + Prefix wrapper for _delete_raw + """ + self._delete_raw(add_prefix(key, identity)) + + def _find_keys(self, identity='image'): + """ + Finds and returns all keys for identity, + """ + prefix = add_prefix('', identity) + raw_keys = self._find_keys_raw(prefix) or [] + for raw_key in raw_keys: + yield del_prefix(raw_key) + + # + # Methods which key-value stores need to implement + # + def _get_raw(self, key): + """ + Gets the value from keystore, returns `None` if not found. + """ + raise NotImplementedError() + + def _set_raw(self, key, value): + """ + Sets value associated to key. Key is expected to be shorter than 200 + chars. Value is a ``unicode`` object with an unknown (reasonable) + length. + """ + raise NotImplementedError() + + def _delete_raw(self, *keys): + """ + Deletes the keys. Silent failure for missing keys. + """ + raise NotImplementedError() + + def _find_keys_raw(self, prefix): + """ + Finds all keys with prefix + """ + raise NotImplementedError() diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/cached_db_kvstore.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/cached_db_kvstore.py new file mode 100644 index 0000000..5b595e2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/cached_db_kvstore.py @@ -0,0 +1,63 @@ +from django.core.cache import cache, caches, InvalidCacheBackendError +from sorl.thumbnail.kvstores.base import KVStoreBase +from sorl.thumbnail.conf import settings +from sorl.thumbnail.models import KVStore as KVStoreModel + + +class EMPTY_VALUE(object): + pass + + +class KVStore(KVStoreBase): + def __init__(self): + super(KVStore, self).__init__() + + @property + def cache(self): + try: + kv_cache = caches[settings.THUMBNAIL_CACHE] + except InvalidCacheBackendError: + kv_cache = cache + return kv_cache + + def clear(self, delete_thumbnails=False): + """ + We can clear the database more efficiently using the prefix here rather + than calling :meth:`_delete_raw`. + """ + prefix = settings.THUMBNAIL_KEY_PREFIX + for key in self._find_keys_raw(prefix): + self.cache.delete(key) + KVStoreModel.objects.filter(key__startswith=prefix).delete() + if delete_thumbnails: + self.delete_all_thumbnail_files() + + def _get_raw(self, key): + value = self.cache.get(key) + if value is None: + try: + value = KVStoreModel.objects.get(key=key).value + except KVStoreModel.DoesNotExist: + # we set the cache to prevent further db lookups + value = EMPTY_VALUE + self.cache.set(key, value, settings.THUMBNAIL_CACHE_TIMEOUT) + if value == EMPTY_VALUE: + return None + return value + + def _set_raw(self, key, value): + kvstore_value, created = KVStoreModel.objects.get_or_create( + key=key, defaults={'value': value}) + if not created: + kvstore_value.value = value + kvstore_value.save() + self.cache.set(key, value, settings.THUMBNAIL_CACHE_TIMEOUT) + + def _delete_raw(self, *keys): + KVStoreModel.objects.filter(key__in=keys).delete() + for key in keys: + self.cache.delete(key) + + def _find_keys_raw(self, prefix): + qs = KVStoreModel.objects.filter(key__startswith=prefix) + return qs.values_list('key', flat=True) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/dbm_kvstore.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/dbm_kvstore.py new file mode 100644 index 0000000..8a645b6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/dbm_kvstore.py @@ -0,0 +1,95 @@ +from __future__ import unicode_literals +import os + +from sorl.thumbnail.kvstores.base import KVStoreBase +from sorl.thumbnail.conf import settings + + +try: + import anydbm as dbm +except KeyError: + import dbm +except ImportError: + # Python 3, hopefully + import dbm + +# +# OS filesystem locking primitives. TODO: Test Windows versions +# +if os.name == 'nt': + import msvcrt + + def lock(f, readonly): + msvcrt.locking(f.fileno(), msvcrt.LK_LOCK, 1) + + def unlock(f): + msvcrt.locking(f.fileno(), msvcrt.LK_UNLCK, 1) +else: + import fcntl + + def lock(f, readonly): + fcntl.lockf(f.fileno(), fcntl.LOCK_SH if readonly else fcntl.LOCK_EX) + + def unlock(f): + fcntl.lockf(f.fileno(), fcntl.LOCK_UN) + + +class DBMContext(object): + """ + A context manager to access the key-value store in a concurrent-safe manner. + """ + __slots__ = ('filename', 'mode', 'readonly', 'lockfile', 'db') + + def __init__(self, filename, mode, readonly): + self.filename = filename + self.mode = mode + self.readonly = readonly + self.lockfile = open(filename + ".lock", 'w+b') + + def __enter__(self): + lock(self.lockfile, self.readonly) + self.db = dbm.open(self.filename, 'c', self.mode) + return self.db + + def __exit__(self, exval, extype, tb): + self.db.close() + unlock(self.lockfile) + self.lockfile.close() + + +class KVStore(KVStoreBase): + # Please note that all the coding effort is devoted to provide correct + # semantics, not performance. Therefore, use this store only in development + # environments. + + def __init__(self): + super(KVStore, self).__init__() + self.filename = settings.THUMBNAIL_DBM_FILE + self.mode = settings.THUMBNAIL_DBM_MODE + + def _cast_key(self, key): + return key if isinstance(key, bytes) else key.encode('utf-8') + + def _get_raw(self, key): + with DBMContext(self.filename, self.mode, True) as db: + try: + return db[self._cast_key(key)] + except KeyError: + return None + + def _set_raw(self, key, value): + with DBMContext(self.filename, self.mode, False) as db: + db[self._cast_key(key)] = value + + def _delete_raw(self, *keys): + with DBMContext(self.filename, self.mode, False) as db: + for key in keys: + try: + del db[self._cast_key(key)] + except KeyError: + pass + + def _find_keys_raw(self, prefix): + with DBMContext(self.filename, self.mode, True) as db: + p = self._cast_key(prefix) + return [k.decode('utf-8') for k in db.keys() if k.startswith(p)] diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/dynamodb_kvstore.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/dynamodb_kvstore.py new file mode 100644 index 0000000..013f21c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/dynamodb_kvstore.py @@ -0,0 +1,38 @@ +from __future__ import unicode_literals + +from boto.dynamodb2.table import Table +import boto +from sorl.thumbnail.kvstores.base import KVStoreBase +from sorl.thumbnail.conf import settings + + +class KVStore(KVStoreBase): + def __init__(self): + super(KVStore, self).__init__() + region = settings.AWS_REGION_NAME + access_key = settings.AWS_ACCESS_KEY_ID + secret = settings.AWS_SECRET_ACCESS_KEY + conn = boto.dynamodb2.connect_to_region(region, aws_access_key_id=access_key, + aws_secret_access_key=secret) + self.table = Table(settings.THUMBNAIL_DYNAMODB_NAME, connection=conn) + + def _get_raw(self, key): + try: + return self.table.get_item(key=key)['value'] + except boto.dynamodb2.exceptions.ItemNotFound: + pass + + def _set_raw(self, key, value): + try: + item = self.table.get_item(key=key) + except boto.dynamodb2.exceptions.ItemNotFound: + item = self.table.new_item() + item['key'] = key + item['value'] = value + item.save(overwrite=True) + + def _delete_raw(self, *keys): + [self.table.delete_item(key=k) for k in keys] + + def _find_keys_raw(self, prefix): + return [i['key'] for i in self.table.scan(key__beginswith=prefix)] diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/redis_kvstore.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/redis_kvstore.py new file mode 100644 index 0000000..4322277 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/kvstores/redis_kvstore.py @@ -0,0 +1,37 @@ +from __future__ import unicode_literals + +import redis +from sorl.thumbnail.kvstores.base import KVStoreBase +from sorl.thumbnail.conf import settings + + +class KVStore(KVStoreBase): + def __init__(self): + super(KVStore, self).__init__() + + if hasattr(settings, 'THUMBNAIL_REDIS_URL'): + self.connection = redis.from_url(settings.THUMBNAIL_REDIS_URL) + else: + self.connection = redis.Redis( + host=settings.THUMBNAIL_REDIS_HOST, + port=settings.THUMBNAIL_REDIS_PORT, + db=settings.THUMBNAIL_REDIS_DB, + ssl=settings.THUMBNAIL_REDIS_SSL, + password=settings.THUMBNAIL_REDIS_PASSWORD, + unix_socket_path=settings.THUMBNAIL_REDIS_UNIX_SOCKET_PATH, + ) + + def _get_raw(self, key): + return self.connection.get(key) + + def _set_raw(self, key, value): + return self.connection.set( + key, value, ex=settings.THUMBNAIL_REDIS_TIMEOUT) + + def _delete_raw(self, *keys): + return self.connection.delete(*keys) + + def _find_keys_raw(self, prefix): + pattern = prefix + '*' + return list(map(lambda key: key.decode('utf-8'), + self.connection.keys(pattern=pattern))) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/log.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/log.py new file mode 100644 index 0000000..06beeb1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/log.py @@ -0,0 +1,39 @@ +from __future__ import unicode_literals +import logging + +from django.core.mail.message import EmailMessage + +from sorl.thumbnail.conf import settings + + +class ThumbnailLogHandler(logging.Handler): + """ + An exception log handler for thumbnail errors. + """ + + def emit(self, record): + import traceback + + if not settings.ADMINS: + return + try: + # Hack to try to get request from context + request = record.exc_info[2].tb_frame.f_locals['context']['request'] + request_repr = repr(request) + request_path = request.path + except Exception: + request_repr = "Request unavailable" + request_path = 'Unknown URL' + if record.exc_info: + stack_trace = '\n'.join(traceback.format_exception(*record.exc_info)) + else: + stack_trace = 'No stack trace available' + message = "%s\n\n%s" % (stack_trace, request_repr) + msg = EmailMessage( + '[sorl-thumbnail] %s: %s' % (record.levelname, request_path), + message, + settings.SERVER_EMAIL, + [a[1] for a in settings.ADMINS], + connection=None + ) + msg.send(fail_silently=True) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/management/__init__.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/management/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/management/commands/__init__.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/management/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/management/commands/thumbnail.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/management/commands/thumbnail.py new file mode 100644 index 0000000..2218c2d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/management/commands/thumbnail.py @@ -0,0 +1,77 @@ +# encoding=utf-8 + +from __future__ import unicode_literals, print_function + +import sys + +from django.core.management.base import LabelCommand, CommandError + +from sorl.thumbnail import default +from sorl.thumbnail.images import delete_all_thumbnails + + +class Command(LabelCommand): + help = ( + 'Handles thumbnails and key value store' + ) + missing_args_message = 'Enter one of [cleanup, clear clear_delete_referenced clear_delete_all]' + + def handle(self, *labels, **options): + verbosity = int(options.get('verbosity')) + + # Django 1.4 compatibility fix + stdout = options.get('stdout', None) + stdout = stdout if stdout else sys.stdout + + stderr = options.get('stderr', None) + stderr = stderr if stderr else sys.stderr + + if not labels: + print(self.print_help('thumbnail', ''), file=stderr) + sys.exit(1) + + if len(labels) != 1: + raise CommandError('`%s` is not a valid argument' % labels) + + label = labels[0] + + if label not in ['cleanup', 'clear', 'clear_delete_referenced', 'clear_delete_all']: + raise CommandError('`%s` unknown action' % label) + + if label == 'cleanup': + if verbosity >= 1: + print("Cleanup thumbnails", end=' ... ', file=stdout) + + default.kvstore.cleanup() + + if verbosity >= 1: + print("[Done]", file=stdout) + + return + + if label == 'clear_delete_referenced': + if verbosity >= 1: + print("Delete all thumbnail files referenced in " + + "Key Value Store", end=' ... ', file=stdout) + + default.kvstore.delete_all_thumbnail_files() + + if verbosity >= 1: + print('[Done]', file=stdout) + + if verbosity >= 1: + print("Clear the Key Value Store", end=' ... ', file=stdout) + + default.kvstore.clear() + + if verbosity >= 1: + print('[Done]', file=stdout) + + if label == 'clear_delete_all': + if verbosity >= 1: + print("Delete all thumbnail files in THUMBNAIL_PREFIX", end=' ... ', file=stdout) + + delete_all_thumbnails() + + if verbosity >= 1: + print('[Done]', file=stdout) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/migrations/0001_initial.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/migrations/0001_initial.py new file mode 100644 index 0000000..bc4a976 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/migrations/0001_initial.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import models, migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='KVStore', + fields=[ + ('key', models.CharField(serialize=False, + db_column='key', + max_length=200, + primary_key=True)), + ('value', models.TextField()), + ], + ), + ] diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/migrations/__init__.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/models.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/models.py new file mode 100644 index 0000000..c147091 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/models.py @@ -0,0 +1,16 @@ +from django.db import models +from django.utils.encoding import python_2_unicode_compatible + +from sorl.thumbnail.conf import settings + + +@python_2_unicode_compatible +class KVStore(models.Model): + key = models.CharField( + max_length=200, primary_key=True, + db_column=settings.THUMBNAIL_KEY_DBCOLUMN + ) + value = models.TextField() + + def __str__(self): + return self.key diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/parsers.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/parsers.py new file mode 100644 index 0000000..89538d0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/parsers.py @@ -0,0 +1,106 @@ +# coding=utf-8 +import re + +from django.utils import six + +from sorl.thumbnail.helpers import ThumbnailError, toint + + +bgpos_pat = re.compile(r'^(?P\d+)(?P%|px)$') +geometry_pat = re.compile(r'^(?P\d+)?(?:x(?P\d+))?$') + + +class ThumbnailParseError(ThumbnailError): + pass + + +def parse_geometry(geometry, ratio=None): + """ + Parses a geometry string syntax and returns a (width, height) tuple + """ + m = geometry_pat.match(geometry) + + def syntax_error(): + return ThumbnailParseError('Geometry does not have the correct ' + 'syntax: %s' % geometry) + + if not m: + raise syntax_error() + x = m.group('x') + y = m.group('y') + if x is None and y is None: + raise syntax_error() + if x is not None: + x = int(x) + if y is not None: + y = int(y) + # calculate x or y proportionally if not set but we need the image ratio + # for this + if ratio is not None: + ratio = float(ratio) + if x is None: + x = toint(y * ratio) + elif y is None: + y = toint(x / ratio) + return x, y + + +def parse_crop(crop, xy_image, xy_window): + """ + Returns x, y offsets for cropping. The window area should fit inside + image but it works out anyway + """ + + x_alias_percent = { + 'left': '0%', + 'center': '50%', + 'right': '100%', + } + y_alias_percent = { + 'top': '0%', + 'center': '50%', + 'bottom': '100%', + } + xy_crop = crop.split(' ') + + if len(xy_crop) == 1: + if crop in x_alias_percent: + x_crop = x_alias_percent[crop] + y_crop = '50%' + elif crop in y_alias_percent: + y_crop = y_alias_percent[crop] + x_crop = '50%' + else: + x_crop, y_crop = crop, crop + elif len(xy_crop) == 2: + x_crop, y_crop = xy_crop + x_crop = x_alias_percent.get(x_crop, x_crop) + y_crop = y_alias_percent.get(y_crop, y_crop) + else: + raise ThumbnailParseError('Unrecognized crop option: %s' % crop) + + def get_offset(crop, epsilon): + m = bgpos_pat.match(crop) + if not m: + raise ThumbnailParseError('Unrecognized crop option: %s' % crop) + value = int(m.group('value')) # we only take ints in the regexp + unit = m.group('unit') + if unit == '%': + value = epsilon * value / 100.0 + + # return ∈ [0, epsilon] + return int(max(0, min(value, epsilon))) + + offset_x = get_offset(x_crop, xy_image[0] - xy_window[0]) + offset_y = get_offset(y_crop, xy_image[1] - xy_window[1]) + return offset_x, offset_y + + +def parse_cropbox(cropbox): + """ + Returns x, y, x2, y2 tuple for cropping. + """ + if isinstance(cropbox, six.text_type): + return tuple([int(x.strip()) for x in cropbox.split(',')]) + else: + return tuple(cropbox) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/shortcuts.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/shortcuts.py new file mode 100644 index 0000000..f5c6562 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/shortcuts.py @@ -0,0 +1,15 @@ +from sorl.thumbnail import default + + +def get_thumbnail(file_, geometry_string, **options): + """ + A shortcut for the Backend ``get_thumbnail`` method + """ + return default.backend.get_thumbnail(file_, geometry_string, **options) + + +def delete(file_, delete_file=True): + """ + A shortcut for the Backend ``delete`` method + """ + return default.backend.delete(file_, delete_file) diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/templatetags/__init__.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/templatetags/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/templatetags/sorl_thumbnail.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/templatetags/sorl_thumbnail.py new file mode 100644 index 0000000..381707d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/templatetags/sorl_thumbnail.py @@ -0,0 +1,8 @@ +""" +This allows usage of sorl-thumbnail in templates +by {% load sorl_thumbnail %} instead of traditional +{% load thumbnail %}. It's specifically useful in projects +that do make use of multiple thumbnailer libraries (for +instance `easy-thumbnails` alongside `sorl-thumbnail`). +""" +from .thumbnail import * # noqa diff --git a/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/templatetags/thumbnail.py b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/templatetags/thumbnail.py new file mode 100644 index 0000000..0bb70c2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl/thumbnail/templatetags/thumbnail.py @@ -0,0 +1,294 @@ +# encoding=utf-8 + +from __future__ import unicode_literals +import decimal +import logging +import sys +import re +import os +from functools import wraps + +from django.template import Library, Node, NodeList, TemplateSyntaxError +from django.utils.encoding import smart_str +from django.utils.six import text_type +from django.conf import settings + +from sorl.thumbnail.conf import settings as sorl_settings +from sorl.thumbnail import default +from sorl.thumbnail.images import ImageFile, DummyImageFile +from sorl.thumbnail.parsers import parse_geometry +from sorl.thumbnail.shortcuts import get_thumbnail + + +register = Library() +kw_pat = re.compile(r'^(?P[\w]+)=(?P.+)$') +logger = logging.getLogger('sorl.thumbnail') + + +def safe_filter(error_output=''): + """ + A safe filter decorator only raising errors when ``THUMBNAIL_DEBUG`` is + ``True`` otherwise returning ``error_output``. + """ + + def inner(f): + @wraps(f) + def wrapper(*args, **kwargs): + try: + return f(*args, **kwargs) + except Exception as err: + if sorl_settings.THUMBNAIL_DEBUG: + raise + logger.error('Thumbnail filter failed: %s' % str(err), + exc_info=sys.exc_info()) + return error_output + + return wrapper + + return inner + + +class ThumbnailNodeBase(Node): + """ + A Node that renders safely + """ + nodelist_empty = NodeList() + + def render(self, context): + + try: + return self._render(context) + except Exception: + if sorl_settings.THUMBNAIL_DEBUG: + raise + + error_message = 'Thumbnail tag failed' + + if context.template.engine.debug: + try: + error_message_template = ( + "Thumbnail tag failed " + "in template {template_name}, error at: " + "{tag_text}" + ) + template_origin, (position_start, position_end) = self.source + template_text = template_origin.reload() + tag_text = template_text[position_start:position_end] + + error_message = error_message_template.format( + template_name=template_origin.name, + tag_text=tag_text, + ) + except Exception: + pass + + logger.exception(error_message) + + return self.nodelist_empty.render(context) + + def _render(self, context): + raise NotImplementedError() + + +class ThumbnailNode(ThumbnailNodeBase): + child_nodelists = ('nodelist_file', 'nodelist_empty') + error_msg = ('Syntax error. Expected: ``thumbnail source geometry ' + '[key1=val1 key2=val2...] as var``') + + def __init__(self, parser, token): + bits = token.split_contents() + self.file_ = parser.compile_filter(bits[1]) + self.geometry = parser.compile_filter(bits[2]) + self.options = [] + self.as_var = None + self.nodelist_file = None + + if bits[-2] == 'as': + options_bits = bits[3:-2] + else: + options_bits = bits[3:] + + for bit in options_bits: + m = kw_pat.match(bit) + if not m: + raise TemplateSyntaxError(self.error_msg) + key = smart_str(m.group('key')) + expr = parser.compile_filter(m.group('value')) + self.options.append((key, expr)) + + if bits[-2] == 'as': + self.as_var = bits[-1] + self.nodelist_file = parser.parse(('empty', 'endthumbnail',)) + if parser.next_token().contents == 'empty': + self.nodelist_empty = parser.parse(('endthumbnail',)) + parser.delete_first_token() + + def _render(self, context): + file_ = self.file_.resolve(context) + geometry = self.geometry.resolve(context) + options = {} + for key, expr in self.options: + noresolve = {'True': True, 'False': False, 'None': None} + value = noresolve.get(text_type(expr), expr.resolve(context)) + if key == 'options': + options.update(value) + else: + options[key] = value + + thumbnail = None + if file_: + thumbnail = get_thumbnail(file_, geometry, **options) + elif sorl_settings.THUMBNAIL_DUMMY: + thumbnail = DummyImageFile(geometry) + + if not thumbnail or (isinstance(thumbnail, DummyImageFile) and self.nodelist_empty): + if self.nodelist_empty: + return self.nodelist_empty.render(context) + else: + return '' + + if self.as_var: + context.push() + context[self.as_var] = thumbnail + output = self.nodelist_file.render(context) + context.pop() + else: + output = thumbnail.url + + return output + + def __repr__(self): + return "" + + def __iter__(self): + for node in self.nodelist_file: + yield node + for node in self.nodelist_empty: + yield node + + +@register.filter +def resolution(file_, resolution_string): + """ + A filter to return the URL for the provided resolution of the thumbnail. + """ + if sorl_settings.THUMBNAIL_DUMMY: + dummy_source = sorl_settings.THUMBNAIL_DUMMY_SOURCE + source = dummy_source.replace('%(width)s', '(?P[0-9]+)') + source = source.replace('%(height)s', '(?P[0-9]+)') + source = re.compile(source) + try: + resolution = decimal.Decimal(resolution_string.strip('x')) + info = source.match(file_).groupdict() + info = {dimension: int(int(size) * resolution) for (dimension, size) in info.items()} + return dummy_source % info + except (AttributeError, TypeError, KeyError): + # If we can't manipulate the dummy we shouldn't change it at all + return file_ + + filename, extension = os.path.splitext(file_) + return '%s@%s%s' % (filename, resolution_string, extension) + + +@register.tag +def thumbnail(parser, token): + return ThumbnailNode(parser, token) + + +@safe_filter(error_output=False) +@register.filter +def is_portrait(file_): + """ + A very handy filter to determine if an image is portrait or landscape. + """ + if sorl_settings.THUMBNAIL_DUMMY: + return sorl_settings.THUMBNAIL_DUMMY_RATIO < 1 + if not file_: + return False + image_file = default.kvstore.get_or_set(ImageFile(file_)) + return image_file.is_portrait() + + +@safe_filter(error_output='auto') +@register.filter +def margin(file_, geometry_string): + """ + Returns the calculated margin for an image and geometry + """ + + if not file_ or (sorl_settings.THUMBNAIL_DUMMY or isinstance(file_, DummyImageFile)): + return 'auto' + + margin = [0, 0, 0, 0] + + image_file = default.kvstore.get_or_set(ImageFile(file_)) + + x, y = parse_geometry(geometry_string, image_file.ratio) + ex = x - image_file.x + margin[3] = ex / 2 + margin[1] = ex / 2 + + if ex % 2: + margin[1] += 1 + + ey = y - image_file.y + margin[0] = ey / 2 + margin[2] = ey / 2 + + if ey % 2: + margin[2] += 1 + + return ' '.join(['%dpx' % n for n in margin]) + + +@safe_filter(error_output='auto') +@register.filter +def background_margin(file_, geometry_string): + """ + Returns the calculated margin for a background image and geometry + """ + if not file_ or sorl_settings.THUMBNAIL_DUMMY: + return 'auto' + + margin = [0, 0] + image_file = default.kvstore.get_or_set(ImageFile(file_)) + x, y = parse_geometry(geometry_string, image_file.ratio) + ex = x - image_file.x + margin[0] = ex / 2 + ey = y - image_file.y + margin[1] = ey / 2 + + return ' '.join(['%spx' % n for n in margin]) + + +def text_filter(regex_base, value): + """ + Helper method to regex replace images with captions in different markups + """ + regex = regex_base % { + 're_cap': '[a-zA-Z0-9\.\,:;/_ \(\)\-\!\?\"]+', + 're_img': '[a-zA-Z0-9\.:/_\-\% ]+' + } + images = re.findall(regex, value) + + for i in images: + image = i[1] + if image.startswith(settings.MEDIA_URL): + image = image[len(settings.MEDIA_URL):] + + im = get_thumbnail(image, str(sorl_settings.THUMBNAIL_FILTER_WIDTH)) + value = value.replace(i[1], im.url) + + return value + + +@safe_filter(error_output='auto') +@register.filter +def markdown_thumbnails(value): + return text_filter('!\[(%(re_cap)s)?\][ ]?\((%(re_img)s)\)', value) + + +@safe_filter(error_output='auto') +@register.filter +def html_thumbnails(value): + return text_filter('`_ + +Developers +========== + +|jazzband| + +This is a `Jazzband `_ project. By contributing you agree to +abide by the `Contributor Code of Conduct `_ +and follow the `guidelines `_. + +Feel free to create a new Pull request if you want to propose a new feature. +If you need development support or want to discuss with other developers +join us in the channel #sorl-thumnbnail at freenode.net or Gitter. + +For releases updates and more in deep development discussion use our mailing list +in Google Groups. + +- IRC Channel: irc://irc.freenode.net/#sorl-thumbnail + +- Mailing List: sorl-thumbnail@googlegroups.com https://groups.google.com/d/forum/sorl-thumbnail + +Tests +----- +The tests should run with tox and pytest. Running `tox` will run all tests for all environments. +However, it is possible to run a certain environment with `tox -e `, a list of all environments +can be found with `tox -l`. These tests require the dependencies of the different engines defined in +the documentation. It is possible to install these dependencies into a vagrant image with the +Vagrantfile in the repo. + +User Support +============ + +If you need help using sorl-thumbnail browse http://stackoverflow.com/questions/tagged/sorl-thumbnail +and posts your questions with the `sorl-thumbnail` tag. + + +How to Use +========== + +Get the code +------------ + +Getting the code for the latest stable release use 'pip'. :: + + $ pip install sorl-thumbnail + +Install in your project +----------------------- + +Then register 'sorl.thumbnail', in the 'INSTALLED_APPS' section of +your project's settings. :: + + INSTALLED_APPS = ( + 'django.contrib.auth', + 'django.contrib.admin', + 'django.contrib.sites', + 'django.contrib.comments', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.contenttypes', + + 'sorl.thumbnail', + ) + + +Templates Usage +--------------- + +All of the examples assume that you first load the thumbnail template tag in +your template.:: + + {% load thumbnail %} + + +A simple usage. :: + + {% thumbnail item.image "100x100" crop="center" as im %} + + {% endthumbnail %} + +See more examples in the section `Template examples`_ in the Documentation + +Model Usage +----------- + +Using the ImageField that automatically deletes references to itself in the key +value store and its thumbnail references and the thumbnail files when deleted.:: + + from django.db import models + from sorl.thumbnail import ImageField + + class Item(models.Model): + image = ImageField(upload_to='whatever') + +See more examples in the section `Model examples`_ in the Documentation + +Low level API +------------- + +You can use the 'get_thumbnail':: + + from sorl.thumbnail import get_thumbnail + from sorl.thumbnail import delete + + im = get_thumbnail(my_file, '100x100', crop='center', quality=99) + delete(my_file) + +See more examples in the section `Low level API examples`_ in the Documentation + +Using in combination with other thumbnalers +------------------------------------------- + +Alternatively, you load the templatetags by {% load sorl_thumbnail %} +instead of traditional {% load thumbnail %}. It's especially useful in +projects that do make use of multiple thumbnailer libraries that use the +same name (``thumbnail``) for the templatetag module:: + + {% load sorl_thumbnail %} + {% thumbnail item.image "100x100" crop="center" as im %} + + {% endthumbnail %} + +Frequently asked questions +========================== + +Is so slow in Amazon S3 ! +------------------------- + +Possible related to the implementation of your Amazon S3 Backend, see the `issue #351`_ +due the storage backend reviews if there is an existing thumbnail when tries to +generate the thumbnail that makes an extensive use of the S3 API + +A fast workaround if you are not willing to tweak your storage backend is to set:: + + THUMBNAIL_FORCE_OVERWRITE = True + +So it will avoid to overly query the S3 API. + + +.. |travis| image:: https://travis-ci.org/jazzband/sorl-thumbnail.svg?branch=master + :target: https://travis-ci.org/jazzband/sorl-thumbnail +.. |pypi| image:: https://img.shields.io/pypi/v/sorl-thumbnail.svg + :target: https://pypi.python.org/pypi/sorl-thumbnail + :alt: sorl-thumbnail on PyPI +.. |coveralls| image:: https://coveralls.io/repos/jazzband//sorl-thumbnail/badge.png?branch=master + :target: https://coveralls.io/r/jazzband//sorl-thumbnail?branch=master +.. |jazzband| image:: https://jazzband.co/static/img/jazzband.svg + :target: https://jazzband.co/ + :alt: Jazzband + +.. _`Pillow`: http://pillow.readthedocs.org/en/latest/ +.. _`ImageMagick`: http://www.imagemagick.org/script/index.php +.. _`PIL`: http://www.pythonware.com/products/pil/ +.. _`Wand`: http://docs.wand-py.org/ +.. _`pgmagick`: http://pgmagick.readthedocs.org/en/latest/ +.. _`vipsthumbnail`: http://www.vips.ecs.soton.ac.uk/index.php?title=VIPS + +.. _`Template examples`: http://sorl-thumbnail.readthedocs.org/en/latest/examples.html#template-examples +.. _`Model examples`: http://sorl-thumbnail.readthedocs.org/en/latest/examples.html#model-examples +.. _`Low level API examples`: http://sorl-thumbnail.readthedocs.org/en/latest/examples.html#low-level-api-examples +.. _`issue #351`: https://github.com/jazzband/sorl-thumbnail/issues/351 +.. _`Django supported versions policy`: https://www.djangoproject.com/download/#supported-versions + + diff --git a/thesisenv/lib/python3.6/site-packages/sorl_thumbnail-12.5.0.dist-info/RECORD b/thesisenv/lib/python3.6/site-packages/sorl_thumbnail-12.5.0.dist-info/RECORD new file mode 100644 index 0000000..c2c7c1c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl_thumbnail-12.5.0.dist-info/RECORD @@ -0,0 +1,79 @@ +sorl/__init__.py,sha256=xB8BSJYcPEX42KQFygh9HOud_AFsxdrFV6fpC76bULI,438 +sorl/__pycache__/__init__.cpython-36.pyc,, +sorl/thumbnail/__init__.py,sha256=Os6kkV2BsrqcpmC-75nyOQS4G5YV-TwhlhTf9elu0EY,134 +sorl/thumbnail/__pycache__/__init__.cpython-36.pyc,, +sorl/thumbnail/__pycache__/base.cpython-36.pyc,, +sorl/thumbnail/__pycache__/compat.cpython-36.pyc,, +sorl/thumbnail/__pycache__/default.cpython-36.pyc,, +sorl/thumbnail/__pycache__/fields.cpython-36.pyc,, +sorl/thumbnail/__pycache__/helpers.cpython-36.pyc,, +sorl/thumbnail/__pycache__/images.cpython-36.pyc,, +sorl/thumbnail/__pycache__/log.cpython-36.pyc,, +sorl/thumbnail/__pycache__/models.cpython-36.pyc,, +sorl/thumbnail/__pycache__/parsers.cpython-36.pyc,, +sorl/thumbnail/__pycache__/shortcuts.cpython-36.pyc,, +sorl/thumbnail/admin/__init__.py,sha256=k-FeIo0ohAMPm1Ac21qxyrIMiOz0ADzKYAB7gtHfOWA,148 +sorl/thumbnail/admin/__pycache__/__init__.cpython-36.pyc,, +sorl/thumbnail/admin/__pycache__/current.cpython-36.pyc,, +sorl/thumbnail/admin/current.py,sha256=_1aYOAagrRkxMZe7qXnSc-X59YrC8V-yCAqRR1Rq3gg,2314 +sorl/thumbnail/base.py,sha256=dJ38kNaXei3M-y5dhpixIm8cjOEl2jZFu82X6_axNuo,8167 +sorl/thumbnail/compat.py,sha256=dGV8-MNUAMSXpVWoJSFkSYlewb_hGe3Sq7Dw5FhsMhc,1605 +sorl/thumbnail/conf/__init__.py,sha256=a1JYB0qSxeTLm8jjD3mYqCMD1eYhiE5gSkr6J0gi8EM,473 +sorl/thumbnail/conf/__pycache__/__init__.cpython-36.pyc,, +sorl/thumbnail/conf/__pycache__/defaults.cpython-36.pyc,, +sorl/thumbnail/conf/defaults.py,sha256=PK0mbm-hRbpTkF2i8ON5mY7l4WAIjTDcw2N9CJXgvKY,4168 +sorl/thumbnail/default.py,sha256=A00SY_5RJ_wHVY2aXlA7Ed7UrAEjA2c1lR-2e7UPVkg,707 +sorl/thumbnail/engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sorl/thumbnail/engines/__pycache__/__init__.cpython-36.pyc,, +sorl/thumbnail/engines/__pycache__/base.cpython-36.pyc,, +sorl/thumbnail/engines/__pycache__/convert_engine.cpython-36.pyc,, +sorl/thumbnail/engines/__pycache__/pgmagick_engine.cpython-36.pyc,, +sorl/thumbnail/engines/__pycache__/pil_engine.cpython-36.pyc,, +sorl/thumbnail/engines/__pycache__/vipsthumbnail_engine.cpython-36.pyc,, +sorl/thumbnail/engines/__pycache__/wand_engine.cpython-36.pyc,, +sorl/thumbnail/engines/base.py,sha256=5Xr8Pa1LG6pTsKfdXV0zD_fHv7_npxzHsd2CMGfbPHg,8093 +sorl/thumbnail/engines/convert_engine.py,sha256=t3RKynm5A-OH59PUWhPSI4JLrZ2kJfKCtabjLL3Glmg,6805 +sorl/thumbnail/engines/pgmagick_engine.py,sha256=tNLonUQvC6RfL5p72GTTz_kICY3ba29yUjxmiW4HprE,3048 +sorl/thumbnail/engines/pil_engine.py,sha256=np0ISHUN4QdSW4ey05EbNzCXkhNgpGwlXkZSxea3lGU,9579 +sorl/thumbnail/engines/vipsthumbnail_engine.py,sha256=gq9J00Fu8Qs63xodt8uwyGJ7jxed3sBbggJHVUBewbU,3958 +sorl/thumbnail/engines/wand_engine.py,sha256=Zn6B5sgASk5NkCpeyDQKs2ars_RHaw3gZ4CvKgOKRjU,2586 +sorl/thumbnail/fields.py,sha256=_YAFGFFaerjxUjyojk2VZudPoh1f_nzercZ7Co5Ddpk,2490 +sorl/thumbnail/helpers.py,sha256=iU79T677GFp_xaVXo8Vnk5Xrja3zcv_2QodFJQbtfCM,1814 +sorl/thumbnail/images.py,sha256=Yg6qzWDiZ0vf3yh7pmw1I1PezQa7ay_Aof2UvMoH7D4,7152 +sorl/thumbnail/kvstores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sorl/thumbnail/kvstores/__pycache__/__init__.cpython-36.pyc,, +sorl/thumbnail/kvstores/__pycache__/base.cpython-36.pyc,, +sorl/thumbnail/kvstores/__pycache__/cached_db_kvstore.cpython-36.pyc,, +sorl/thumbnail/kvstores/__pycache__/dbm_kvstore.cpython-36.pyc,, +sorl/thumbnail/kvstores/__pycache__/dynamodb_kvstore.cpython-36.pyc,, +sorl/thumbnail/kvstores/__pycache__/redis_kvstore.cpython-36.pyc,, +sorl/thumbnail/kvstores/base.py,sha256=n9vHXiFrWI2Uuu_8jb0gbcdTKJVJBv4LYG0Ha4sPvko,7074 +sorl/thumbnail/kvstores/cached_db_kvstore.py,sha256=jLN1vzb3YPhBbeF3ozp5hwSMg2AJWljGyU1QCXKUZ2E,2118 +sorl/thumbnail/kvstores/dbm_kvstore.py,sha256=lpe-tdEl37qUV5XglMgewT_3jUkwQZ9P--UTaWPLC5s,2703 +sorl/thumbnail/kvstores/dynamodb_kvstore.py,sha256=680m_xOtTPeOEm71EFLEr-pMLXrVCiRp9xlNrD-gP4Q,1323 +sorl/thumbnail/kvstores/redis_kvstore.py,sha256=92KQfvKLo5Dj_VtyONdHrOX7vB0zuhzYHzK6AR4QSBE,1255 +sorl/thumbnail/log.py,sha256=JMbr4FD8CQJFCwjTFQcGOHTW3sS-v_XhTG7PHshVXN0,1226 +sorl/thumbnail/management/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sorl/thumbnail/management/__pycache__/__init__.cpython-36.pyc,, +sorl/thumbnail/management/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sorl/thumbnail/management/commands/__pycache__/__init__.cpython-36.pyc,, +sorl/thumbnail/management/commands/__pycache__/thumbnail.cpython-36.pyc,, +sorl/thumbnail/management/commands/thumbnail.py,sha256=E1th7SM4rqHMa-0T6PKD7qxz0GBOtFuwOfBPyHhvMSY,2272 +sorl/thumbnail/migrations/0001_initial.py,sha256=1MzjYjO-zzurEaC42E91aiMxUqQLAuQAEJ_qoiIj8eI,589 +sorl/thumbnail/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sorl/thumbnail/migrations/__pycache__/0001_initial.cpython-36.pyc,, +sorl/thumbnail/migrations/__pycache__/__init__.cpython-36.pyc,, +sorl/thumbnail/models.py,sha256=MdeDaF0OXyC2gLOxsP27VavN2W0yU2V9YQTygraFYkI,398 +sorl/thumbnail/parsers.py,sha256=1JvTZCNHlQbI23HQaoa3z13adGK4neoJW9VI9VugAZU,2886 +sorl/thumbnail/shortcuts.py,sha256=K-230CYUlKbZ4hK4lfG8CvqVzqsdO4lVllDYb94buVs,397 +sorl/thumbnail/templatetags/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sorl/thumbnail/templatetags/__pycache__/__init__.cpython-36.pyc,, +sorl/thumbnail/templatetags/__pycache__/sorl_thumbnail.cpython-36.pyc,, +sorl/thumbnail/templatetags/__pycache__/thumbnail.cpython-36.pyc,, +sorl/thumbnail/templatetags/sorl_thumbnail.py,sha256=kux3Knmq1KdD63cE7eW9ph6l-DnNLAyqaAZjKOVcv3U,313 +sorl/thumbnail/templatetags/thumbnail.py,sha256=TD2UUg_bmbsvwKxff0bmerbLhrhbtZPmHIjABVsEnZA,8804 +sorl_thumbnail-12.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +sorl_thumbnail-12.5.0.dist-info/METADATA,sha256=nxne0obnfaQFgJOXZNwox3tXmZYMD3RRaJeZS-bn0-E,7548 +sorl_thumbnail-12.5.0.dist-info/RECORD,, +sorl_thumbnail-12.5.0.dist-info/WHEEL,sha256=gduuPyBvFJQSQ0zdyxF7k0zynDXbIbvg5ZBHoXum5uk,110 +sorl_thumbnail-12.5.0.dist-info/top_level.txt,sha256=meXlMOpco9UKC9MPDiyAyN74ijLMBh6gaKZYcP5EFMQ,5 diff --git a/thesisenv/lib/python3.6/site-packages/sorl_thumbnail-12.5.0.dist-info/WHEEL b/thesisenv/lib/python3.6/site-packages/sorl_thumbnail-12.5.0.dist-info/WHEEL new file mode 100644 index 0000000..1316c41 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl_thumbnail-12.5.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.31.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/thesisenv/lib/python3.6/site-packages/sorl_thumbnail-12.5.0.dist-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/sorl_thumbnail-12.5.0.dist-info/top_level.txt new file mode 100644 index 0000000..b3993a1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/sorl_thumbnail-12.5.0.dist-info/top_level.txt @@ -0,0 +1 @@ +sorl diff --git a/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/PKG-INFO b/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..aaa062f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/PKG-INFO @@ -0,0 +1,15 @@ +Metadata-Version: 1.1 +Name: surlex +Version: 0.2.0 +Summary: Simple URL expression translator: alternative to regular expressions for URL pattern matching and data extraction. +Home-page: http://github.com/codysoyland/surlex/tree/master +Author: Cody Soyland +Author-email: codysoyland@gmail.com +License: BSD +Description: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python diff --git a/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/SOURCES.txt b/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..bffbf1b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,15 @@ +MANIFEST.in +README.rst +setup.cfg +setup.py +tests.py +scripts/surlex2regex.py +src/surlex/__init__.py +src/surlex/dj.py +src/surlex/exceptions.py +src/surlex/grammar.py +src/surlex/macros.py +surlex.egg-info/PKG-INFO +surlex.egg-info/SOURCES.txt +surlex.egg-info/dependency_links.txt +surlex.egg-info/top_level.txt \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/dependency_links.txt b/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/installed-files.txt b/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..fd12b99 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/installed-files.txt @@ -0,0 +1,15 @@ +../../../../bin/surlex2regex.py +../surlex/__init__.py +../surlex/__pycache__/__init__.cpython-36.pyc +../surlex/__pycache__/dj.cpython-36.pyc +../surlex/__pycache__/exceptions.cpython-36.pyc +../surlex/__pycache__/grammar.cpython-36.pyc +../surlex/__pycache__/macros.cpython-36.pyc +../surlex/dj.py +../surlex/exceptions.py +../surlex/grammar.py +../surlex/macros.py +PKG-INFO +SOURCES.txt +dependency_links.txt +top_level.txt diff --git a/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..ba5b343 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/surlex-0.2.0-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +surlex diff --git a/thesisenv/lib/python3.6/site-packages/surlex/__init__.py b/thesisenv/lib/python3.6/site-packages/surlex/__init__.py new file mode 100644 index 0000000..fa4ba04 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/surlex/__init__.py @@ -0,0 +1,56 @@ +from surlex.grammar import Parser, RegexScribe, get_all_nodes, MacroTagNode +from surlex.macros import MacroRegistry, DefaultMacroRegistry +import re + +class Surlex(object): + def __init__(self, surlex, macro_registry=DefaultMacroRegistry()): + self.translated = False + self.surlex = surlex + self.macro_registry = macro_registry + + def translate(self): + self.parser = Parser(self.surlex) + self.node_list = self.parser.get_node_list() + self.scribe = RegexScribe( + self.node_list, + self.macro_registry, + ) + self.regex = self.scribe.translate() + return self.regex + + @property + def groupmacros(self): + macros = {} + if not self.translated: + self.translate() + for node in get_all_nodes(self.node_list): + if isinstance(node, MacroTagNode): + macros[node.name] = node.macro + return macros + + @property + def to_regex(self): + if not self.translated: + self.translate() + return self.regex + + def match(self, subject): + m = re.match(self.to_regex, subject) + if m: + return m.groupdict() + +# This allows "surlex.register_macro" to register to the default registry +register_macro = DefaultMacroRegistry.register + +def surlex_to_regex(surlex): + parser = Parser(surlex) + scribe = RegexScribe(parser.get_node_list()) + return scribe.translate() + +def parsed_surlex_object(surlex): + object = Surlex(surlex) + object.translate() + return object + +def match(surlex, subject): + return Surlex(surlex).match(subject) diff --git a/thesisenv/lib/python3.6/site-packages/surlex/dj.py b/thesisenv/lib/python3.6/site-packages/surlex/dj.py new file mode 100644 index 0000000..6563613 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/surlex/dj.py @@ -0,0 +1,5 @@ +from django.conf.urls import url +from surlex import Surlex + +def surl(surlex, *args, **kwargs): + return url(Surlex(surlex).translate(), *args, **kwargs) diff --git a/thesisenv/lib/python3.6/site-packages/surlex/exceptions.py b/thesisenv/lib/python3.6/site-packages/surlex/exceptions.py new file mode 100644 index 0000000..0b76f80 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/surlex/exceptions.py @@ -0,0 +1,19 @@ +class SurlexException(Exception): + """ + a generic surlex exception + """ + pass + +class MalformedSurlex(SurlexException): + """ + surlex parser error -- when read_until does not find + the expected character it'll throw this + """ + pass + +class MacroDoesNotExist(SurlexException): + """ + surlex parser error -- when a macro cannot be resolved + this will be thrown + """ + pass diff --git a/thesisenv/lib/python3.6/site-packages/surlex/grammar.py b/thesisenv/lib/python3.6/site-packages/surlex/grammar.py new file mode 100644 index 0000000..f2de531 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/surlex/grammar.py @@ -0,0 +1,185 @@ +import re +from surlex.exceptions import MalformedSurlex +from surlex.macros import MacroRegistry, DefaultMacroRegistry + +# Define the next function for python 2 and 3 compatibility +try: + if next: + pass +except NameError: + def next(iterable): + return iterable.next() + +class Node(object): + pass + +class TextNode(Node): + def __init__(self, token): + self.token = token + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self.token == other.token) + + def __repr__(self): + return '' % self.token + +class WildcardNode(Node): + def __init__(self): + pass + def __eq__(self, other): + return self.__class__ == other.__class__ + + def __repr__(self): + return '' + +class BlockNode(Node): + def __init__(self, node_list): + self.node_list = node_list + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self.node_list == other.node_list) + +class OptionalNode(BlockNode): + def __repr__(self): + return '' % self.node_list + +class TagNode(Node): + def __init__(self, name): + self.name = name + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self.name == other.name) + + def __repr__(self): + return '' % self.name + +class RegexTagNode(TagNode): + def __init__(self, name, regex): + self.name = name + self.regex = regex + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self.name == other.name and + self.regex == other.regex) + + def __repr__(self): + return '' % (self.name, self.regex) + +class MacroTagNode(TagNode): + def __init__(self, name, macro): + self.name = name + self.macro = macro + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self.name == other.name and + self.macro == other.macro) + + def __repr__(self): + return '' % (self.name, self.macro) + +class Parser(object): + def __init__(self, surlex): + self.surlex = surlex + self.chars = iter(surlex) + + def get_node_list(self): + return list(self.parse(self.chars)) + + def read_until(self, chars, char): + try: + next_char = next(chars) + except StopIteration: + raise MalformedSurlex('Malformed surlex. Expected %s.' % char) + if next_char == char: + return '' + if next_char == '\\': + # only escape what we are looking for + escaped_char = next(chars) + if escaped_char == char: + return escaped_char + self.read_until(chars, char) + else: + return '\\' + escaped_char + self.read_until(chars, char) + else: + return next_char + self.read_until(chars, char) + + def parse(self, chars): + token = '' + for char in chars: + if char in '<*(': + if token: + yield TextNode(token) + token = '' + if char == '\\': + # escape with backslash + token += next(chars) + elif char == '<': + tag_content = self.read_until(chars, '>') + name = '' + regex = None + macro = None + for char in tag_content: + if char == '=': + name, regex = tag_content.split('=', 1) + break + if char == ':': + name, macro = tag_content.split(':', 1) + break + if regex: + yield RegexTagNode(name, regex) + elif macro: + yield MacroTagNode(name, macro) + else: + yield TagNode(tag_content) + elif char == '*': + # wildcard + yield WildcardNode() + elif char == '(': + yield OptionalNode(list(self.parse(chars))) + elif char == ')': + # end of node list, stop parsing + break + else: + # literal output + token += char + if token: + yield TextNode(token) + +class RegexScribe(object): + def __init__(self, node_list, macro_registry=DefaultMacroRegistry()): + self.node_list = node_list + self.macro_registry = macro_registry + + def translate(self): + output = '' + for node in self.node_list: + if isinstance(node, TextNode): + output += node.token.replace('.', '\.') + elif isinstance(node, WildcardNode): + output += '.*' + elif isinstance(node, OptionalNode): + output += '(' + RegexScribe(node.node_list).translate() + ')?' + elif isinstance(node, TagNode): + if isinstance(node, MacroTagNode): + regex = self.macro_registry.get(node.macro) + elif isinstance(node, RegexTagNode): + regex = node.regex + else: + regex = '.+' + if node.name: + output += '(?P<%s>%s)' % (node.name, regex) + else: + output += regex + return output + +def get_all_nodes(node_list): + for node in node_list: + if isinstance(node, BlockNode): + for node in get_all_nodes(node.node_list): + yield node + else: + yield node diff --git a/thesisenv/lib/python3.6/site-packages/surlex/macros.py b/thesisenv/lib/python3.6/site-packages/surlex/macros.py new file mode 100644 index 0000000..f092791 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/surlex/macros.py @@ -0,0 +1,47 @@ +from surlex.exceptions import MacroDoesNotExist + +class MacroRegistry(object): + macros = {} + def __init__(self, macros={}): + all_macros = {} + all_macros.update(self.macros) + all_macros.update(macros) + self.macros = all_macros + + def get(self, macro_name): + try: + return self.macros[macro_name] + except KeyError: + raise MacroDoesNotExist('Macro "%s" not defined' % macro_name) + + def set(self, macro_name, regex): + self.macros[macro_name] = regex + +class DefaultMacroRegistry(MacroRegistry): + global_macros = {} + + def __init__(self): + super(DefaultMacroRegistry, self).__init__({ + 'Y': r'\d{4}', # year, including century + 'y': r'\d{2}', # year, not including century + 'M': r'(jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec)', # month, abbreviated + 'm': r'(0?([1-9])|10|11|12)', # month, 1 or 2 digit + 'd': r'((0|1|2)?([1-9])|[1-3]0|31)', # day, 1 or 2 digit + '#': r'\d+', # number, any length + 's': r'[\w-]+', # slug + 'u': r'[a-fA-F0-9]{8}-?[a-fA-F0-9]{4}-?[a-fA-F0-9]{4}-?[a-fA-F0-9]{4}-?[a-fA-F0-9]{12}', # uuid + }) + + @classmethod + def register(cls, macro, regex): + cls.global_macros[macro] = regex + + def get(self, macro_name): + try: + return super(DefaultMacroRegistry, self).get(macro_name) + except MacroDoesNotExist: + try: + return self.__class__.global_macros[macro_name] + except KeyError: + raise MacroDoesNotExist('Macro "%s" not defined' % macro_name) + diff --git a/thesisenv/lib/python3.6/site-packages/terryfy/__init__.py b/thesisenv/lib/python3.6/site-packages/terryfy/__init__.py new file mode 100644 index 0000000..20cd07a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/terryfy/__init__.py @@ -0,0 +1 @@ +# Init for terryfy waf helpers diff --git a/thesisenv/lib/python3.6/site-packages/terryfy/bdist_wheel.py b/thesisenv/lib/python3.6/site-packages/terryfy/bdist_wheel.py new file mode 100644 index 0000000..76052a7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/terryfy/bdist_wheel.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +""" Script to run bdist_wheel after setuptools import +""" + +import sys, os + +import setuptools + +def main(): + del sys.argv[0] + if not sys.argv: + sys.argv[:0] = ['setup.py'] + elif sys.argv[0].startswith('-'): + sys.argv[:0] = ['setup.py'] + sys.argv.insert(1, 'bdist_wheel') + if os.path.isdir(sys.argv[0]): + sys.argv[0] = os.path.join(sys.argv[0], 'setup.py') + path, name = os.path.split(os.path.abspath(sys.argv[0])) + if path: + os.chdir(path) + sys.path.insert(0, path) + sys.argv[0] = name + g = dict(globals()) + g['__file__'] = sys.argv[0] + g['__name__'] = '__main__' + if sys.version_info[0] < 3: + execfile(sys.argv[0], g, g) + else: + exec(open(sys.argv[0]).read(), g, g) + + +if __name__ == '__main__': + main() diff --git a/thesisenv/lib/python3.6/site-packages/terryfy/cp_suff_real_libs.py b/thesisenv/lib/python3.6/site-packages/terryfy/cp_suff_real_libs.py new file mode 100644 index 0000000..6584231 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/terryfy/cp_suff_real_libs.py @@ -0,0 +1,34 @@ +""" Copy real (not symlinks to) libraries with given suffix +""" + +from __future__ import print_function + +USAGE = """\ +cp_suff_real_libs.py +""" + +import os +from os.path import splitext, join as pjoin, islink +import sys +import shutil + +LIB_EXTS = ('.a', '.so', '.dylib') + + +def main(): + try: + lib_dir, suffix = sys.argv[1:] + except (IndexError, ValueError): + print(USAGE) + sys.exit(-1) + for fname in os.listdir(lib_dir): + if not splitext(fname)[1] in LIB_EXTS: + continue + old_path = pjoin(lib_dir, fname) + if islink(old_path): + continue + shutil.copyfile(old_path, old_path + suffix) + + +if __name__ == '__main__': + main() diff --git a/thesisenv/lib/python3.6/site-packages/terryfy/fuse_suff_real_libs.py b/thesisenv/lib/python3.6/site-packages/terryfy/fuse_suff_real_libs.py new file mode 100644 index 0000000..d0c4966 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/terryfy/fuse_suff_real_libs.py @@ -0,0 +1,42 @@ +""" Fuse real (not symlinks to) libraries with same name +""" + +from __future__ import print_function + +USAGE = """\ +fuse_suff_real_libs.py +""" + +import os +from os.path import (splitext, join as pjoin, split as psplit, islink, isfile, + abspath, realpath) +import sys +import shutil +from subprocess import check_call + +LIB_EXTS = ('.a', '.so', '.dylib') + + +def main(): + try: + lib_dir_out, lib_dir_in1, lib_dir_in2 = sys.argv[1:] + except (IndexError, ValueError): + print(USAGE) + sys.exit(-1) + for fname in os.listdir(lib_dir_in1): + if not splitext(fname)[1] in LIB_EXTS: + continue + lib_path = pjoin(lib_dir_in1, fname) + out_path = pjoin(lib_dir_out, fname) + if islink(lib_path): + continue + lib_path_2 = pjoin(lib_dir_in2, fname) + if not isfile(lib_path_2): + continue + # Fuse and copy library + check_call(['lipo', '-create', lib_path, lib_path_2, + '-output', out_path]) + + +if __name__ == '__main__': + main() diff --git a/thesisenv/lib/python3.6/site-packages/terryfy/monkeyexec.py b/thesisenv/lib/python3.6/site-packages/terryfy/monkeyexec.py new file mode 100644 index 0000000..424a1df --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/terryfy/monkeyexec.py @@ -0,0 +1,56 @@ +""" Monkey patch waf code to pipe command output direct to stdout + +As far as I know there is no way to do this using standard waf build commands +options. +""" + +import sys +from waflib import Utils, Errors, Logs, Context + + +def my_exec_command(self,cmd,**kw): + """ Copy of Context.exec_command that doesn't capture stdout / stderr + + This is necessary to prevent travis-ci timing out while waiting for + feedback from the scipy build process, in particular + """ + subprocess=Utils.subprocess + kw['shell']=isinstance(cmd,str) + Logs.debug('runner: %r'%cmd) + Logs.debug('runner_env: kw=%s'%kw) + if self.logger: + self.logger.info(cmd) + if'stdout'not in kw: + kw['stdout']=sys.stdout + if'stderr'not in kw: + kw['stderr']=sys.stderr + try: + if kw['stdout']or kw['stderr']: + p=subprocess.Popen(cmd,**kw) + (out,err)=p.communicate() + ret=p.returncode + else: + out,err=(None,None) + ret=subprocess.Popen(cmd,**kw).wait() + except Exception as e: + raise Errors.WafError('Execution failure: %s'%str(e),ex=e) + if out: + if not isinstance(out,str): + out=out.decode(sys.stdout.encoding or'iso8859-1') + if self.logger: + self.logger.debug('out: %s'%out) + else: + sys.stdout.write(out) + if err: + if not isinstance(err,str): + err=err.decode(sys.stdout.encoding or'iso8859-1') + if self.logger: + self.logger.error('err: %s'%err) + else: + sys.stderr.write(err) + return ret + + +def monkey_patch(): + """ Apply monkey patch to exec_command """ + Context.Context.exec_command = my_exec_command diff --git a/thesisenv/lib/python3.6/site-packages/terryfy/repath_lib_names.py b/thesisenv/lib/python3.6/site-packages/terryfy/repath_lib_names.py new file mode 100644 index 0000000..3cabe86 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/terryfy/repath_lib_names.py @@ -0,0 +1,36 @@ +""" Change install names and ids to reflect changed path +""" + +USAGE = """\ +USAGE: repath_lib_names.py [ 2: + basestring = str + + +class TravisError(Exception): + pass + + +def get_yaml_entry(yaml_dict, name): + """ Get entry `name` from dict `yaml_dict` + + Parameters + ---------- + yaml_dict : dict + dict or subdict from parsing .travis.yml file + name : str + key to analyze and return + + Returns + ------- + entry : None or list + If `name` not in `yaml_dict` return None. If key value is a string + return a single entry list. Otherwise return the key value. + """ + entry = yaml_dict.get(name) + if entry is None: + return None + if isinstance(entry, basestring): + return [entry] + return entry + + +def get_envs(yaml_dict): + """ Get first env combination from travis yaml dict + + Parameters + ---------- + yaml_dict : dict + dict or subdict from parsing .travis.yml file + + Returns + ------- + bash_str : str + bash scripting lines as string + """ + env = get_yaml_entry(yaml_dict, 'env') + if env is None: + return '' + # Bare string + if isinstance(env, basestring): + return env + '\n' + # Simple list defining matrix + if isinstance(env, (list, tuple)): + return env[0] + '\n' + # More complex dictey things + globals, matrix = [get_yaml_entry(env, name) + for name in ('global', 'matrix')] + if hasattr(matrix, 'keys'): + raise TravisError('Oops, envs too complicated') + lines = [] + if not globals is None: + if matrix is None: + raise TravisError('global section needs matrix section') + lines += globals + if not matrix is None: + lines.append(matrix[0]) + return '\n'.join(lines) + '\n' diff --git a/thesisenv/lib/python3.6/site-packages/terryfy/wafutils.py b/thesisenv/lib/python3.6/site-packages/terryfy/wafutils.py new file mode 100644 index 0000000..19e1504 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/terryfy/wafutils.py @@ -0,0 +1,254 @@ +from __future__ import print_function + +from os.path import split as psplit, join as pjoin +from subprocess import Popen, PIPE + +def back_tick(cmd, ret_err=False, as_str=True, shell=False): + """ Run command `cmd`, return stdout, or stdout, stderr if `ret_err` + + Roughly equivalent to ``check_output`` in Python 2.7 + + Parameters + ---------- + cmd : str + command to execute + ret_err : bool, optional + If True, return stderr in addition to stdout. If False, just return + stdout + as_str : bool, optional + Whether to decode outputs to unicode string on exit. + + Returns + ------- + out : str or tuple + If `ret_err` is False, return stripped string containing stdout from + `cmd`. If `ret_err` is True, return tuple of (stdout, stderr) where + ``stdout`` is the stripped stdout, and ``stderr`` is the stripped + stderr. + + Raises + ------ + Raises RuntimeError if command returns non-zero exit code + """ + proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=shell) + out, err = proc.communicate() + retcode = proc.returncode + if retcode is None: + proc.terminate() + raise RuntimeError(cmd + ' process did not terminate') + if retcode != 0: + raise RuntimeError(cmd + ' process returned code %d' % retcode) + out = out.strip() + if as_str: + out = out.decode('latin-1') + if not ret_err: + return out + err = err.strip() + if as_str: + err = err.decode('latin-1') + return out, err + + +def seq_to_list(seq): + """ Convert non-sequence to 1 element sequence, tuples to lists + """ + if not isinstance(seq, (list, tuple)): + return [seq] + return list(seq) + + +class FilePackageMaker(object): + # all packages + instances = {} + + def __init__(self, name, filename, build_cmd, + depends=(), + after=(), + patcher=None, + unpacked_sdir=None, + build_src_sdir='src', + ): + """ Initialize object for creating unpack, patch, build tasks + + Unpacking assumed to have no dependencies. + + Patching assumed to depend only on the unpacking. + + Build depends on packing / patching and on given dependencies + + Parameters + ---------- + name : str + package name + filename : str + filename containing source archive to unpack + build_cmd : str or callable + command to build after extracting + depends : str or sequence, optional + depends for build + after : str or sequence, optional + names to set build to follow after (task name depends) + patcher : None or str or callable, optional + If str, a file containing a ``-p1`` patch for the sources. If + callable, then a rule to apply for patching. If None, don't patch + unpacked_sdir : str or None, optional + directory created by unpacking `filename`. If None we guess from + `filename` + build_src_sdir : str, optional + subdirectory in build directory into which to unpack + """ + self.name = name + self.filename = filename + self.build_cmd = build_cmd + _, fname = psplit(filename) + if fname.endswith('.tar.gz'): + self.unpack_cmd = 'tar zxf' + fname = fname[:-7] + elif fname.endswith('.tar.bz2'): + self.unpack_cmd = 'tar jxf' + fname = fname[:-8] + elif fname.endswith('.zip'): + self.unpack_cmd = 'unzip' + fname = fname[:-4] + else: + raise ValueError("Can't work out type of archive " + fname) + self.patcher = patcher + if unpacked_sdir is None: # Guess at output subdirectory + unpacked_sdir = fname + self.unpacked_sdir = unpacked_sdir + self.depends = seq_to_list(depends) + self.after = seq_to_list(after) + self.build_src_sdir = build_src_sdir + self._register_instance() + + def _register_instance(self): + """ Register instance to class dictionary """ + if self.name in self.instances: + raise ValueError('Name "{0}" already in instance ' + 'dict'.format(self.name)) + self.instances[self.name] = self + + def _unpack(self, bctx): + task_name = self.name + '.unpack' + dir_relpath = pjoin(self.build_src_sdir, self.unpacked_sdir) + dir_node = bctx.bldnode.make_node(dir_relpath) + archive_path = pjoin(bctx.srcnode.abspath(), self.filename) + rule = 'cd {dir_path} && {unpack_cmd} {archive_path}'.format( + dir_path = pjoin(bctx.bldnode.abspath(), self.build_src_sdir), + unpack_cmd = self.unpack_cmd, + archive_path = archive_path) + bctx(rule = rule, + target = dir_node, + name = task_name) + return task_name, dir_node + + def unpack_patch_build(self, bctx): + """ Make task generators to unpack, patch and build + + Parameters + ---------- + bctx : build context + + Returns + ------- + build_name : str + name of build task, for other tasks to depend on if necessary + dir_node : :class:`Node` instance + node pointing to directory containing unpacked and built sources + """ + task_name, dir_node = self._unpack(bctx) + if not self.patcher is None: + if hasattr(self.patcher, '__call__'): # patch function + rule = self.patcher + else: # assume filename in source tree + patch_node = bctx.srcnode.find_node(self.patcher) + if patch_node is None: + bctx.fatal('Patch file {0} does not exist'.format( + self.patcher)) + rule = 'cd ${SRC} && patch -p1 < ' + patch_node.abspath() + task_name = self.name + '.patch' + bctx( + rule = rule, + source = dir_node, + name = task_name) + build_name = self.name + '.build' + bctx( + rule = self.build_cmd, + source = [dir_node] + self.depends, + after = [task_name] + self.after, + name = build_name) + return build_name, dir_node + + +class GitPackageMaker(FilePackageMaker): + # all packages + instances = {} + + def __init__(self, name, commit, build_cmd, + depends=(), + after=(), + patcher=None, + out_sdir=None, + git_sdir=None, + build_src_sdir='src', + ): + """ Initialize object for creating unpack, patch, build tasks + + * Unpacking assumed to have no dependencies. + * Patching assumed to depend only on the unpacking. + * Build depends on packing / patching and on given dependencies + + Parameters + ---------- + name : str + package name + commit : str + identifier for commit to unarchive + build_cmd : str or callable + command to build after extracting + depends : str or sequence, optional + depends for build + after : str or sequence, optional + names to set build to follow after (task name depends) + patcher : None or str or callable, optional + If str, a file containing a ``-p1`` patch for the sources. If + callable, then a rule to apply for patching. If None, don't patch + out_sdir : None or str, optional + subdirectory in `build_src_sdir` in which to unpack. If None, use + `name` + git_sdir : str or None, optional + directory containing git repository. Defaults to `name` + build_src_sdir : str, optional + subdirectory in build directory into which to unpack + """ + self.name = name + self.commit = commit + self.build_cmd = build_cmd + self.patcher = patcher + if git_sdir is None: + git_sdir = name + self.git_sdir = git_sdir + self.depends = seq_to_list(depends) + self.after = seq_to_list(after) + self.build_src_sdir = build_src_sdir + self.out_sdir = name if out_sdir is None else out_sdir + self._register_instance() + + def _unpack(self, bctx): + src_path = bctx.srcnode.abspath() + bld_path = bctx.bldnode.abspath() + task_name = self.name + '.unpack' + dir_relpath = pjoin(self.build_src_sdir, self.out_sdir) + dir_node = bctx.bldnode.make_node(dir_relpath) + git_dir = pjoin(src_path, self.git_sdir) + bctx( + rule = ('cd {git_dir} && ' + 'git archive --prefix={dir_relpath}/ {commit} | ' + '( cd {bld_path} && tar x )'.format( + git_dir = git_dir, + dir_relpath = dir_relpath, + commit = self.commit, + bld_path = bld_path)), + target = dir_node, + name = task_name) + return task_name, dir_node diff --git a/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/INSTALLER b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/LICENSE.txt b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..e1f9ad7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/LICENSE.txt @@ -0,0 +1,44 @@ +Zope Public License (ZPL) Version 2.1 + +A copyright notice accompanies this license document that identifies the +copyright holders. + +This license has been certified as open source. It has also been designated as +GPL compatible by the Free Software Foundation (FSF). + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions in source code must retain the accompanying copyright +notice, this list of conditions, and the following disclaimer. + +2. Redistributions in binary form must reproduce the accompanying copyright +notice, this list of conditions, and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +3. Names of the copyright holders must not be used to endorse or promote +products derived from this software without prior written permission from the +copyright holders. + +4. The right to distribute this software or to use it for any purpose does not +give you the right to use Servicemarks (sm) or Trademarks (tm) of the +copyright +holders. Use of them is covered by separate agreement with the copyright +holders. + +5. If any files are modified, you must cause the modified files to carry +prominent notices stating that you changed the files and the date of any +change. + +Disclaimer + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY EXPRESSED +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/METADATA b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/METADATA new file mode 100644 index 0000000..e00861e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/METADATA @@ -0,0 +1,401 @@ +Metadata-Version: 2.1 +Name: transaction +Version: 2.4.0 +Summary: Transaction management for Python +Home-page: https://github.com/zopefoundation/transaction +Author: Zope Corporation +Author-email: zodb-dev@zope.org +License: ZPL 2.1 +Platform: any +Classifier: Development Status :: 6 - Mature +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Programming Language :: Python +Classifier: Topic :: Database +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: Unix +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Framework :: ZODB +Requires-Dist: zope.interface +Provides-Extra: docs +Requires-Dist: Sphinx; extra == 'docs' +Requires-Dist: repoze.sphinx.autointerface; extra == 'docs' +Provides-Extra: test +Requires-Dist: mock; extra == 'test' +Provides-Extra: testing +Requires-Dist: nose; extra == 'testing' +Requires-Dist: coverage; extra == 'testing' +Requires-Dist: mock; extra == 'testing' + +============ +Transactions +============ + +.. image:: https://travis-ci.org/zopefoundation/transaction.svg?branch=master + :target: https://travis-ci.org/zopefoundation/transaction + +.. image:: https://readthedocs.org/projects/transaction/badge/?version=latest + :target: http://transaction.readthedocs.org/en/latest/ + :alt: Documentation Status + +.. image:: https://img.shields.io/pypi/v/transaction.svg + :target: https://pypi.python.org/pypi/transaction + :alt: PyPI + +.. image:: https://img.shields.io/pypi/pyversions/transaction.svg + :target: https://pypi.python.org/pypi/transaction + :alt: Python versions + +This package contains a generic transaction implementation for Python. It is +mainly used by the ZODB. + +See http://transaction.readthedocs.org/en/latest for narrative documentation +on its usage. + + + +========= + Changes +========= + +2.4.0 (2018-10-23) +================== + +- Changed the implementation of ThreadTransactionManager to be a + thread.local that wraps a TransactionManager rather than a + thread.local that inherits from TransactionManager. It now exposes a + manager attribute that allows access to the wrapped transaction + manager to allow cross thread calls. See `issue 68 + `_. + + +2.3.0 (2018-10-19) +================== + +- Add support for Python 3.7. + +- Reach 100% test coverage. + +- Fix ``transaction.manager.run`` formatting transaction notes when + given a mix of byte and text strings, such as can happen in Python 2 + with ``unicode_literals``. + +2.2.1 (2018-03-27) +================== + +- Make documentation index more user friendly; move old docs to developer + section. + +- Don't crash when printing tracebacks in IPython on Python 2. + (This addresses https://github.com/zopefoundation/transaction/issues/5.) + + +2.2.0 (2018-02-27) +================== + +- Add support for Python 3.6. + +- Drop support for Python 3.3. + +- Add ``isRetryableError`` to the + ``transaction.interfaces.ITransaction`` interface to allow external + systems to query whether an exception is retryable (transient) by + any of the attached data managers. Any + ``transaction.interfaces.TransientError`` is considered retryable + but a data manager may also consider other exceptions on a + per-instance basis. + + See https://github.com/zopefoundation/transaction/pull/38 + + +2.1.2 (2017-03-11) +================== + +- To avoid leaking memory, don't include unexpected value in warnings + about non-text transaction meta data. + + +2.1.1 (2017-03-11) +================== + +- For backward compatibility, relax the requirements that transaction + meta data (user or description) be text: + + - If None is assigned, the assignment is ignored. + + - If a non-text value is assigned, a warning is issued and the value + is converted to text. If the value is a binary string, it will be + decoded with the UTF-8 encoding the ``replace`` error policy. + + +2.1.0 (2017-02-08) +================== + +Added a transaction-manager explicit mode. Explicit mode makes some +kinds of application bugs easier to detect and potentially allows data +managers to manage resources more efficiently. + +(This addresses https://github.com/zopefoundation/transaction/issues/35.) + +2.0.3 (2016-11-17) +================== + +- The user and description fields must now be set with text (unicode) + data. Previously, if bytes were provided, they'd be decoded as + ASCII. It was decided that this would lead to bugs that were hard + to test for. + + Also, the transaction meta-data field, ``extended_info`` has been + renamed to ``extension``. + +2.0.2 (2016-11-13) +================== + +- Fixed: Some legacy applications expect the transaction _extension + attribute to be mutable and it wasn't. + +2.0.1 (2016-11-11) +================== + +- The transaction ``user`` and ``description`` attributes are now + defined to be text (unicode) as opposed to Python the ``str`` type. + +- Added the ``extended_info`` transaction attribute which contains + transaction meta data. (The ``_extension`` attribute is retained as + an alias for backward compatibility.) + + The transaction interface, ``ITransaction``, now requires + ``extended_info`` keys to be text (unicode) and values to be + JSON-serializable. + +- Removed setUser from ITransaction. We'll keep the method + indefinitely, but it's unseemly in ITransaction. :) + +The main purpose of these changes is to tighten up the text +specification of user, description and extended_info keys, and to give +us more flexibility in the future for serializing extended info. It's +possible that these changes will be breaking, so we're also increasing +the major version number. + +1.7.0 (2016-11-08) +================== + +- Added a transaction-manager ``run`` method for running a function as a + transaction, retrying as necessary on transient errors. + +- Fixed the transaction manager ``attempts`` method. It didn't stop + repeating when there wasn't an error. + +- Corrected ITransaction by removing beforeCommitHook (which is no longer + implemented) and removing 'self' from two methods. + +1.6.1 (2016-06-10) +================== + +- Fixed: Synchonizers that registered with transaction managers when + transactions were in progress didn't have their newTransaction + methods called to let them know of the in-progress transactions. + +1.6.0 (2016-05-21) +================== + +- New transaction API for storing data on behalf of objects, such as + data managers. + +- Drop references to data managers joined to a transaction when it is + committed or aborted. + +1.5.0 (2016-05-05) +================== + +- Drop support for Python 2.6 and 3.2. + +- Add support for Python 3.5. + +- Added APIs for interogating and clearing internal state to support + client tests. + +1.4.4 (2015-05-19) +================== + +- Use the standard ``valuerefs()`` method rather than relying on + implementation details of ``WeakValueDictionary`` in ``WeakSet``. + +- Add support for PyPy3. + +- Require 100% branch coverage (in addition to 100% statement coverage). + +1.4.3 (2014-03-20) +================== + +- Add support for Python 3.4. + +1.4.2 (skipped) +=============== + +- Released in error as 1.4.3. + +1.4.1 (2013-02-20) +================== + +- Document that values returned by ``sortKey`` must be strings, in order + to guarantee total ordering. + +- Fix occasional RuntimeError: dictionary changed size during iteration errors + in transaction.weakset on Python 3. + +1.4.0 (2013-01-03) +================== + +- Updated Trove classifiers. + +1.4.0b1 (2012-12-18) +==================== + +- Converted existing doctests into Sphinx documentation (snippets are + exercised via 'tox'). + +- 100% unit test coverage. + +- Backward incompatibility: raise ValueError rather than AssertionError + for runtime errors: + + - In ``Transaction.doom`` if the transaction is in a non-doomable state. + + - In ``TransactionManager.attempts`` if passed a non-positive value. + + - In ``TransactionManager.free`` if passed a foreign transaction. + +- Declared support for Python 3.3 in ``setup.py``, and added ``tox`` testing. + +- When a non-retryable exception was raised as the result of a call to + ``transaction.manager.commit`` within the "attempts" machinery, the + exception was not reraised properly. Symptom: an unrecoverable exception + such as ``Unsupported: Storing blobs in is not supported.`` + would be swallowed inappropriately. + +1.3.0 (2012-05-16) +================== + +- Added Sphinx API docuementation. + +- Added explicit support for PyPy. + +- Dropped use of Python3-impatible ``zope.interface.implements`` class + advisor in favor of ``zope.interface.implementer`` class decorator. + +- Added support for continuous integration using ``tox`` and ``jenkins``. + +- Added ``setup.py docs`` alias (installs ``Sphinx`` and dependencies). + +- Added ``setup.py dev`` alias (runs ``setup.py develop`` plus installs + ``nose`` and ``coverage``). + +- Python 3.3 compatibility. + +- Fix "for attempt in transaction.attempts(x)" machinery, which would not + retry a transaction if its implicit call to ``.commit()`` itself raised a + transient error. Symptom: seeing conflict errors even though you thought + you were retrying some number of times via the "attempts" machinery (the + first attempt to generate an exception during commit would cause that + exception to be raised). + +1.2.0 (2011-12-05) +================== + +New Features: + +- Python 3.2 compatibility. + +- Dropped Python 2.4 and 2.5 compatibility (use 1.1.1 if you need to use + "transaction" under these Python versions). + +1.1.1 (2010-09-16) +================== + +Bug Fixes: + +- Code in ``_transaction.py`` held on to local references to traceback + objects after calling ``sys.exc_info()`` to get one, causing + potential reference leakages. + +- Fixed ``hexlify`` NameError in ``transaction._transaction.oid_repr`` + and add test. + +1.1.0 (1010-05-12) +================== + +New Features: + +- Transaction managers and the transaction module can be used with the + with statement to define transaction boundaries, as in:: + + with transaction: + ... do some things ... + + See transaction/tests/convenience.txt for more details. + +- There is a new iterator function that automates dealing with + transient errors (such as ZODB confict errors). For example, in:: + + for attempt in transaction.attempts(5): + with attempt: + ... do some things .. + + If the work being done raises transient errors, the transaction will + be retried up to 5 times. + + See transaction/tests/convenience.txt for more details. + +Bugs fixed: + +- Fixed a bug that caused extra commit calls to be made on data + managers under certain special circumstances. + + https://mail.zope.org/pipermail/zodb-dev/2010-May/013329.html + +- When threads were reused, transaction data could leak accross them, + causing subtle application bugs. + + https://bugs.launchpad.net/zodb/+bug/239086 + +1.0.1 (2010-05-07) +================== + +- LP #142464: remove double newline between log entries: it makes doing + smarter formatting harder. + +- Updated tests to remove use of deprecated ``zope.testing.doctest``. + +1.0.0 (2009-07-24) +================== + +- Fix test that incorrectly relied on the order of a list that was generated + from a dict. + +- Remove crufty DEPENDENCIES.cfg left over from zpkg. + +1.0a1 (2007-12-18) +================== + += Initial release, branched from ZODB trunk on 2007-11-08 (aka + "3.9.0dev"). + +- Remove (deprecated) support for beforeCommitHook alias to + addBeforeCommitHook. + +- Add weakset tests. + +- Remove unit tests that depend on ZODB.tests.utils from + test_transaction (these are actually integration tests). + + diff --git a/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/RECORD b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/RECORD new file mode 100644 index 0000000..a9c3da3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/RECORD @@ -0,0 +1,37 @@ +transaction-2.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +transaction-2.4.0.dist-info/LICENSE.txt,sha256=PmcdsR32h1FswdtbPWXkqjg-rKPCDOo_r1Og9zNdCjw,2070 +transaction-2.4.0.dist-info/METADATA,sha256=vgk96frbP6y0yY5mZDtj_-8S98TqWEcpeCVHi-DuhPM,12067 +transaction-2.4.0.dist-info/RECORD,, +transaction-2.4.0.dist-info/WHEEL,sha256=8T8fxefr_r-A79qbOJ9d_AaEgkpCGmEPHc-gpCq5BRg,110 +transaction-2.4.0.dist-info/entry_points.txt,sha256=OZFBvh0wrCZW2J7tzw2NztqnLWwpv5WcriQ9x7FELPY,6 +transaction-2.4.0.dist-info/top_level.txt,sha256=SJxH-KO6MpNzese9uqTh8OccAhHs5dUJMJGpL6NYyFE,12 +transaction/__init__.py,sha256=d4bnAZTESTV3zepJifxxcc-CmHqCuzFOMMkm6_i9rHs,1411 +transaction/__pycache__/__init__.cpython-36.pyc,, +transaction/__pycache__/_compat.cpython-36.pyc,, +transaction/__pycache__/_manager.cpython-36.pyc,, +transaction/__pycache__/_transaction.cpython-36.pyc,, +transaction/__pycache__/interfaces.cpython-36.pyc,, +transaction/__pycache__/weakset.cpython-36.pyc,, +transaction/_compat.py,sha256=SjNDAJeeOeeTSZF1NaANkjE8FPBgADwyLfXE8WApCVE,1958 +transaction/_manager.py,sha256=VA0clKh3kfr3QNqWIJIkMqGSPgZWYODsKLYA2f7rIQc,9397 +transaction/_transaction.py,sha256=tWR1NAnFeeINO43dQs1DmijCkR4uM3drjzuHqJ-Wz7I,25511 +transaction/interfaces.py,sha256=GoEHckLg060eFY9811z-JMJUc0z1stxiYp5P09z678s,21709 +transaction/tests/__init__.py,sha256=MsSFjiLMLJZ7QhUPpVBWKiyDnCzryquRyr329NoCACI,2 +transaction/tests/__pycache__/__init__.cpython-36.pyc,, +transaction/tests/__pycache__/common.cpython-36.pyc,, +transaction/tests/__pycache__/examples.cpython-36.pyc,, +transaction/tests/__pycache__/savepointsample.cpython-36.pyc,, +transaction/tests/__pycache__/test__manager.cpython-36.pyc,, +transaction/tests/__pycache__/test__transaction.cpython-36.pyc,, +transaction/tests/__pycache__/test_register_compat.cpython-36.pyc,, +transaction/tests/__pycache__/test_savepoint.cpython-36.pyc,, +transaction/tests/__pycache__/test_weakset.cpython-36.pyc,, +transaction/tests/common.py,sha256=KWzj4QdahfaII6jRw4kXOm1FAZjN9e_YZOoyEldjfu8,2110 +transaction/tests/examples.py,sha256=wTZw7X1yQG91aSSqoSORtkzbuYEoWUYg6nHkulG6IXY,5514 +transaction/tests/savepointsample.py,sha256=39IsqAqZ4nGoFeUr1GVGWu7aJQM-j5vHuv-A4LuDz2w,7102 +transaction/tests/test__manager.py,sha256=1I8M8xwMiSLloFLa9c2p-PbTizKqR0KLtf-RdH4FTF4,31012 +transaction/tests/test__transaction.py,sha256=yyFrOq6jH894ehMh6xqOxvbEY7bAawq2K6sPS1GC8kI,63526 +transaction/tests/test_register_compat.py,sha256=VRmjyASySRhpSXAbrGqQwX76mqPoak8SmLB2mt6UG4s,4356 +transaction/tests/test_savepoint.py,sha256=jiQ0IROaRykXufXkhS96E1SMJs37Kb2QC5A0zntQOIw,2429 +transaction/tests/test_weakset.py,sha256=7bPsgD4RTJlWhccQ2sdL5K3V94EU5tCGDwQqXinmktE,3812 +transaction/weakset.py,sha256=ZH4q5pK5p1zAI3pfJr_-3hUYYN3o84MLV1WD9V7s_lA,3520 diff --git a/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/WHEEL b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/WHEEL new file mode 100644 index 0000000..1001235 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.32.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/entry_points.txt b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/entry_points.txt new file mode 100644 index 0000000..5c979ee --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/entry_points.txt @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/top_level.txt new file mode 100644 index 0000000..2aeb127 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction-2.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +transaction diff --git a/thesisenv/lib/python3.6/site-packages/transaction/__init__.py b/thesisenv/lib/python3.6/site-packages/transaction/__init__.py new file mode 100644 index 0000000..d7687ef --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/__init__.py @@ -0,0 +1,37 @@ +############################################################################ +# +# Copyright (c) 2001, 2002, 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################ +"""Exported transaction functions. + +$Id$ +""" + +from transaction._transaction import Transaction +from transaction._manager import TransactionManager +from transaction._manager import ThreadTransactionManager + +# NB: "with transaction:" does not work under Python 3 because they worked +# really hard to break looking up special methods like __enter__ and __exit__ +# via getattr and getattribute; see http://bugs.python.org/issue12022. On +# Python 3, you must use ``with transaction.manager`` instead. + +manager = ThreadTransactionManager() +get = __enter__ = manager.get +begin = manager.begin +commit = manager.commit +abort = manager.abort +__exit__ = manager.__exit__ +doom = manager.doom +isDoomed = manager.isDoomed +savepoint = manager.savepoint +attempts = manager.attempts diff --git a/thesisenv/lib/python3.6/site-packages/transaction/_compat.py b/thesisenv/lib/python3.6/site-packages/transaction/_compat.py new file mode 100644 index 0000000..4a4ab0b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/_compat.py @@ -0,0 +1,74 @@ +import sys + + +PY3 = sys.version_info[0] == 3 +JYTHON = sys.platform.startswith('java') + +if PY3: + text_type = str +else: # pragma: no cover + # py2 + text_type = unicode + +def bytes_(s, encoding='latin-1', errors='strict'): + if isinstance(s, text_type): + s = s.encode(encoding, errors) + return s + +def text_(s): + if not isinstance(s, text_type): + s = s.decode('utf-8') + return s + +if PY3: + def native_(s, encoding='latin-1', errors='strict'): + if isinstance(s, text_type): + return s + return str(s, encoding, errors) +else: # pragma: no cover + def native_(s, encoding='latin-1', errors='strict'): + if isinstance(s, text_type): + return s.encode(encoding, errors) + return str(s) + +if PY3: + from io import StringIO +else: # pragma: no cover + from io import BytesIO + # Prevent crashes in IPython when writing tracebacks if a commit fails + # ref: https://github.com/ipython/ipython/issues/9126#issuecomment-174966638 + class StringIO(BytesIO): + def write(self, s): + s = native_(s, encoding='utf-8') + super(StringIO, self).write(s) + + +if PY3: + def reraise(tp, value, tb=None): + if value.__traceback__ is not tb: # pragma: no cover + raise value.with_traceback(tb) + raise value + +else: # pragma: no cover + def exec_(code, globs=None, locs=None): + """Execute code in a namespace.""" + if globs is None: + frame = sys._getframe(1) + globs = frame.f_globals + if locs is None: + locs = frame.f_locals + del frame + elif locs is None: + locs = globs + exec("""exec code in globs, locs""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +try: + from threading import get_ident as get_thread_ident +except ImportError: # pragma: no cover + # py2 + from thread import get_ident as get_thread_ident diff --git a/thesisenv/lib/python3.6/site-packages/transaction/_manager.py b/thesisenv/lib/python3.6/site-packages/transaction/_manager.py new file mode 100644 index 0000000..24fcdd4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/_manager.py @@ -0,0 +1,313 @@ +############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################ +"""A TransactionManager controls transaction boundaries. + +It coordinates application code and resource managers, so that they +are associated with the right transaction. +""" +import sys +import threading + +from zope.interface import implementer + +from transaction.interfaces import AlreadyInTransaction +from transaction.interfaces import ITransactionManager +from transaction.interfaces import NoTransaction +from transaction.interfaces import TransientError +from transaction.weakset import WeakSet +from transaction._compat import reraise +from transaction._compat import text_ +from transaction._transaction import Transaction + + +# We have to remember sets of synch objects, especially Connections. +# But we don't want mere registration with a transaction manager to +# keep a synch object alive forever; in particular, it's common +# practice not to explicitly close Connection objects, and keeping +# a Connection alive keeps a potentially huge number of other objects +# alive (e.g., the cache, and everything reachable from it too). +# Therefore we use "weak sets" internally. + +# Call the ISynchronizer newTransaction() method on every element of +# WeakSet synchs. +# A transaction manager needs to do this whenever begin() is called. +# Since it would be good if tm.get() returned the new transaction while +# newTransaction() is running, calling this has to be delayed until after +# the transaction manager has done whatever it needs to do to make its +# get() return the new txn. +def _new_transaction(txn, synchs): + if synchs: + synchs.map(lambda s: s.newTransaction(txn)) + +# Important: we must always pass a WeakSet (even if empty) to the Transaction +# constructor: synchronizers are registered with the TM, but the +# ISynchronizer xyzCompletion() methods are called by Transactions without +# consulting the TM, so we need to pass a mutable collection of synchronizers +# so that Transactions "see" synchronizers that get registered after the +# Transaction object is constructed. + + +@implementer(ITransactionManager) +class TransactionManager(object): + + def __init__(self, explicit=False): + self.explicit = explicit + self._txn = None + self._synchs = WeakSet() + + def begin(self): + """ See ITransactionManager. + """ + if self._txn is not None: + if self.explicit: + raise AlreadyInTransaction() + self._txn.abort() + txn = self._txn = Transaction(self._synchs, self) + _new_transaction(txn, self._synchs) + return txn + + __enter__ = lambda self: self.begin() + + def get(self): + """ See ITransactionManager. + """ + if self._txn is None: + if self.explicit: + raise NoTransaction() + self._txn = Transaction(self._synchs, self) + return self._txn + + def free(self, txn): + if txn is not self._txn: + raise ValueError("Foreign transaction") + self._txn = None + + def registerSynch(self, synch): + """ See ITransactionManager. + """ + self._synchs.add(synch) + if self._txn is not None: + synch.newTransaction(self._txn) + + def unregisterSynch(self, synch): + """ See ITransactionManager. + """ + self._synchs.remove(synch) + + def clearSynchs(self): + """ See ITransactionManager. + """ + self._synchs.clear() + + def registeredSynchs(self): + """ See ITransactionManager. + """ + return bool(self._synchs) + + def isDoomed(self): + """ See ITransactionManager. + """ + return self.get().isDoomed() + + def doom(self): + """ See ITransactionManager. + """ + return self.get().doom() + + def commit(self): + """ See ITransactionManager. + """ + return self.get().commit() + + def abort(self): + """ See ITransactionManager. + """ + return self.get().abort() + + def __exit__(self, t, v, tb): + if v is None: + self.commit() + else: + self.abort() + + def savepoint(self, optimistic=False): + """ See ITransactionManager. + """ + return self.get().savepoint(optimistic) + + def attempts(self, number=3): + if number <= 0: + raise ValueError("number must be positive") + while number: + number -= 1 + if number: + attempt = Attempt(self) + yield attempt + if attempt.success: + break + else: + yield self + + def _retryable(self, error_type, error): + if issubclass(error_type, TransientError): + return True + + for dm in self.get()._resources: + should_retry = getattr(dm, 'should_retry', None) + if (should_retry is not None) and should_retry(error): + return True + return False + + run_no_func_types = int, type(None) + def run(self, func=None, tries=3): + if isinstance(func, self.run_no_func_types): + if func is not None: + tries = func + return lambda func: self.run(func, tries) + + if tries <= 0: + raise ValueError("tries must be positive") + + # These are ordinarily native strings, but that's + # not required. A callable class could override them + # to anything, and a Python 2.7 file could have + # imported `from __future__ import unicode_literals` + # which gets unicode docstrings. + name = func.__name__ + doc = func.__doc__ + + name = text_(name) if name else u'' + doc = text_(doc) if doc else u'' + + if name != u'_': + if doc: + doc = name + u'\n\n' + doc + else: + doc = name + + for i in range(1, tries + 1): # pragma: no branch + txn = self.begin() + if doc: + txn.note(doc) + + try: + result = func() + txn.commit() + except Exception as v: + if i == tries: + raise # that was our last chance + retry = self._retryable(v.__class__, v) + txn.abort() + if not retry: + raise + else: + return result + + +@implementer(ITransactionManager) +class ThreadTransactionManager(threading.local): + """ + Thread-local transaction manager. + + A thread-local transaction manager can be used as a global + variable, but has a separate copy for each thread. + + Advanced applications can use the `manager` attribute to get a + wrapped TransactionManager to allow cross-thread calls for + graceful shutdown of data managers. + """ + + def __init__(self): + self.manager = TransactionManager() + + @property + def explicit(self): + return self.manager.explicit + + @explicit.setter + def explicit(self, v): + self.manager.explicit = v + + def begin(self): + return self.manager.begin() + + def get(self): + return self.manager.get() + + def __enter__(self): + return self.manager.__enter__() + + def commit(self): + return self.manager.commit() + + def abort(self): + return self.manager.abort() + + def __exit__(self, t, v, tb): + return self.manager.__exit__(t, v, tb) + + def doom(self): + return self.manager.doom() + + def isDoomed(self): + return self.manager.isDoomed() + + def savepoint(self, optimistic=False): + return self.manager.savepoint(optimistic) + + def registerSynch(self, synch): + return self.manager.registerSynch(synch) + + def unregisterSynch(self, synch): + return self.manager.unregisterSynch(synch) + + def clearSynchs(self): + return self.manager.clearSynchs() + + def registeredSynchs(self): + return self.manager.registeredSynchs() + + def attempts(self, number=3): + return self.manager.attempts(number) + + def run(self, func=None, tries=3): + return self.manager.run(func, tries) + +class Attempt(object): + + success = False + + def __init__(self, manager): + self.manager = manager + + def _retry_or_raise(self, t, v, tb): + retry = self.manager._retryable(t, v) + self.manager.abort() + if retry: + return retry # suppress the exception if necessary + reraise(t, v, tb) # otherwise reraise the exception + + def __enter__(self): + return self.manager.__enter__() + + def __exit__(self, t, v, tb): + if v is None: + try: + self.manager.commit() + except: + return self._retry_or_raise(*sys.exc_info()) + else: + self.success = True + else: + return self._retry_or_raise(t, v, tb) diff --git a/thesisenv/lib/python3.6/site-packages/transaction/_transaction.py b/thesisenv/lib/python3.6/site-packages/transaction/_transaction.py new file mode 100644 index 0000000..7fcbe65 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/_transaction.py @@ -0,0 +1,769 @@ +############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################ +import binascii +import logging +import sys +import warnings +import weakref +import traceback + +from zope.interface import implementer + +from transaction.weakset import WeakSet +from transaction.interfaces import TransactionFailedError +from transaction import interfaces +from transaction._compat import reraise +from transaction._compat import get_thread_ident +from transaction._compat import native_ +from transaction._compat import bytes_ +from transaction._compat import StringIO +from transaction._compat import text_type + +_marker = object() + +_TB_BUFFER = None #unittests may hook +def _makeTracebackBuffer(): #pragma NO COVER + if _TB_BUFFER is not None: + return _TB_BUFFER + return StringIO() + +_LOGGER = None #unittests may hook +def _makeLogger(): #pragma NO COVER + if _LOGGER is not None: + return _LOGGER + return logging.getLogger("txn.%d" % get_thread_ident()) + + +# The point of this is to avoid hiding exceptions (which the builtin +# hasattr() does). +def myhasattr(obj, attr): + return getattr(obj, attr, _marker) is not _marker + +class Status: + # ACTIVE is the initial state. + ACTIVE = "Active" + + COMMITTING = "Committing" + COMMITTED = "Committed" + + DOOMED = "Doomed" + + # commit() or commit(True) raised an exception. All further attempts + # to commit or join this transaction will raise TransactionFailedError. + COMMITFAILED = "Commit failed" + +@implementer(interfaces.ITransaction, + interfaces.ITransactionDeprecated) +class Transaction(object): + + + + # Assign an index to each savepoint so we can invalidate later savepoints + # on rollback. The first index assigned is 1, and it goes up by 1 each + # time. + _savepoint_index = 0 + + # If savepoints are used, keep a weak key dict of them. This maps a + # savepoint to its index (see above). + _savepoint2index = None + + # Meta data. extended_info is also metadata, but is initialized to an + # emtpy dict in __init__. + _user = u"" + _description = u"" + + def __init__(self, synchronizers=None, manager=None): + self.status = Status.ACTIVE + # List of resource managers, e.g. MultiObjectResourceAdapters. + self._resources = [] + + # Weak set of synchronizer objects to call. + if synchronizers is None: + synchronizers = WeakSet() + self._synchronizers = synchronizers + + self._manager = manager + + # _adapters: Connection/_p_jar -> MultiObjectResourceAdapter[Sub] + self._adapters = {} + self._voted = {} # id(Connection) -> boolean, True if voted + # _voted and other dictionaries use the id() of the resource + # manager as a key, because we can't guess whether the actual + # resource managers will be safe to use as dict keys. + + # The user, description, and extension attributes are accessed + # directly by storages, leading underscore notwithstanding. + self.extension = {} + + self.log = _makeLogger() + self.log.debug("new transaction") + + # If a commit fails, the traceback is saved in _failure_traceback. + # If another attempt is made to commit, TransactionFailedError is + # raised, incorporating this traceback. + self._failure_traceback = None + + # List of (hook, args, kws) tuples added by addBeforeCommitHook(). + self._before_commit = [] + + # List of (hook, args, kws) tuples added by addAfterCommitHook(). + self._after_commit = [] + + @property + def _extension(self): + # for backward compatibility, since most clients used this + # absent any formal API. + return self.extension + + @_extension.setter + def _extension(self, v): + self.extension = v + + @property + def user(self): + return self._user + + @user.setter + def user(self, v): + if v is None: + raise ValueError("user must not be None") + self._user = text_or_warn(v) + + @property + def description(self): + return self._description + + @description.setter + def description(self, v): + if v is not None: + self._description = text_or_warn(v) + + def isDoomed(self): + """ See ITransaction. + """ + return self.status is Status.DOOMED + + def doom(self): + """ See ITransaction. + """ + if self.status is not Status.DOOMED: + if self.status is not Status.ACTIVE: + # should not doom transactions in the middle, + # or after, a commit + raise ValueError('non-doomable') + self.status = Status.DOOMED + + # Raise TransactionFailedError, due to commit()/join()/register() + # getting called when the current transaction has already suffered + # a commit/savepoint failure. + def _prior_operation_failed(self): + assert self._failure_traceback is not None + raise TransactionFailedError("An operation previously failed, " + "with traceback:\n\n%s" % + self._failure_traceback.getvalue()) + + def join(self, resource): + """ See ITransaction. + """ + if self.status is Status.COMMITFAILED: + self._prior_operation_failed() # doesn't return + + if (self.status is not Status.ACTIVE and + self.status is not Status.DOOMED): + # TODO: Should it be possible to join a committing transaction? + # I think some users want it. + raise ValueError("expected txn status %r or %r, but it's %r" % ( + Status.ACTIVE, Status.DOOMED, self.status)) + # TODO: the prepare check is a bit of a hack, perhaps it would + # be better to use interfaces. If this is a ZODB4-style + # resource manager, it needs to be adapted, too. + if myhasattr(resource, "prepare"): + # TODO: deprecate 3.6 + resource = DataManagerAdapter(resource) + self._resources.append(resource) + + if self._savepoint2index: + # A data manager has joined a transaction *after* a savepoint + # was created. A couple of things are different in this case: + # + # 1. We need to add its savepoint to all previous savepoints. + # so that if they are rolled back, we roll this one back too. + # + # 2. We don't actually need to ask the data manager for a + # savepoint: because it's just joining, we can just abort it to + # roll back to the current state, so we simply use an + # AbortSavepoint. + datamanager_savepoint = AbortSavepoint(resource, self) + for transaction_savepoint in self._savepoint2index.keys(): + transaction_savepoint._savepoints.append( + datamanager_savepoint) + + def _unjoin(self, resource): + # Leave a transaction because a savepoint was rolled back on a resource + # that joined later. + + # Don't use remove. We don't want to assume anything about __eq__. + self._resources = [r for r in self._resources if r is not resource] + + def savepoint(self, optimistic=False): + """ See ITransaction. + """ + if self.status is Status.COMMITFAILED: + self._prior_operation_failed() # doesn't return, it raises + + try: + savepoint = Savepoint(self, optimistic, *self._resources) + except: + self._cleanup(self._resources) + self._saveAndRaiseCommitishError() # reraises! + + if self._savepoint2index is None: + self._savepoint2index = weakref.WeakKeyDictionary() + self._savepoint_index += 1 + self._savepoint2index[savepoint] = self._savepoint_index + + return savepoint + + # Remove and invalidate all savepoints we know about with an index + # larger than `savepoint`'s. This is what's needed when a rollback + # _to_ `savepoint` is done. + def _remove_and_invalidate_after(self, savepoint): + savepoint2index = self._savepoint2index + index = savepoint2index[savepoint] + # use list(items()) to make copy to avoid mutating while iterating + for savepoint, i in list(savepoint2index.items()): + if i > index: + savepoint.transaction = None # invalidate + del savepoint2index[savepoint] + + # Invalidate and forget about all savepoints. + def _invalidate_all_savepoints(self): + for savepoint in self._savepoint2index.keys(): + savepoint.transaction = None # invalidate + self._savepoint2index.clear() + + + def register(self, obj): + """ See ITransaction. + """ + # The old way of registering transaction participants. + # + # register() is passed either a persisent object or a + # resource manager like the ones defined in ZODB.DB. + # If it is passed a persistent object, that object should + # be stored when the transaction commits. For other + # objects, the object implements the standard two-phase + # commit protocol. + manager = getattr(obj, "_p_jar", obj) + if manager is None: + raise ValueError("Register with no manager") + adapter = self._adapters.get(manager) + if adapter is None: + adapter = MultiObjectResourceAdapter(manager) + adapter.objects.append(obj) + self._adapters[manager] = adapter + self.join(adapter) + else: + # TODO: comment out this expensive assert later + # Use id() to guard against proxies. + assert id(obj) not in map(id, adapter.objects) + adapter.objects.append(obj) + + def commit(self): + """ See ITransaction. + """ + if self.status is Status.DOOMED: + raise interfaces.DoomedTransaction( + 'transaction doomed, cannot commit') + + if self._savepoint2index: + self._invalidate_all_savepoints() + + if self.status is Status.COMMITFAILED: + self._prior_operation_failed() # doesn't return + + self._callBeforeCommitHooks() + + self._synchronizers.map(lambda s: s.beforeCompletion(self)) + self.status = Status.COMMITTING + + try: + self._commitResources() + self.status = Status.COMMITTED + except: + t = None + v = None + tb = None + try: + t, v, tb = self._saveAndGetCommitishError() + self._callAfterCommitHooks(status=False) + reraise(t, v, tb) + finally: + del t, v, tb + else: + self._free() + self._synchronizers.map(lambda s: s.afterCompletion(self)) + self._callAfterCommitHooks(status=True) + self.log.debug("commit") + + def _saveAndGetCommitishError(self): + self.status = Status.COMMITFAILED + # Save the traceback for TransactionFailedError. + ft = self._failure_traceback = _makeTracebackBuffer() + t = None + v = None + tb = None + try: + t, v, tb = sys.exc_info() + # Record how we got into commit(). + traceback.print_stack(sys._getframe(1), None, ft) + # Append the stack entries from here down to the exception. + traceback.print_tb(tb, None, ft) + # Append the exception type and value. + ft.writelines(traceback.format_exception_only(t, v)) + return t, v, tb + finally: + del t, v, tb + + def _saveAndRaiseCommitishError(self): + t = None + v = None + tb = None + try: + t, v, tb = self._saveAndGetCommitishError() + reraise(t, v, tb) + finally: + del t, v, tb + + def getBeforeCommitHooks(self): + """ See ITransaction. + """ + return iter(self._before_commit) + + def addBeforeCommitHook(self, hook, args=(), kws=None): + """ See ITransaction. + """ + if kws is None: + kws = {} + self._before_commit.append((hook, tuple(args), kws)) + + def _callBeforeCommitHooks(self): + # Call all hooks registered, allowing further registrations + # during processing. Note that calls to addBeforeCommitHook() may + # add additional hooks while hooks are running, and iterating over a + # growing list is well-defined in Python. + for hook, args, kws in self._before_commit: + hook(*args, **kws) + self._before_commit = [] + + def getAfterCommitHooks(self): + """ See ITransaction. + """ + return iter(self._after_commit) + + def addAfterCommitHook(self, hook, args=(), kws=None): + """ See ITransaction. + """ + if kws is None: + kws = {} + self._after_commit.append((hook, tuple(args), kws)) + + def _callAfterCommitHooks(self, status=True): + # Avoid to abort anything at the end if no hooks are registred. + if not self._after_commit: + return + # Call all hooks registered, allowing further registrations + # during processing. Note that calls to addAterCommitHook() may + # add additional hooks while hooks are running, and iterating over a + # growing list is well-defined in Python. + for hook, args, kws in self._after_commit: + # The first argument passed to the hook is a Boolean value, + # true if the commit succeeded, or false if the commit aborted. + try: + hook(status, *args, **kws) + except: + # We need to catch the exceptions if we want all hooks + # to be called + self.log.error("Error in after commit hook exec in %s ", + hook, exc_info=sys.exc_info()) + # The transaction is already committed. It must not have + # further effects after the commit. + for rm in self._resources: + try: + rm.abort(self) + except: + # XXX should we take further actions here ? + self.log.error("Error in abort() on manager %s", + rm, exc_info=sys.exc_info()) + self._after_commit = [] + self._before_commit = [] + + def _commitResources(self): + # Execute the two-phase commit protocol. + + L = list(self._resources) + L.sort(key=rm_key) + try: + for rm in L: + rm.tpc_begin(self) + for rm in L: + rm.commit(self) + self.log.debug("commit %r", rm) + for rm in L: + rm.tpc_vote(self) + self._voted[id(rm)] = True + + try: + for rm in L: + rm.tpc_finish(self) + except: + # TODO: do we need to make this warning stronger? + # TODO: It would be nice if the system could be configured + # to stop committing transactions at this point. + self.log.critical("A storage error occurred during the second " + "phase of the two-phase commit. Resources " + "may be in an inconsistent state.") + raise + except: + # If an error occurs committing a transaction, we try + # to revert the changes in each of the resource managers. + t, v, tb = sys.exc_info() + try: + try: + self._cleanup(L) + finally: + self._synchronizers.map(lambda s: s.afterCompletion(self)) + reraise(t, v, tb) + finally: + del t, v, tb + + def _cleanup(self, L): + # Called when an exception occurs during tpc_vote or tpc_finish. + for rm in L: + if id(rm) not in self._voted: + try: + rm.abort(self) + except Exception: + self.log.error("Error in abort() on manager %s", + rm, exc_info=sys.exc_info()) + for rm in L: + try: + rm.tpc_abort(self) + except Exception: + self.log.error("Error in tpc_abort() on manager %s", + rm, exc_info=sys.exc_info()) + + def _free(self): + # Called when the transaction has been committed or aborted + # to break references---this transaction object will not be returned + # as the current transaction from its manager after this, and all + # IDatamanager objects joined to it will forgotten + if self._manager: + self._manager.free(self) + + if hasattr(self, '_data'): + delattr(self, '_data') + + del self._resources[:] + + def data(self, ob): + try: + data = self._data + except AttributeError: + raise KeyError(ob) + + try: + return data[id(ob)] + except KeyError: + raise KeyError(ob) + + def set_data(self, ob, ob_data): + try: + data = self._data + except AttributeError: + data = self._data = {} + + data[id(ob)] = ob_data + + def abort(self): + """ See ITransaction. + """ + if self._savepoint2index: + self._invalidate_all_savepoints() + + self._synchronizers.map(lambda s: s.beforeCompletion(self)) + + try: + + t = None + v = None + tb = None + + for rm in self._resources: + try: + rm.abort(self) + except: + if tb is None: + t, v, tb = sys.exc_info() + self.log.error("Failed to abort resource manager: %s", + rm, exc_info=sys.exc_info()) + + self._free() + + self._synchronizers.map(lambda s: s.afterCompletion(self)) + + self.log.debug("abort") + + if tb is not None: + reraise(t, v, tb) + finally: + del t, v, tb + + def note(self, text): + """ See ITransaction. + """ + if text is not None: + text = text_or_warn(text).strip() + if self.description: + self.description += u"\n" + text + else: + self.description = text + + def setUser(self, user_name, path=u"/"): + """ See ITransaction. + """ + self.user = u"%s %s" % (text_or_warn(path), text_or_warn(user_name)) + + def setExtendedInfo(self, name, value): + """ See ITransaction. + """ + self.extension[name] = value + + def isRetryableError(self, error): + return self._manager._retryable(type(error), error) + + +# TODO: We need a better name for the adapters. + + +class MultiObjectResourceAdapter(object): + """Adapt the old-style register() call to the new-style join(). + + With join(), a resource manager like a Connection registers with + the transaction manager. With register(), an individual object + is passed to register(). + """ + def __init__(self, jar): + self.manager = jar + self.objects = [] + self.ncommitted = 0 + + def __repr__(self): + return "<%s for %s at %s>" % (self.__class__.__name__, + self.manager, id(self)) + + def sortKey(self): + return self.manager.sortKey() + + def tpc_begin(self, txn): + self.manager.tpc_begin(txn) + + def tpc_finish(self, txn): + self.manager.tpc_finish(txn) + + def tpc_abort(self, txn): + self.manager.tpc_abort(txn) + + def commit(self, txn): + for o in self.objects: + self.manager.commit(o, txn) + self.ncommitted += 1 + + def tpc_vote(self, txn): + self.manager.tpc_vote(txn) + + def abort(self, txn): + t = None + v = None + tb = None + try: + for o in self.objects: + try: + self.manager.abort(o, txn) + except: + # Capture the first exception and re-raise it after + # aborting all the other objects. + if tb is None: + t, v, tb = sys.exc_info() + txn.log.error("Failed to abort object: %s", + object_hint(o), exc_info=sys.exc_info()) + + if tb is not None: + reraise(t, v, tb) + finally: + del t, v, tb + + +def rm_key(rm): + func = getattr(rm, 'sortKey', None) + if func is not None: + return func() + +def object_hint(o): + """Return a string describing the object. + + This function does not raise an exception. + """ + # We should always be able to get __class__. + klass = o.__class__.__name__ + # oid would be great, but maybe this isn't a persistent object. + oid = getattr(o, "_p_oid", _marker) + if oid is not _marker: + oid = oid_repr(oid) + else: + oid = 'None' + return "%s oid=%s" % (klass, oid) + +def oid_repr(oid): + if isinstance(oid, str) and len(oid) == 8: + # Convert to hex and strip leading zeroes. + as_hex = native_( + binascii.hexlify(bytes_(oid, 'ascii')), 'ascii').lstrip('0') + # Ensure two characters per input byte. + if len(as_hex) & 1: + as_hex = '0' + as_hex + elif as_hex == '': + as_hex = '00' + return '0x' + as_hex + else: + return repr(oid) + + +# TODO: deprecate for 3.6. +class DataManagerAdapter(object): + """Adapt zodb 4-style data managers to zodb3 style + + Adapt transaction.interfaces.IDataManager to + ZODB.interfaces.IPureDatamanager + """ + + # Note that it is pretty important that this does not have a _p_jar + # attribute. This object will be registered with a zodb3 TM, which + # will then try to get a _p_jar from it, using it as the default. + # (Objects without a _p_jar are their own data managers.) + + def __init__(self, datamanager): + self._datamanager = datamanager + + # TODO: I'm not sure why commit() doesn't do anything + + def commit(self, transaction): + # We don't do anything here because ZODB4-style data managers + # didn't have a separate commit step + pass + + def abort(self, transaction): + self._datamanager.abort(transaction) + + def tpc_begin(self, transaction): + # We don't do anything here because ZODB4-style data managers + # didn't have a separate tpc_begin step + pass + + def tpc_abort(self, transaction): + self._datamanager.abort(transaction) + + def tpc_finish(self, transaction): + self._datamanager.commit(transaction) + + def tpc_vote(self, transaction): + self._datamanager.prepare(transaction) + + def sortKey(self): + return self._datamanager.sortKey() + + +@implementer(interfaces.ISavepoint) +class Savepoint: + """Transaction savepoint. + + Transaction savepoints coordinate savepoints for data managers + participating in a transaction. + """ + + def __init__(self, transaction, optimistic, *resources): + self.transaction = transaction + self._savepoints = savepoints = [] + + for datamanager in resources: + try: + savepoint = datamanager.savepoint + except AttributeError: + if not optimistic: + raise TypeError("Savepoints unsupported", datamanager) + savepoint = NoRollbackSavepoint(datamanager) + else: + savepoint = savepoint() + + savepoints.append(savepoint) + + @property + def valid(self): + return self.transaction is not None + + def rollback(self): + """ See ISavepoint. + """ + transaction = self.transaction + if transaction is None: + raise interfaces.InvalidSavepointRollbackError( + 'invalidated by a later savepoint') + transaction._remove_and_invalidate_after(self) + + try: + for savepoint in self._savepoints: + savepoint.rollback() + except: + # Mark the transaction as failed. + transaction._saveAndRaiseCommitishError() # reraises! + + +class AbortSavepoint: + + def __init__(self, datamanager, transaction): + self.datamanager = datamanager + self.transaction = transaction + + def rollback(self): + self.datamanager.abort(self.transaction) + self.transaction._unjoin(self.datamanager) + + +class NoRollbackSavepoint: + + def __init__(self, datamanager): + self.datamanager = datamanager + + def rollback(self): + raise TypeError("Savepoints unsupported", self.datamanager) + +def text_or_warn(s): + if isinstance(s, text_type): + return s + + warnings.warn("Expected text", DeprecationWarning, stacklevel=3) + if isinstance(s, bytes): + return s.decode('utf-8', 'replace') + else: + return text_type(s) diff --git a/thesisenv/lib/python3.6/site-packages/transaction/interfaces.py b/thesisenv/lib/python3.6/site-packages/transaction/interfaces.py new file mode 100644 index 0000000..b4f98ce --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/interfaces.py @@ -0,0 +1,586 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +from zope.interface import Attribute +from zope.interface import Interface + +class ITransactionManager(Interface): + """An object that manages a sequence of transactions. + + Applications use transaction managers to establish transaction boundaries. + """ + + explicit = Attribute( + """Explicit mode indicator. + + This is true if the transaction manager is in explicit mode. + In explicit mode, transactions must be begun explicitly, by + calling ``begin()`` and ended explicitly by calling + ``commit()`` or ``abort()``. + """) + + + def begin(): + """Explicitly begin and return a new transaction. + + If an existing transaction is in progress and the transaction + manager not in explicit mode, the previous transaction will be + aborted. If an existing transaction is in progress and the + transaction manager is in explicit mode, an + ``AlreadyInTransaction`` exception will be raised.. + + The ``newTransaction`` method of registered synchronizers is called, + passing the new transaction object. + + Note that when not in explicit mode, transactions may be + started implicitly without calling ``begin``. In that case, + ``newTransaction`` isn't called because the transaction + manager doesn't know when to call it. The transaction is + likely to have begun long before the transaction manager is + involved. (Conceivably the ``commit`` and ``abort`` methods + could call ``begin``, but they don't.) + """ + + def get(): + """Get the current transaction. + + In explicit mode, if a transaction hasn't begun, a + ``NoTransaction`` exception will be raised. + """ + + def commit(): + """Commit the current transaction. + + In explicit mode, if a transaction hasn't begun, a + ``NoTransaction`` exception will be raised. + """ + + def abort(): + """Abort the current transaction. + + In explicit mode, if a transaction hasn't begun, a + ``NoTransaction`` exception will be raised. + """ + + def doom(): + """Doom the current transaction. + + In explicit mode, if a transaction hasn't begun, a + ``NoTransaction`` exception will be raised. + """ + + def isDoomed(): + """Returns True if the current transaction is doomed, otherwise False. + + In explicit mode, if a transaction hasn't begun, a + ``NoTransaction`` exception will be raised. + """ + + def savepoint(optimistic=False): + """Create a savepoint from the current transaction. + + If the optimistic argument is true, then data managers that + don't support savepoints can be used, but an error will be + raised if the savepoint is rolled back. + + An ISavepoint object is returned. + + In explicit mode, if a transaction hasn't begun, a + ``NoTransaction`` exception will be raised. + """ + + def registerSynch(synch): + """Register an ISynchronizer. + + Synchronizers are notified about some major events in a transaction's + life. See ISynchronizer for details. + + If a synchronizer registers while there is an active + transaction, its newTransaction method will be called with the + active transaction. + """ + + def unregisterSynch(synch): + """Unregister an ISynchronizer. + + Synchronizers are notified about some major events in a transaction's + life. See ISynchronizer for details. + """ + + def clearSynchs(): + """Unregister all registered ISynchronizers. + + This exists to support test cleanup/initialization + """ + + def registeredSynchs(): + """Determine if any ISynchronizers are registered. + + Return true if any are registered, and return False otherwise. + + This exists to support test cleanup/initialization + """ + +class ITransaction(Interface): + """Object representing a running transaction. + + Objects with this interface may represent different transactions + during their lifetime (.begin() can be called to start a new + transaction using the same instance, although that example is + deprecated and will go away in ZODB 3.6). + """ + + user = Attribute( + """A user name associated with the transaction. + + The format of the user name is defined by the application. The value + is text (unicode). Storages record the user value, as meta-data, + when a transaction commits. + + A storage may impose a limit on the size of the value; behavior is + undefined if such a limit is exceeded (for example, a storage may + raise an exception, or truncate the value). + """) + + description = Attribute( + """A textual description of the transaction. + + The value is text (unicode). Method note() is the intended + way to set the value. Storages record the description, as meta-data, + when a transaction commits. + + A storage may impose a limit on the size of the description; behavior + is undefined if such a limit is exceeded (for example, a storage may + raise an exception, or truncate the value). + """) + + extension = Attribute( + "A dictionary containing application-defined metadata.") + + def commit(): + """Finalize the transaction. + + This executes the two-phase commit algorithm for all + IDataManager objects associated with the transaction. + """ + + def abort(): + """Abort the transaction. + + This is called from the application. This can only be called + before the two-phase commit protocol has been started. + """ + + def doom(): + """Doom the transaction. + + Dooms the current transaction. This will cause + DoomedTransactionException to be raised on any attempt to commit the + transaction. + + Otherwise the transaction will behave as if it was active. + """ + + def savepoint(optimistic=False): + """Create a savepoint. + + If the optimistic argument is true, then data managers that don't + support savepoints can be used, but an error will be raised if the + savepoint is rolled back. + + An ISavepoint object is returned. + """ + + def join(datamanager): + """Add a data manager to the transaction. + + `datamanager` must provide the transactions.interfaces.IDataManager + interface. + """ + + def note(text): + """Add text (unicode) to the transaction description. + + This modifies the `.description` attribute; see its docs for more + detail. First surrounding whitespace is stripped from `text`. If + `.description` is currently an empty string, then the stripped text + becomes its value, else two newlines and the stripped text are + appended to `.description`. + """ + + def setExtendedInfo(name, value): + """Add extension data to the transaction. + + name + is the text (unicode) name of the extension property to set + + value + must be picklable and json serializable (not an instance). + + Multiple calls may be made to set multiple extension + properties, provided the names are distinct. + + Storages record the extension data, as meta-data, when a transaction + commits. + + A storage may impose a limit on the size of extension data; behavior + is undefined if such a limit is exceeded (for example, a storage may + raise an exception, or remove `` pairs). + """ + + def addBeforeCommitHook(hook, args=(), kws=None): + """Register a hook to call before the transaction is committed. + + The specified hook function will be called after the transaction's + commit method has been called, but before the commit process has been + started. The hook will be passed the specified positional (`args`) + and keyword (`kws`) arguments. `args` is a sequence of positional + arguments to be passed, defaulting to an empty tuple (no positional + arguments are passed). `kws` is a dictionary of keyword argument + names and values to be passed, or the default None (no keyword + arguments are passed). + + Multiple hooks can be registered and will be called in the order they + were registered (first registered, first called). This method can + also be called from a hook: an executing hook can register more + hooks. Applications should take care to avoid creating infinite loops + by recursively registering hooks. + + Hooks are called only for a top-level commit. A + savepoint creation does not call any hooks. If the + transaction is aborted, hooks are not called, and are discarded. + Calling a hook "consumes" its registration too: hook registrations + do not persist across transactions. If it's desired to call the same + hook on every transaction commit, then addBeforeCommitHook() must be + called with that hook during every transaction; in such a case + consider registering a synchronizer object via a TransactionManager's + registerSynch() method instead. + """ + + def getBeforeCommitHooks(): + """Return iterable producing the registered addBeforeCommit hooks. + + A triple (hook, args, kws) is produced for each registered hook. + The hooks are produced in the order in which they would be invoked + by a top-level transaction commit. + """ + + def addAfterCommitHook(hook, args=(), kws=None): + """Register a hook to call after a transaction commit attempt. + + The specified hook function will be called after the transaction + commit succeeds or aborts. The first argument passed to the hook + is a Boolean value, true if the commit succeeded, or false if the + commit aborted. `args` specifies additional positional, and `kws` + keyword, arguments to pass to the hook. `args` is a sequence of + positional arguments to be passed, defaulting to an empty tuple + (only the true/false success argument is passed). `kws` is a + dictionary of keyword argument names and values to be passed, or + the default None (no keyword arguments are passed). + + Multiple hooks can be registered and will be called in the order they + were registered (first registered, first called). This method can + also be called from a hook: an executing hook can register more + hooks. Applications should take care to avoid creating infinite loops + by recursively registering hooks. + + Hooks are called only for a top-level commit. A + savepoint creation does not call any hooks. Calling a + hook "consumes" its registration: hook registrations do not + persist across transactions. If it's desired to call the same + hook on every transaction commit, then addAfterCommitHook() must be + called with that hook during every transaction; in such a case + consider registering a synchronizer object via a TransactionManager's + registerSynch() method instead. + """ + + def getAfterCommitHooks(): + """Return iterable producing the registered addAfterCommit hooks. + + A triple (hook, args, kws) is produced for each registered hook. + The hooks are produced in the order in which they would be invoked + by a top-level transaction commit. + """ + + def set_data(ob, data): + """Hold data on behalf of an object + + For objects such as data managers or their subobjects that + work with multiple transactions, it's convenient to store + transaction-specific data on the transaction itself. The + transaction knows nothing about the data, but simply holds it + on behalf of the object. + + The object passed should be the object that needs the data, as + opposed to simple object like a string. (Internally, the id of + the object is used as the key.) + """ + + def data(ob): + """Retrieve data held on behalf of an object. + + See set_data. + """ + + def isRetryableError(error): + """Determine if the error is retryable. + + Return true if any joined IRetryDataManager considers the error + transient. Such errors may occur due to concurrency issues in the + underlying storage engine. + + """ + +class ITransactionDeprecated(Interface): + """Deprecated parts of the transaction API.""" + + def begin(info=None): + """Begin a new transaction. + + If the transaction is in progress, it is aborted and a new + transaction is started using the same transaction object. + """ + + # TODO: deprecate this for 3.6. + def register(object): + """Register the given object for transaction control.""" + + +class IDataManager(Interface): + """Objects that manage transactional storage. + + These objects may manage data for other objects, or they may manage + non-object storages, such as relational databases. For example, + a ZODB.Connection. + + Note that when some data is modified, that data's data manager should + join a transaction so that data can be committed when the user commits + the transaction. + """ + + transaction_manager = Attribute( + """The transaction manager (TM) used by this data manager. + + This is a public attribute, intended for read-only use. The value + is an instance of ITransactionManager, typically set by the data + manager's constructor. + """) + + def abort(transaction): + """Abort a transaction and forget all changes. + + Abort must be called outside of a two-phase commit. + + Abort is called by the transaction manager to abort + transactions that are not yet in a two-phase commit. It may + also be called when rolling back a savepoint made before the + data manager joined the transaction. + + In any case, after abort is called, the data manager is no + longer participating in the transaction. If there are new + changes, the data manager must rejoin the transaction. + """ + + # Two-phase commit protocol. These methods are called by the ITransaction + # object associated with the transaction being committed. The sequence + # of calls normally follows this regular expression: + # tpc_begin commit tpc_vote (tpc_finish | tpc_abort) + + def tpc_begin(transaction): + """Begin commit of a transaction, starting the two-phase commit. + + transaction is the ITransaction instance associated with the + transaction being committed. + """ + + def commit(transaction): + """Commit modifications to registered objects. + + Save changes to be made persistent if the transaction commits (if + tpc_finish is called later). If tpc_abort is called later, changes + must not persist. + + This includes conflict detection and handling. If no conflicts or + errors occur, the data manager should be prepared to make the + changes persist when tpc_finish is called. + """ + + def tpc_vote(transaction): + """Verify that a data manager can commit the transaction. + + This is the last chance for a data manager to vote 'no'. A + data manager votes 'no' by raising an exception. + + transaction is the ITransaction instance associated with the + transaction being committed. + """ + + def tpc_finish(transaction): + """Indicate confirmation that the transaction is done. + + Make all changes to objects modified by this transaction persist. + + transaction is the ITransaction instance associated with the + transaction being committed. + + This should never fail. If this raises an exception, the + database is not expected to maintain consistency; it's a + serious error. + """ + + def tpc_abort(transaction): + """Abort a transaction. + + This is called by a transaction manager to end a two-phase commit on + the data manager. Abandon all changes to objects modified by this + transaction. + + transaction is the ITransaction instance associated with the + transaction being committed. + + This should never fail. + """ + + def sortKey(): + """Return a key to use for ordering registered DataManagers. + + In order to guarantee a total ordering, keys must be strings. + + ZODB uses a global sort order to prevent deadlock when it commits + transactions involving multiple resource managers. The resource + manager must define a sortKey() method that provides a global ordering + for resource managers. + """ + # Alternate version: + #"""Return a consistent sort key for this connection. + # + #This allows ordering multiple connections that use the same storage in + #a consistent manner. This is unique for the lifetime of a connection, + #which is good enough to avoid ZEO deadlocks. + #""" + +class ISavepointDataManager(IDataManager): + + def savepoint(): + """Return a data-manager savepoint (IDataManagerSavepoint). + """ + +class IRetryDataManager(IDataManager): + + def should_retry(exception): + """Return whether a given exception instance should be retried. + + A data manager can provide this method to indicate that a a + transaction that raised the given error should be retried. + This method may be called by an ITransactionManager when + considering whether to retry a failed transaction. + """ + +class IDataManagerSavepoint(Interface): + """Savepoint for data-manager changes for use in transaction savepoints. + + Datamanager savepoints are used by, and only by, transaction savepoints. + + Note that data manager savepoints don't have any notion of, or + responsibility for, validity. It isn't the responsibility of + data-manager savepoints to prevent multiple rollbacks or rollbacks after + transaction termination. Preventing invalid savepoint rollback is the + responsibility of transaction rollbacks. Application code should never + use data-manager savepoints. + """ + + def rollback(): + """Rollback any work done since the savepoint. + """ + +class ISavepoint(Interface): + """A transaction savepoint. + """ + + def rollback(): + """Rollback any work done since the savepoint. + + InvalidSavepointRollbackError is raised if the savepoint isn't valid. + """ + + valid = Attribute( + "Boolean indicating whether the savepoint is valid") + +class InvalidSavepointRollbackError(Exception): + """Attempt to rollback an invalid savepoint. + + A savepoint may be invalid because: + + - The surrounding transaction has committed or aborted. + + - An earlier savepoint in the same transaction has been rolled back. + """ + +class ISynchronizer(Interface): + """Objects that participate in the transaction-boundary notification API. + """ + + def beforeCompletion(transaction): + """Hook that is called by the transaction at the start of a commit. + """ + + def afterCompletion(transaction): + """Hook that is called by the transaction after completing a commit. + """ + + def newTransaction(transaction): + """Hook that is called at the start of a transaction. + + This hook is called when, and only when, a transaction manager's + begin() method is called explictly. + """ + +class TransactionError(Exception): + """An error occurred due to normal transaction processing.""" + +class TransactionFailedError(TransactionError): + """Cannot perform an operation on a transaction that previously failed. + + An attempt was made to commit a transaction, or to join a transaction, + but this transaction previously raised an exception during an attempt + to commit it. The transaction must be explicitly aborted, either by + invoking abort() on the transaction, or begin() on its transaction + manager. + """ + +class DoomedTransaction(TransactionError): + """A commit was attempted on a transaction that was doomed.""" + +class TransientError(TransactionError): + """An error has occured when performing a transaction. + + It's possible that retrying the transaction will succeed. + """ + +class NoTransaction(TransactionError): + """No transaction has been defined + + An application called an operation on a transaction manager that + affects an exciting transaction, but no transaction was begun. + The transaction manager was in explicit mode, so a new transaction + was not explicitly created. + """ + +class AlreadyInTransaction(TransactionError): + """Attempt to create a new transaction without ending a preceding one + + An application called ``begin()`` on a transaction manager in + explicit mode, without committing or aborting the previous + transaction. + """ diff --git a/thesisenv/lib/python3.6/site-packages/transaction/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/transaction/tests/__init__.py new file mode 100644 index 0000000..792d600 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/tests/__init__.py @@ -0,0 +1 @@ +# diff --git a/thesisenv/lib/python3.6/site-packages/transaction/tests/common.py b/thesisenv/lib/python3.6/site-packages/transaction/tests/common.py new file mode 100644 index 0000000..ada7b51 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/tests/common.py @@ -0,0 +1,64 @@ +############################################################################## +# +# Copyright (c) 2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + + +class DummyFile(object): + def __init__(self): + self._lines = [] + def write(self, text): + self._lines.append(text) + def writelines(self, lines): + self._lines.extend(lines) + + +class DummyLogger(object): + def __init__(self): + self._clear() + def _clear(self): + self._log = [] + def log(self, level, msg, *args, **kwargs): + if args: + self._log.append((level, msg % args)) + else: + self._log.append((level, msg)) + def debug(self, msg, *args, **kw): + self.log('debug', msg, *args, **kw) + def error(self, msg, *args, **kw): + self.log('error', msg, *args, **kw) + def critical(self, msg, *args, **kw): + self.log('critical', msg, *args, **kw) + + +class Monkey(object): + # context-manager for replacing module names in the scope of a test. + def __init__(self, module, **kw): + self.module = module + self.to_restore = {key: getattr(module, key) for key in kw} + for key, value in kw.items(): + setattr(module, key, value) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + for key, value in self.to_restore.items(): + setattr(self.module, key, value) + +def assertRaisesEx(e_type, checked, *args, **kw): + # Only used in doctests + try: + checked(*args, **kw) + except e_type as e: + return e + raise AssertionError("Didn't raise: %s" % e_type.__name__) diff --git a/thesisenv/lib/python3.6/site-packages/transaction/tests/examples.py b/thesisenv/lib/python3.6/site-packages/transaction/tests/examples.py new file mode 100644 index 0000000..ccfc8d5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/tests/examples.py @@ -0,0 +1,181 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Sample objects for use in tests + +""" + + +class DataManager(object): + """Sample data manager + + Used by the 'datamanager' chapter in the Sphinx docs. + """ + def __init__(self): + self.state = 0 + self.sp = 0 + self.transaction = None + self.delta = 0 + self.prepared = False + + def inc(self, n=1): + self.delta += n + + def prepare(self, transaction): + if self.prepared: + raise TypeError('Already prepared') + self._checkTransaction(transaction) + self.prepared = True + self.transaction = transaction + self.state += self.delta + + def _checkTransaction(self, transaction): + if (transaction is not self.transaction + and self.transaction is not None): + raise TypeError("Transaction missmatch", + transaction, self.transaction) + + def abort(self, transaction): + self._checkTransaction(transaction) + if self.transaction is not None: + self.transaction = None + + if self.prepared: + self.state -= self.delta + self.prepared = False + + self.delta = 0 + + def commit(self, transaction): + if not self.prepared: + raise TypeError('Not prepared to commit') + self._checkTransaction(transaction) + self.delta = 0 + self.transaction = None + self.prepared = False + + def savepoint(self, transaction): + if self.prepared: + raise AssertionError("Can't get savepoint during two-phase commit") + self._checkTransaction(transaction) + self.transaction = transaction + self.sp += 1 + return Rollback(self) + + +class Rollback(object): + + def __init__(self, dm): + self.dm = dm + self.sp = dm.sp + self.delta = dm.delta + self.transaction = dm.transaction + + def rollback(self): + if self.transaction is not self.dm.transaction: + raise TypeError("Attempt to rollback stale rollback") + if self.dm.sp < self.sp: + raise TypeError("Attempt to roll back to invalid save point", + self.sp, self.dm.sp) + self.dm.sp = self.sp + self.dm.delta = self.delta + + +class ResourceManager(object): + """ Sample resource manager. + + Used by the 'resourcemanager' chapter in the Sphinx docs. + """ + def __init__(self): + self.state = 0 + self.sp = 0 + self.transaction = None + self.delta = 0 + self.txn_state = None + + def _check_state(self, *ok_states): + if self.txn_state not in ok_states: + raise ValueError("txn in state %r but expected one of %r" % + (self.txn_state, ok_states)) + + def _checkTransaction(self, transaction): + if (transaction is not self.transaction + and self.transaction is not None): + raise TypeError("Transaction missmatch", + transaction, self.transaction) + + def inc(self, n=1): + self.delta += n + + def tpc_begin(self, transaction): + self._checkTransaction(transaction) + self._check_state(None) + self.transaction = transaction + self.txn_state = 'tpc_begin' + + def tpc_vote(self, transaction): + self._checkTransaction(transaction) + self._check_state('tpc_begin') + self.state += self.delta + self.txn_state = 'tpc_vote' + + def tpc_finish(self, transaction): + self._checkTransaction(transaction) + self._check_state('tpc_vote') + self.delta = 0 + self.transaction = None + self.prepared = False + self.txn_state = None + + def tpc_abort(self, transaction): + self._checkTransaction(transaction) + if self.transaction is not None: + self.transaction = None + + if self.txn_state == 'tpc_vote': + self.state -= self.delta + + self.txn_state = None + self.delta = 0 + + def savepoint(self, transaction): + if self.txn_state is not None: + raise AssertionError("Can't get savepoint during two-phase commit") + self._checkTransaction(transaction) + self.transaction = transaction + self.sp += 1 + return SavePoint(self) + + def discard(self, transaction): + "Does nothing" + + +class SavePoint(object): + + def __init__(self, rm): + self.rm = rm + self.sp = rm.sp + self.delta = rm.delta + self.transaction = rm.transaction + + def rollback(self): + if self.transaction is not self.rm.transaction: + raise TypeError("Attempt to rollback stale rollback") + if self.rm.sp < self.sp: + raise TypeError("Attempt to roll back to invalid save point", + self.sp, self.rm.sp) + self.rm.sp = self.sp + self.rm.delta = self.delta + + def discard(self): + "Does nothing." diff --git a/thesisenv/lib/python3.6/site-packages/transaction/tests/savepointsample.py b/thesisenv/lib/python3.6/site-packages/transaction/tests/savepointsample.py new file mode 100644 index 0000000..208afd1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/tests/savepointsample.py @@ -0,0 +1,185 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Savepoint data manager implementation example. + +Sample data manager implementation that illustrates how to implement +savepoints. + +Used by savepoint.rst in the Sphinx docs. +""" + +from zope.interface import implementer +import transaction.interfaces + +@implementer(transaction.interfaces.IDataManager) +class SampleDataManager(object): + """Sample implementation of data manager that doesn't support savepoints + + This data manager stores named simple values, like strings and numbers. + """ + + def __init__(self, transaction_manager=None): + if transaction_manager is None: + # Use the thread-local transaction manager if none is provided: + import transaction + transaction_manager = transaction.manager + self.transaction_manager = transaction_manager + + # Our committed and uncommitted data: + self.committed = {} + self.uncommitted = self.committed.copy() + + # Our transaction state: + # + # If our uncommitted data is modified, we'll join a transaction + # and keep track of the transaction we joined. Any commit + # related messages we get should be for this same transaction + self.transaction = None + + # What phase, if any, of two-phase commit we are in: + self.tpc_phase = None + + + ####################################################################### + # Provide a mapping interface to uncommitted data. We provide + # a basic subset of the interface. DictMixin does the rest. + + def __getitem__(self, name): + return self.uncommitted[name] + + def __setitem__(self, name, value): + self._join() # join the current transaction, if we haven't already + self.uncommitted[name] = value + + def keys(self): + return self.uncommitted.keys() + + __iter__ = keys + + def __contains__(self, k): + return k in self.uncommitted + + def __repr__(self): + return repr(self.uncommitted) + + # + ####################################################################### + + ####################################################################### + # Transaction methods + + def _join(self): + # If this is the first change in the transaction, join the transaction + if self.transaction is None: + self.transaction = self.transaction_manager.get() + self.transaction.join(self) + + def _resetTransaction(self): + self.last_note = getattr(self.transaction, 'description', None) + self.transaction = None + self.tpc_phase = None + + def abort(self, transaction): + """Throw away changes made before the commit process has started + """ + assert ((transaction is self.transaction) or (self.transaction is None) + ), "Must not change transactions" + assert self.tpc_phase is None, "Must be called outside of tpc" + self.uncommitted = self.committed.copy() + self._resetTransaction() + + def tpc_begin(self, transaction): + """Enter two-phase commit + """ + assert transaction is self.transaction, "Must not change transactions" + assert self.tpc_phase is None, "Must be called outside of tpc" + self.tpc_phase = 1 + + def commit(self, transaction): + """Record data modified during the transaction + """ + assert transaction is self.transaction, "Must not change transactions" + assert self.tpc_phase == 1, "Must be called in first phase of tpc" + + # In our simple example, we don't need to do anything. + # A more complex data manager would typically write to some sort + # of log. + + def tpc_vote(self, transaction): + assert transaction is self.transaction, "Must not change transactions" + assert self.tpc_phase == 1, "Must be called in first phase of tpc" + # This particular data manager is always ready to vote. + # Real data managers will usually need to take some steps to + # make sure that the finish will succeed + self.tpc_phase = 2 + + def tpc_finish(self, transaction): + assert transaction is self.transaction, "Must not change transactions" + assert self.tpc_phase == 2, "Must be called in second phase of tpc" + self.committed = self.uncommitted.copy() + self._resetTransaction() + + def tpc_abort(self, transaction): + if self.transaction is not None: # pragma: no cover + # otherwise we're not actually joined. + assert self.tpc_phase is not None, "Must be called inside of tpc" + self.uncommitted = self.committed.copy() + self._resetTransaction() + + # + ####################################################################### + + ####################################################################### + # Other data manager methods + + def sortKey(self): + # Commit operations on multiple data managers are performed in + # sort key order. This important to avoid deadlock when data + # managers are shared among multiple threads or processes and + # use locks to manage that sharing. We aren't going to bother + # with that here. + return str(id(self)) + + # + ####################################################################### + +@implementer(transaction.interfaces.ISavepointDataManager) +class SampleSavepointDataManager(SampleDataManager): + """Sample implementation of a savepoint-supporting data manager + + This extends the basic data manager with savepoint support. + """ + + def savepoint(self): + # When we create the savepoint, we save the existing database state. + return SampleSavepoint(self, self.uncommitted.copy()) + + def _rollback_savepoint(self, savepoint): + # When we rollback the savepoint, we restore the saved data. + # Caution: without the copy(), further changes to the database + # could reflect in savepoint.data, and then `savepoint` would no + # longer contain the originally saved data, and so `savepoint` + # couldn't restore the original state if a rollback to this + # savepoint was done again. IOW, copy() is necessary. + self.uncommitted = savepoint.data.copy() + +@implementer(transaction.interfaces.IDataManagerSavepoint) +class SampleSavepoint: + + def __init__(self, data_manager, data): + self.data_manager = data_manager + self.data = data + + def rollback(self): + self.data_manager._rollback_savepoint(self) diff --git a/thesisenv/lib/python3.6/site-packages/transaction/tests/test__manager.py b/thesisenv/lib/python3.6/site-packages/transaction/tests/test__manager.py new file mode 100644 index 0000000..4c88238 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/tests/test__manager.py @@ -0,0 +1,991 @@ +############################################################################## +# +# Copyright (c) 2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import mock +import unittest + +import zope.interface.verify + +from .. import interfaces + + +class TransactionManagerTests(unittest.TestCase): + + def _getTargetClass(self): + from transaction import TransactionManager + return TransactionManager + + def _makeOne(self): + return self._getTargetClass()() + + def _makePopulated(self): + mgr = self._makeOne() + sub1 = DataObject(mgr) + sub2 = DataObject(mgr) + sub3 = DataObject(mgr) + nosub1 = DataObject(mgr, nost=1) + return mgr, sub1, sub2, sub3, nosub1 + + def test_interface(self): + zope.interface.verify.verifyObject(interfaces.ITransactionManager, + self._makeOne()) + + def test_ctor(self): + tm = self._makeOne() + self.assertTrue(tm._txn is None) + self.assertEqual(len(tm._synchs), 0) + + def test_begin_wo_existing_txn_wo_synchs(self): + from transaction._transaction import Transaction + tm = self._makeOne() + tm.begin() + self.assertTrue(isinstance(tm._txn, Transaction)) + + def test_begin_wo_existing_txn_w_synchs(self): + from transaction._transaction import Transaction + tm = self._makeOne() + synch = DummySynch() + tm.registerSynch(synch) + tm.begin() + self.assertTrue(isinstance(tm._txn, Transaction)) + self.assertTrue(tm._txn in synch._txns) + + def test_begin_w_existing_txn(self): + class Existing(object): + _aborted = False + def abort(self): + self._aborted = True + tm = self._makeOne() + tm._txn = txn = Existing() + tm.begin() + self.assertFalse(tm._txn is txn) + self.assertTrue(txn._aborted) + + def test_get_wo_existing_txn(self): + from transaction._transaction import Transaction + tm = self._makeOne() + txn = tm.get() + self.assertTrue(isinstance(txn, Transaction)) + + def test_get_w_existing_txn(self): + class Existing(object): + _aborted = False + def abort(self): + raise AssertionError("This is not actually called") + tm = self._makeOne() + tm._txn = txn = Existing() + self.assertIs(tm.get(), txn) + + def test_free_w_other_txn(self): + from transaction._transaction import Transaction + tm = self._makeOne() + txn = Transaction() + tm.begin() + self.assertRaises(ValueError, tm.free, txn) + + def test_free_w_existing_txn(self): + class Existing(object): + _aborted = False + def abort(self): + raise AssertionError("This is not actually called") + tm = self._makeOne() + tm._txn = txn = Existing() + tm.free(txn) + self.assertIsNone(tm._txn) + + def test_registerSynch(self): + tm = self._makeOne() + synch = DummySynch() + tm.registerSynch(synch) + self.assertEqual(len(tm._synchs), 1) + self.assertTrue(synch in tm._synchs) + + def test_unregisterSynch(self): + tm = self._makeOne() + synch1 = DummySynch() + synch2 = DummySynch() + self.assertFalse(tm.registeredSynchs()) + tm.registerSynch(synch1) + self.assertTrue(tm.registeredSynchs()) + tm.registerSynch(synch2) + self.assertTrue(tm.registeredSynchs()) + tm.unregisterSynch(synch1) + self.assertTrue(tm.registeredSynchs()) + self.assertEqual(len(tm._synchs), 1) + self.assertFalse(synch1 in tm._synchs) + self.assertTrue(synch2 in tm._synchs) + tm.unregisterSynch(synch2) + self.assertFalse(tm.registeredSynchs()) + + def test_clearSynchs(self): + tm = self._makeOne() + synch1 = DummySynch() + synch2 = DummySynch() + tm.registerSynch(synch1) + tm.registerSynch(synch2) + tm.clearSynchs() + self.assertEqual(len(tm._synchs), 0) + + def test_isDoomed_wo_existing_txn(self): + tm = self._makeOne() + self.assertFalse(tm.isDoomed()) + tm._txn.doom() + self.assertTrue(tm.isDoomed()) + + def test_isDoomed_w_existing_txn(self): + class Existing(object): + _doomed = False + def isDoomed(self): + return self._doomed + tm = self._makeOne() + tm._txn = txn = Existing() + self.assertFalse(tm.isDoomed()) + txn._doomed = True + self.assertTrue(tm.isDoomed()) + + def test_doom(self): + tm = self._makeOne() + txn = tm.get() + self.assertFalse(txn.isDoomed()) + tm.doom() + self.assertTrue(txn.isDoomed()) + self.assertTrue(tm.isDoomed()) + + def test_commit_w_existing_txn(self): + class Existing(object): + _committed = False + def commit(self): + self._committed = True + tm = self._makeOne() + tm._txn = txn = Existing() + tm.commit() + self.assertTrue(txn._committed) + + def test_abort_w_existing_txn(self): + class Existing(object): + _aborted = False + def abort(self): + self._aborted = True + tm = self._makeOne() + tm._txn = txn = Existing() + tm.abort() + self.assertTrue(txn._aborted) + + def test_as_context_manager_wo_error(self): + class _Test(object): + _committed = False + _aborted = False + def commit(self): + self._committed = True + def abort(self): + raise AssertionError("This should not be called") + tm = self._makeOne() + with tm: + tm._txn = txn = _Test() + self.assertTrue(txn._committed) + self.assertFalse(txn._aborted) + + def test_as_context_manager_w_error(self): + class _Test(object): + _committed = False + _aborted = False + def commit(self): + raise AssertionError("This should not be called") + def abort(self): + self._aborted = True + tm = self._makeOne() + + with self.assertRaises(ZeroDivisionError): + with tm: + tm._txn = txn = _Test() + raise ZeroDivisionError() + + self.assertFalse(txn._committed) + self.assertTrue(txn._aborted) + + def test_savepoint_default(self): + class _Test(object): + _sp = None + def savepoint(self, optimistic): + self._sp = optimistic + tm = self._makeOne() + tm._txn = txn = _Test() + tm.savepoint() + self.assertFalse(txn._sp) + + def test_savepoint_explicit(self): + class _Test(object): + _sp = None + def savepoint(self, optimistic): + self._sp = optimistic + tm = self._makeOne() + tm._txn = txn = _Test() + tm.savepoint(True) + self.assertTrue(txn._sp) + + def test_attempts_w_invalid_count(self): + tm = self._makeOne() + self.assertRaises(ValueError, list, tm.attempts(0)) + self.assertRaises(ValueError, list, tm.attempts(-1)) + self.assertRaises(ValueError, list, tm.attempts(-10)) + + def test_attempts_w_valid_count(self): + tm = self._makeOne() + found = list(tm.attempts(1)) + self.assertEqual(len(found), 1) + self.assertTrue(found[0] is tm) + + def test_attempts_stop_on_success(self): + tm = self._makeOne() + + i = 0 + for attempt in tm.attempts(): + with attempt: + i += 1 + + self.assertEqual(i, 1) + + def test_attempts_retries(self): + import transaction.interfaces + class Retry(transaction.interfaces.TransientError): + pass + + tm = self._makeOne() + i = 0 + for attempt in tm.attempts(4): + with attempt: + i += 1 + if i < 4: + raise Retry + + self.assertEqual(i, 4) + + def test_attempts_retries_but_gives_up(self): + import transaction.interfaces + class Retry(transaction.interfaces.TransientError): + pass + + tm = self._makeOne() + i = 0 + + with self.assertRaises(Retry): + for attempt in tm.attempts(4): + with attempt: + i += 1 + raise Retry + + self.assertEqual(i, 4) + + def test_attempts_propigates_errors(self): + tm = self._makeOne() + with self.assertRaises(ValueError): + for attempt in tm.attempts(4): + with attempt: + raise ValueError + + def test_attempts_defer_to_dm(self): + import transaction.tests.savepointsample + + class DM(transaction.tests.savepointsample.SampleSavepointDataManager): + def should_retry(self, e): + if 'should retry' in str(e): + return True + + ntry = 0 + dm = transaction.tests.savepointsample.SampleSavepointDataManager() + dm2 = DM() + with transaction.manager: + dm2['ntry'] = 0 + + for attempt in transaction.manager.attempts(): + with attempt: + ntry += 1 + dm['ntry'] = ntry + dm2['ntry'] = ntry + if ntry % 3: + raise ValueError('we really should retry this') + + self.assertEqual(ntry, 3) + + + def test_attempts_w_default_count(self): + from transaction._manager import Attempt + tm = self._makeOne() + found = list(tm.attempts()) + self.assertEqual(len(found), 3) + for attempt in found[:-1]: + self.assertTrue(isinstance(attempt, Attempt)) + self.assertTrue(attempt.manager is tm) + self.assertTrue(found[-1] is tm) + + def test_run(self): + import transaction.interfaces + class Retry(transaction.interfaces.TransientError): + pass + + tm = self._makeOne() + i = [0, None] + + @tm.run() + def meaning(): + "Nice doc" + i[0] += 1 + i[1] = tm.get() + if i[0] < 3: + raise Retry + return 42 + + self.assertEqual(i[0], 3) + self.assertEqual(meaning, 42) + self.assertEqual(i[1].description, "meaning\n\nNice doc") + + def test_run_no_name_explicit_tries(self): + import transaction.interfaces + class Retry(transaction.interfaces.TransientError): + pass + + tm = self._makeOne() + i = [0, None] + + @tm.run(4) + def _(): + "Nice doc" + i[0] += 1 + i[1] = tm.get() + if i[0] < 4: + raise Retry + + self.assertEqual(i[0], 4) + self.assertEqual(i[1].description, "Nice doc") + + def test_run_pos_tries(self): + tm = self._makeOne() + + with self.assertRaises(ValueError): + tm.run(0)(lambda : None) + with self.assertRaises(ValueError): + @tm.run(-1) + def _(): + raise AssertionError("Never called") + + def test_run_stop_on_success(self): + import transaction.interfaces + + tm = self._makeOne() + i = [0, None] + + @tm.run() + def meaning(): + i[0] += 1 + i[1] = tm.get() + return 43 + + self.assertEqual(i[0], 1) + self.assertEqual(meaning, 43) + self.assertEqual(i[1].description, "meaning") + + def test_run_retries_but_gives_up(self): + import transaction.interfaces + class Retry(transaction.interfaces.TransientError): + pass + + tm = self._makeOne() + i = [0] + + with self.assertRaises(Retry): + @tm.run() + def _(): + i[0] += 1 + raise Retry + + self.assertEqual(i[0], 3) + + def test_run_propigates_errors(self): + tm = self._makeOne() + with self.assertRaises(ValueError): + @tm.run + def _(): + raise ValueError + + def test_run_defer_to_dm(self): + import transaction.tests.savepointsample + + class DM(transaction.tests.savepointsample.SampleSavepointDataManager): + def should_retry(self, e): + if 'should retry' in str(e): + return True + + ntry = [0] + dm = transaction.tests.savepointsample.SampleSavepointDataManager() + dm2 = DM() + with transaction.manager: + dm2['ntry'] = 0 + + @transaction.manager.run + def _(): + ntry[0] += 1 + dm['ntry'] = ntry[0] + dm2['ntry'] = ntry[0] + if ntry[0] % 3: + raise ValueError('we really should retry this') + + self.assertEqual(ntry[0], 3) + + def test_run_callable_with_bytes_doc(self): + import transaction + class Callable(object): + + def __init__(self): + self.__doc__ = b'some bytes' + self.__name__ = b'more bytes' + + def __call__(self): + return 42 + + result = transaction.manager.run(Callable()) + self.assertEqual(result, 42) + + def test__retryable_w_transient_error(self): + from transaction.interfaces import TransientError + tm = self._makeOne() + self.assertTrue(tm._retryable(TransientError, object())) + + def test__retryable_w_transient_subclass(self): + from transaction.interfaces import TransientError + class _Derived(TransientError): + pass + tm = self._makeOne() + self.assertTrue(tm._retryable(_Derived, object())) + + def test__retryable_w_normal_exception_no_resources(self): + tm = self._makeOne() + self.assertFalse(tm._retryable(Exception, object())) + + def test__retryable_w_normal_exception_w_resource_voting_yes(self): + class _Resource(object): + def should_retry(self, err): + return True + tm = self._makeOne() + tm.get()._resources.append(_Resource()) + self.assertTrue(tm._retryable(Exception, object())) + + def test__retryable_w_multiple(self): + class _Resource(object): + _should = True + def should_retry(self, err): + return self._should + tm = self._makeOne() + res1 = _Resource() + res1._should = False + res2 = _Resource() + tm.get()._resources.append(res1) + tm.get()._resources.append(res2) + self.assertTrue(tm._retryable(Exception, object())) + + # basic tests with two sub trans jars + # really we only need one, so tests for + # sub1 should identical to tests for sub2 + def test_commit_normal(self): + + mgr, sub1, sub2, sub3, nosub1 = self._makePopulated() + sub1.modify() + sub2.modify() + + mgr.commit() + + assert sub1._p_jar.ccommit_sub == 0 + assert sub1._p_jar.ctpc_finish == 1 + + def test_abort_normal(self): + + mgr, sub1, sub2, sub3, nosub1 = self._makePopulated() + sub1.modify() + sub2.modify() + + mgr.abort() + + assert sub2._p_jar.cabort == 1 + + + # repeat adding in a nonsub trans jars + + def test_commit_w_nonsub_jar(self): + + mgr, sub1, sub2, sub3, nosub1 = self._makePopulated() + nosub1.modify() + + mgr.commit() + + assert nosub1._p_jar.ctpc_finish == 1 + + def test_abort_w_nonsub_jar(self): + + mgr, sub1, sub2, sub3, nosub1 = self._makePopulated() + nosub1.modify() + + mgr.abort() + + assert nosub1._p_jar.ctpc_finish == 0 + assert nosub1._p_jar.cabort == 1 + + + ### Failure Mode Tests + # + # ok now we do some more interesting + # tests that check the implementations + # error handling by throwing errors from + # various jar methods + ### + + # first the recoverable errors + + def test_abort_w_broken_jar(self): + from transaction import _transaction + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + mgr, sub1, sub2, sub3, nosub1 = self._makePopulated() + sub1._p_jar = BasicJar(errors='abort') + nosub1.modify() + sub1.modify(nojar=1) + sub2.modify() + try: + mgr.abort() + except TestTxnException: + pass + + assert nosub1._p_jar.cabort == 1 + assert sub2._p_jar.cabort == 1 + + def test_commit_w_broken_jar_commit(self): + from transaction import _transaction + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + mgr, sub1, sub2, sub3, nosub1 = self._makePopulated() + sub1._p_jar = BasicJar(errors='commit') + nosub1.modify() + sub1.modify(nojar=1) + try: + mgr.commit() + except TestTxnException: + pass + + assert nosub1._p_jar.ctpc_finish == 0 + assert nosub1._p_jar.ccommit == 1 + assert nosub1._p_jar.ctpc_abort == 1 + + def test_commit_w_broken_jar_tpc_vote(self): + from transaction import _transaction + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + mgr, sub1, sub2, sub3, nosub1 = self._makePopulated() + sub1._p_jar = BasicJar(errors='tpc_vote') + nosub1.modify() + sub1.modify(nojar=1) + try: + mgr.commit() + except TestTxnException: + pass + + assert nosub1._p_jar.ctpc_finish == 0 + assert nosub1._p_jar.ccommit == 1 + assert nosub1._p_jar.ctpc_abort == 1 + assert sub1._p_jar.ctpc_abort == 1 + + def test_commit_w_broken_jar_tpc_begin(self): + # ok this test reveals a bug in the TM.py + # as the nosub tpc_abort there is ignored. + + # nosub calling method tpc_begin + # nosub calling method commit + # sub calling method tpc_begin + # sub calling method abort + # sub calling method tpc_abort + # nosub calling method tpc_abort + from transaction import _transaction + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + mgr, sub1, sub2, sub3, nosub1 = self._makePopulated() + sub1._p_jar = BasicJar(errors='tpc_begin') + nosub1.modify() + sub1.modify(nojar=1) + try: + mgr.commit() + except TestTxnException: + pass + + assert nosub1._p_jar.ctpc_abort == 1 + assert sub1._p_jar.ctpc_abort == 1 + + def test_commit_w_broken_jar_tpc_abort_tpc_vote(self): + from transaction import _transaction + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + mgr, sub1, sub2, sub3, nosub1 = self._makePopulated() + sub1._p_jar = BasicJar(errors=('tpc_abort', 'tpc_vote')) + nosub1.modify() + sub1.modify(nojar=1) + try: + mgr.commit() + except TestTxnException: + pass + + assert nosub1._p_jar.ctpc_abort == 1 + + def test_notify_transaction_late_comers(self): + # If a datamanager registers for synchonization after a + # transaction has started, we should call newTransaction so it + # can do necessry setup. + import mock + from .. import TransactionManager + manager = TransactionManager() + sync1 = mock.MagicMock() + manager.registerSynch(sync1) + sync1.newTransaction.assert_not_called() + t = manager.begin() + sync1.newTransaction.assert_called_with(t) + sync2 = mock.MagicMock() + manager.registerSynch(sync2) + sync2.newTransaction.assert_called_with(t) + + # for, um, completeness + t.commit() + for s in sync1, sync2: + s.beforeCompletion.assert_called_with(t) + s.afterCompletion.assert_called_with(t) + + def test_unregisterSynch_on_transaction_manager_from_serparate_thread(self): + # We should be able to get the underlying manager of the thread manager + # cand call methods from other threads. + + import threading, transaction + + started = threading.Event() + stopped = threading.Event() + + synchronizer = self + + class Runner(threading.Thread): + + def __init__(self): + threading.Thread.__init__(self) + self.manager = transaction.manager.manager + self.setDaemon(True) + self.start() + + def run(self): + self.manager.registerSynch(synchronizer) + started.set() + stopped.wait() + + runner = Runner() + started.wait() + runner.manager.unregisterSynch(synchronizer) + stopped.set() + runner.join(1) + + +class TestThreadTransactionManager(unittest.TestCase): + + def test_interface(self): + import transaction + zope.interface.verify.verifyObject(interfaces.ITransactionManager, + transaction.manager) + + def test_sync_registration_thread_local_manager(self): + import transaction + + sync = mock.MagicMock() + sync2 = mock.MagicMock() + self.assertFalse(transaction.manager.registeredSynchs()) + transaction.manager.registerSynch(sync) + self.assertTrue(transaction.manager.registeredSynchs()) + transaction.manager.registerSynch(sync2) + self.assertTrue(transaction.manager.registeredSynchs()) + t = transaction.begin() + sync.newTransaction.assert_called_with(t) + transaction.abort() + sync.beforeCompletion.assert_called_with(t) + sync.afterCompletion.assert_called_with(t) + transaction.manager.unregisterSynch(sync) + self.assertTrue(transaction.manager.registeredSynchs()) + transaction.manager.unregisterSynch(sync2) + self.assertFalse(transaction.manager.registeredSynchs()) + sync.reset_mock() + transaction.begin() + transaction.abort() + sync.newTransaction.assert_not_called() + sync.beforeCompletion.assert_not_called() + sync.afterCompletion.assert_not_called() + + self.assertFalse(transaction.manager.registeredSynchs()) + transaction.manager.registerSynch(sync) + transaction.manager.registerSynch(sync2) + t = transaction.begin() + sync.newTransaction.assert_called_with(t) + self.assertTrue(transaction.manager.registeredSynchs()) + transaction.abort() + sync.beforeCompletion.assert_called_with(t) + sync.afterCompletion.assert_called_with(t) + transaction.manager.clearSynchs() + self.assertFalse(transaction.manager.registeredSynchs()) + sync.reset_mock() + transaction.begin() + transaction.abort() + sync.newTransaction.assert_not_called() + sync.beforeCompletion.assert_not_called() + sync.afterCompletion.assert_not_called() + + def test_explicit_thread_local_manager(self): + import transaction.interfaces + + self.assertFalse(transaction.manager.explicit) + transaction.abort() + transaction.manager.explicit = True + self.assertTrue(transaction.manager.explicit) + with self.assertRaises(transaction.interfaces.NoTransaction): + transaction.abort() + transaction.manager.explicit = False + transaction.abort() + + +class AttemptTests(unittest.TestCase): + + def _makeOne(self, manager): + from transaction._manager import Attempt + return Attempt(manager) + + def test___enter__(self): + manager = DummyManager() + inst = self._makeOne(manager) + inst.__enter__() + self.assertTrue(manager.entered) + + def test___exit__no_exc_no_commit_exception(self): + manager = DummyManager() + inst = self._makeOne(manager) + result = inst.__exit__(None, None, None) + self.assertFalse(result) + self.assertTrue(manager.committed) + + def test___exit__no_exc_nonretryable_commit_exception(self): + manager = DummyManager(raise_on_commit=ValueError) + inst = self._makeOne(manager) + self.assertRaises(ValueError, inst.__exit__, None, None, None) + self.assertTrue(manager.committed) + self.assertTrue(manager.aborted) + + def test___exit__no_exc_abort_exception_after_nonretryable_commit_exc(self): + manager = DummyManager(raise_on_abort=ValueError, + raise_on_commit=KeyError) + inst = self._makeOne(manager) + self.assertRaises(ValueError, inst.__exit__, None, None, None) + self.assertTrue(manager.committed) + self.assertTrue(manager.aborted) + + def test___exit__no_exc_retryable_commit_exception(self): + from transaction.interfaces import TransientError + manager = DummyManager(raise_on_commit=TransientError) + inst = self._makeOne(manager) + result = inst.__exit__(None, None, None) + self.assertTrue(result) + self.assertTrue(manager.committed) + self.assertTrue(manager.aborted) + + def test___exit__with_exception_value_retryable(self): + from transaction.interfaces import TransientError + manager = DummyManager() + inst = self._makeOne(manager) + result = inst.__exit__(TransientError, TransientError(), None) + self.assertTrue(result) + self.assertFalse(manager.committed) + self.assertTrue(manager.aborted) + + def test___exit__with_exception_value_nonretryable(self): + manager = DummyManager() + inst = self._makeOne(manager) + self.assertRaises(KeyError, inst.__exit__, KeyError, KeyError(), None) + self.assertFalse(manager.committed) + self.assertTrue(manager.aborted) + + def test_explicit_mode(self): + from .. import TransactionManager + from ..interfaces import AlreadyInTransaction, NoTransaction + + tm = TransactionManager() + self.assertFalse(tm.explicit) + + tm = TransactionManager(explicit=True) + self.assertTrue(tm.explicit) + for name in 'get', 'commit', 'abort', 'doom', 'isDoomed', 'savepoint': + with self.assertRaises(NoTransaction): + getattr(tm, name)() + + t = tm.begin() + with self.assertRaises(AlreadyInTransaction): + tm.begin() + + self.assertTrue(t is tm.get()) + + self.assertFalse(tm.isDoomed()) + tm.doom() + self.assertTrue(tm.isDoomed()) + tm.abort() + + for name in 'get', 'commit', 'abort', 'doom', 'isDoomed', 'savepoint': + with self.assertRaises(NoTransaction): + getattr(tm, name)() + + t = tm.begin() + self.assertFalse(tm.isDoomed()) + with self.assertRaises(AlreadyInTransaction): + tm.begin() + tm.savepoint() + tm.commit() + + + +class DummyManager(object): + entered = False + committed = False + aborted = False + + def __init__(self, raise_on_commit=None, raise_on_abort=None): + self.raise_on_commit = raise_on_commit + self.raise_on_abort = raise_on_abort + + def _retryable(self, t, v): + from transaction._manager import TransientError + return issubclass(t, TransientError) + + def __enter__(self): + self.entered = True + + def abort(self): + self.aborted = True + if self.raise_on_abort: + raise self.raise_on_abort + + def commit(self): + self.committed = True + if self.raise_on_commit: + raise self.raise_on_commit + + +class DataObject: + + def __init__(self, transaction_manager, nost=0): + self.transaction_manager = transaction_manager + self.nost = nost + self._p_jar = None + + def modify(self, nojar=0, tracing=0): + if not nojar: + if self.nost: + self._p_jar = BasicJar(tracing=tracing) + else: + self._p_jar = BasicJar(tracing=tracing) + self.transaction_manager.get().join(self._p_jar) + + +class TestTxnException(Exception): + pass + + +class BasicJar(object): + + def __init__(self, errors=(), tracing=0): + if not isinstance(errors, tuple): + errors = errors, + self.errors = errors + self.tracing = tracing + self.cabort = 0 + self.ccommit = 0 + self.ctpc_begin = 0 + self.ctpc_abort = 0 + self.ctpc_vote = 0 + self.ctpc_finish = 0 + self.cabort_sub = 0 + self.ccommit_sub = 0 + + def __repr__(self): + return "<%s %X %s>" % (self.__class__.__name__, + positive_id(self), + self.errors) + + def sortKey(self): + # All these jars use the same sort key, and Python's list.sort() + # is stable. These two + return self.__class__.__name__ + + def check(self, method): + if self.tracing: # pragma: no cover + print('%s calling method %s'%(str(self.tracing),method)) + + if method in self.errors: + raise TestTxnException("error %s" % method) + + ## basic jar txn interface + + def abort(self, *args): + self.check('abort') + self.cabort += 1 + + def commit(self, *args): + self.check('commit') + self.ccommit += 1 + + def tpc_begin(self, txn, sub=0): + self.check('tpc_begin') + self.ctpc_begin += 1 + + def tpc_vote(self, *args): + self.check('tpc_vote') + self.ctpc_vote += 1 + + def tpc_abort(self, *args): + self.check('tpc_abort') + self.ctpc_abort += 1 + + def tpc_finish(self, *args): + self.check('tpc_finish') + self.ctpc_finish += 1 + + +class DummySynch(object): + def __init__(self): + self._txns = set() + def newTransaction(self, txn): + self._txns.add(txn) + + +def positive_id(obj): + """Return id(obj) as a non-negative integer.""" + import struct + _ADDRESS_MASK = 256 ** struct.calcsize('P') + + result = id(obj) + if result < 0: # pragma: no cover + # Happens...on old 32-bit systems? + result += _ADDRESS_MASK + assert result > 0 + return result diff --git a/thesisenv/lib/python3.6/site-packages/transaction/tests/test__transaction.py b/thesisenv/lib/python3.6/site-packages/transaction/tests/test__transaction.py new file mode 100644 index 0000000..575a663 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/tests/test__transaction.py @@ -0,0 +1,1701 @@ +# -*- coding: utf-8 -*- +############################################################################## +# +# Copyright (c) 2001, 2002, 2005 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Test transaction behavior for variety of cases. + +I wrote these unittests to investigate some odd transaction +behavior when doing unittests of integrating non sub transaction +aware objects, and to insure proper txn behavior. these +tests test the transaction system independent of the rest of the +zodb. + +you can see the method calls to a jar by passing the +keyword arg tracing to the modify method of a dataobject. +the value of the arg is a prefix used for tracing print calls +to that objects jar. + +the number of times a jar method was called can be inspected +by looking at an attribute of the jar that is the method +name prefixed with a c (count/check). + +i've included some tracing examples for tests that i thought +were illuminating as doc strings below. + +TODO + + add in tests for objects which are modified multiple times, + for example an object that gets modified in multiple sub txns. +""" +import os +import warnings +import unittest + + +class TransactionTests(unittest.TestCase): + + def _getTargetClass(self): + from transaction._transaction import Transaction + return Transaction + + def _makeOne(self, synchronizers=None, manager=None): + return self._getTargetClass()(synchronizers, manager) + + def test_verifyImplements_ITransaction(self): + from zope.interface.verify import verifyClass + from transaction.interfaces import ITransaction + verifyClass(ITransaction, self._getTargetClass()) + + def test_verifyProvides_ITransaction(self): + from zope.interface.verify import verifyObject + from transaction.interfaces import ITransaction + verifyObject(ITransaction, self._makeOne()) + + def test_ctor_defaults(self): + from transaction.weakset import WeakSet + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + self.assertTrue(isinstance(txn._synchronizers, WeakSet)) + self.assertEqual(len(txn._synchronizers), 0) + self.assertTrue(txn._manager is None) + self.assertEqual(txn.user, u"") + self.assertEqual(txn.description, u"") + self.assertTrue(txn._savepoint2index is None) + self.assertEqual(txn._savepoint_index, 0) + self.assertEqual(txn._resources, []) + self.assertEqual(txn._adapters, {}) + self.assertEqual(txn._voted, {}) + self.assertEqual(txn.extension, {}) + self.assertTrue(txn._extension is txn.extension) # legacy + self.assertTrue(txn.log is logger) + self.assertEqual(len(logger._log), 1) + self.assertEqual(logger._log[0][0], 'debug') + self.assertEqual(logger._log[0][1], 'new transaction') + self.assertTrue(txn._failure_traceback is None) + self.assertEqual(txn._before_commit, []) + self.assertEqual(txn._after_commit, []) + + def test_ctor_w_syncs(self): + from transaction.weakset import WeakSet + synchs = WeakSet() + txn = self._makeOne(synchronizers=synchs) + self.assertTrue(txn._synchronizers is synchs) + + def test_isDoomed(self): + from transaction._transaction import Status + txn = self._makeOne() + self.assertFalse(txn.isDoomed()) + txn.status = Status.DOOMED + self.assertTrue(txn.isDoomed()) + + def test_doom_active(self): + from transaction._transaction import Status + txn = self._makeOne() + txn.doom() + self.assertTrue(txn.isDoomed()) + self.assertEqual(txn.status, Status.DOOMED) + + def test_doom_invalid(self): + from transaction._transaction import Status + txn = self._makeOne() + for status in Status.COMMITTING, Status.COMMITTED, Status.COMMITFAILED: + txn.status = status + self.assertRaises(ValueError, txn.doom) + + def test_doom_already_doomed(self): + from transaction._transaction import Status + txn = self._makeOne() + txn.status = Status.DOOMED + txn.doom() + self.assertTrue(txn.isDoomed()) + self.assertEqual(txn.status, Status.DOOMED) + + def test__prior_operation_failed(self): + from transaction.interfaces import TransactionFailedError + class _Traceback(object): + def getvalue(self): + return 'TRACEBACK' + txn = self._makeOne() + txn._failure_traceback = _Traceback() + with self.assertRaises(TransactionFailedError) as exc: + txn._prior_operation_failed() + err = exc.exception + self.assertTrue(str(err).startswith('An operation previously failed')) + self.assertTrue(str(err).endswith( "with traceback:\n\nTRACEBACK")) + + def test_join_COMMITFAILED(self): + from transaction.interfaces import TransactionFailedError + from transaction._transaction import Status + class _Traceback(object): + def getvalue(self): + return 'TRACEBACK' + txn = self._makeOne() + txn.status = Status.COMMITFAILED + txn._failure_traceback = _Traceback() + self.assertRaises(TransactionFailedError, txn.join, object()) + + def test_join_COMMITTING(self): + from transaction._transaction import Status + txn = self._makeOne() + txn.status = Status.COMMITTING + self.assertRaises(ValueError, txn.join, object()) + + def test_join_COMMITTED(self): + from transaction._transaction import Status + txn = self._makeOne() + txn.status = Status.COMMITTED + self.assertRaises(ValueError, txn.join, object()) + + def test_join_DOOMED_non_preparing_wo_sp2index(self): + from transaction._transaction import Status + txn = self._makeOne() + txn.status = Status.DOOMED + resource = object() + txn.join(resource) + self.assertEqual(txn._resources, [resource]) + + def test_join_ACTIVE_w_preparing_w_sp2index(self): + from transaction._transaction import AbortSavepoint + from transaction._transaction import DataManagerAdapter + class _TSP(object): + def __init__(self): + self._savepoints = [] + class _DM(object): + def prepare(self): + raise AssertionError("Not called") + txn = self._makeOne() + tsp = _TSP() + txn._savepoint2index = {tsp: object()} + dm = _DM + txn.join(dm) + self.assertEqual(len(txn._resources), 1) + dma = txn._resources[0] + self.assertTrue(isinstance(dma, DataManagerAdapter)) + self.assertTrue(txn._resources[0]._datamanager is dm) + self.assertEqual(len(tsp._savepoints), 1) + self.assertTrue(isinstance(tsp._savepoints[0], AbortSavepoint)) + self.assertTrue(tsp._savepoints[0].datamanager is dma) + self.assertTrue(tsp._savepoints[0].transaction is txn) + + def test__unjoin_miss(self): + txn = self._makeOne() + txn._unjoin(object()) #no raise + + def test__unjoin_hit(self): + txn = self._makeOne() + resource = object() + txn._resources.append(resource) + txn._unjoin(resource) + self.assertEqual(txn._resources, []) + + def test_savepoint_COMMITFAILED(self): + from transaction.interfaces import TransactionFailedError + from transaction._transaction import Status + class _Traceback(object): + def getvalue(self): + return 'TRACEBACK' + txn = self._makeOne() + txn.status = Status.COMMITFAILED + txn._failure_traceback = _Traceback() + self.assertRaises(TransactionFailedError, txn.savepoint) + + def test_savepoint_empty(self): + from weakref import WeakKeyDictionary + from transaction import _transaction + from transaction._transaction import Savepoint + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + sp = txn.savepoint() + self.assertTrue(isinstance(sp, Savepoint)) + self.assertTrue(sp.transaction is txn) + self.assertEqual(sp._savepoints, []) + self.assertEqual(txn._savepoint_index, 1) + self.assertTrue(isinstance(txn._savepoint2index, WeakKeyDictionary)) + self.assertEqual(txn._savepoint2index[sp], 1) + + def test_savepoint_non_optimistc_resource_wo_support(self): + from transaction import _transaction + from transaction._transaction import Status + from transaction._compat import StringIO + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + logger._clear() + resource = object() + txn._resources.append(resource) + self.assertRaises(TypeError, txn.savepoint) + self.assertEqual(txn.status, Status.COMMITFAILED) + self.assertTrue(isinstance(txn._failure_traceback, StringIO)) + self.assertTrue('TypeError' in txn._failure_traceback.getvalue()) + self.assertEqual(len(logger._log), 2) + self.assertEqual(logger._log[0][0], 'error') + self.assertTrue(logger._log[0][1].startswith('Error in abort')) + self.assertEqual(logger._log[1][0], 'error') + self.assertTrue(logger._log[1][1].startswith('Error in tpc_abort')) + + def test__remove_and_invalidate_after_miss(self): + from weakref import WeakKeyDictionary + txn = self._makeOne() + txn._savepoint2index = WeakKeyDictionary() + class _SP(object): + def __init__(self, txn): + self.transaction = txn + holdme = [] + for i in range(10): + sp = _SP(txn) + holdme.append(sp) #prevent gc + txn._savepoint2index[sp] = i + self.assertEqual(len(txn._savepoint2index), 10) + self.assertRaises(KeyError, txn._remove_and_invalidate_after, _SP(txn)) + self.assertEqual(len(txn._savepoint2index), 10) + + def test__remove_and_invalidate_after_hit(self): + from weakref import WeakKeyDictionary + txn = self._makeOne() + txn._savepoint2index = WeakKeyDictionary() + class _SP(object): + def __init__(self, txn, index): + self.transaction = txn + self._index = index + def __lt__(self, other): + return self._index < other._index + def __repr__(self): # pragma: no cover + return '_SP: %d' % self._index + holdme = [] + for i in range(10): + sp = _SP(txn, i) + holdme.append(sp) #prevent gc + txn._savepoint2index[sp] = i + self.assertEqual(len(txn._savepoint2index), 10) + txn._remove_and_invalidate_after(holdme[1]) + self.assertEqual(sorted(txn._savepoint2index), sorted(holdme[:2])) + + def test__invalidate_all_savepoints(self): + from weakref import WeakKeyDictionary + txn = self._makeOne() + txn._savepoint2index = WeakKeyDictionary() + class _SP(object): + def __init__(self, txn, index): + self.transaction = txn + self._index = index + def __repr__(self): # pragma: no cover + return '_SP: %d' % self._index + holdme = [] + for i in range(10): + sp = _SP(txn, i) + holdme.append(sp) #prevent gc + txn._savepoint2index[sp] = i + self.assertEqual(len(txn._savepoint2index), 10) + txn._invalidate_all_savepoints() + self.assertEqual(list(txn._savepoint2index), []) + + def test_register_wo_jar(self): + class _Dummy(object): + _p_jar = None + txn = self._makeOne() + self.assertRaises(ValueError, txn.register, _Dummy()) + + def test_register_w_jar(self): + class _Manager(object): + pass + mgr = _Manager() + class _Dummy(object): + _p_jar = mgr + txn = self._makeOne() + dummy = _Dummy() + txn.register(dummy) + resources = list(txn._resources) + self.assertEqual(len(resources), 1) + adapter = resources[0] + self.assertTrue(adapter.manager is mgr) + self.assertTrue(dummy in adapter.objects) + items = list(txn._adapters.items()) + self.assertEqual(len(items), 1) + self.assertTrue(items[0][0] is mgr) + self.assertTrue(items[0][1] is adapter) + + def test_register_w_jar_already_adapted(self): + class _Adapter(object): + def __init__(self): + self.objects = [] + class _Manager(object): + pass + mgr = _Manager() + class _Dummy(object): + _p_jar = mgr + txn = self._makeOne() + txn._adapters[mgr] = adapter = _Adapter() + dummy = _Dummy() + txn.register(dummy) + self.assertTrue(dummy in adapter.objects) + + def test_commit_DOOMED(self): + from transaction.interfaces import DoomedTransaction + from transaction._transaction import Status + txn = self._makeOne() + txn.status = Status.DOOMED + self.assertRaises(DoomedTransaction, txn.commit) + + def test_commit_COMMITFAILED(self): + from transaction._transaction import Status + from transaction.interfaces import TransactionFailedError + class _Traceback(object): + def getvalue(self): + return 'TRACEBACK' + txn = self._makeOne() + txn.status = Status.COMMITFAILED + txn._failure_traceback = _Traceback() + self.assertRaises(TransactionFailedError, txn.commit) + + def test_commit_wo_savepoints_wo_hooks_wo_synchronizers(self): + from transaction._transaction import Status + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + class _Mgr(object): + def __init__(self, txn): + self._txn = txn + def free(self, txn): + assert txn is self._txn + self._txn = None + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + logger._clear() + mgr = txn._manager = _Mgr(txn) + txn.commit() + self.assertEqual(txn.status, Status.COMMITTED) + self.assertTrue(mgr._txn is None) + self.assertEqual(logger._log[0][0], 'debug') + self.assertEqual(logger._log[0][1], 'commit') + + def test_commit_w_savepoints(self): + from weakref import WeakKeyDictionary + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + class _SP(object): + def __init__(self, txn, index): + self.transaction = txn + self._index = index + def __repr__(self): # pragma: no cover + return '_SP: %d' % self._index + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + txn._savepoint2index = WeakKeyDictionary() + holdme = [] + for i in range(10): + sp = _SP(txn, i) + holdme.append(sp) #prevent gc + txn._savepoint2index[sp] = i + logger._clear() + txn.commit() + self.assertEqual(list(txn._savepoint2index), []) + + def test_commit_w_beforeCommitHooks(self): + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + _hooked1, _hooked2 = [], [] + def _hook1(*args, **kw): + _hooked1.append((args, kw)) + def _hook2(*args, **kw): + _hooked2.append((args, kw)) + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + txn._before_commit.append((_hook1, ('one',), {'uno': 1})) + txn._before_commit.append((_hook2, (), {})) + logger._clear() + txn.commit() + self.assertEqual(_hooked1, [(('one',), {'uno': 1})]) + self.assertEqual(_hooked2, [((), {})]) + self.assertEqual(txn._before_commit, []) + + def test_commit_w_synchronizers(self): + from transaction.weakset import WeakSet + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + class _Synch(object): + _before = _after = False + def beforeCompletion(self, txn): + self._before = txn + def afterCompletion(self, txn): + self._after = txn + synchs = [_Synch(), _Synch(), _Synch()] + ws = WeakSet() + for synch in synchs: + ws.add(synch) + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne(synchronizers=ws) + logger._clear() + txn.commit() + for synch in synchs: + self.assertTrue(synch._before is txn) + self.assertTrue(synch._after is txn) + + def test_commit_w_afterCommitHooks(self): + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + _hooked1, _hooked2 = [], [] + def _hook1(*args, **kw): + _hooked1.append((args, kw)) + def _hook2(*args, **kw): + _hooked2.append((args, kw)) + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + txn._after_commit.append((_hook1, ('one',), {'uno': 1})) + txn._after_commit.append((_hook2, (), {})) + logger._clear() + txn.commit() + self.assertEqual(_hooked1, [((True, 'one',), {'uno': 1})]) + self.assertEqual(_hooked2, [((True,), {})]) + self.assertEqual(txn._after_commit, []) + self.assertEqual(txn._resources, []) + + def test_commit_error_w_afterCompleteHooks(self): + from transaction import _transaction + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + class BrokenResource(object): + def sortKey(self): + return 'zzz' + def tpc_begin(self, txn): + raise ValueError('test') + broken = BrokenResource() + resource = Resource('aaa') + _hooked1, _hooked2 = [], [] + def _hook1(*args, **kw): + _hooked1.append((args, kw)) + def _hook2(*args, **kw): + _hooked2.append((args, kw)) + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + txn._after_commit.append((_hook1, ('one',), {'uno': 1})) + txn._after_commit.append((_hook2, (), {})) + txn._resources.append(broken) + txn._resources.append(resource) + logger._clear() + self.assertRaises(ValueError, txn.commit) + self.assertEqual(_hooked1, [((False, 'one',), {'uno': 1})]) + self.assertEqual(_hooked2, [((False,), {})]) + self.assertEqual(txn._after_commit, []) + self.assertTrue(resource._b) + self.assertFalse(resource._c) + self.assertFalse(resource._v) + self.assertFalse(resource._f) + self.assertTrue(resource._a) + self.assertTrue(resource._x) + + def test_commit_error_w_synchronizers(self): + from transaction.weakset import WeakSet + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + class _Synch(object): + _before = _after = False + def beforeCompletion(self, txn): + self._before = txn + def afterCompletion(self, txn): + self._after = txn + synchs = [_Synch(), _Synch(), _Synch()] + ws = WeakSet() + for synch in synchs: + ws.add(synch) + class BrokenResource(object): + def sortKey(self): + return 'zzz' + def tpc_begin(self, txn): + raise ValueError('test') + broken = BrokenResource() + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne(synchronizers=ws) + logger._clear() + txn._resources.append(broken) + self.assertRaises(ValueError, txn.commit) + for synch in synchs: + self.assertTrue(synch._before is txn) + self.assertTrue(synch._after is txn) #called in _cleanup + + def test_commit_clears_resources(self): + class DM(object): + tpc_begin = commit = tpc_finish = tpc_vote = lambda s, txn: True + + dm = DM() + txn = self._makeOne() + txn.join(dm) + self.assertEqual(txn._resources, [dm]) + txn.commit() + self.assertEqual(txn._resources, []) + + def test_getBeforeCommitHooks_empty(self): + txn = self._makeOne() + self.assertEqual(list(txn.getBeforeCommitHooks()), []) + + def test_addBeforeCommitHook(self): + def _hook(*args, **kw): + raise AssertionError("Not called") + txn = self._makeOne() + txn.addBeforeCommitHook(_hook, ('one',), dict(uno=1)) + self.assertEqual(list(txn.getBeforeCommitHooks()), + [(_hook, ('one',), {'uno': 1})]) + + def test_addBeforeCommitHook_w_kws(self): + def _hook(*args, **kw): + raise AssertionError("Not called") + txn = self._makeOne() + txn.addBeforeCommitHook(_hook, ('one',)) + self.assertEqual(list(txn.getBeforeCommitHooks()), + [(_hook, ('one',), {})]) + + def test_getAfterCommitHooks_empty(self): + txn = self._makeOne() + self.assertEqual(list(txn.getAfterCommitHooks()), []) + + def test_addAfterCommitHook(self): + def _hook(*args, **kw): + raise AssertionError("Not called") + txn = self._makeOne() + txn.addAfterCommitHook(_hook, ('one',), dict(uno=1)) + self.assertEqual(list(txn.getAfterCommitHooks()), + [(_hook, ('one',), {'uno': 1})]) + + def test_addAfterCommitHook_wo_kws(self): + def _hook(*args, **kw): + raise AssertionError("Not called") + txn = self._makeOne() + txn.addAfterCommitHook(_hook, ('one',)) + self.assertEqual(list(txn.getAfterCommitHooks()), + [(_hook, ('one',), {})]) + + def test_callAfterCommitHook_w_error(self): + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + _hooked2 = [] + def _hook1(*args, **kw): + raise ValueError() + def _hook2(*args, **kw): + _hooked2.append((args, kw)) + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + logger._clear() + txn.addAfterCommitHook(_hook1, ('one',)) + txn.addAfterCommitHook(_hook2, ('two',), dict(dos=2)) + txn._callAfterCommitHooks() + # second hook gets called even if first raises + self.assertEqual(_hooked2, [((True, 'two',), {'dos': 2})]) + self.assertEqual(len(logger._log), 1) + self.assertEqual(logger._log[0][0], 'error') + self.assertTrue(logger._log[0][1].startswith( + "Error in after commit hook")) + + def test_callAfterCommitHook_w_abort(self): + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + _hooked2 = [] + def _hook1(*args, **kw): + raise ValueError() + def _hook2(*args, **kw): + _hooked2.append((args, kw)) + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + logger._clear() + txn.addAfterCommitHook(_hook1, ('one',)) + txn.addAfterCommitHook(_hook2, ('two',), dict(dos=2)) + txn._callAfterCommitHooks() + self.assertEqual(logger._log[0][0], 'error') + self.assertTrue(logger._log[0][1].startswith( + "Error in after commit hook")) + + def test__commitResources_normal(self): + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + resources = [Resource('bbb'), Resource('aaa')] + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + logger._clear() + txn._resources.extend(resources) + txn._commitResources() + self.assertEqual(len(txn._voted), 2) + for r in resources: + self.assertTrue(r._b and r._c and r._v and r._f) + self.assertFalse(r._a and r._x) + self.assertTrue(id(r) in txn._voted) + self.assertEqual(len(logger._log), 2) + self.assertEqual(logger._log[0][0], 'debug') + self.assertEqual(logger._log[0][1], 'commit Resource: aaa') + self.assertEqual(logger._log[1][0], 'debug') + self.assertEqual(logger._log[1][1], 'commit Resource: bbb') + + def test__commitResources_error_in_tpc_begin(self): + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + resources = [Resource('bbb', 'tpc_begin'), Resource('aaa')] + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + logger._clear() + txn._resources.extend(resources) + self.assertRaises(ValueError, txn._commitResources) + for r in resources: + if r._key == 'aaa': + self.assertTrue(r._b) + else: + self.assertFalse(r._b) + self.assertFalse(r._c and r._v and r._f) + self.assertTrue(r._a and r._x) + self.assertEqual(len(logger._log), 0) + + def test__commitResources_error_in_afterCompletion(self): + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + class _Synchrnonizers(object): + def __init__(self, res): + self._res = res + def map(self, func): + for res in self._res: + func(res) + resources = [Resource('bbb', 'tpc_begin'), + Resource('aaa', 'afterCompletion')] + sync = _Synchrnonizers(resources) + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne(sync) + logger._clear() + txn._resources.extend(resources) + self.assertRaises(ValueError, txn._commitResources) + for r in resources: + if r._key == 'aaa': + self.assertTrue(r._b) + else: + self.assertFalse(r._b) + self.assertFalse(r._c and r._v and r._f) + self.assertTrue(r._a and r._x) + self.assertEqual(len(logger._log), 0) + self.assertTrue(resources[0]._after) + self.assertFalse(resources[1]._after) + + def test__commitResources_error_in_commit(self): + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + resources = [Resource('bbb', 'commit'), Resource('aaa')] + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + logger._clear() + txn._resources.extend(resources) + self.assertRaises(ValueError, txn._commitResources) + for r in resources: + self.assertTrue(r._b) + if r._key == 'aaa': + self.assertTrue(r._c) + else: + self.assertFalse(r._c) + self.assertFalse(r._v and r._f) + self.assertTrue(r._a and r._x) + self.assertEqual(len(logger._log), 1) + self.assertEqual(logger._log[0][0], 'debug') + self.assertEqual(logger._log[0][1], 'commit Resource: aaa') + + def test__commitResources_error_in_tpc_vote(self): + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + resources = [Resource('bbb', 'tpc_vote'), Resource('aaa')] + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + logger._clear() + txn._resources.extend(resources) + self.assertRaises(ValueError, txn._commitResources) + self.assertEqual(len(txn._voted), 1) + for r in resources: + self.assertTrue(r._b and r._c) + if r._key == 'aaa': + self.assertTrue(id(r) in txn._voted) + self.assertTrue(r._v) + self.assertFalse(r._f) + self.assertFalse(r._a) + self.assertTrue(r._x) + else: + self.assertFalse(id(r) in txn._voted) + self.assertFalse(r._v) + self.assertFalse(r._f) + self.assertTrue(r._a and r._x) + self.assertEqual(len(logger._log), 2) + self.assertEqual(logger._log[0][0], 'debug') + self.assertEqual(logger._log[0][1], 'commit Resource: aaa') + self.assertEqual(logger._log[1][0], 'debug') + self.assertEqual(logger._log[1][1], 'commit Resource: bbb') + + def test__commitResources_error_in_tpc_finish(self): + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + resources = [Resource('bbb', 'tpc_finish'), Resource('aaa')] + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + logger._clear() + txn._resources.extend(resources) + self.assertRaises(ValueError, txn._commitResources) + for r in resources: + self.assertTrue(r._b and r._c and r._v) + self.assertTrue(id(r) in txn._voted) + if r._key == 'aaa': + self.assertTrue(r._f) + else: + self.assertFalse(r._f) + self.assertFalse(r._a and r._x) #no cleanup if tpc_finish raises + self.assertEqual(len(logger._log), 3) + self.assertEqual(logger._log[0][0], 'debug') + self.assertEqual(logger._log[0][1], 'commit Resource: aaa') + self.assertEqual(logger._log[1][0], 'debug') + self.assertEqual(logger._log[1][1], 'commit Resource: bbb') + self.assertEqual(logger._log[2][0], 'critical') + self.assertTrue(logger._log[2][1].startswith( + 'A storage error occurred')) + + def test_abort_wo_savepoints_wo_hooks_wo_synchronizers(self): + from transaction._transaction import Status + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + class _Mgr(object): + def __init__(self, txn): + self._txn = txn + def free(self, txn): + assert txn is self._txn + self._txn = None + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + logger._clear() + mgr = txn._manager = _Mgr(txn) + txn.abort() + self.assertEqual(txn.status, Status.ACTIVE) + self.assertTrue(mgr._txn is None) + self.assertEqual(logger._log[0][0], 'debug') + self.assertEqual(logger._log[0][1], 'abort') + + def test_abort_w_savepoints(self): + from weakref import WeakKeyDictionary + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + class _SP(object): + def __init__(self, txn, index): + self.transaction = txn + self._index = index + def __repr__(self): # pragma: no cover + return '_SP: %d' % self._index + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + txn._savepoint2index = WeakKeyDictionary() + holdme = [] + for i in range(10): + sp = _SP(txn, i) + holdme.append(sp) #prevent gc + txn._savepoint2index[sp] = i + logger._clear() + txn.abort() + self.assertEqual(list(txn._savepoint2index), []) + + def test_abort_w_beforeCommitHooks(self): + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + _hooked1, _hooked2 = [], [] + def _hook1(*args, **kw): + raise AssertionError("Not called") + def _hook2(*args, **kw): + raise AssertionError("Not called") + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + txn._before_commit.append((_hook1, ('one',), {'uno': 1})) + txn._before_commit.append((_hook2, (), {})) + logger._clear() + txn.abort() + self.assertEqual(_hooked1, []) + self.assertEqual(_hooked2, []) + # Hooks are neither called nor cleared on abort + self.assertEqual(list(txn.getBeforeCommitHooks()), + [(_hook1, ('one',), {'uno': 1}), (_hook2, (), {})]) + + def test_abort_w_synchronizers(self): + from transaction.weakset import WeakSet + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + class _Synch(object): + _before = _after = False + def beforeCompletion(self, txn): + self._before = txn + def afterCompletion(self, txn): + self._after = txn + synchs = [_Synch(), _Synch(), _Synch()] + ws = WeakSet() + for synch in synchs: + ws.add(synch) + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne(synchronizers=ws) + logger._clear() + txn.abort() + for synch in synchs: + self.assertTrue(synch._before is txn) + self.assertTrue(synch._after is txn) + + def test_abort_w_afterCommitHooks(self): + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + _hooked1, _hooked2 = [], [] + def _hook1(*args, **kw): + raise AssertionError("Not called") + def _hook2(*args, **kw): + raise AssertionError("Not called") + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + txn._after_commit.append((_hook1, ('one',), {'uno': 1})) + txn._after_commit.append((_hook2, (), {})) + logger._clear() + txn.abort() + # Hooks are neither called nor cleared on abort + self.assertEqual(_hooked1, []) + self.assertEqual(_hooked2, []) + self.assertEqual(list(txn.getAfterCommitHooks()), + [(_hook1, ('one',), {'uno': 1}), (_hook2, (), {})]) + self.assertEqual(txn._resources, []) + + def test_abort_error_w_afterCompleteHooks(self): + from transaction import _transaction + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + class BrokenResource(object): + def sortKey(self): + raise AssertionError("Not called") + def abort(self, txn): + raise ValueError('test') + broken = BrokenResource() + aaa = Resource('aaa') + broken2 = BrokenResource() + _hooked1, _hooked2 = [], [] + def _hook1(*args, **kw): + raise AssertionError("Not called") + def _hook2(*args, **kw): + raise AssertionError("Not called") + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + txn = self._makeOne() + txn._after_commit.append((_hook1, ('one',), {'uno': 1})) + txn._after_commit.append((_hook2, (), {})) + txn._resources.append(aaa) + txn._resources.append(broken) + txn._resources.append(broken2) + logger._clear() + self.assertRaises(ValueError, txn.abort) + # Hooks are neither called nor cleared on abort + self.assertEqual(_hooked1, []) + self.assertEqual(_hooked2, []) + self.assertEqual(list(txn.getAfterCommitHooks()), + [(_hook1, ('one',), {'uno': 1}), (_hook2, (), {})]) + self.assertTrue(aaa._a) + self.assertFalse(aaa._x) + + def test_abort_error_w_synchronizers(self): + from transaction.weakset import WeakSet + from transaction.tests.common import DummyLogger + from transaction.tests.common import Monkey + from transaction import _transaction + class _Synch(object): + _before = _after = False + def beforeCompletion(self, txn): + self._before = txn + def afterCompletion(self, txn): + self._after = txn + synchs = [_Synch(), _Synch(), _Synch()] + ws = WeakSet() + for synch in synchs: + ws.add(synch) + class BrokenResource(object): + def sortKey(self): + raise AssertionError("Should not be called") + def abort(self, txn): + raise ValueError('test') + broken = BrokenResource() + logger = DummyLogger() + with Monkey(_transaction, _LOGGER=logger): + t = self._makeOne(synchronizers=ws) + logger._clear() + t._resources.append(broken) + self.assertRaises(ValueError, t.abort) + for synch in synchs: + self.assertTrue(synch._before is t) + self.assertTrue(synch._after is t) #called in _cleanup + + def test_abort_clears_resources(self): + class DM(object): + abort = lambda s, txn: True + + dm = DM() + txn = self._makeOne() + txn.join(dm) + self.assertEqual(txn._resources, [dm]) + txn.abort() + self.assertEqual(txn._resources, []) + + def test_note(self): + txn = self._makeOne() + try: + txn.note(u'This is a note.') + self.assertEqual(txn.description, u'This is a note.') + txn.note(u'Another.') + self.assertEqual(txn.description, u'This is a note.\nAnother.') + finally: + txn.abort() + + def test_note_bytes(self): + txn = self._makeOne() + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + txn.note(b'haha') + self.assertNonTextDeprecationWarning(w) + self.assertEqual(txn.description, u'haha') + + def test_note_None(self): + txn = self._makeOne() + self.assertEqual(u'', txn.description) + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + txn.note(None) + self.assertFalse(w) + self.assertEqual(txn.description, u'') + + def test_note_42(self): + txn = self._makeOne() + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + txn.note(42) + self.assertNonTextDeprecationWarning(w) + self.assertEqual(txn.description, u'42') + + def assertNonTextDeprecationWarning(self, w): + [w] = w + self.assertEqual( + (DeprecationWarning, "Expected text", + os.path.splitext(__file__)[0]), + (w.category, str(w.message), os.path.splitext(w.filename)[0]), + ) + + def test_description_bytes(self): + txn = self._makeOne() + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + txn.description = b'haha' + self.assertNonTextDeprecationWarning(w) + self.assertEqual(txn.description, u'haha') + + def test_description_42(self): + txn = self._makeOne() + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + txn.description = 42 + self.assertNonTextDeprecationWarning(w) + self.assertEqual(txn.description, u'42') + + def test_description_None(self): + txn = self._makeOne() + self.assertEqual(u'', txn.description) + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + txn.description = None + self.assertFalse(w) + self.assertEqual(txn.description, u'') + + def test_setUser_default_path(self): + txn = self._makeOne() + txn.setUser(u'phreddy') + self.assertEqual(txn.user, u'/ phreddy') + + def test_setUser_explicit_path(self): + txn = self._makeOne() + txn.setUser(u'phreddy', u'/bedrock') + self.assertEqual(txn.user, u'/bedrock phreddy') + + def test_user_w_none(self): + txn = self._makeOne() + txn.user = b'phreddy' + with self.assertRaises(ValueError): + txn.user = None # resets to empty text + self.assertEqual(txn.user, u'phreddy') + + def _test_user_non_text(self, user, path, expect, both=False): + txn = self._makeOne() + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + if path: + txn.setUser(user, path) + else: + if path is None: + txn.setUser(user) + else: + txn.user = user + + if both: + self.assertNonTextDeprecationWarning(w[:1]) + self.assertNonTextDeprecationWarning(w[1:]) + else: + self.assertNonTextDeprecationWarning(w) + + self.assertEqual(expect, txn.user) + + def test_user_non_text(self, user=b'phreddy', path=b'/bedrock', + expect=u"/bedrock phreddy", both=True): + self._test_user_non_text(b'phreddy', b'/bedrock', + u"/bedrock phreddy", True) + self._test_user_non_text(b'phreddy', None, u'/ phreddy') + self._test_user_non_text(b'phreddy', False, u'phreddy') + self._test_user_non_text(b'phreddy', u'/bedrock', u'/bedrock phreddy') + self._test_user_non_text(u'phreddy', b'/bedrock', u'/bedrock phreddy') + self._test_user_non_text(u'phreddy', 2, u'2 phreddy') + self._test_user_non_text(1, u'/bedrock', u'/bedrock 1') + self._test_user_non_text(1, 2, u'2 1', True) + + def test_setExtendedInfo_single(self): + txn = self._makeOne() + txn.setExtendedInfo('frob', 'qux') + self.assertEqual(txn.extension, {u'frob': 'qux'}) + self.assertTrue(txn._extension is txn._extension) # legacy + + def test_setExtendedInfo_multiple(self): + txn = self._makeOne() + txn.setExtendedInfo('frob', 'qux') + txn.setExtendedInfo('baz', 'spam') + txn.setExtendedInfo('frob', 'quxxxx') + self.assertEqual(txn._extension, {u'frob': 'quxxxx', u'baz': 'spam'}) + self.assertTrue(txn._extension is txn._extension) # legacy + + def test__extension_settable(self): + # Because ZEO sets it. I'll fix ZEO, but maybe something else will break + txn = self._makeOne() + txn._extension = dict(baz='spam') + txn.setExtendedInfo('frob', 'qux') + self.assertEqual(txn.extension, {u'frob': 'qux', 'baz': 'spam'}) + + def test_data(self): + txn = self._makeOne() + + # Can't get data that wasn't set: + with self.assertRaises(KeyError) as c: + txn.data(self) + self.assertEqual(c.exception.args, (self,)) + + data = dict(a=1) + txn.set_data(self, data) + self.assertEqual(txn.data(self), data) + + # Can't get something we haven't stored. + with self.assertRaises(KeyError) as c: + txn.data(data) + self.assertEqual(c.exception.args, (data,)) + + # When the transaction ends, data are discarded: + txn.commit() + with self.assertRaises(KeyError) as c: + txn.data(self) + self.assertEqual(c.exception.args, (self,)) + + def test_isRetryableError_w_transient_error(self): + from transaction.interfaces import TransientError + from transaction._manager import TransactionManager + txn = self._makeOne(manager=TransactionManager()) + txn._manager._txn = txn + self.assertTrue(txn.isRetryableError(TransientError())) + + def test_isRetryableError_w_transient_subclass(self): + from transaction.interfaces import TransientError + from transaction._manager import TransactionManager + class _Derived(TransientError): + pass + txn = self._makeOne(manager=TransactionManager()) + txn._manager._txn = txn + self.assertTrue(txn.isRetryableError(_Derived())) + + def test_isRetryableError_w_normal_exception_no_resources(self): + from transaction._manager import TransactionManager + txn = self._makeOne(manager=TransactionManager()) + txn._manager._txn = txn + self.assertFalse(txn.isRetryableError(Exception())) + + def test_isRetryableError_w_normal_exception_w_resource_voting_yes(self): + from transaction._manager import TransactionManager + class _Resource(object): + def should_retry(self, err): + return True + txn = self._makeOne(manager=TransactionManager()) + txn._manager._txn = txn + txn._resources.append(_Resource()) + self.assertTrue(txn.isRetryableError(Exception())) + + def test_isRetryableError_w_multiple(self): + from transaction._manager import TransactionManager + class _Resource(object): + _should = True + def should_retry(self, err): + return self._should + txn = self._makeOne(manager=TransactionManager()) + txn._manager._txn = txn + res1 = _Resource() + res1._should = False + res2 = _Resource() + txn._resources.append(res1) + txn._resources.append(res2) + self.assertTrue(txn.isRetryableError(Exception())) + + + +class MultiObjectResourceAdapterTests(unittest.TestCase): + + def _getTargetClass(self): + from transaction._transaction import MultiObjectResourceAdapter + return MultiObjectResourceAdapter + + def _makeOne(self, jar): + return self._getTargetClass()(jar) + + def _makeJar(self, key): + class _Resource(Resource): + def __init__(self, key): + super(_Resource, self).__init__(key) + self._c = [] + self._a = [] + def commit(self, obj, txn): + self._c.append((obj, txn)) + def abort(self, obj, txn): + self._a.append((obj, txn)) + return _Resource(key) + + def _makeDummy(self, kind, name): + class _Dummy(object): + def __init__(self, kind, name): + self._kind = kind + self._name = name + def __repr__(self): # pragma: no cover + return '<%s: %s>' % (self._kind, self._name) + return _Dummy(kind, name) + + def test_ctor(self): + jar = self._makeJar('aaa') + mora = self._makeOne(jar) + self.assertTrue(mora.manager is jar) + self.assertEqual(mora.objects, []) + self.assertEqual(mora.ncommitted, 0) + + def test___repr__(self): + jar = self._makeJar('bbb') + mora = self._makeOne(jar) + self.assertEqual(repr(mora), + '' % id(mora)) + + def test_sortKey(self): + jar = self._makeJar('ccc') + mora = self._makeOne(jar) + self.assertEqual(mora.sortKey(), 'ccc') + + def test_tpc_begin(self): + jar = self._makeJar('ddd') + mora = self._makeOne(jar) + txn = object() + mora.tpc_begin(txn) + self.assertTrue(jar._b) + + def test_commit(self): + jar = self._makeJar('eee') + objects = [self._makeDummy('obj', 'a'), self._makeDummy('obj', 'b')] + mora = self._makeOne(jar) + mora.objects.extend(objects) + txn = self._makeDummy('txn', 'c') + mora.commit(txn) + self.assertEqual(jar._c, [(objects[0], txn), (objects[1], txn)]) + + def test_tpc_vote(self): + jar = self._makeJar('fff') + mora = self._makeOne(jar) + txn = object() + mora.tpc_vote(txn) + self.assertTrue(jar._v) + + def test_tpc_finish(self): + jar = self._makeJar('ggg') + mora = self._makeOne(jar) + txn = object() + mora.tpc_finish(txn) + self.assertTrue(jar._f) + + def test_abort(self): + jar = self._makeJar('hhh') + objects = [self._makeDummy('obj', 'a'), self._makeDummy('obj', 'b')] + mora = self._makeOne(jar) + mora.objects.extend(objects) + txn = self._makeDummy('txn', 'c') + mora.abort(txn) + self.assertEqual(jar._a, [(objects[0], txn), (objects[1], txn)]) + + def test_abort_w_error(self): + from transaction.tests.common import DummyLogger + jar = self._makeJar('hhh') + objects = [self._makeDummy('obj', 'a'), + self._makeDummy('obj', 'b'), + self._makeDummy('obj', 'c'), + ] + _old_abort = jar.abort + def _abort(obj, txn): + if obj._name in ('b', 'c'): + raise ValueError() + _old_abort(obj, txn) + jar.abort = _abort + mora = self._makeOne(jar) + mora.objects.extend(objects) + txn = self._makeDummy('txn', 'c') + txn.log = log = DummyLogger() + self.assertRaises(ValueError, mora.abort, txn) + self.assertEqual(jar._a, [(objects[0], txn)]) + + def test_tpc_abort(self): + jar = self._makeJar('iii') + mora = self._makeOne(jar) + txn = object() + mora.tpc_abort(txn) + self.assertTrue(jar._x) + + +class Test_rm_key(unittest.TestCase): + + def _callFUT(self, oid): + from transaction._transaction import rm_key + return rm_key(oid) + + def test_miss(self): + self.assertTrue(self._callFUT(object()) is None) + + def test_hit(self): + self.assertEqual(self._callFUT(Resource('zzz')), 'zzz') + + +class Test_object_hint(unittest.TestCase): + + def _callFUT(self, oid): + from transaction._transaction import object_hint + return object_hint(oid) + + def test_miss(self): + class _Test(object): + pass + test = _Test() + self.assertEqual(self._callFUT(test), "_Test oid=None") + + def test_hit(self): + class _Test(object): + pass + test = _Test() + test._p_oid = 'OID' + self.assertEqual(self._callFUT(test), "_Test oid='OID'") + + +class Test_oid_repr(unittest.TestCase): + + def _callFUT(self, oid): + from transaction._transaction import oid_repr + return oid_repr(oid) + + def test_as_nonstring(self): + self.assertEqual(self._callFUT(123), '123') + + def test_as_string_not_8_chars(self): + self.assertEqual(self._callFUT('a'), "'a'") + + def test_as_string_z64(self): + s = '\0'*8 + self.assertEqual(self._callFUT(s), '0x00') + + def test_as_string_all_Fs(self): + s = '\1'*8 + self.assertEqual(self._callFUT(s), '0x0101010101010101') + + def test_as_string_xxx(self): + s = '\20'*8 + self.assertEqual(self._callFUT(s), '0x1010101010101010') + + +class DataManagerAdapterTests(unittest.TestCase): + + def _getTargetClass(self): + from transaction._transaction import DataManagerAdapter + return DataManagerAdapter + + def _makeOne(self, jar): + return self._getTargetClass()(jar) + + def _makeJar(self, key): + class _Resource(Resource): + _p = False + def prepare(self, txn): + self._p = True + return _Resource(key) + + def _makeDummy(self, kind, name): + class _Dummy(object): + def __init__(self, kind, name): + self._kind = kind + self._name = name + def __repr__(self): # pragma: no cover + return '<%s: %s>' % (self._kind, self._name) + return _Dummy(kind, name) + + def test_ctor(self): + jar = self._makeJar('aaa') + dma = self._makeOne(jar) + self.assertTrue(dma._datamanager is jar) + + def test_commit(self): + jar = self._makeJar('bbb') + mora = self._makeOne(jar) + txn = self._makeDummy('txn', 'c') + mora.commit(txn) + self.assertFalse(jar._c) #no-op + + def test_abort(self): + jar = self._makeJar('ccc') + mora = self._makeOne(jar) + txn = self._makeDummy('txn', 'c') + mora.abort(txn) + self.assertTrue(jar._a) + + def test_tpc_begin(self): + jar = self._makeJar('ddd') + mora = self._makeOne(jar) + txn = object() + mora.tpc_begin(txn) + self.assertFalse(jar._b) #no-op + + def test_tpc_abort(self): + jar = self._makeJar('eee') + mora = self._makeOne(jar) + txn = object() + mora.tpc_abort(txn) + self.assertFalse(jar._f) + self.assertTrue(jar._a) + + def test_tpc_finish(self): + jar = self._makeJar('fff') + mora = self._makeOne(jar) + txn = object() + mora.tpc_finish(txn) + self.assertFalse(jar._f) + self.assertTrue(jar._c) + + def test_tpc_vote(self): + jar = self._makeJar('ggg') + mora = self._makeOne(jar) + txn = object() + mora.tpc_vote(txn) + self.assertFalse(jar._v) + self.assertTrue(jar._p) + + def test_sortKey(self): + jar = self._makeJar('hhh') + mora = self._makeOne(jar) + self.assertEqual(mora.sortKey(), 'hhh') + + +class SavepointTests(unittest.TestCase): + + def _getTargetClass(self): + from transaction._transaction import Savepoint + return Savepoint + + def _makeOne(self, txn, optimistic, *resources): + return self._getTargetClass()(txn, optimistic, *resources) + + def test_ctor_w_savepoint_oblivious_resource_non_optimistic(self): + txn = object() + resource = object() + self.assertRaises(TypeError, self._makeOne, txn, False, resource) + + def test_ctor_w_savepoint_oblivious_resource_optimistic(self): + from transaction._transaction import NoRollbackSavepoint + txn = object() + resource = object() + sp = self._makeOne(txn, True, resource) + self.assertEqual(len(sp._savepoints), 1) + self.assertTrue(isinstance(sp._savepoints[0], NoRollbackSavepoint)) + self.assertTrue(sp._savepoints[0].datamanager is resource) + + def test_ctor_w_savepoint_aware_resources(self): + class _Aware(object): + def savepoint(self): + return self + txn = object() + one = _Aware() + another = _Aware() + sp = self._makeOne(txn, True, one, another) + self.assertEqual(len(sp._savepoints), 2) + self.assertTrue(isinstance(sp._savepoints[0], _Aware)) + self.assertTrue(sp._savepoints[0] is one) + self.assertTrue(isinstance(sp._savepoints[1], _Aware)) + self.assertTrue(sp._savepoints[1] is another) + + def test_valid_wo_transacction(self): + sp = self._makeOne(None, True, object()) + self.assertFalse(sp.valid) + + def test_valid_w_transacction(self): + sp = self._makeOne(object(), True, object()) + self.assertTrue(sp.valid) + + def test_rollback_w_txn_None(self): + from transaction.interfaces import InvalidSavepointRollbackError + txn = None + class _Aware(object): + def savepoint(self): + return self + resource = _Aware() + sp = self._makeOne(txn, False, resource) + self.assertRaises(InvalidSavepointRollbackError, sp.rollback) + + def test_rollback_w_sp_error(self): + class _TXN(object): + _sarce = False + _raia = None + def _saveAndRaiseCommitishError(self): + import sys + from transaction._compat import reraise + self._sarce = True + reraise(*sys.exc_info()) + def _remove_and_invalidate_after(self, sp): + self._raia = sp + class _Broken(object): + def rollback(self): + raise ValueError() + _broken = _Broken() + class _GonnaRaise(object): + def savepoint(self): + return _broken + txn = _TXN() + resource = _GonnaRaise() + sp = self._makeOne(txn, False, resource) + self.assertRaises(ValueError, sp.rollback) + self.assertTrue(txn._raia is sp) + self.assertTrue(txn._sarce) + + +class AbortSavepointTests(unittest.TestCase): + + def _getTargetClass(self): + from transaction._transaction import AbortSavepoint + return AbortSavepoint + + def _makeOne(self, datamanager, transaction): + return self._getTargetClass()(datamanager, transaction) + + def test_ctor(self): + dm = object() + txn = object() + asp = self._makeOne(dm, txn) + self.assertTrue(asp.datamanager is dm) + self.assertTrue(asp.transaction is txn) + + def test_rollback(self): + class _DM(object): + _aborted = None + def abort(self, txn): + self._aborted = txn + class _TXN(object): + _unjoined = None + def _unjoin(self, datamanager): + self._unjoin = datamanager + dm = _DM() + txn = _TXN() + asp = self._makeOne(dm, txn) + asp.rollback() + self.assertTrue(dm._aborted is txn) + self.assertTrue(txn._unjoin is dm) + + +class NoRollbackSavepointTests(unittest.TestCase): + + def _getTargetClass(self): + from transaction._transaction import NoRollbackSavepoint + return NoRollbackSavepoint + + def _makeOne(self, datamanager): + return self._getTargetClass()(datamanager) + + def test_ctor(self): + dm = object() + nrsp = self._makeOne(dm) + self.assertTrue(nrsp.datamanager is dm) + + def test_rollback(self): + dm = object() + nrsp = self._makeOne(dm) + self.assertRaises(TypeError, nrsp.rollback) + + +class MiscellaneousTests(unittest.TestCase): + + def test_BBB_join(self): + # The join method is provided for "backward-compatability" with ZODB 4 + # data managers. + from transaction import Transaction + from transaction.tests.examples import DataManager + from transaction._transaction import DataManagerAdapter + # The argument to join must be a zodb4 data manager, + # transaction.interfaces.IDataManager. + txn = Transaction() + dm = DataManager() + txn.join(dm) + # The end result is that a data manager adapter is one of the + # transaction's objects: + self.assertTrue(isinstance(txn._resources[0], DataManagerAdapter)) + self.assertTrue(txn._resources[0]._datamanager is dm) + + def test_bug239086(self): + # The original implementation of thread transaction manager made + # invalid assumptions about thread ids. + import threading + import transaction + import transaction.tests.savepointsample as SPS + dm = SPS.SampleSavepointDataManager() + self.assertEqual(list(dm.keys()), []) + + class Sync(object): + def __init__(self, label): + self.label = label + self.log = [] + def beforeCompletion(self, txn): + raise AssertionError("Not called") + def afterCompletion(self, txn): + raise AssertionError("Not called") + def newTransaction(self, txn): + self.log.append('%s %s' % (self.label, 'new')) + + def run_in_thread(f): + txn = threading.Thread(target=f) + txn.start() + txn.join() + + sync = Sync(1) + @run_in_thread + def first(): + transaction.manager.registerSynch(sync) + transaction.manager.begin() + dm['a'] = 1 + self.assertEqual(sync.log, ['1 new']) + + @run_in_thread + def second(): + transaction.abort() # should do nothing. + self.assertEqual(sync.log, ['1 new']) + self.assertEqual(list(dm.keys()), ['a']) + + dm = SPS.SampleSavepointDataManager() + self.assertEqual(list(dm.keys()), []) + + @run_in_thread + def third(): + dm['a'] = 1 + self.assertEqual(sync.log, ['1 new']) + + transaction.abort() # should do nothing + self.assertEqual(list(dm.keys()), ['a']) + + def test_gh5(self): + from transaction import _transaction + from transaction._compat import native_ + + buffer = _transaction._makeTracebackBuffer() + + s = u'ąčę' + buffer.write(s) + + buffer.seek(0) + self.assertEqual(buffer.read(), native_(s, 'utf-8')) + +class Resource(object): + _b = _c = _v = _f = _a = _x = _after = False + def __init__(self, key, error=None): + self._key = key + self._error = error + def __repr__(self): + return 'Resource: %s' % self._key + def sortKey(self): + return self._key + def tpc_begin(self, txn): + if self._error == 'tpc_begin': + raise ValueError() + self._b = True + def commit(self, txn): + if self._error == 'commit': + raise ValueError() + self._c = True + def tpc_vote(self, txn): + if self._error == 'tpc_vote': + raise ValueError() + self._v = True + def tpc_finish(self, txn): + if self._error == 'tpc_finish': + raise ValueError() + self._f = True + def abort(self, txn): + if self._error == 'abort': + raise AssertionError("Not called in that state") + self._a = True + def tpc_abort(self, txn): + if self._error == 'tpc_abort': + raise AssertionError("Not called in that state") + self._x = True + def afterCompletion(self, txn): + if self._error == 'afterCompletion': + raise ValueError() + self._after = True diff --git a/thesisenv/lib/python3.6/site-packages/transaction/tests/test_register_compat.py b/thesisenv/lib/python3.6/site-packages/transaction/tests/test_register_compat.py new file mode 100644 index 0000000..83054d9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/tests/test_register_compat.py @@ -0,0 +1,142 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test backwards compatibility for resource managers using register(). + +The transaction package supports several different APIs for resource +managers. The original ZODB3 API was implemented by ZODB.Connection. +The Connection passed persistent objects to a Transaction's register() +method. It's possible that third-party code also used this API, hence +these tests that the code that adapts the old interface to the current +API works. + +These tests use a TestConnection object that implements the old API. +They check that the right methods are called and in roughly the right +order. +""" +import unittest + + +class BBBTests(unittest.TestCase): + + def setUp(self): + from transaction import abort + abort() + tearDown = setUp + + def test_basic_commit(self): + import transaction + cn = TestConnection() + cn.register(Object()) + cn.register(Object()) + cn.register(Object()) + transaction.commit() + self.assertEqual(len(cn.committed), 3) + self.assertEqual(len(cn.aborted), 0) + self.assertEqual(cn.calls, ['begin', 'vote', 'finish']) + + def test_basic_abort(self): + # If the application calls abort(), then the transaction never gets + # into the two-phase commit. It just aborts each object. + import transaction + cn = TestConnection() + cn.register(Object()) + cn.register(Object()) + cn.register(Object()) + transaction.abort() + self.assertEqual(len(cn.committed), 0) + self.assertEqual(len(cn.aborted), 3) + self.assertEqual(cn.calls, []) + + def test_tpc_error(self): + # The tricky part of the implementation is recovering from an error + # that occurs during the two-phase commit. We override the commit() + # and abort() methods of Object to cause errors during commit. + + # Note that the implementation uses lists internally, so that objects + # are committed in the order they are registered. (In the presence + # of multiple resource managers, objects from a single resource + # manager are committed in order. I'm not sure if this is an + # accident of the implementation or a feature that should be + # supported by any implementation.) + + # The order of resource managers depends on sortKey(). + import transaction + cn = TestConnection() + cn.register(Object()) + cn.register(CommitError()) + cn.register(Object()) + self.assertRaises(RuntimeError, transaction.commit) + self.assertEqual(len(cn.committed), 1) + self.assertEqual(len(cn.aborted), 3) + + +class Object(object): + + def commit(self): + pass + + def abort(self): + pass + + +class CommitError(Object): + + def commit(self): + raise RuntimeError("commit") + + +class AbortError(Object): + + def abort(self): + raise AssertionError("This should not actually be called") + + +class BothError(CommitError, AbortError): + pass + + +class TestConnection(object): + + def __init__(self): + self.committed = [] + self.aborted = [] + self.calls = [] + + def register(self, obj): + import transaction + obj._p_jar = self + transaction.get().register(obj) + + def sortKey(self): + return str(id(self)) + + def tpc_begin(self, txn): + self.calls.append("begin") + + def tpc_vote(self, txn): + self.calls.append("vote") + + def tpc_finish(self, txn): + self.calls.append("finish") + + def tpc_abort(self, txn): + self.calls.append("abort") + + def commit(self, obj, txn): + obj.commit() + self.committed.append(obj) + + def abort(self, obj, txn): + obj.abort() + self.aborted.append(obj) diff --git a/thesisenv/lib/python3.6/site-packages/transaction/tests/test_savepoint.py b/thesisenv/lib/python3.6/site-packages/transaction/tests/test_savepoint.py new file mode 100644 index 0000000..ba2b1cf --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/tests/test_savepoint.py @@ -0,0 +1,66 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + + +class SavepointTests(unittest.TestCase): + + def testRollbackRollsbackDataManagersThatJoinedLater(self): + # A savepoint needs to not just rollback it's savepoints, but needs + # to # rollback savepoints for data managers that joined savepoints + # after the savepoint: + import transaction + from transaction.tests import savepointsample + dm = savepointsample.SampleSavepointDataManager() + dm['name'] = 'bob' + sp1 = transaction.savepoint() + dm['job'] = 'geek' + sp2 = transaction.savepoint() + dm['salary'] = 'fun' + dm2 = savepointsample.SampleSavepointDataManager() + dm2['name'] = 'sally' + + self.assertTrue('name' in dm) + self.assertTrue('job' in dm) + self.assertTrue('salary' in dm) + self.assertTrue('name' in dm2) + + sp1.rollback() + + self.assertTrue('name' in dm) + self.assertFalse('job' in dm) + self.assertFalse('salary' in dm) + self.assertFalse('name' in dm2) + + def test_commit_after_rollback_for_dm_that_joins_after_savepoint(self): + # There was a problem handling data managers that joined after a + # savepoint. If the savepoint was rolled back and then changes + # made, the dm would end up being joined twice, leading to extra + # tpc calls and pain. + import transaction + from transaction.tests import savepointsample + sp = transaction.savepoint() + dm = savepointsample.SampleSavepointDataManager() + dm['name'] = 'bob' + sp.rollback() + dm['name'] = 'Bob' + transaction.commit() + self.assertEqual(dm['name'], 'Bob') + + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(SavepointTests), + )) diff --git a/thesisenv/lib/python3.6/site-packages/transaction/tests/test_weakset.py b/thesisenv/lib/python3.6/site-packages/transaction/tests/test_weakset.py new file mode 100644 index 0000000..e406fd1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/tests/test_weakset.py @@ -0,0 +1,131 @@ +############################################################################## +# +# Copyright (c) 2007 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +import unittest +from transaction._compat import JYTHON + +class WeakSetTests(unittest.TestCase): + def test_contains(self): + from transaction.weakset import WeakSet + w = WeakSet() + dummy = Dummy() + w.add(dummy) + self.assertEqual(dummy in w, True) + dummy2 = Dummy() + self.assertEqual(dummy2 in w, False) + + def test_len(self): + import gc + from transaction.weakset import WeakSet + w = WeakSet() + d1 = Dummy() + d2 = Dummy() + w.add(d1) + w.add(d2) + self.assertEqual(len(w), 2) + del d1 + gc.collect() + if not JYTHON: + # The Jython GC is non deterministic + self.assertEqual(len(w), 1) + + def test_remove(self): + from transaction.weakset import WeakSet + w = WeakSet() + dummy = Dummy() + w.add(dummy) + self.assertEqual(dummy in w, True) + w.remove(dummy) + self.assertEqual(dummy in w, False) + + def test_clear(self): + from transaction.weakset import WeakSet + w = WeakSet() + dummy = Dummy() + w.add(dummy) + dummy2 = Dummy() + w.add(dummy2) + self.assertEqual(dummy in w, True) + self.assertEqual(dummy2 in w, True) + w.clear() + self.assertEqual(dummy in w, False) + self.assertEqual(dummy2 in w, False) + + def test_as_weakref_list(self): + import gc + from transaction.weakset import WeakSet + w = WeakSet() + dummy = Dummy() + dummy2 = Dummy() + dummy3 = Dummy() + w.add(dummy) + w.add(dummy2) + w.add(dummy3) + del dummy3 + gc.collect() + refs = w.as_weakref_list() + self.assertTrue(isinstance(refs, list)) + L = [x() for x in refs] + # L is a list, but it does not have a guaranteed order. + self.assertTrue(list, type(L)) + self.assertEqual(set(L), set([dummy, dummy2])) + + def test_map(self): + from transaction.weakset import WeakSet + w = WeakSet() + dummy = Dummy() + dummy2 = Dummy() + dummy3 = Dummy() + w.add(dummy) + w.add(dummy2) + w.add(dummy3) + def poker(x): + x.poked = 1 + w.map(poker) + for thing in dummy, dummy2, dummy3: + self.assertEqual(thing.poked, 1) + + def test_map_w_gced_element(self): + import gc + from transaction.weakset import WeakSet + w = WeakSet() + dummy = Dummy() + dummy2 = Dummy() + dummy3 = [Dummy()] + w.add(dummy) + w.add(dummy2) + w.add(dummy3[0]) + + _orig = w.as_weakref_list + def _as_weakref_list(): + # simulate race condition during iteration of list + # object is collected after being iterated. + result = _orig() + del dummy3[:] + gc.collect() + return result + w.as_weakref_list = _as_weakref_list + + def poker(x): + x.poked = 1 + w.map(poker) + for thing in dummy, dummy2: + self.assertEqual(thing.poked, 1) + + +class Dummy: + pass + + +def test_suite(): + return unittest.makeSuite(WeakSetTests) diff --git a/thesisenv/lib/python3.6/site-packages/transaction/weakset.py b/thesisenv/lib/python3.6/site-packages/transaction/weakset.py new file mode 100644 index 0000000..b94402c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/transaction/weakset.py @@ -0,0 +1,85 @@ +############################################################################ +# +# Copyright (c) 2007 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################ + +import weakref + +# A simple implementation of weak sets, supplying just enough of Python's +# sets.Set interface for our needs. + +class WeakSet(object): + """A set of objects that doesn't keep its elements alive. + + The objects in the set must be weakly referencable. + The objects need not be hashable, and need not support comparison. + Two objects are considered to be the same iff their id()s are equal. + + When the only references to an object are weak references (including + those from WeakSets), the object can be garbage-collected, and + will vanish from any WeakSets it may be a member of at that time. + """ + + def __init__(self): + # Map id(obj) to obj. By using ids as keys, we avoid requiring + # that the elements be hashable or comparable. + self.data = weakref.WeakValueDictionary() + + def __len__(self): + return len(self.data) + + def __contains__(self, obj): + return id(obj) in self.data + + # Same as a Set, add obj to the collection. + def add(self, obj): + self.data[id(obj)] = obj + + # Same as a Set, remove obj from the collection, and raise + # KeyError if obj not in the collection. + def remove(self, obj): + del self.data[id(obj)] + + def clear(self): + self.data.clear() + + # f is a one-argument function. Execute f(elt) for each elt in the + # set. f's return value is ignored. + def map(self, f): + for wr in self.as_weakref_list(): + elt = wr() + if elt is not None: + f(elt) + + # Return a list of weakrefs to all the objects in the collection. + # Because a weak dict is used internally, iteration is dicey (the + # underlying dict may change size during iteration, due to gc or + # activity from other threads). as_weakef_list() is safe. + # + # If we invoke self.data.values() instead, we get back a list of live + # objects instead of weakrefs. If gc occurs while this list is alive, + # all the objects move to an older generation (because they're strongly + # referenced by the list!). They can't get collected then, until a + # less frequent collection of the older generation. Before then, if we + # invoke self.data.values() again, they're still alive, and if gc occurs + # while that list is alive they're all moved to yet an older generation. + # And so on. Stress tests showed that it was easy to get into a state + # where a WeakSet grows without bounds, despite that almost all its + # elements are actually trash. By returning a list of weakrefs instead, + # we avoid that, although the decision to use weakrefs is now very + # visible to our clients. + + def as_weakref_list(self): + # The docstring of WeakValueDictionary.valuerefs() + # guarantees to return an actual list on all supported versions + # of Python. + return self.data.valuerefs() diff --git a/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/PKG-INFO b/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..2fc3a20 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/PKG-INFO @@ -0,0 +1,48 @@ +Metadata-Version: 1.1 +Name: unicodecsv +Version: 0.14.1 +Summary: Python2's stdlib csv module is nice, but it doesn't support unicode. This module is a drop-in replacement which *does*. +Home-page: https://github.com/jdunck/python-unicodecsv +Author: Jeremy Dunck +Author-email: jdunck@gmail.com +License: BSD License +Description: unicodecsv + ========== + + The unicodecsv is a drop-in replacement for Python 2.7's csv module which supports unicode strings without a hassle. Supported versions are python 2.7, 3.3, 3.4, 3.5, and pypy 2.4.0. + + More fully + ---------- + + Python 2's csv module doesn't easily deal with unicode strings, leading to the dreaded "'ascii' codec can't encode characters in position ..." exception. + + You can work around it by encoding everything just before calling write (or just after read), but why not add support to the serializer? + + .. code-block:: pycon + + >>> import unicodecsv as csv + >>> from io import BytesIO + >>> f = BytesIO() + >>> w = csv.writer(f, encoding='utf-8') + >>> _ = w.writerow((u'é', u'ñ')) + >>> _ = f.seek(0) + >>> r = csv.reader(f, encoding='utf-8') + >>> next(r) == [u'é', u'ñ'] + True + + Note that unicodecsv expects a bytestream, not unicode -- so there's no need to use `codecs.open` or similar wrappers. Plain `open(..., 'rb')` will do. + + (Version 0.14.0 dropped support for python 2.6, but 0.14.1 added it back. See c0b7655248c4249 for the mistaken, breaking change.) + +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: Implementation :: CPython diff --git a/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/SOURCES.txt b/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..bb7ea0b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,12 @@ +MANIFEST.in +README.rst +setup.cfg +setup.py +unicodecsv/__init__.py +unicodecsv/py2.py +unicodecsv/py3.py +unicodecsv/test.py +unicodecsv.egg-info/PKG-INFO +unicodecsv.egg-info/SOURCES.txt +unicodecsv.egg-info/dependency_links.txt +unicodecsv.egg-info/top_level.txt \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/dependency_links.txt b/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/installed-files.txt b/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..5fbc33f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/installed-files.txt @@ -0,0 +1,12 @@ +../unicodecsv/__init__.py +../unicodecsv/__pycache__/__init__.cpython-36.pyc +../unicodecsv/__pycache__/py2.cpython-36.pyc +../unicodecsv/__pycache__/py3.cpython-36.pyc +../unicodecsv/__pycache__/test.cpython-36.pyc +../unicodecsv/py2.py +../unicodecsv/py3.py +../unicodecsv/test.py +PKG-INFO +SOURCES.txt +dependency_links.txt +top_level.txt diff --git a/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..fee1540 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/unicodecsv-0.14.1-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +unicodecsv diff --git a/thesisenv/lib/python3.6/site-packages/unicodecsv/__init__.py b/thesisenv/lib/python3.6/site-packages/unicodecsv/__init__.py new file mode 100644 index 0000000..270bdee --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/unicodecsv/__init__.py @@ -0,0 +1,11 @@ +# -*- coding: utf-8 -*- +# http://semver.org/ +import sys + +if sys.version_info >= (3, 0): + from unicodecsv.py3 import * +else: + from unicodecsv.py2 import * + +VERSION = (0, 14, 1) +__version__ = ".".join(map(str, VERSION)) diff --git a/thesisenv/lib/python3.6/site-packages/unicodecsv/py2.py b/thesisenv/lib/python3.6/site-packages/unicodecsv/py2.py new file mode 100644 index 0000000..f436397 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/unicodecsv/py2.py @@ -0,0 +1,223 @@ +# -*- coding: utf-8 -*- +import csv +import numbers + +from itertools import izip + +pass_throughs = [ + 'register_dialect', + 'unregister_dialect', + 'get_dialect', + 'list_dialects', + 'field_size_limit', + 'Dialect', + 'excel', + 'excel_tab', + 'Sniffer', + 'QUOTE_ALL', + 'QUOTE_MINIMAL', + 'QUOTE_NONNUMERIC', + 'QUOTE_NONE', + 'Error' +] +__all__ = [ + 'reader', + 'writer', + 'DictReader', + 'DictWriter', +] + pass_throughs + +for prop in pass_throughs: + globals()[prop] = getattr(csv, prop) + + +def _stringify(s, encoding, errors): + if s is None: + return '' + if isinstance(s, unicode): + return s.encode(encoding, errors) + elif isinstance(s, numbers.Number): + pass # let csv.QUOTE_NONNUMERIC do its thing. + elif not isinstance(s, str): + s = str(s) + return s + + +def _stringify_list(l, encoding, errors='strict'): + try: + return [_stringify(s, encoding, errors) for s in iter(l)] + except TypeError as e: + raise csv.Error(str(e)) + + +def _unicodify(s, encoding): + if s is None: + return None + if isinstance(s, (unicode, int, float)): + return s + elif isinstance(s, str): + return s.decode(encoding) + return s + + +class UnicodeWriter(object): + """ + >>> import unicodecsv + >>> from cStringIO import StringIO + >>> f = StringIO() + >>> w = unicodecsv.writer(f, encoding='utf-8') + >>> w.writerow((u'é', u'ñ')) + >>> f.seek(0) + >>> r = unicodecsv.reader(f, encoding='utf-8') + >>> row = r.next() + >>> row[0] == u'é' + True + >>> row[1] == u'ñ' + True + """ + def __init__(self, f, dialect=csv.excel, encoding='utf-8', errors='strict', + *args, **kwds): + self.encoding = encoding + self.writer = csv.writer(f, dialect, *args, **kwds) + self.encoding_errors = errors + + def writerow(self, row): + return self.writer.writerow( + _stringify_list(row, self.encoding, self.encoding_errors)) + + def writerows(self, rows): + for row in rows: + self.writerow(row) + + @property + def dialect(self): + return self.writer.dialect +writer = UnicodeWriter + + +class UnicodeReader(object): + def __init__(self, f, dialect=None, encoding='utf-8', errors='strict', + **kwds): + + format_params = ['delimiter', 'doublequote', 'escapechar', + 'lineterminator', 'quotechar', 'quoting', + 'skipinitialspace'] + + if dialect is None: + if not any([kwd_name in format_params + for kwd_name in kwds.keys()]): + dialect = csv.excel + self.reader = csv.reader(f, dialect, **kwds) + self.encoding = encoding + self.encoding_errors = errors + self._parse_numerics = bool( + self.dialect.quoting & csv.QUOTE_NONNUMERIC) + + def next(self): + row = self.reader.next() + encoding = self.encoding + encoding_errors = self.encoding_errors + unicode_ = unicode + if self._parse_numerics: + float_ = float + return [(value if isinstance(value, float_) else + unicode_(value, encoding, encoding_errors)) + for value in row] + else: + return [unicode_(value, encoding, encoding_errors) + for value in row] + + def __iter__(self): + return self + + @property + def dialect(self): + return self.reader.dialect + + @property + def line_num(self): + return self.reader.line_num +reader = UnicodeReader + + +class DictWriter(csv.DictWriter): + """ + >>> from cStringIO import StringIO + >>> f = StringIO() + >>> w = DictWriter(f, ['a', u'ñ', 'b'], restval=u'î') + >>> w.writerow({'a':'1', u'ñ':'2'}) + >>> w.writerow({'a':'1', u'ñ':'2', 'b':u'ø'}) + >>> w.writerow({'a':u'é', u'ñ':'2'}) + >>> f.seek(0) + >>> r = DictReader(f, fieldnames=['a', u'ñ'], restkey='r') + >>> r.next() == {'a': u'1', u'ñ':'2', 'r': [u'î']} + True + >>> r.next() == {'a': u'1', u'ñ':'2', 'r': [u'\xc3\xb8']} + True + >>> r.next() == {'a': u'\xc3\xa9', u'ñ':'2', 'r': [u'\xc3\xae']} + True + """ + def __init__(self, csvfile, fieldnames, restval='', + extrasaction='raise', dialect='excel', encoding='utf-8', + errors='strict', *args, **kwds): + self.encoding = encoding + csv.DictWriter.__init__(self, csvfile, fieldnames, restval, + extrasaction, dialect, *args, **kwds) + self.writer = UnicodeWriter(csvfile, dialect, encoding=encoding, + errors=errors, *args, **kwds) + self.encoding_errors = errors + + def writeheader(self): + header = dict(zip(self.fieldnames, self.fieldnames)) + self.writerow(header) + + +class DictReader(csv.DictReader): + """ + >>> from cStringIO import StringIO + >>> f = StringIO() + >>> w = DictWriter(f, fieldnames=['name', 'place']) + >>> w.writerow({'name': 'Cary Grant', 'place': 'hollywood'}) + >>> w.writerow({'name': 'Nathan Brillstone', 'place': u'øLand'}) + >>> w.writerow({'name': u'Will ø. Unicoder', 'place': u'éSpandland'}) + >>> f.seek(0) + >>> r = DictReader(f, fieldnames=['name', 'place']) + >>> print r.next() == {'name': 'Cary Grant', 'place': 'hollywood'} + True + >>> print r.next() == {'name': 'Nathan Brillstone', 'place': u'øLand'} + True + >>> print r.next() == {'name': u'Will ø. Unicoder', 'place': u'éSpandland'} + True + """ + def __init__(self, csvfile, fieldnames=None, restkey=None, restval=None, + dialect='excel', encoding='utf-8', errors='strict', *args, + **kwds): + if fieldnames is not None: + fieldnames = _stringify_list(fieldnames, encoding) + csv.DictReader.__init__(self, csvfile, fieldnames, restkey, restval, + dialect, *args, **kwds) + self.reader = UnicodeReader(csvfile, dialect, encoding=encoding, + errors=errors, *args, **kwds) + if fieldnames is None and not hasattr(csv.DictReader, 'fieldnames'): + # Python 2.5 fieldnames workaround. + # See http://bugs.python.org/issue3436 + reader = UnicodeReader(csvfile, dialect, encoding=encoding, + *args, **kwds) + self.fieldnames = _stringify_list(reader.next(), reader.encoding) + + if self.fieldnames is not None: + self.unicode_fieldnames = [_unicodify(f, encoding) for f in + self.fieldnames] + else: + self.unicode_fieldnames = [] + + self.unicode_restkey = _unicodify(restkey, encoding) + + def next(self): + row = csv.DictReader.next(self) + result = dict((uni_key, row[str_key]) for (str_key, uni_key) in + izip(self.fieldnames, self.unicode_fieldnames)) + rest = row.get(self.restkey) + if rest: + result[self.unicode_restkey] = rest + return result diff --git a/thesisenv/lib/python3.6/site-packages/unicodecsv/py3.py b/thesisenv/lib/python3.6/site-packages/unicodecsv/py3.py new file mode 100644 index 0000000..13a8bf7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/unicodecsv/py3.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +import csv +from csv import * + + +class _UnicodeWriteWrapper(object): + """Simple write() wrapper that converts unicode to bytes.""" + + def __init__(self, binary, encoding, errors): + self.binary = binary + self.encoding = encoding + self.errors = errors + + def write(self, string): + return self.binary.write(string.encode(self.encoding, self.errors)) + + +class UnicodeWriter(object): + def __init__(self, f, dialect=csv.excel, encoding='utf-8', errors='strict', + *args, **kwds): + if f is None: + raise TypeError + + f = _UnicodeWriteWrapper(f, encoding=encoding, errors=errors) + self.writer = csv.writer(f, dialect, *args, **kwds) + + def writerow(self, row): + return self.writer.writerow(row) + + def writerows(self, rows): + return self.writer.writerows(rows) + + @property + def dialect(self): + return self.writer.dialect + + +class UnicodeReader(object): + def __init__(self, f, dialect=None, encoding='utf-8', errors='strict', + **kwds): + + format_params = ['delimiter', 'doublequote', 'escapechar', + 'lineterminator', 'quotechar', 'quoting', + 'skipinitialspace'] + + if dialect is None: + if not any([kwd_name in format_params + for kwd_name in kwds.keys()]): + dialect = csv.excel + + f = (bs.decode(encoding, errors=errors) for bs in f) + self.reader = csv.reader(f, dialect, **kwds) + + def __next__(self): + return self.reader.__next__() + + def __iter__(self): + return self + + @property + def dialect(self): + return self.reader.dialect + + @property + def line_num(self): + return self.reader.line_num + + +writer = UnicodeWriter +reader = UnicodeReader + + +class DictWriter(csv.DictWriter): + def __init__(self, csvfile, fieldnames, restval='', + extrasaction='raise', dialect='excel', encoding='utf-8', + errors='strict', *args, **kwds): + super().__init__(csvfile, fieldnames, restval, + extrasaction, dialect, *args, **kwds) + self.writer = UnicodeWriter(csvfile, dialect, encoding=encoding, + errors=errors, *args, **kwds) + self.encoding_errors = errors + + def writeheader(self): + header = dict(zip(self.fieldnames, self.fieldnames)) + self.writerow(header) + + +class DictReader(csv.DictReader): + def __init__(self, csvfile, fieldnames=None, restkey=None, restval=None, + dialect='excel', encoding='utf-8', errors='strict', *args, + **kwds): + csv.DictReader.__init__(self, csvfile, fieldnames, restkey, restval, + dialect, *args, **kwds) + self.reader = UnicodeReader(csvfile, dialect, encoding=encoding, + errors=errors, *args, **kwds) diff --git a/thesisenv/lib/python3.6/site-packages/unicodecsv/test.py b/thesisenv/lib/python3.6/site-packages/unicodecsv/test.py new file mode 100644 index 0000000..f3d0377 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/unicodecsv/test.py @@ -0,0 +1,919 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2001,2002 Python Software Foundation +# csv package unit tests + +import array +import decimal +import os +import string +import sys +import tempfile +import unittest2 as unittest +from codecs import EncodedFile +from io import BytesIO + +import unicodecsv as csv + +try: + # Python 2 + chr = unichr +except: + pass + + +# pypy and cpython differ under which exception is raised under some +# circumstances e.g. whether a module is written in C or not. +py_compat_exc = (TypeError, AttributeError) + + +class Test_Csv(unittest.TestCase): + """ + Test the underlying C csv parser in ways that are not appropriate + from the high level interface. Further tests of this nature are done + in TestDialectRegistry. + """ + def _test_arg_valid(self, ctor, arg): + self.assertRaises(py_compat_exc, ctor) + self.assertRaises(py_compat_exc, ctor, None) + self.assertRaises(py_compat_exc, ctor, arg, bad_attr=0) + self.assertRaises(py_compat_exc, ctor, arg, delimiter=0) + self.assertRaises(py_compat_exc, ctor, arg, delimiter='XX') + self.assertRaises(csv.Error, ctor, arg, 'foo') + self.assertRaises(py_compat_exc, ctor, arg, delimiter=None) + self.assertRaises(py_compat_exc, ctor, arg, delimiter=1) + self.assertRaises(py_compat_exc, ctor, arg, quotechar=1) + self.assertRaises(py_compat_exc, ctor, arg, lineterminator=None) + self.assertRaises(py_compat_exc, ctor, arg, lineterminator=1) + self.assertRaises(py_compat_exc, ctor, arg, quoting=None) + self.assertRaises(py_compat_exc, ctor, arg, + quoting=csv.QUOTE_ALL, quotechar='') + self.assertRaises(py_compat_exc, ctor, arg, + quoting=csv.QUOTE_ALL, quotechar=None) + + def test_reader_arg_valid(self): + self._test_arg_valid(csv.reader, []) + + def test_writer_arg_valid(self): + self._test_arg_valid(csv.writer, BytesIO()) + + def _test_default_attrs(self, ctor, *args): + obj = ctor(*args) + # Check defaults + self.assertEqual(obj.dialect.delimiter, ',') + self.assertEqual(obj.dialect.doublequote, True) + self.assertEqual(obj.dialect.escapechar, None) + self.assertEqual(obj.dialect.lineterminator, "\r\n") + self.assertEqual(obj.dialect.quotechar, '"') + self.assertEqual(obj.dialect.quoting, csv.QUOTE_MINIMAL) + self.assertEqual(obj.dialect.skipinitialspace, False) + self.assertEqual(obj.dialect.strict, False) + # Try deleting or changing attributes (they are read-only) + self.assertRaises(py_compat_exc, delattr, + obj.dialect, 'delimiter') + self.assertRaises(py_compat_exc, setattr, + obj.dialect, 'delimiter', ':') + self.assertRaises(py_compat_exc, delattr, + obj.dialect, 'quoting') + self.assertRaises(py_compat_exc, setattr, + obj.dialect, 'quoting', None) + + def test_reader_attrs(self): + self._test_default_attrs(csv.reader, []) + + def test_writer_attrs(self): + self._test_default_attrs(csv.writer, BytesIO()) + + def _test_kw_attrs(self, ctor, *args): + # Now try with alternate options + kwargs = dict(delimiter=':', doublequote=False, escapechar='\\', + lineterminator='\r', quotechar='*', + quoting=csv.QUOTE_NONE, skipinitialspace=True, + strict=True) + obj = ctor(*args, **kwargs) + self.assertEqual(obj.dialect.delimiter, ':') + self.assertEqual(obj.dialect.doublequote, False) + self.assertEqual(obj.dialect.escapechar, '\\') + self.assertEqual(obj.dialect.lineterminator, "\r") + self.assertEqual(obj.dialect.quotechar, '*') + self.assertEqual(obj.dialect.quoting, csv.QUOTE_NONE) + self.assertEqual(obj.dialect.skipinitialspace, True) + self.assertEqual(obj.dialect.strict, True) + + def test_reader_kw_attrs(self): + self._test_kw_attrs(csv.reader, []) + + def test_writer_kw_attrs(self): + self._test_kw_attrs(csv.writer, BytesIO()) + + def _test_dialect_attrs(self, ctor, *args): + # Now try with dialect-derived options + class dialect: + delimiter = '-' + doublequote = False + escapechar = '^' + lineterminator = '$' + quotechar = '#' + quoting = csv.QUOTE_ALL + skipinitialspace = True + strict = False + args = args + (dialect,) + obj = ctor(*args) + self.assertEqual(obj.dialect.delimiter, '-') + self.assertEqual(obj.dialect.doublequote, False) + self.assertEqual(obj.dialect.escapechar, '^') + self.assertEqual(obj.dialect.lineterminator, "$") + self.assertEqual(obj.dialect.quotechar, '#') + self.assertEqual(obj.dialect.quoting, csv.QUOTE_ALL) + self.assertEqual(obj.dialect.skipinitialspace, True) + self.assertEqual(obj.dialect.strict, False) + + def test_reader_dialect_attrs(self): + self._test_dialect_attrs(csv.reader, []) + + def test_writer_dialect_attrs(self): + self._test_dialect_attrs(csv.writer, BytesIO()) + + def _write_test(self, fields, expect, **kwargs): + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + writer = csv.writer(fileobj, **kwargs) + writer.writerow(fields) + fileobj.seek(0) + self.assertEqual(fileobj.read(), + expect + writer.dialect.lineterminator.encode('utf-8')) + finally: + fileobj.close() + os.unlink(name) + + def test_write_arg_valid(self): + import sys + pypy3 = hasattr(sys, 'pypy_version_info') and sys.version_info.major == 3 + + self.assertRaises(TypeError if pypy3 else csv.Error, self._write_test, None, '') + self._write_test((), b'') + self._write_test([None], b'""') + self.assertRaises(csv.Error, self._write_test, + [None], None, quoting=csv.QUOTE_NONE) + + # Check that exceptions are passed up the chain + class BadList: + def __len__(self): + return 10 + + def __getitem__(self, i): + if i > 2: + raise IOError + + self.assertRaises(IOError, self._write_test, BadList(), '') + + class BadItem: + def __str__(self): + raise IOError + + self.assertRaises(IOError, self._write_test, [BadItem()], '') + + def test_write_bigfield(self): + # This exercises the buffer realloc functionality + bigstring = 'X' * 50000 + self._write_test([bigstring, bigstring], + b','.join([bigstring.encode('utf-8')] * 2)) + + def test_write_quoting(self): + self._write_test(['a', 1, 'p,q'], b'a,1,"p,q"') + self.assertRaises(csv.Error, + self._write_test, + ['a', 1, 'p,q'], b'a,1,p,q', + quoting=csv.QUOTE_NONE) + self._write_test(['a', 1, 'p,q'], b'a,1,"p,q"', + quoting=csv.QUOTE_MINIMAL) + self._write_test(['a', 1, 'p,q'], b'"a",1,"p,q"', + quoting=csv.QUOTE_NONNUMERIC) + self._write_test(['a', 1, 'p,q'], b'"a","1","p,q"', + quoting=csv.QUOTE_ALL) + self._write_test(['a\nb', 1], b'"a\nb","1"', + quoting=csv.QUOTE_ALL) + + def test_write_decimal(self): + self._write_test(['a', decimal.Decimal("1.1"), 'p,q'], b'"a",1.1,"p,q"', + quoting=csv.QUOTE_NONNUMERIC) + + def test_write_escape(self): + self._write_test(['a', 1, 'p,q'], b'a,1,"p,q"', + escapechar='\\') + self.assertRaises(csv.Error, + self._write_test, + ['a', 1, 'p,"q"'], b'a,1,"p,\\"q\\""', + escapechar=None, doublequote=False) + self._write_test(['a', 1, 'p,"q"'], b'a,1,"p,\\"q\\""', + escapechar='\\', doublequote=False) + self._write_test(['"'], b'""""', + escapechar='\\', quoting=csv.QUOTE_MINIMAL) + self._write_test(['"'], b'\\"', + escapechar='\\', quoting=csv.QUOTE_MINIMAL, + doublequote=False) + self._write_test(['"'], b'\\"', + escapechar='\\', quoting=csv.QUOTE_NONE) + self._write_test(['a', 1, 'p,q'], b'a,1,p\\,q', + escapechar='\\', quoting=csv.QUOTE_NONE) + + def test_writerows(self): + class BrokenFile: + def write(self, buf): + raise IOError + + writer = csv.writer(BrokenFile()) + self.assertRaises(IOError, writer.writerows, [['a']]) + + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + writer = csv.writer(fileobj) + self.assertRaises(TypeError, writer.writerows, None) + writer.writerows([['a', 'b'], ['c', 'd']]) + fileobj.seek(0) + self.assertEqual(fileobj.read(), b"a,b\r\nc,d\r\n") + finally: + fileobj.close() + os.unlink(name) + + def _read_test(self, input, expect, **kwargs): + reader = csv.reader(input, **kwargs) + result = list(reader) + self.assertEqual(result, expect) + + def test_read_oddinputs(self): + self._read_test([], []) + self._read_test([b''], [[]]) + self.assertRaises(csv.Error, self._read_test, + [b'"ab"c'], None, strict=1) + # cannot handle null bytes for the moment + self.assertRaises(csv.Error, self._read_test, + [b'ab\0c'], None, strict=1) + self._read_test([b'"ab"c'], [['abc']], doublequote=0) + + def test_read_eol(self): + self._read_test([b'a,b'], [['a', 'b']]) + self._read_test([b'a,b\n'], [['a', 'b']]) + self._read_test([b'a,b\r\n'], [['a', 'b']]) + self._read_test([b'a,b\r'], [['a', 'b']]) + self.assertRaises(csv.Error, self._read_test, [b'a,b\rc,d'], []) + self.assertRaises(csv.Error, self._read_test, [b'a,b\nc,d'], []) + self.assertRaises(csv.Error, self._read_test, [b'a,b\r\nc,d'], []) + + def test_read_escape(self): + self._read_test([b'a,\\b,c'], [['a', 'b', 'c']], escapechar='\\') + self._read_test([b'a,b\\,c'], [['a', 'b,c']], escapechar='\\') + self._read_test([b'a,"b\\,c"'], [['a', 'b,c']], escapechar='\\') + self._read_test([b'a,"b,\\c"'], [['a', 'b,c']], escapechar='\\') + self._read_test([b'a,"b,c\\""'], [['a', 'b,c"']], escapechar='\\') + self._read_test([b'a,"b,c"\\'], [['a', 'b,c\\']], escapechar='\\') + + def test_read_quoting(self): + self._read_test([b'1,",3,",5'], [['1', ',3,', '5']]) + self._read_test([b'1,",3,",5'], [['1', '"', '3', '"', '5']], + quotechar=None, escapechar='\\') + self._read_test([b'1,",3,",5'], [['1', '"', '3', '"', '5']], + quoting=csv.QUOTE_NONE, escapechar='\\') + # will this fail where locale uses comma for decimals? + self._read_test([b',3,"5",7.3, 9'], [['', 3, '5', 7.3, 9]], + quoting=csv.QUOTE_NONNUMERIC) + self._read_test([b'"a\nb", 7'], [['a\nb', ' 7']]) + self.assertRaises(ValueError, self._read_test, + [b'abc,3'], [[]], + quoting=csv.QUOTE_NONNUMERIC) + + def test_read_linenum(self): + for r in (csv.reader([b'line,1', b'line,2', b'line,3']), + csv.DictReader([b'line,1', b'line,2', b'line,3'], + fieldnames=['a', 'b', 'c'])): + self.assertEqual(r.line_num, 0) + next(r) + self.assertEqual(r.line_num, 1) + next(r) + self.assertEqual(r.line_num, 2) + next(r) + self.assertEqual(r.line_num, 3) + self.assertRaises(StopIteration, next, r) + self.assertEqual(r.line_num, 3) + + def test_roundtrip_quoteed_newlines(self): + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + writer = csv.writer(fileobj) + self.assertRaises(TypeError, writer.writerows, None) + rows = [['a\nb', 'b'], ['c', 'x\r\nd']] + writer.writerows(rows) + fileobj.seek(0) + for i, row in enumerate(csv.reader(fileobj)): + self.assertEqual(row, rows[i]) + finally: + fileobj.close() + os.unlink(name) + + +class TestDialectRegistry(unittest.TestCase): + def test_registry_badargs(self): + self.assertRaises(TypeError, csv.list_dialects, None) + self.assertRaises(TypeError, csv.get_dialect) + self.assertRaises(csv.Error, csv.get_dialect, None) + self.assertRaises(csv.Error, csv.get_dialect, "nonesuch") + self.assertRaises(TypeError, csv.unregister_dialect) + self.assertRaises(csv.Error, csv.unregister_dialect, None) + self.assertRaises(csv.Error, csv.unregister_dialect, "nonesuch") + self.assertRaises(TypeError, csv.register_dialect, None) + self.assertRaises(TypeError, csv.register_dialect, None, None) + self.assertRaises(TypeError, csv.register_dialect, "nonesuch", 0, 0) + self.assertRaises(TypeError, csv.register_dialect, "nonesuch", + badargument=None) + self.assertRaises(TypeError, csv.register_dialect, "nonesuch", + quoting=None) + self.assertRaises(TypeError, csv.register_dialect, []) + + def test_registry(self): + class myexceltsv(csv.excel): + delimiter = "\t" + name = "myexceltsv" + expected_dialects = csv.list_dialects() + [name] + expected_dialects.sort() + csv.register_dialect(name, myexceltsv) + try: + self.assertEqual(csv.get_dialect(name).delimiter, '\t') + got_dialects = csv.list_dialects() + got_dialects.sort() + self.assertEqual(expected_dialects, got_dialects) + finally: + csv.unregister_dialect(name) + + def test_register_kwargs(self): + name = 'fedcba' + csv.register_dialect(name, delimiter=';') + try: + self.assertNotEqual(csv.get_dialect(name).delimiter, '\t') + self.assertEqual(list(csv.reader([b'X;Y;Z'], name)), [[u'X', u'Y', u'Z']]) + finally: + csv.unregister_dialect(name) + + def test_incomplete_dialect(self): + class myexceltsv(csv.Dialect): + delimiter = "\t" + self.assertRaises(csv.Error, myexceltsv) + + def test_space_dialect(self): + class space(csv.excel): + delimiter = " " + quoting = csv.QUOTE_NONE + escapechar = "\\" + + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + fileobj.write(b"abc def\nc1ccccc1 benzene\n") + fileobj.seek(0) + rdr = csv.reader(fileobj, dialect=space()) + self.assertEqual(next(rdr), ["abc", "def"]) + self.assertEqual(next(rdr), ["c1ccccc1", "benzene"]) + finally: + fileobj.close() + os.unlink(name) + + def test_dialect_apply(self): + class testA(csv.excel): + delimiter = "\t" + + class testB(csv.excel): + delimiter = ":" + + class testC(csv.excel): + delimiter = "|" + + csv.register_dialect('testC', testC) + try: + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + writer = csv.writer(fileobj) + writer.writerow([1, 2, 3]) + fileobj.seek(0) + self.assertEqual(fileobj.read(), b"1,2,3\r\n") + finally: + fileobj.close() + os.unlink(name) + + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + writer = csv.writer(fileobj, testA) + writer.writerow([1, 2, 3]) + fileobj.seek(0) + self.assertEqual(fileobj.read(), b"1\t2\t3\r\n") + finally: + fileobj.close() + os.unlink(name) + + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + writer = csv.writer(fileobj, dialect=testB()) + writer.writerow([1, 2, 3]) + fileobj.seek(0) + self.assertEqual(fileobj.read(), b"1:2:3\r\n") + finally: + fileobj.close() + os.unlink(name) + + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + writer = csv.writer(fileobj, dialect='testC') + writer.writerow([1, 2, 3]) + fileobj.seek(0) + self.assertEqual(fileobj.read(), b"1|2|3\r\n") + finally: + fileobj.close() + os.unlink(name) + + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + writer = csv.writer(fileobj, dialect=testA, delimiter=';') + writer.writerow([1, 2, 3]) + fileobj.seek(0) + self.assertEqual(fileobj.read(), b"1;2;3\r\n") + finally: + fileobj.close() + os.unlink(name) + + finally: + csv.unregister_dialect('testC') + + def test_bad_dialect(self): + # Unknown parameter + self.assertRaises(TypeError, csv.reader, [], bad_attr=0) + # Bad values + self.assertRaises(TypeError, csv.reader, [], delimiter=None) + self.assertRaises(TypeError, csv.reader, [], quoting=-1) + self.assertRaises(TypeError, csv.reader, [], quoting=100) + + +class TestCsvBase(unittest.TestCase): + def readerAssertEqual(self, input, expected_result): + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + fileobj.write(input) + fileobj.seek(0) + reader = csv.reader(fileobj, dialect=self.dialect) + fields = list(reader) + self.assertEqual(fields, expected_result) + finally: + fileobj.close() + os.unlink(name) + + def writerAssertEqual(self, input, expected_result): + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + writer = csv.writer(fileobj, dialect=self.dialect) + writer.writerows(input) + fileobj.seek(0) + self.assertEqual(fileobj.read(), expected_result) + finally: + fileobj.close() + os.unlink(name) + + +class TestDialectExcel(TestCsvBase): + dialect = 'excel' + + def test_single(self): + self.readerAssertEqual(b'abc', [['abc']]) + + def test_simple(self): + self.readerAssertEqual(b'1,2,3,4,5', [['1', '2', '3', '4', '5']]) + + def test_blankline(self): + self.readerAssertEqual(b'', []) + + def test_empty_fields(self): + self.readerAssertEqual(b',', [['', '']]) + + def test_singlequoted(self): + self.readerAssertEqual(b'""', [['']]) + + def test_singlequoted_left_empty(self): + self.readerAssertEqual(b'"",', [['', '']]) + + def test_singlequoted_right_empty(self): + self.readerAssertEqual(b',""', [['', '']]) + + def test_single_quoted_quote(self): + self.readerAssertEqual(b'""""', [['"']]) + + def test_quoted_quotes(self): + self.readerAssertEqual(b'""""""', [['""']]) + + def test_inline_quote(self): + self.readerAssertEqual(b'a""b', [['a""b']]) + + def test_inline_quotes(self): + self.readerAssertEqual(b'a"b"c', [['a"b"c']]) + + def test_quotes_and_more(self): + # Excel would never write a field containing '"a"b', but when + # reading one, it will return 'ab'. + self.readerAssertEqual(b'"a"b', [['ab']]) + + def test_lone_quote(self): + self.readerAssertEqual(b'a"b', [['a"b']]) + + def test_quote_and_quote(self): + # Excel would never write a field containing '"a" "b"', but when + # reading one, it will return 'a "b"'. + self.readerAssertEqual(b'"a" "b"', [['a "b"']]) + + def test_space_and_quote(self): + self.readerAssertEqual(b' "a"', [[' "a"']]) + + def test_quoted(self): + self.readerAssertEqual(b'1,2,3,"I think, therefore I am",5,6', + [['1', '2', '3', + 'I think, therefore I am', + '5', '6']]) + + def test_quoted_quote(self): + value = b'1,2,3,"""I see,"" said the blind man","as he picked up his hammer and saw"' + self.readerAssertEqual(value, + [['1', '2', '3', + '"I see," said the blind man', + 'as he picked up his hammer and saw']]) + + def test_quoted_nl(self): + input = b'''\ +1,2,3,"""I see,"" +said the blind man","as he picked up his +hammer and saw" +9,8,7,6''' + self.readerAssertEqual(input, + [['1', '2', '3', + '"I see,"\nsaid the blind man', + 'as he picked up his\nhammer and saw'], + ['9', '8', '7', '6']]) + + def test_dubious_quote(self): + self.readerAssertEqual(b'12,12,1",', [['12', '12', '1"', '']]) + + def test_null(self): + self.writerAssertEqual([], b'') + + def test_single_writer(self): + self.writerAssertEqual([['abc']], b'abc\r\n') + + def test_simple_writer(self): + self.writerAssertEqual([[1, 2, 'abc', 3, 4]], + b'1,2,abc,3,4\r\n') + + def test_quotes(self): + self.writerAssertEqual([[1, 2, 'a"bc"', 3, 4]], + b'1,2,"a""bc""",3,4\r\n') + + def test_quote_fieldsep(self): + self.writerAssertEqual([['abc,def']], + b'"abc,def"\r\n') + + def test_newlines(self): + self.writerAssertEqual([[1, 2, 'a\nbc', 3, 4]], + b'1,2,"a\nbc",3,4\r\n') + + +class EscapedExcel(csv.excel): + quoting = csv.QUOTE_NONE + escapechar = '\\' + + +class TestEscapedExcel(TestCsvBase): + dialect = EscapedExcel() + + def test_escape_fieldsep(self): + self.writerAssertEqual([['abc,def']], b'abc\\,def\r\n') + + def test_read_escape_fieldsep(self): + self.readerAssertEqual(b'abc\\,def\r\n', [['abc,def']]) + + +class QuotedEscapedExcel(csv.excel): + quoting = csv.QUOTE_NONNUMERIC + escapechar = '\\' + + +class TestQuotedEscapedExcel(TestCsvBase): + dialect = QuotedEscapedExcel() + + def test_write_escape_fieldsep(self): + self.writerAssertEqual([['abc,def']], b'"abc,def"\r\n') + + def test_read_escape_fieldsep(self): + self.readerAssertEqual(b'"abc\\,def"\r\n', [['abc,def']]) + + +class TestDictFields(unittest.TestCase): + # "long" means the row is longer than the number of fieldnames + # "short" means there are fewer elements in the row than fieldnames + def test_write_simple_dict(self): + fd, name = tempfile.mkstemp() + fileobj = open(name, 'w+b') + try: + writer = csv.DictWriter(fileobj, fieldnames=["f1", "f2", "f3"]) + writer.writeheader() + fileobj.seek(0) + self.assertEqual(fileobj.readline(), b"f1,f2,f3\r\n") + writer.writerow({"f1": 10, "f3": "abc"}) + fileobj.seek(0) + fileobj.readline() # header + self.assertEqual(fileobj.read(), b"10,,abc\r\n") + finally: + fileobj.close() + os.unlink(name) + + def test_write_unicode_header_dict(self): + fd, name = tempfile.mkstemp() + fileobj = open(name, 'w+b') + try: + writer = csv.DictWriter(fileobj, fieldnames=[u"ñ", u"ö"]) + writer.writeheader() + fileobj.seek(0) + self.assertEqual(fileobj.readline().decode('utf-8'), u"ñ,ö\r\n") + finally: + fileobj.close() + os.unlink(name) + + def test_write_no_fields(self): + fileobj = BytesIO() + self.assertRaises(TypeError, csv.DictWriter, fileobj) + + def test_read_dict_fields(self): + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + fileobj.write(b"1,2,abc\r\n") + fileobj.seek(0) + reader = csv.DictReader(fileobj, + fieldnames=["f1", "f2", "f3"]) + self.assertEqual(next(reader), + {"f1": '1', "f2": '2', "f3": 'abc'}) + finally: + fileobj.close() + os.unlink(name) + + def test_read_dict_no_fieldnames(self): + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + fileobj.write(b"f1,f2,f3\r\n1,2,abc\r\n") + fileobj.seek(0) + reader = csv.DictReader(fileobj) + self.assertEqual(reader.fieldnames, + ["f1", "f2", "f3"]) + self.assertEqual(next(reader), + {"f1": '1', "f2": '2', "f3": 'abc'}) + finally: + fileobj.close() + os.unlink(name) + + # Two test cases to make sure existing ways of implicitly setting + # fieldnames continue to work. Both arise from discussion in issue3436. + def test_read_dict_fieldnames_from_file(self): + fd, name = tempfile.mkstemp() + f = os.fdopen(fd, "w+b") + try: + f.write(b"f1,f2,f3\r\n1,2,abc\r\n") + f.seek(0) + reader = csv.DictReader(f, fieldnames=next(csv.reader(f))) + self.assertEqual(reader.fieldnames, + ["f1", "f2", "f3"]) + self.assertEqual(next(reader), + {"f1": '1', "f2": '2', "f3": 'abc'}) + finally: + f.close() + os.unlink(name) + + def test_read_dict_fieldnames_chain(self): + import itertools + fd, name = tempfile.mkstemp() + f = os.fdopen(fd, "w+b") + try: + f.write(b"f1,f2,f3\r\n1,2,abc\r\n") + f.seek(0) + reader = csv.DictReader(f) + first = next(reader) + for row in itertools.chain([first], reader): + self.assertEqual(reader.fieldnames, ["f1", "f2", "f3"]) + self.assertEqual(row, {"f1": '1', "f2": '2', "f3": 'abc'}) + finally: + f.close() + os.unlink(name) + + def test_read_long(self): + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + fileobj.write(b"1,2,abc,4,5,6\r\n") + fileobj.seek(0) + reader = csv.DictReader(fileobj, + fieldnames=["f1", "f2"]) + self.assertEqual(next(reader), {"f1": '1', "f2": '2', + None: ["abc", "4", "5", "6"]}) + finally: + fileobj.close() + os.unlink(name) + + def test_read_long_with_rest(self): + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + fileobj.write(b"1,2,abc,4,5,6\r\n") + fileobj.seek(0) + reader = csv.DictReader(fileobj, + fieldnames=["f1", "f2"], restkey="_rest") + self.assertEqual(next(reader), {"f1": '1', "f2": '2', + "_rest": ["abc", "4", "5", "6"]}) + finally: + fileobj.close() + os.unlink(name) + + def test_read_long_with_rest_no_fieldnames(self): + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + fileobj.write(b"f1,f2\r\n1,2,abc,4,5,6\r\n") + fileobj.seek(0) + reader = csv.DictReader(fileobj, restkey="_rest") + self.assertEqual(reader.fieldnames, ["f1", "f2"]) + self.assertEqual(next(reader), {"f1": '1', "f2": '2', + "_rest": ["abc", "4", "5", "6"]}) + finally: + fileobj.close() + os.unlink(name) + + def test_read_short(self): + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + fileobj.write(b"1,2,abc,4,5,6\r\n1,2,abc\r\n") + fileobj.seek(0) + reader = csv.DictReader(fileobj, + fieldnames="1 2 3 4 5 6".split(), + restval="DEFAULT") + self.assertEqual(next(reader), {"1": '1', "2": '2', "3": 'abc', + "4": '4', "5": '5', "6": '6'}) + self.assertEqual(next(reader), {"1": '1', "2": '2', "3": 'abc', + "4": 'DEFAULT', "5": 'DEFAULT', + "6": 'DEFAULT'}) + finally: + fileobj.close() + os.unlink(name) + + def test_read_multi(self): + sample = [ + b'2147483648,43.0e12,17,abc,def\r\n', + b'147483648,43.0e2,17,abc,def\r\n', + b'47483648,43.0,170,abc,def\r\n' + ] + + reader = csv.DictReader(sample, + fieldnames="i1 float i2 s1 s2".split()) + self.assertEqual(next(reader), {"i1": '2147483648', + "float": '43.0e12', + "i2": '17', + "s1": 'abc', + "s2": 'def'}) + + def test_read_with_blanks(self): + reader = csv.DictReader([b"1,2,abc,4,5,6\r\n", b"\r\n", + b"1,2,abc,4,5,6\r\n"], + fieldnames="1 2 3 4 5 6".split()) + self.assertEqual(next(reader), {"1": '1', "2": '2', "3": 'abc', + "4": '4', "5": '5', "6": '6'}) + self.assertEqual(next(reader), {"1": '1', "2": '2', "3": 'abc', + "4": '4', "5": '5', "6": '6'}) + + def test_read_semi_sep(self): + reader = csv.DictReader([b"1;2;abc;4;5;6\r\n"], + fieldnames="1 2 3 4 5 6".split(), + delimiter=';') + self.assertEqual(next(reader), {"1": '1', "2": '2', "3": 'abc', + "4": '4', "5": '5', "6": '6'}) + + def test_empty_file(self): + csv.DictReader(BytesIO()) + +class TestArrayWrites(unittest.TestCase): + def test_int_write(self): + contents = [(20-i) for i in range(20)] + a = array.array('i', contents) + + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + writer = csv.writer(fileobj, dialect="excel") + writer.writerow(a) + expected = b",".join([str(i).encode('utf-8') for i in a])+b"\r\n" + fileobj.seek(0) + self.assertEqual(fileobj.read(), expected) + finally: + fileobj.close() + os.unlink(name) + + def test_double_write(self): + contents = [(20-i)*0.1 for i in range(20)] + a = array.array('d', contents) + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + writer = csv.writer(fileobj, dialect="excel") + writer.writerow(a) + float_repr = str + if sys.version_info >= (2, 7, 3): + float_repr = repr + expected = b",".join([float_repr(i).encode('utf-8') for i in a])+b"\r\n" + fileobj.seek(0) + self.assertEqual(fileobj.read(), expected) + finally: + fileobj.close() + os.unlink(name) + + def test_float_write(self): + contents = [(20-i)*0.1 for i in range(20)] + a = array.array('f', contents) + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + writer = csv.writer(fileobj, dialect="excel") + writer.writerow(a) + float_repr = str + if sys.version_info >= (2, 7, 3): + float_repr = repr + expected = b",".join([float_repr(i).encode('utf-8') for i in a])+b"\r\n" + fileobj.seek(0) + self.assertEqual(fileobj.read(), expected) + finally: + fileobj.close() + os.unlink(name) + + def test_char_write(self): + a = string.ascii_letters + fd, name = tempfile.mkstemp() + fileobj = os.fdopen(fd, "w+b") + try: + writer = csv.writer(fileobj, dialect="excel") + writer.writerow(a) + expected = ",".join(a).encode('utf-8')+b"\r\n" + fileobj.seek(0) + self.assertEqual(fileobj.read(), expected) + finally: + fileobj.close() + os.unlink(name) + + +class TestUnicode(unittest.TestCase): + def test_unicode_read(self): + f = EncodedFile(BytesIO((u"Martin von Löwis," + u"Marc André Lemburg," + u"Guido van Rossum," + u"François Pinard\r\n").encode('iso-8859-1')), + data_encoding='iso-8859-1') + reader = csv.reader(f, encoding='iso-8859-1') + self.assertEqual(list(reader), [[u"Martin von Löwis", + u"Marc André Lemburg", + u"Guido van Rossum", + u"François Pinard"]]) + + +class TestUnicodeErrors(unittest.TestCase): + def test_encode_error(self): + fd = BytesIO() + writer = csv.writer(fd, encoding='cp1252', errors='xmlcharrefreplace') + writer.writerow(['hello', chr(2603)]) + self.assertEqual(fd.getvalue(), b'hello,ਫ\r\n') + + def test_encode_error_dictwriter(self): + fd = BytesIO() + dw = csv.DictWriter(fd, ['col1'], + encoding='cp1252', errors='xmlcharrefreplace') + dw.writerow({'col1': chr(2604)}) + self.assertEqual(fd.getvalue(), b'ਬ\r\n') + + def test_decode_error(self): + """Make sure the specified error-handling mode is obeyed on readers.""" + file = EncodedFile(BytesIO(u'Löwis,2,3'.encode('iso-8859-1')), + data_encoding='iso-8859-1') + reader = csv.reader(file, encoding='ascii', errors='ignore') + self.assertEqual(list(reader)[0][0], 'Lwis') + + def test_decode_error_dictreader(self): + """Make sure the error-handling mode is obeyed on DictReaders.""" + file = EncodedFile(BytesIO(u'name,height,weight\nLöwis,2,3'.encode('iso-8859-1')), + data_encoding='iso-8859-1') + reader = csv.DictReader(file, encoding='ascii', errors='ignore') + self.assertEqual(list(reader)[0]['name'], 'Lwis') diff --git a/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6-nspkg.pth b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6-nspkg.pth new file mode 100644 index 0000000..25541e3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6-nspkg.pth @@ -0,0 +1 @@ +import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('zc',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('zc', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('zc', [os.path.dirname(p)])));m = m or sys.modules.setdefault('zc', types.ModuleType('zc'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p) diff --git a/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/PKG-INFO b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..f9af51f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/PKG-INFO @@ -0,0 +1,185 @@ +Metadata-Version: 2.1 +Name: zc.lockfile +Version: 1.3.0 +Summary: Basic inter-process locks +Home-page: http://www.python.org/pypi/zc.lockfile +Author: Zope Foundation +Author-email: zope-dev@zope.org +License: ZPL 2.1 +Description: ************************* + Basic inter-process locks + ************************* + + The zc.lockfile package provides a basic portable implementation of + interprocess locks using lock files. The purpose if not specifically + to lock files, but to simply provide locks with an implementation + based on file-locking primitives. Of course, these locks could be + used to mediate access to *other* files. For example, the ZODB file + storage implementation uses file locks to mediate access to + file-storage database files. The database files and lock file files + are separate files. + + .. contents:: + + Detailed Documentation + ********************** + + Lock file support + ================= + + The ZODB lock_file module provides support for creating file system + locks. These are locks that are implemented with lock files and + OS-provided locking facilities. To create a lock, instantiate a + LockFile object with a file name: + + >>> import zc.lockfile + >>> lock = zc.lockfile.LockFile('lock') + + If we try to lock the same name, we'll get a lock error: + + >>> import zope.testing.loggingsupport + >>> handler = zope.testing.loggingsupport.InstalledHandler('zc.lockfile') + >>> try: + ... zc.lockfile.LockFile('lock') + ... except zc.lockfile.LockError: + ... print("Can't lock file") + Can't lock file + + .. We don't log failure to acquire. + + >>> for record in handler.records: # doctest: +ELLIPSIS + ... print(record.levelname+' '+record.getMessage()) + + To release the lock, use it's close method: + + >>> lock.close() + + The lock file is not removed. It is left behind: + + >>> import os + >>> os.path.exists('lock') + True + + Of course, now that we've released the lock, we can create it again: + + >>> lock = zc.lockfile.LockFile('lock') + >>> lock.close() + + .. Cleanup + + >>> import os + >>> os.remove('lock') + + Hostname in lock file + ===================== + + In a container environment (e.g. Docker), the PID is typically always + identical even if multiple containers are running under the same operating + system instance. + + Clearly, inspecting lock files doesn't then help much in debugging. To identify + the container which created the lock file, we need information about the + container in the lock file. Since Docker uses the container identifier or name + as the hostname, this information can be stored in the lock file in addition to + or instead of the PID. + + Use the ``content_template`` keyword argument to ``LockFile`` to specify a + custom lock file content format: + + >>> lock = zc.lockfile.LockFile('lock', content_template='{pid};{hostname}') + >>> lock.close() + + If you now inspected the lock file, you would see e.g.: + + $ cat lock + 123;myhostname + + + Change History + *************** + + 1.3.0 (2018-04-23) + ================== + + - Stop logging failure to acquire locks. Clients can do that if they wish. + + - Claim support for Python 3.4 and 3.5. + + - Drop Python 3.2 support because pip no longer supports it. + + 1.2.1 (2016-06-19) + ================== + + - Fixed: unlocking and locking didn't work when a multiprocessing + process was running (and presumably other conditions). + + 1.2.0 (2016-06-09) + ================== + + - Added the ability to include the hostname in the lock file content. + + - Code and ReST markup cosmetics. + [alecghica] + + 1.1.0 (2013-02-12) + ================== + + - Added Trove classifiers and made setup.py zest.releaser friendly. + + - Added Python 3.2, 3.3 and PyPy 1.9 support. + + - Removed Python 2.4 and Python 2.5 support. + + 1.0.2 (2012-12-02) + ================== + + - Fixed: the fix included in 1.0.1 caused multiple pids to be written + to the lock file + + 1.0.1 (2012-11-30) + ================== + + - Fixed: when there was lock contention, the pid in the lock file was + lost. + + Thanks to Daniel Moisset reporting the problem and providing a fix + with tests. + + - Added test extra to declare test dependency on ``zope.testing``. + + - Using Python's ``doctest`` module instead of depreacted + ``zope.testing.doctest``. + + 1.0.0 (2008-10-18) + ================== + + - Fixed a small bug in error logging. + + 1.0.0b1 (2007-07-18) + ==================== + + - Initial release + + Download + ********************** + +Keywords: lock +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Natural Language :: English +Classifier: Operating System :: POSIX +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development +Provides-Extra: test diff --git a/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/SOURCES.txt b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..5408874 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,23 @@ +CHANGES.txt +COPYRIGHT.txt +LICENSE.txt +MANIFEST.in +README.txt +bootstrap.py +buildout.cfg +doc.txt +pip-delete-this-directory.txt +setup.cfg +setup.py +tox.ini +src/zc/__init__.py +src/zc.lockfile.egg-info/PKG-INFO +src/zc.lockfile.egg-info/SOURCES.txt +src/zc.lockfile.egg-info/dependency_links.txt +src/zc.lockfile.egg-info/namespace_packages.txt +src/zc.lockfile.egg-info/not-zip-safe +src/zc.lockfile.egg-info/requires.txt +src/zc.lockfile.egg-info/top_level.txt +src/zc/lockfile/README.txt +src/zc/lockfile/__init__.py +src/zc/lockfile/tests.py \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/dependency_links.txt b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/installed-files.txt b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..2995e9a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/installed-files.txt @@ -0,0 +1,13 @@ +../zc.lockfile-1.3.0-py3.6-nspkg.pth +../zc/lockfile/README.txt +../zc/lockfile/__init__.py +../zc/lockfile/__pycache__/__init__.cpython-36.pyc +../zc/lockfile/__pycache__/tests.cpython-36.pyc +../zc/lockfile/tests.py +PKG-INFO +SOURCES.txt +dependency_links.txt +namespace_packages.txt +not-zip-safe +requires.txt +top_level.txt diff --git a/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/namespace_packages.txt b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/namespace_packages.txt new file mode 100644 index 0000000..7647cfa --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/namespace_packages.txt @@ -0,0 +1 @@ +zc diff --git a/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/not-zip-safe b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/requires.txt b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/requires.txt new file mode 100644 index 0000000..e93acfb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/requires.txt @@ -0,0 +1,4 @@ +setuptools + +[test] +zope.testing diff --git a/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..7647cfa --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zc.lockfile-1.3.0-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +zc diff --git a/thesisenv/lib/python3.6/site-packages/zc/lockfile/README.txt b/thesisenv/lib/python3.6/site-packages/zc/lockfile/README.txt new file mode 100644 index 0000000..89ef33e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zc/lockfile/README.txt @@ -0,0 +1,70 @@ +Lock file support +================= + +The ZODB lock_file module provides support for creating file system +locks. These are locks that are implemented with lock files and +OS-provided locking facilities. To create a lock, instantiate a +LockFile object with a file name: + + >>> import zc.lockfile + >>> lock = zc.lockfile.LockFile('lock') + +If we try to lock the same name, we'll get a lock error: + + >>> import zope.testing.loggingsupport + >>> handler = zope.testing.loggingsupport.InstalledHandler('zc.lockfile') + >>> try: + ... zc.lockfile.LockFile('lock') + ... except zc.lockfile.LockError: + ... print("Can't lock file") + Can't lock file + +.. We don't log failure to acquire. + + >>> for record in handler.records: # doctest: +ELLIPSIS + ... print(record.levelname+' '+record.getMessage()) + +To release the lock, use it's close method: + + >>> lock.close() + +The lock file is not removed. It is left behind: + + >>> import os + >>> os.path.exists('lock') + True + +Of course, now that we've released the lock, we can create it again: + + >>> lock = zc.lockfile.LockFile('lock') + >>> lock.close() + +.. Cleanup + + >>> import os + >>> os.remove('lock') + +Hostname in lock file +===================== + +In a container environment (e.g. Docker), the PID is typically always +identical even if multiple containers are running under the same operating +system instance. + +Clearly, inspecting lock files doesn't then help much in debugging. To identify +the container which created the lock file, we need information about the +container in the lock file. Since Docker uses the container identifier or name +as the hostname, this information can be stored in the lock file in addition to +or instead of the PID. + +Use the ``content_template`` keyword argument to ``LockFile`` to specify a +custom lock file content format: + + >>> lock = zc.lockfile.LockFile('lock', content_template='{pid};{hostname}') + >>> lock.close() + +If you now inspected the lock file, you would see e.g.: + + $ cat lock + 123;myhostname + diff --git a/thesisenv/lib/python3.6/site-packages/zc/lockfile/__init__.py b/thesisenv/lib/python3.6/site-packages/zc/lockfile/__init__.py new file mode 100644 index 0000000..a0ac2ff --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zc/lockfile/__init__.py @@ -0,0 +1,104 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +import os +import errno +import logging +logger = logging.getLogger("zc.lockfile") + +class LockError(Exception): + """Couldn't get a lock + """ + +try: + import fcntl +except ImportError: + try: + import msvcrt + except ImportError: + def _lock_file(file): + raise TypeError('No file-locking support on this platform') + def _unlock_file(file): + raise TypeError('No file-locking support on this platform') + + else: + # Windows + def _lock_file(file): + # Lock just the first byte + try: + msvcrt.locking(file.fileno(), msvcrt.LK_NBLCK, 1) + except IOError: + raise LockError("Couldn't lock %r" % file.name) + + def _unlock_file(file): + try: + file.seek(0) + msvcrt.locking(file.fileno(), msvcrt.LK_UNLCK, 1) + except IOError: + raise LockError("Couldn't unlock %r" % file.name) + +else: + # Unix + _flags = fcntl.LOCK_EX | fcntl.LOCK_NB + + def _lock_file(file): + try: + fcntl.flock(file.fileno(), _flags) + except IOError: + raise LockError("Couldn't lock %r" % file.name) + + def _unlock_file(file): + fcntl.flock(file.fileno(), fcntl.LOCK_UN) + +class LazyHostName(object): + """Avoid importing socket and calling gethostname() unnecessarily""" + def __str__(self): + import socket + return socket.gethostname() + + +class LockFile: + + _fp = None + + def __init__(self, path, content_template='{pid}'): + self._path = path + try: + # Try to open for writing without truncation: + fp = open(path, 'r+') + except IOError: + # If the file doesn't exist, we'll get an IO error, try a+ + # Note that there may be a race here. Multiple processes + # could fail on the r+ open and open the file a+, but only + # one will get the the lock and write a pid. + fp = open(path, 'a+') + + try: + _lock_file(fp) + except: + fp.close() + raise + + # We got the lock, record info in the file. + self._fp = fp + fp.write(" %s\n" % content_template.format(pid=os.getpid(), + hostname=LazyHostName())) + fp.truncate() + fp.flush() + + def close(self): + if self._fp is not None: + _unlock_file(self._fp) + self._fp.close() + self._fp = None diff --git a/thesisenv/lib/python3.6/site-packages/zc/lockfile/tests.py b/thesisenv/lib/python3.6/site-packages/zc/lockfile/tests.py new file mode 100644 index 0000000..e9fcbff --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zc/lockfile/tests.py @@ -0,0 +1,193 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import os, re, sys, unittest, doctest +import zc.lockfile, time, threading +from zope.testing import renormalizing, setupstack +import tempfile +try: + from unittest.mock import Mock, patch +except ImportError: + from mock import Mock, patch + +checker = renormalizing.RENormalizing([ + # Python 3 adds module path to error class name. + (re.compile("zc\.lockfile\.LockError:"), + r"LockError:"), + ]) + +def inc(): + while 1: + try: + lock = zc.lockfile.LockFile('f.lock') + except zc.lockfile.LockError: + continue + else: + break + f = open('f', 'r+b') + v = int(f.readline().strip()) + time.sleep(0.01) + v += 1 + f.seek(0) + f.write(('%d\n' % v).encode('ASCII')) + f.close() + lock.close() + +def many_threads_read_and_write(): + r""" + >>> with open('f', 'w+b') as file: + ... _ = file.write(b'0\n') + >>> with open('f.lock', 'w+b') as file: + ... _ = file.write(b'0\n') + + >>> n = 50 + >>> threads = [threading.Thread(target=inc) for i in range(n)] + >>> _ = [thread.start() for thread in threads] + >>> _ = [thread.join() for thread in threads] + >>> with open('f', 'rb') as file: + ... saved = int(file.read().strip()) + >>> saved == n + True + + >>> os.remove('f') + + We should only have one pid in the lock file: + + >>> f = open('f.lock') + >>> len(f.read().strip().split()) + 1 + >>> f.close() + + >>> os.remove('f.lock') + + """ + +def pid_in_lockfile(): + r""" + >>> import os, zc.lockfile + >>> pid = os.getpid() + >>> lock = zc.lockfile.LockFile("f.lock") + >>> f = open("f.lock") + >>> _ = f.seek(1) + >>> f.read().strip() == str(pid) + True + >>> f.close() + + Make sure that locking twice does not overwrite the old pid: + + >>> lock = zc.lockfile.LockFile("f.lock") + Traceback (most recent call last): + ... + LockError: Couldn't lock 'f.lock' + + >>> f = open("f.lock") + >>> _ = f.seek(1) + >>> f.read().strip() == str(pid) + True + >>> f.close() + + >>> lock.close() + """ + + +def hostname_in_lockfile(): + r""" + hostname is correctly written into the lock file when it's included in the + lock file content template + + >>> import zc.lockfile + >>> with patch('socket.gethostname', Mock(return_value='myhostname')): + ... lock = zc.lockfile.LockFile("f.lock", content_template='{hostname}') + >>> f = open("f.lock") + >>> _ = f.seek(1) + >>> f.read().rstrip() + 'myhostname' + >>> f.close() + + Make sure that locking twice does not overwrite the old hostname: + + >>> lock = zc.lockfile.LockFile("f.lock", content_template='{hostname}') + Traceback (most recent call last): + ... + LockError: Couldn't lock 'f.lock' + + >>> f = open("f.lock") + >>> _ = f.seek(1) + >>> f.read().rstrip() + 'myhostname' + >>> f.close() + + >>> lock.close() + """ + + +class TestLogger(object): + def __init__(self): + self.log_entries = [] + + def exception(self, msg, *args): + self.log_entries.append((msg,) + args) + + +class LockFileLogEntryTestCase(unittest.TestCase): + """Tests for logging in case of lock failure""" + def setUp(self): + self.here = os.getcwd() + self.tmp = tempfile.mkdtemp(prefix='zc.lockfile-test-') + os.chdir(self.tmp) + + def tearDown(self): + os.chdir(self.here) + setupstack.rmtree(self.tmp) + + def test_log_formatting(self): + # PID and hostname are parsed and logged from lock file on failure + with patch('os.getpid', Mock(return_value=123)): + with patch('socket.gethostname', Mock(return_value='myhostname')): + lock = zc.lockfile.LockFile('f.lock', + content_template='{pid}/{hostname}') + with open('f.lock') as f: + self.assertEqual(' 123/myhostname\n', f.read()) + + lock.close() + + def test_unlock_and_lock_while_multiprocessing_process_running(self): + import multiprocessing + + lock = zc.lockfile.LockFile('l') + q = multiprocessing.Queue() + p = multiprocessing.Process(target=q.get) + p.daemon = True + p.start() + + # release and re-acquire should work (obviously) + lock.close() + lock = zc.lockfile.LockFile('l') + self.assertTrue(p.is_alive()) + + q.put(0) + lock.close() + p.join() + + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(doctest.DocFileSuite( + 'README.txt', checker=checker, + setUp=setupstack.setUpDirectory, tearDown=setupstack.tearDown)) + suite.addTest(doctest.DocTestSuite( + setUp=setupstack.setUpDirectory, tearDown=setupstack.tearDown, + checker=checker)) + # Add unittest test cases from this module + suite.addTest(unittest.defaultTestLoader.loadTestsFromName(__name__)) + return suite diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/PKG-INFO b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/PKG-INFO new file mode 100644 index 0000000..45fcb61 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/PKG-INFO @@ -0,0 +1,884 @@ +Metadata-Version: 2.1 +Name: zdaemon +Version: 4.2.0 +Summary: Daemon process control library and tools for Unix-based systems +Home-page: https://github.com/zopefoundation/zdaemon +Author: Zope Foundation and Contributors +Author-email: zope-dev@zope.org +License: ZPL 2.1 +Description: ***************************************************** + ``zdaemon`` process controller for Unix-based systems + ***************************************************** + + ``zdaemon`` is a Unix (Unix, Linux, Mac OS X) Python program that wraps + commands to make them behave as proper daemons. + + .. contents:: + + =============== + Using zdaemon + =============== + + zdaemon provides a script, zdaemon, that can be used to run other + programs as POSIX (Unix) daemons. (Of course, it is only usable on + POSIX-complient systems.) + + Using zdaemon requires specifying a number of options, which can be + given in a configuration file, or as command-line options. It also + accepts commands teling it what do do. The commands are: + + start + Start a process as a daemon + + stop + Stop a running daemon process + + restart + Stop and then restart a program + + status + Find out if the program is running + + foreground or fg + Run a program + + kill signal + Send a signal to the daemon process + + reopen_transcript + Reopen the transcript log. See the discussion of the transcript + log below. + + help command + Get help on a command + + + Commands can be given on a command line, or can be given using an + interactive interpreter. + + Let's start with a simple example. We'll use command-line options to + run the echo command: + + sh> ./zdaemon -p 'echo hello world' fg + echo hello world + hello world + + + Here we used the -p option to specify a program to run. We can + specify a program name and command-line options in the program + command. Note, however, that the command-line parsing is pretty + primitive. Quotes and spaces aren't handled correctly. Let's look at + a slightly more complex example. We'll run the sleep command as a + daemon :) + + sh> ./zdaemon -p 'sleep 100' start + . . + daemon process started, pid=819 + + This ran the sleep daemon. We can check whether it ran with the + status command: + + sh> ./zdaemon -p 'sleep 100' status + program running; pid=819 + + We can stop it with the stop command: + + sh> ./zdaemon -p 'sleep 100' stop + . . + daemon process stopped + + sh> ./zdaemon -p 'sleep 100' status + daemon manager not running + Failed: 3 + + Normally, we control zdaemon using a configuration file. Let's create + a typical configuration file:: + + + program sleep 100 + + + .. -> text + + >>> with open('conf', 'w') as file: + ... _ = file.write(text) + + Now, we can run with the -C option to read the configuration file: + + sh> ./zdaemon -Cconf start + . . + daemon process started, pid=1136 + + If we list the directory: + + sh> ls + conf + zdaemon + zdsock + + We'll see that a file, zdsock, was created. This is a unix-domain + socket used internally by ZDaemon. We'll normally want to control + where this goes. + + sh> ./zdaemon -Cconf stop + . . + daemon process stopped + + Here's an updated configuration:: + + + program sleep 100 + socket-name /tmp/demo.zdsock + + + .. -> text + + >>> with open('conf', 'w') as file: + ... _ = file.write(text.replace('/tmp', tmpdir)) + + Now, when we run zdaemon: + + sh> ./zdaemon -Cconf start + . . + daemon process started, pid=1139 + + sh> ls + conf + zdaemon + + .. test + + >>> import os + >>> os.path.exists("/tmp/demo.zdsock".replace('/tmp', tmpdir)) + True + + The socket file is created in the given directory. + + sh> ./zdaemon -Cconf stop + . . + daemon process stopped + + In the example, we included a command-line argument in the program + option. We can also provide options on the command line:: + + + program sleep + socket-name /tmp/demo.zdsock + + + .. -> text + + >>> with open('conf', 'w') as file: + ... _ = file.write(text.replace('/tmp', tmpdir)) + + Then we can pass the program argument on the command line: + + sh> ./zdaemon -Cconf start 100 + . . + daemon process started, pid=1149 + + sh> ./zdaemon -Cconf status + program running; pid=1149 + + sh> ./zdaemon -Cconf stop + . . + daemon process stopped + + Environment Variables + ===================== + + Sometimes, it is necessary to set environment variables before running + a program. Perhaps the most common case for this is setting + LD_LIBRARY_PATH so that dynamically loaded libraries can be found. + + :: + + + program env + socket-name /tmp/demo.zdsock + + + LD_LIBRARY_PATH /home/foo/lib + HOME /home/foo + + + .. -> text + + >>> with open('conf', 'w') as file: + ... _ = file.write(text.replace('/tmp', tmpdir)) + + Now, when we run the command, we'll see out environment settings reflected: + + sh> ./zdaemon -Cconf fg + env + USER=jim + HOME=/home/foo + LOGNAME=jim + USERNAME=jim + TERM=dumb + PATH=/home/jim/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin + EMACS=t + LANG=en_US.UTF-8 + SHELL=/bin/bash + EDITOR=emacs + LD_LIBRARY_PATH=/home/foo/lib + + Transcript log + ============== + + When zdaemon run a program in daemon mode, it disconnects the + program's standard input, standard output, and standard error from the + controlling terminal. It can optionally redirect the output to + standard error and standard output to a file. This is done with the + transcript option. This is, of course, useful for logging output from + long-running applications. + + Let's look at an example. We'll have a long-running process that + simple tails a data file: + + >>> f = open('data', 'w', 1) + >>> import os + >>> _ = f.write('rec 1\n'); f.flush(); os.fsync(f.fileno()) + + Now, here's out zdaemon configuration:: + + + program tail -f data + transcript log + + + .. -> text + + >>> with open('conf', 'w') as file: + ... _ = file.write(text) + + Now we'll start: + + sh> ./zdaemon -Cconf start + . . + daemon process started, pid=7963 + + .. Wait a little bit to make sure tail has a chance to work + + >>> import time + >>> time.sleep(0.1) + + After waiting a bit, if we look at the log file, it contains the tail output: + + >>> with open('log') as file: + ... file.read() + 'rec 1\n' + + We can rotate the transcript log by renaming it and telling zdaemon to + reopen it: + + >>> import os + >>> os.rename('log', 'log.1') + + If we generate more output: + + >>> _ = f.write('rec 2\n'); f.flush(); os.fsync(f.fileno()) + + .. Wait a little bit to make sure tail has a chance to work + + >>> time.sleep(1) + + The output will appear in the old file, because zdaemon still has it + open: + + >>> with open('log.1') as file: + ... file.read() + 'rec 1\nrec 2\n' + + Now, if we tell zdaemon to reopen the file: + + sh> ./zdaemon -Cconf reopen_transcript + + and generate some output: + + >>> _ = f.write('rec 3\n'); f.flush(); os.fsync(f.fileno()) + + .. Wait a little bit to make sure tail has a chance to work + + >>> time.sleep(1) + + the output will show up in the new file, not the old: + + >>> with open('log') as file: + ... file.read() + 'rec 3\n' + + >>> with open('log.1') as file: + ... file.read() + 'rec 1\nrec 2\n' + + Close files and clean up: + + >>> f.close() + + sh> ./zdaemon -Cconf stop + . . + daemon process stopped + + + Start test program and timeout + ============================== + + Normally, zdaemon considers a process to have started when the process + itself has been created. A process may take a while before it is + truly up and running. For example, a database server or a web server + may take time before they're ready to accept requests. + + You can optionally supply a test program, via the ``start-test-program`` + configuration option, that is called repeatedly until it returns a 0 + exit status or until a time limit, ``start-timeout``, has been reached. + + Reference Documentation + ======================= + + The following options are available for use in the runner section of + configuration files and as command-line options. + + program + Command-line option: -p or --program + + This option gives the command used to start the subprocess + managed by zdaemon. This is currently a simple list of + whitespace-delimited words. The first word is the program + file, subsequent words are its command line arguments. If the + program file contains no slashes, it is searched using $PATH. + (Note that there is no way to to include whitespace in the program + file or an argument, and under certain circumstances other + shell metacharacters are also a problem.) + + socket-name + Command-line option: -s or --socket-name. + + The pathname of the Unix domain socket used for communication + between the zdaemon command-line tool and a daemon-management + process. The default is relative to the current directory in + which zdaemon is started. You want to specify + an absolute pathname here. + + This defaults to "zdsock", which is created in the directory + in which zdrun is started. + + daemon + Command-line option: -d or --daemon. + + If this option is true, zdaemon runs in the background as a + true daemon. It forks a child process which becomes the + subprocess manager, while the parent exits (making the shell + that started it believe it is done). The child process also + does the following: + + - if the directory option is set, change into that directory + + - redirect stdin, stdout and stderr to /dev/null + + - call setsid() so it becomes a session leader + + - call umask() with specified value + + The default for this option is on by default. The + command-line option therefore has no effect. To disable + daemon mode, you must use a configuration file:: + + + program sleep 1 + daemon off + + + directory + Command-line option: -z or --directory. + + If the daemon option is true (default), this option can + specify a directory into which zdrun.py changes as part of the + "daemonizing". If the daemon option is false, this option is + ignored. + + backoff-limit + Command-line option: -b or --backoff-limit. + + When the subprocess crashes, zdaemon inserts a one-second + delay before it restarts it. When the subprocess crashes + again right away, the delay is incremented by one second, and + so on. What happens when the delay has reached the value of + backoff-limit (in seconds), depends on the value of the + forever option. If forever is false, zdaemon gives up at + this point, and exits. An always-crashing subprocess will + have been restarted exactly backoff-limit times in this case. + If forever is true, zdaemon continues to attempt to restart + the process, keeping the delay at backoff-limit seconds. + + If the subprocess stays up for more than backoff-limit + seconds, the delay is reset to 1 second. + + This defaults to 10. + + forever + Command-line option: -f or --forever. + + If this option is true, zdaemon will keep restarting a + crashing subprocess forever. If it is false, it will give up + after backoff-limit crashes in a row. See the description of + backoff-limit for details. + + This is disabled by default. + + exit-codes + Command-line option: -x or --exit-codes. + + This defaults to 0,2. + + If the subprocess exits with an exit status that is equal to + one of the integers in this list, zdaemon will not restart + it. The default list requires some explanation. Exit status + 0 is considered a willful successful exit; the ZEO and Zope + server processes use this exit status when they want to stop + without being restarted. (Including in response to a + SIGTERM.) Exit status 2 is typically issued for command line + syntax errors; in this case, restarting the program will not + help! + + NOTE: this mechanism overrides the backoff-limit and forever + options; i.e. even if forever is true, a subprocess exit + status code in this list makes zdaemon give up. To disable + this, change the value to an empty list. + + start-test-program + A command that tests whether the program is up and running. + The command should exit with a zero exit statis if the program + is running and with a non-zero status otherwise. + + start-timeout + Command-line option: -T or --start-timeout. + + If the program takes more than ``start-timeout`` seconds to + start, then an error is printed and the control script will + exit with a non-zero exit status. + + stop-timeout + This defaults to 300 seconds (5 minutes). + + When a stop command is issued, a SIGTERM signal is sent to the + process. zdaemon waits for stop-timeout seconds for the + process to gracefully exit. If the process doesn't exit in + that time, a SIGKILL signal is sent. + + user + Command-line option: -u or --user. + + When zdaemon is started by root, this option specifies the + user as who the the zdaemon process (and hence the daemon + subprocess) will run. This can be a user name or a numeric + user id. Both the user and the group are set from the + corresponding password entry, using setuid() and setgid(). + This is done before zdaemon does anything else besides + parsing its command line arguments. + + NOTE: when zdaemon is not started by root, specifying this + option is an error. (XXX This may be a mistake.) + + XXX The zdaemon event log file may be opened *before* + setuid() is called. Is this good or bad? + + umask + Command-line option: -m or --umask. + + When daemon mode is used, this option specifies the octal umask + of the subprocess. + + default-to-interactive + If this option is true, zdaemon enters interactive mode + when it is invoked without a positional command argument. If + it is false, you must use the -i or --interactive command line + option to zdaemon to enter interactive mode. + + This is enabled by default. + + logfile + Command-line option: -l or --logfile. + + This option specifies a log file that is the default target of + the "logtail" zdaemon command. + + NOTE: This is NOT the log file to which zdaemon writes its + logging messages! That log file is specified by the + section described below. + + transcript + Command-line option: -t or --transcript. + + The name of a file in which a transcript of all output from + the command being run will be written to when daemonized. + + If not specified, output from the command will be discarded. + + This only takes effect when the "daemon" option is enabled. + + prompt + The prompt shown by the controller program. The default must + be provided by the application. + + (Note that a few other options are available to support old + configuration files, but aren't needed any more and can generally be + ignored.) + + In addition to the runner section, you can use an eventlog section + that specified one or more logfile subsections:: + + + + path /var/log/foo/foo.log + + + + path STDOUT + + + + In this example, log output is sent to a file and to standard out. + Log output from zdaemon usually isn't very interesting but can be + handy for debugging. + + ========== + Change log + ========== + + 4.2.0 (2016-12-07) + ================== + + - Add support for Python 3.5. + + - Drop support for Python 2.6 and 3.2. + + + 4.1.0 (2015-04-16) + ================== + + - Add ``--version`` command line option (fixes + https://github.com/zopefoundation/zdaemon/issues/4). + + - ``kill`` now accepts signal names, not just numbers + (https://github.com/zopefoundation/zdaemon/issues/11). + + - Restore ``logreopen`` as an alias for ``kill USR2`` (removed in version + 3.0.0 due to lack of tests): + https://github.com/zopefoundation/zdaemon/issues/10. + + - Make ``logreopen`` also reopen the transcript log: + https://github.com/zopefoundation/zdaemon/issues/9. + + - Reopen event log on ``logreopen`` or ``reopen_transcript``: + https://github.com/zopefoundation/zdaemon/issues/8. + + - Help message for ``reopen_transcript`` + (https://github.com/zopefoundation/zdaemon/issues/5). + + - Fix race condition where ``stop`` would be ignored if the daemon + manager was waiting before respawning a crashed program. + https://github.com/zopefoundation/zdaemon/issues/13. + + - Partially fix delayed deadlock when the transcript file runs into a + full disk (https://github.com/zopefoundation/zdaemon/issues/1). + + - Fix test suite leaving stale processes behind + (https://github.com/zopefoundation/zdaemon/issues/7). + + + 4.0.1 (2014-12-26) + ================== + + - Add support for PyPy. (PyPy3 is pending release of a fix for: + https://bitbucket.org/pypy/pypy/issue/1946) + + - Add support for Python 3.4. + + - Add ``-t/--transcript`` command line option. + + - zdaemon can now be invoked as a module as in ``python -m zdaemon ...`` + + 4.0.0 (2013-05-10) + ================== + + - Add support for Python 3.2. + + 4.0.0a1 (2013-02-15) + ==================== + + - Add tox support and MANIFEST.in for proper releasing. + + - Add Python 3.3 support. + + - Drop Python 2.4 and 2.5 support. + + 3.0.5 (2012-11-27) + ================== + + - Fixed: the status command didn't return a non-zero exit status when + the program wasn't running. This made it impossible for other + software (e.g. Puppet) to tell if a process was running. + + 3.0.4 (2012-07-30) + ================== + + - Fixed: The start command exited with a zero exit status even when + the program being started failed to start (or exited imediately). + + 3.0.3 (2012-07-10) + ================== + + - Fixed: programs started with zdaemon couldn't, themselves, invoke + zdaemon. + + 3.0.2 (2012-07-10) + ================== + + Fail :( + + 3.0.1 (2012-06-08) + ================== + + - Fixed: + + The change in 2.0.6 to set a user's supplemental groups broke common + configurations in which the effective user was set via ``su`` or + ``sudo -u`` prior to invoking zdaemon. + + Now, zdaemon doesn't set groups or the effective user if the + effective user is already set to the configured user. + + 3.0.0 (2012-06-08) + ================== + + - Added an option, ``start-test-program`` to supply a test command to + test whether the program managed by zdaemon is up and operational, + rather than just running. When starting a program, the start + command doesn't return until the test passes. You could, for + example, use this to wait until a web server is actually accepting + connections. + + - Added a ``start-timeout`` option to error if a program takes too long to + start. This is especially useful in combination with the + ``start-test-program`` option. + + - Added an option, stop-timeout, to control how long to wait + for a graceful shutdown. + + Previously, this was controlled by backoff-limit, which didn't make + much sense. + + - Several undocumented, untested, and presumably unused features were removed. + + 2.0.6 (2012-06-07) + ================== + + - Fixed: When the ``user`` option was used to run as a particular + user, supplemental groups weren't set to the user's supplemental + groups. + + 2.0.5 (2012-06-07) + ================== + + (Accidental release. Please ignore.) + + 2.0.4 (2009-04-20) + ================== + + - Version 2.0.3 broke support for relative paths to the socket (``-s`` + option and ``socket-name`` parameter), now relative paths work again + as in version 2.0.2. + + - Fixed change log format, made table of contents nicer. + + - Fixed author's email address. + + - Removed zpkg stuff. + + + 2.0.3 (2009-04-11) + ================== + + - Added support to bootstrap on Jython. + + - If the run directory does not exist it will be created. This allow to use + `/var/run/mydaemon` as run directory when /var/run is a tmpfs (LP #318118). + + Bugs Fixed + ---------- + + - No longer uses a hard-coded file name (/tmp/demo.zdsock) in unit tests. + This lets you run the tests on Python 2.4 and 2.5 simultaneously without + spurious errors. + + - make -h work again for both runner and control scripts. + Help is now taken from the __doc__ of the options class users by + the zdaemon script being run. + + 2.0.2 (2008-04-05) + ================== + + Bugs Fixed + ---------- + + - Fixed backwards incompatible change in handling of environment option. + + 2.0.1 (2007-10-31) + ================== + + Bugs Fixed + ---------- + + - Fixed test renormalizer that did not work in certain cases where the + environment was complex. + + 2.0.0 (2007-07-19) + ================== + + - Final release for 2.0.0. + + 2.0a6 (2007-01-11) + ================== + + Bugs Fixed + ---------- + + - When the user option was used, it only affected running the daemon. + + 2.0a3, 2.0a4, 2.0a5 (2007-01-10) + ================================ + + Bugs Fixed + ---------- + + - The new (2.0) mechanism used by zdaemon to start the daemon manager + broke some applications that extended zdaemon. + + - Added extra checks to deal with programs that extend zdaemon + and copy the schema and thus don't see updates to the ZConfig schema. + + 2.0a2 (2007-01-10) + ================== + + New Features + ------------ + + - Added support for setting environment variables in the configuration + file. This is useful when zdaemon is used to run programs that need + environment variables set (e.g. LD_LIBRARY_PATH). + + - Added a command to rotate the transcript log. + + 2.0a1 (2006-12-21) + ================== + + Bugs Fixed + ---------- + + - In non-daemon mode, start hung, producing annoying dots + when the program exited. + + - The start command hung producing annoying dots if the daemon failed + to start. + + - foreground and start had different semantics because one used + os.system and another used os.spawn + + New Features + ------------ + + - Documentation + + - Command-line arguments can now be supplied to the start and + foreground (fg) commands + + - zdctl now invokes itself to run zdrun. This means that it's + no-longer necessary to generate a separate zdrun script. This + especially when the magic techniques to find and run zdrun using + directory sniffing fail to set the path correctly. + + - The daemon mode is now enabled by default. To get non-daemon mode, + you have to use a configuration file and set daemon to off + there. The old -d option is kept for backward compatibility, but is + a no-op. + + 1.4a1 (2005-11-21) + ================== + + - Fixed a bug in the distribution setup file. + + 1.4a1 (2005-11-05) + ================== + + - First semi-formal release. + + After some unknown release(???) + =============================== + + - Made 'zdaemon.zdoptions' not fail for --help when __main__.__doc__ + is None. + + After 1.1 + ========= + + - Updated test 'testRunIgnoresParentSignals': + + o Use 'mkdtemp' to create a temporary directory to hold the test socket + rather than creating the test socket in the test directory. + Hopefully this will be more robust. Sometimes the test directory + has a path so long that the test socket can't be created. + + o Changed management of 'donothing.sh'. This script is now created by + the test in the temporarily directory with the necessary + permissions. This is to avoids possible mangling of permissions + leading to spurious test failures. It also avoids management of a + file in the source tree, which is a bonus. + + - Rearranged source tree to conform to more usual zpkg-based layout: + + o Python package lives under 'src'. + + o Dependencies added to 'src' as 'svn:externals'. + + o Unit tests can now be run from a checkout. + + - Made umask-based test failures due to running as root emit a more + forceful warning. + + 1.1 (2005-06-09) + ================ + + - SVN tag: svn://svn.zope.org/repos/main/zdaemon/tags/zdaemon-1.1 + + - Tagged to make better 'svn:externals' linkage possible. + + To-Dos + ====== + + More docs: + + - Document/demonstrate some important features, such as: + + - working directory + + Bugs: + + - help command + +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: Zope Public License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Operating System :: POSIX +Classifier: Topic :: Utilities +Provides-Extra: test diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/SOURCES.txt b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/SOURCES.txt new file mode 100644 index 0000000..9e7d661 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/SOURCES.txt @@ -0,0 +1,37 @@ +.coveragerc +CHANGES.rst +COPYRIGHT.txt +LICENSE.txt +MANIFEST.in +README.rst +bootstrap.py +buildout.cfg +pip-delete-this-directory.txt +setup.cfg +setup.py +tox.ini +src/zdaemon/README.rst +src/zdaemon/__init__.py +src/zdaemon/__main__.py +src/zdaemon/component.xml +src/zdaemon/sample.conf +src/zdaemon/schema.xml +src/zdaemon/zdctl.py +src/zdaemon/zdoptions.py +src/zdaemon/zdrun.py +src/zdaemon.egg-info/PKG-INFO +src/zdaemon.egg-info/SOURCES.txt +src/zdaemon.egg-info/dependency_links.txt +src/zdaemon.egg-info/entry_points.txt +src/zdaemon.egg-info/not-zip-safe +src/zdaemon.egg-info/pbr.json +src/zdaemon.egg-info/requires.txt +src/zdaemon.egg-info/top_level.txt +src/zdaemon/tests/__init__.py +src/zdaemon/tests/nokill.py +src/zdaemon/tests/parent.py +src/zdaemon/tests/tests.py +src/zdaemon/tests/testuser.py +src/zdaemon/tests/testzdctl.py +src/zdaemon/tests/testzdoptions.py +src/zdaemon/tests/testzdrun.py \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/dependency_links.txt b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/entry_points.txt b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/entry_points.txt new file mode 100644 index 0000000..bca108a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/entry_points.txt @@ -0,0 +1,3 @@ + +[console_scripts] +zdaemon = zdaemon.zdctl:main diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/installed-files.txt b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/installed-files.txt new file mode 100644 index 0000000..ac0c33e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/installed-files.txt @@ -0,0 +1,39 @@ +../../../../bin/zdaemon +../zdaemon/README.rst +../zdaemon/__init__.py +../zdaemon/__main__.py +../zdaemon/__pycache__/__init__.cpython-36.pyc +../zdaemon/__pycache__/__main__.cpython-36.pyc +../zdaemon/__pycache__/zdctl.cpython-36.pyc +../zdaemon/__pycache__/zdoptions.cpython-36.pyc +../zdaemon/__pycache__/zdrun.cpython-36.pyc +../zdaemon/component.xml +../zdaemon/sample.conf +../zdaemon/schema.xml +../zdaemon/tests/__init__.py +../zdaemon/tests/__pycache__/__init__.cpython-36.pyc +../zdaemon/tests/__pycache__/nokill.cpython-36.pyc +../zdaemon/tests/__pycache__/parent.cpython-36.pyc +../zdaemon/tests/__pycache__/tests.cpython-36.pyc +../zdaemon/tests/__pycache__/testuser.cpython-36.pyc +../zdaemon/tests/__pycache__/testzdctl.cpython-36.pyc +../zdaemon/tests/__pycache__/testzdoptions.cpython-36.pyc +../zdaemon/tests/__pycache__/testzdrun.cpython-36.pyc +../zdaemon/tests/nokill.py +../zdaemon/tests/parent.py +../zdaemon/tests/tests.py +../zdaemon/tests/testuser.py +../zdaemon/tests/testzdctl.py +../zdaemon/tests/testzdoptions.py +../zdaemon/tests/testzdrun.py +../zdaemon/zdctl.py +../zdaemon/zdoptions.py +../zdaemon/zdrun.py +PKG-INFO +SOURCES.txt +dependency_links.txt +entry_points.txt +not-zip-safe +pbr.json +requires.txt +top_level.txt diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/not-zip-safe b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/pbr.json b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/pbr.json new file mode 100644 index 0000000..cc3a3f7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/pbr.json @@ -0,0 +1 @@ +{"is_release": true, "git_version": "88f559b"} \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/requires.txt b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/requires.txt new file mode 100644 index 0000000..3205176 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/requires.txt @@ -0,0 +1,9 @@ +ZConfig +setuptools + +[test] +zope.testing +zope.testrunner +manuel +mock +zc.customdoctests diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/top_level.txt b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/top_level.txt new file mode 100644 index 0000000..efd190d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon-4.2.0-py3.6.egg-info/top_level.txt @@ -0,0 +1 @@ +zdaemon diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/README.rst b/thesisenv/lib/python3.6/site-packages/zdaemon/README.rst new file mode 100644 index 0000000..bbbc093 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/README.rst @@ -0,0 +1,526 @@ +=============== + Using zdaemon +=============== + +zdaemon provides a script, zdaemon, that can be used to run other +programs as POSIX (Unix) daemons. (Of course, it is only usable on +POSIX-complient systems.) + +Using zdaemon requires specifying a number of options, which can be +given in a configuration file, or as command-line options. It also +accepts commands teling it what do do. The commands are: + +start + Start a process as a daemon + +stop + Stop a running daemon process + +restart + Stop and then restart a program + +status + Find out if the program is running + +foreground or fg + Run a program + +kill signal + Send a signal to the daemon process + +reopen_transcript + Reopen the transcript log. See the discussion of the transcript + log below. + +help command + Get help on a command + + +Commands can be given on a command line, or can be given using an +interactive interpreter. + +Let's start with a simple example. We'll use command-line options to +run the echo command: + + sh> ./zdaemon -p 'echo hello world' fg + echo hello world + hello world + + +Here we used the -p option to specify a program to run. We can +specify a program name and command-line options in the program +command. Note, however, that the command-line parsing is pretty +primitive. Quotes and spaces aren't handled correctly. Let's look at +a slightly more complex example. We'll run the sleep command as a +daemon :) + + sh> ./zdaemon -p 'sleep 100' start + . . + daemon process started, pid=819 + +This ran the sleep daemon. We can check whether it ran with the +status command: + + sh> ./zdaemon -p 'sleep 100' status + program running; pid=819 + +We can stop it with the stop command: + + sh> ./zdaemon -p 'sleep 100' stop + . . + daemon process stopped + + sh> ./zdaemon -p 'sleep 100' status + daemon manager not running + Failed: 3 + +Normally, we control zdaemon using a configuration file. Let's create +a typical configuration file:: + + + program sleep 100 + + +.. -> text + + >>> with open('conf', 'w') as file: + ... _ = file.write(text) + +Now, we can run with the -C option to read the configuration file: + + sh> ./zdaemon -Cconf start + . . + daemon process started, pid=1136 + +If we list the directory: + + sh> ls + conf + zdaemon + zdsock + +We'll see that a file, zdsock, was created. This is a unix-domain +socket used internally by ZDaemon. We'll normally want to control +where this goes. + + sh> ./zdaemon -Cconf stop + . . + daemon process stopped + +Here's an updated configuration:: + + + program sleep 100 + socket-name /tmp/demo.zdsock + + +.. -> text + + >>> with open('conf', 'w') as file: + ... _ = file.write(text.replace('/tmp', tmpdir)) + +Now, when we run zdaemon: + + sh> ./zdaemon -Cconf start + . . + daemon process started, pid=1139 + + sh> ls + conf + zdaemon + +.. test + + >>> import os + >>> os.path.exists("/tmp/demo.zdsock".replace('/tmp', tmpdir)) + True + +The socket file is created in the given directory. + + sh> ./zdaemon -Cconf stop + . . + daemon process stopped + +In the example, we included a command-line argument in the program +option. We can also provide options on the command line:: + + + program sleep + socket-name /tmp/demo.zdsock + + +.. -> text + + >>> with open('conf', 'w') as file: + ... _ = file.write(text.replace('/tmp', tmpdir)) + +Then we can pass the program argument on the command line: + + sh> ./zdaemon -Cconf start 100 + . . + daemon process started, pid=1149 + + sh> ./zdaemon -Cconf status + program running; pid=1149 + + sh> ./zdaemon -Cconf stop + . . + daemon process stopped + +Environment Variables +===================== + +Sometimes, it is necessary to set environment variables before running +a program. Perhaps the most common case for this is setting +LD_LIBRARY_PATH so that dynamically loaded libraries can be found. + +:: + + + program env + socket-name /tmp/demo.zdsock + + + LD_LIBRARY_PATH /home/foo/lib + HOME /home/foo + + +.. -> text + + >>> with open('conf', 'w') as file: + ... _ = file.write(text.replace('/tmp', tmpdir)) + +Now, when we run the command, we'll see out environment settings reflected: + + sh> ./zdaemon -Cconf fg + env + USER=jim + HOME=/home/foo + LOGNAME=jim + USERNAME=jim + TERM=dumb + PATH=/home/jim/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin + EMACS=t + LANG=en_US.UTF-8 + SHELL=/bin/bash + EDITOR=emacs + LD_LIBRARY_PATH=/home/foo/lib + +Transcript log +============== + +When zdaemon run a program in daemon mode, it disconnects the +program's standard input, standard output, and standard error from the +controlling terminal. It can optionally redirect the output to +standard error and standard output to a file. This is done with the +transcript option. This is, of course, useful for logging output from +long-running applications. + +Let's look at an example. We'll have a long-running process that +simple tails a data file: + + >>> f = open('data', 'w', 1) + >>> import os + >>> _ = f.write('rec 1\n'); f.flush(); os.fsync(f.fileno()) + +Now, here's out zdaemon configuration:: + + + program tail -f data + transcript log + + +.. -> text + + >>> with open('conf', 'w') as file: + ... _ = file.write(text) + +Now we'll start: + + sh> ./zdaemon -Cconf start + . . + daemon process started, pid=7963 + +.. Wait a little bit to make sure tail has a chance to work + + >>> import time + >>> time.sleep(0.1) + +After waiting a bit, if we look at the log file, it contains the tail output: + + >>> with open('log') as file: + ... file.read() + 'rec 1\n' + +We can rotate the transcript log by renaming it and telling zdaemon to +reopen it: + + >>> import os + >>> os.rename('log', 'log.1') + +If we generate more output: + + >>> _ = f.write('rec 2\n'); f.flush(); os.fsync(f.fileno()) + +.. Wait a little bit to make sure tail has a chance to work + + >>> time.sleep(1) + +The output will appear in the old file, because zdaemon still has it +open: + + >>> with open('log.1') as file: + ... file.read() + 'rec 1\nrec 2\n' + +Now, if we tell zdaemon to reopen the file: + + sh> ./zdaemon -Cconf reopen_transcript + +and generate some output: + + >>> _ = f.write('rec 3\n'); f.flush(); os.fsync(f.fileno()) + +.. Wait a little bit to make sure tail has a chance to work + + >>> time.sleep(1) + +the output will show up in the new file, not the old: + + >>> with open('log') as file: + ... file.read() + 'rec 3\n' + + >>> with open('log.1') as file: + ... file.read() + 'rec 1\nrec 2\n' + +Close files and clean up: + + >>> f.close() + + sh> ./zdaemon -Cconf stop + . . + daemon process stopped + + +Start test program and timeout +============================== + +Normally, zdaemon considers a process to have started when the process +itself has been created. A process may take a while before it is +truly up and running. For example, a database server or a web server +may take time before they're ready to accept requests. + +You can optionally supply a test program, via the ``start-test-program`` +configuration option, that is called repeatedly until it returns a 0 +exit status or until a time limit, ``start-timeout``, has been reached. + +Reference Documentation +======================= + +The following options are available for use in the runner section of +configuration files and as command-line options. + +program + Command-line option: -p or --program + + This option gives the command used to start the subprocess + managed by zdaemon. This is currently a simple list of + whitespace-delimited words. The first word is the program + file, subsequent words are its command line arguments. If the + program file contains no slashes, it is searched using $PATH. + (Note that there is no way to to include whitespace in the program + file or an argument, and under certain circumstances other + shell metacharacters are also a problem.) + +socket-name + Command-line option: -s or --socket-name. + + The pathname of the Unix domain socket used for communication + between the zdaemon command-line tool and a daemon-management + process. The default is relative to the current directory in + which zdaemon is started. You want to specify + an absolute pathname here. + + This defaults to "zdsock", which is created in the directory + in which zdrun is started. + +daemon + Command-line option: -d or --daemon. + + If this option is true, zdaemon runs in the background as a + true daemon. It forks a child process which becomes the + subprocess manager, while the parent exits (making the shell + that started it believe it is done). The child process also + does the following: + + - if the directory option is set, change into that directory + + - redirect stdin, stdout and stderr to /dev/null + + - call setsid() so it becomes a session leader + + - call umask() with specified value + + The default for this option is on by default. The + command-line option therefore has no effect. To disable + daemon mode, you must use a configuration file:: + + + program sleep 1 + daemon off + + +directory + Command-line option: -z or --directory. + + If the daemon option is true (default), this option can + specify a directory into which zdrun.py changes as part of the + "daemonizing". If the daemon option is false, this option is + ignored. + +backoff-limit + Command-line option: -b or --backoff-limit. + + When the subprocess crashes, zdaemon inserts a one-second + delay before it restarts it. When the subprocess crashes + again right away, the delay is incremented by one second, and + so on. What happens when the delay has reached the value of + backoff-limit (in seconds), depends on the value of the + forever option. If forever is false, zdaemon gives up at + this point, and exits. An always-crashing subprocess will + have been restarted exactly backoff-limit times in this case. + If forever is true, zdaemon continues to attempt to restart + the process, keeping the delay at backoff-limit seconds. + + If the subprocess stays up for more than backoff-limit + seconds, the delay is reset to 1 second. + + This defaults to 10. + +forever + Command-line option: -f or --forever. + + If this option is true, zdaemon will keep restarting a + crashing subprocess forever. If it is false, it will give up + after backoff-limit crashes in a row. See the description of + backoff-limit for details. + + This is disabled by default. + +exit-codes + Command-line option: -x or --exit-codes. + + This defaults to 0,2. + + If the subprocess exits with an exit status that is equal to + one of the integers in this list, zdaemon will not restart + it. The default list requires some explanation. Exit status + 0 is considered a willful successful exit; the ZEO and Zope + server processes use this exit status when they want to stop + without being restarted. (Including in response to a + SIGTERM.) Exit status 2 is typically issued for command line + syntax errors; in this case, restarting the program will not + help! + + NOTE: this mechanism overrides the backoff-limit and forever + options; i.e. even if forever is true, a subprocess exit + status code in this list makes zdaemon give up. To disable + this, change the value to an empty list. + +start-test-program + A command that tests whether the program is up and running. + The command should exit with a zero exit statis if the program + is running and with a non-zero status otherwise. + +start-timeout + Command-line option: -T or --start-timeout. + + If the program takes more than ``start-timeout`` seconds to + start, then an error is printed and the control script will + exit with a non-zero exit status. + +stop-timeout + This defaults to 300 seconds (5 minutes). + + When a stop command is issued, a SIGTERM signal is sent to the + process. zdaemon waits for stop-timeout seconds for the + process to gracefully exit. If the process doesn't exit in + that time, a SIGKILL signal is sent. + +user + Command-line option: -u or --user. + + When zdaemon is started by root, this option specifies the + user as who the the zdaemon process (and hence the daemon + subprocess) will run. This can be a user name or a numeric + user id. Both the user and the group are set from the + corresponding password entry, using setuid() and setgid(). + This is done before zdaemon does anything else besides + parsing its command line arguments. + + NOTE: when zdaemon is not started by root, specifying this + option is an error. (XXX This may be a mistake.) + + XXX The zdaemon event log file may be opened *before* + setuid() is called. Is this good or bad? + +umask + Command-line option: -m or --umask. + + When daemon mode is used, this option specifies the octal umask + of the subprocess. + +default-to-interactive + If this option is true, zdaemon enters interactive mode + when it is invoked without a positional command argument. If + it is false, you must use the -i or --interactive command line + option to zdaemon to enter interactive mode. + + This is enabled by default. + +logfile + Command-line option: -l or --logfile. + + This option specifies a log file that is the default target of + the "logtail" zdaemon command. + + NOTE: This is NOT the log file to which zdaemon writes its + logging messages! That log file is specified by the + section described below. + +transcript + Command-line option: -t or --transcript. + + The name of a file in which a transcript of all output from + the command being run will be written to when daemonized. + + If not specified, output from the command will be discarded. + + This only takes effect when the "daemon" option is enabled. + +prompt + The prompt shown by the controller program. The default must + be provided by the application. + +(Note that a few other options are available to support old +configuration files, but aren't needed any more and can generally be +ignored.) + +In addition to the runner section, you can use an eventlog section +that specified one or more logfile subsections:: + + + + path /var/log/foo/foo.log + + + + path STDOUT + + + +In this example, log output is sent to a file and to standard out. +Log output from zdaemon usually isn't very interesting but can be +handy for debugging. diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/__init__.py b/thesisenv/lib/python3.6/site-packages/zdaemon/__init__.py new file mode 100644 index 0000000..734b7b4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/__init__.py @@ -0,0 +1,14 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""zdaemon -- a package to manage a daemon application.""" diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/__main__.py b/thesisenv/lib/python3.6/site-packages/zdaemon/__main__.py new file mode 100644 index 0000000..31967ad --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/__main__.py @@ -0,0 +1,2 @@ +from zdaemon.zdctl import main +main() diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/component.xml b/thesisenv/lib/python3.6/site-packages/zdaemon/component.xml new file mode 100644 index 0000000..903394a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/component.xml @@ -0,0 +1,328 @@ + + + + + + + + This section describes the options for zdctl.py and zdrun.py. + The only required option is "program". Many other options have + no default value specified in the schema; in some cases, the + program calculates a dynamic default, in others, the feature + associated with the option is disabled. + + For those options that also have corresponding command-line + options, the command line option (short and long form) are given + here too. + + +
    + + Log configuration for zdctl.py and zdrun.py. These + applications will normally use the eventlog section at the top + level of the configuration, but will use this eventlog section + if it exists. + + (This is done so that the combined schema for the runner and + the controlled application will write to the same logs by + default, but a separation of logs can be achieved if desired.) + +
    + + + + Command-line option: -p or --program (zdctl.py only). + + This option gives the command used to start the subprocess + managed by zdrun.py. This is currently a simple list of + whitespace-delimited words. The first word is the program + file, subsequent words are its command line arguments. If the + program file contains no slashes, it is searched using $PATH. + (XXX There is no way to to include whitespace in the program + file or an argument, and under certain circumstances other + shell metacharacters are also a problem, e.g. the "foreground" + command of zdctl.py.) + + NOTE: zdrun.py doesn't use this option; it uses its positional + arguments. Rather, zdctl.py uses this option to determine the + positional argument with which to invoke zdrun.py. (XXX This + could be better.) + + + + + + Path to the Python interpreter. Used by zdctl.py to start the + zdrun.py process. Defaults to sys.executable. + + + + + + Path to the zdrun.py script. Used by zdctl.py to start the + zdrun.py process. Defaults to a file named "zdrun.py" in the + same directory as zdctl.py. + + + + + + Command-line option: -s or --socket-name. + + The pathname of the Unix domain socket used for communication + between zdctl.py and zdrun.py. The default is relative to the + current directory in which zdctl.py and zdrun.py are started. + You want to specify an absolute pathname here. + + + + + + Command-line option: -d or --daemon. + + If this option is true, zdrun.py runs in the background as a + true daemon. It forks a child process which becomes the + subprocess manager, while the parent exits (making the shell + that started it believe it is done). The child process also + does the following: + + - if the directory option is set, change into that directory + + - redirect stdin, stdout and stderr to /dev/null + + - call setsid() so it becomes a session leader + + - call umask() with specified value + + + + + + Command-line option: -z or --directory. + + If the daemon option is true, this option can specify a + directory into which zdrun.py changes as part of the + "daemonizing". If the daemon option is false, this option is + ignored. + + + + + + Command-line option: -b or --backoff-limit. + + When the subprocess crashes, zdrun.py inserts a one-second + delay before it restarts it. When the subprocess crashes + again right away, the delay is incremented by one second, and + so on. What happens when the delay has reached the value of + backoff-limit (in seconds), depends on the value of the + forever option. If forever is false, zdrun.py gives up at + this point, and exits. An always-crashing subprocess will + have been restarted exactly backoff-limit times in this case. + If forever is true, zdrun.py continues to attempt to restart + the process, keeping the delay at backoff-limit seconds. + + If the subprocess stays up for more than backoff-limit + seconds, the delay is reset to 1 second. + + + + + + Command-line option: -f or --forever. + + If this option is true, zdrun.py will keep restarting a + crashing subprocess forever. If it is false, it will give up + after backoff-limit crashes in a row. See the description of + backoff-limit for details. + + + + + + Command-line option: -x or --exit-codes. + + If the subprocess exits with an exit status that is equal to + one of the integers in this list, zdrun.py will not restart + it. The default list requires some explanation. Exit status + 0 is considered a willful successful exit; the ZEO and Zope + server processes use this exit status when they want to stop + without being restarted. (Including in response to a + SIGTERM.) Exit status 2 is typically issued for command line + syntax errors; in this case, restarting the program will not + help! + + NOTE: this mechanism overrides the backoff-limit and forever + options; i.e. even if forever is true, a subprocess exit + status code in this list makes zdrun.py give up. To disable + this, change the value to an empty list. + + + + + + Command-line option: -p or --program (zdctl.py only). + + This option gives the command used to start the subprocess + managed by zdrun.py. This is currently a simple list of + whitespace-delimited words. The first word is the program + file, subsequent words are its command line arguments. If the + program file contains no slashes, it is searched using $PATH. + (XXX There is no way to to include whitespace in the program + file or an argument, and under certain circumstances other + shell metacharacters are also a problem, e.g. the "foreground" + command of zdctl.py.) + + NOTE: zdrun.py doesn't use this option; it uses its positional + arguments. Rather, zdctl.py uses this option to determine the + positional argument with which to invoke zdrun.py. (XXX This + could be better.) + + + + + + When a start-test-program is supplied, a process won't be + considered to be started until the test program exits normally + or until start-timout seconds have passed. + + This defaults to 300 seconds (5 minutes). + + + + + + When a stop command is issued, a SIGTERM signal is sent to the + process. zdaemon waits for stop-timeout seconds for the + process to gracefully exit. If the process doesn't exit in + that time, a SIGKILL signal is sent. + + This defaults to 300 seconds (5 minutes). + + + + + + Command-line option: -u or --user. + + When zdrun.py is started by root, this option specifies the + user as who the the zdrun.py process (and hence the daemon + subprocess) will run. This can be a user name or a numeric + user id. Both the user and the group are set from the + corresponding password entry, using setuid() and setgid(). + This is done before zdrun.py does anything else besides + parsing its command line arguments. + + NOTE: when zdrun.py is not started by root, specifying this + option is an error. (XXX This may be a mistake.) + + XXX The zdrun.py event log file may be opened *before* + setuid() is called. Is this good or bad? + + + + + + Command-line option: -m or --umask. + + When daemon mode is used, this option specifies the octal umask + of the subprocess. + + + + + + If this option is true, the zdrun.py process will remain even + when the daemon subprocess is stopped. In this case, zdctl.py + will restart zdrun.py as necessary. If this option is false, + zdrun.py will exit when the daemon subprocess is stopped + (unless zdrun.py intends to restart it). + + + + + + If this option is true, zdctl.py enters interactive mode + when it is invoked without a positional command argument. If + it is false, you must use the -i or --interactive command line + option to zdctl.py to enter interactive mode. + + + + + + This option specifies a log file that is the default target of + the "logtail" zdctl.py command. + + NOTE: This is NOT the log file to which zdrun.py writes its + logging messages! That log file is specified by the + <eventlog> section. + + + + + + The name of a file in which a transcript of all output from + the command being run will be written to when daemonized. + + If not specified, output from the command will be discarded. + + This only takes effect when the "daemon" option is enabled. + + + + + + The prompt shown by the controller program. The default must + be provided by the application. + + + +
    + + + + + +
    diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/sample.conf b/thesisenv/lib/python3.6/site-packages/zdaemon/sample.conf new file mode 100644 index 0000000..d76cef7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/sample.conf @@ -0,0 +1,24 @@ +# Sample config file for zdctl.py and zdrun.py (which share a schema). + + + # Harmless example + program sleep 100 + # Repeat the defaults + backoff-limit 10 + daemon True + forever True + socket-name zdsock + exit-codes 0,2 + # user has no default + umask 022 + directory . + default-to-interactive True + hang-around False + + + + level info + + path /tmp/zdrun.log + + diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/schema.xml b/thesisenv/lib/python3.6/site-packages/zdaemon/schema.xml new file mode 100644 index 0000000..3ca1d6e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/schema.xml @@ -0,0 +1,28 @@ + + + + This schema describes various options that control zdctl.py and + zdrun.py. zdrun.py is the "daemon process manager"; it runs a + subprocess in the background and restarts it when it crashes. + zdctl.py is the user interface to zdrun.py; it can tell zdrun.py + to start, stop or restart the subprocess, send it a signal, etc. + + There are two sections: <runner> defines options unique + zdctl.py and zdrun.py, and <eventlog> defines a standard + event logging section used by zdrun.py. + + More information about zdctl.py and zdrun.py can be found in the + file Doc/zdctl.txt. This all is specific to Unix/Linux. + + + + + + +
    + +
    + +
    + + diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/__init__.py new file mode 100644 index 0000000..46b66bb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/__init__.py @@ -0,0 +1 @@ +# This file is needed to make this a package. diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/tests/nokill.py b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/nokill.py new file mode 100644 index 0000000..a50976f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/nokill.py @@ -0,0 +1,8 @@ +#! /usr/bin/env python + +import signal + +signal.signal(signal.SIGTERM, signal.SIG_IGN) + +while 1: + signal.pause() diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/tests/parent.py b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/parent.py new file mode 100644 index 0000000..c1259a4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/parent.py @@ -0,0 +1,58 @@ +import time +import os +import sys + +donothing_contents = """\ +#!/bin/sh +while [ "1" -ne "2" ]; do + sleep 10 +done +""" + + +def main(): + # dummy zdctl startup of zdrun + shutup() + file = os.path.normpath(os.path.abspath(sys.argv[0])) + tmp = sys.argv[1] + dir = os.path.dirname(file) + zctldir = os.path.dirname(dir) + zdrun = os.path.join(zctldir, 'zdrun.py') + donothing = os.path.join(tmp, 'donothing.sh') + fd = os.open(donothing, os.O_WRONLY | os.O_CREAT, 0o700) + os.write(fd, donothing_contents.encode()) + os.close(fd) + args = [sys.executable, zdrun] + args += ['-d', '-b', '10', '-s', os.path.join(tmp, 'testsock'), + '-x', '0,2', '-z', dir, donothing] + flag = os.P_NOWAIT + os.spawnvpe(flag, args[0], args, + dict(os.environ, PYTHONPATH=':'.join(sys.path)), + ) + while 1: + # wait to be signaled + time.sleep(1) + + +def shutup(): + os.close(0) + sys.stdin = sys.__stdin__ = open("/dev/null") + try: # PEP 446, Python >= 3.4 + os.set_inheritable(sys.stdin.fileno(), True) + except AttributeError: + pass + os.close(1) + sys.stdout = sys.__stdout__ = open("/dev/null", "w") + try: # PEP 446, Python >= 3.4 + os.set_inheritable(sys.stdout.fileno(), True) + except AttributeError: + pass + os.close(2) + sys.stderr = sys.__stderr__ = open("/dev/null", "w") + try: # PEP 446, Python >= 3.4 + os.set_inheritable(sys.stderr.fileno(), True) + except AttributeError: + pass + +if __name__ == '__main__': + main() diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/tests/tests.py b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/tests.py new file mode 100644 index 0000000..dd09ae7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/tests.py @@ -0,0 +1,530 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +from __future__ import print_function + +import doctest +import glob +import os +import re +import shutil +import signal +import subprocess +import sys +import tempfile +import unittest +from contextlib import contextmanager + +import ZConfig +import manuel.capture +import manuel.doctest +import manuel.testing +import zc.customdoctests +import zdaemon +from zope.testing import renormalizing + +try: + import pkg_resources + zdaemon_loc = pkg_resources.working_set.find( + pkg_resources.Requirement.parse('zdaemon')).location + zconfig_loc = pkg_resources.working_set.find( + pkg_resources.Requirement.parse('ZConfig')).location +except (ImportError, AttributeError): + zdaemon_loc = os.path.dirname(os.path.dirname(zdaemon.__file__)) + zconfig_loc = os.path.dirname(os.path.dirname(ZConfig.__file__)) + + +def write(name, text): + with open(name, 'w') as f: + f.write(text) + + +def read(name): + with open(name) as f: + return f.read() + + +def make_sure_non_daemon_mode_doesnt_hang_when_program_exits(): + """ + The whole awhile bit that waits for a program to start + whouldn't be used on non-daemon mode. + + >>> write('conf', + ... ''' + ... + ... program sleep 1 + ... daemon off + ... + ... ''') + + >>> system("./zdaemon -Cconf start") + + """ + + +def dont_hang_when_program_doesnt_start(): + """ + If a program doesn't start, we don't want to wait for ever. + + >>> write('conf', + ... ''' + ... + ... program sleep + ... backoff-limit 2 + ... + ... ''') + + >>> system("./zdaemon -Cconf start") + . . + daemon manager not running + Failed: 1 + + """ + + +def allow_duplicate_arguments(): + """ + Wrapper scripts will often embed configuration arguments. This could + cause a problem when zdaemon reinvokes itself, passing it's own set of + configuration arguments. To deal with this, we'll allow duplicate + arguments that have the same values. + + >>> write('conf', + ... ''' + ... + ... program sleep 10 + ... + ... ''') + + >>> system("./zdaemon -Cconf -Cconf -Cconf start") + . . + daemon process started, pid=21446 + + >>> system("./zdaemon -Cconf -Cconf -Cconf stop") + . . + daemon process stopped + + """ + + +def test_stop_timeout(): + r""" + + >>> write('t.py', + ... ''' + ... import time, signal + ... signal.signal(signal.SIGTERM, lambda *a: None) + ... while 1: time.sleep(9) + ... ''') + + >>> write('conf', + ... ''' + ... + ... program %s t.py + ... stop-timeout 1 + ... + ... ''' % sys.executable) + + >>> system("./zdaemon -Cconf start") + . . + daemon process started, pid=21446 + + >>> import threading, time + >>> thread = threading.Thread( + ... target=system, args=("./zdaemon -Cconf stop",), + ... kwargs=dict(quiet=True)) + >>> thread.start() + >>> time.sleep(.2) + + >>> system("./zdaemon -Cconf status") + program running; pid=15372 + + >>> thread.join(2) + + >>> system("./zdaemon -Cconf status") + daemon manager not running + Failed: 3 + + """ + + +def test_kill(): + """ + + >>> write('conf', + ... ''' + ... + ... program sleep 100 + ... + ... ''') + + >>> system("./zdaemon -Cconf start") + . . + daemon process started, pid=1234 + + >>> system("./zdaemon -Cconf kill ded") + invalid signal 'ded' + + >>> system("./zdaemon -Cconf kill CONT") + kill(1234, 18) + signal SIGCONT sent to process 1234 + + >>> system("./zdaemon -Cconf stop") + . . + daemon process stopped + + >>> system("./zdaemon -Cconf kill") + daemon process not running + + """ + + +def test_logreopen(): + """ + + >>> write('conf', + ... ''' + ... + ... program sleep 100 + ... transcript transcript.log + ... + ... ''') + + >>> system("./zdaemon -Cconf start") + . . + daemon process started, pid=1234 + + >>> os.rename('transcript.log', 'transcript.log.1') + + >>> system("./zdaemon -Cconf logreopen") + kill(1234, 12) + signal SIGUSR2 sent to process 1234 + + This also reopens the transcript.log: + + >>> sorted(os.listdir('.')) + ['conf', 'transcript.log', 'transcript.log.1', 'zdaemon', 'zdsock'] + + >>> system("./zdaemon -Cconf stop") + . . + daemon process stopped + + """ + + +def test_log_rotation(): + """ + + >>> write('conf', + ... ''' + ... + ... program sleep 100 + ... transcript transcript.log + ... + ... + ... + ... path event.log + ... + ... + ... ''') + + >>> system("./zdaemon -Cconf start") + . . + daemon process started, pid=1234 + + Pretend we did a logrotate: + + >>> os.rename('transcript.log', 'transcript.log.1') + >>> os.rename('event.log', 'event.log.1') + + >>> system("./zdaemon -Cconf reopen_transcript") # or logreopen + + This reopens both transcript.log and event.log: + + >>> sorted(glob.glob('transcript.log*')) + ['transcript.log', 'transcript.log.1'] + + >>> sorted(glob.glob('event.log*')) + ['event.log', 'event.log.1'] + + >>> system("./zdaemon -Cconf stop") + . . + daemon process stopped + + """ + + +def test_start_test_program(): + """ + >>> write('t.py', + ... ''' + ... import time + ... time.sleep(1) + ... open('x', 'w').close() + ... time.sleep(99) + ... ''') + + >>> write('conf', + ... ''' + ... + ... program %s t.py + ... start-test-program cat x + ... + ... ''' % sys.executable) + + >>> import os + + >>> system("./zdaemon -Cconf start") + . . + daemon process started, pid=21446 + + >>> os.path.exists('x') + True + >>> os.remove('x') + + >>> system("./zdaemon -Cconf restart") + . . . + daemon process restarted, pid=19622 + >>> os.path.exists('x') + True + + >>> system("./zdaemon -Cconf stop") + + daemon process stopped + """ + + +def test_start_timeout(): + """ + >>> write('t.py', + ... ''' + ... import time + ... time.sleep(9) + ... ''') + + >>> write('conf', + ... ''' + ... + ... program %s t.py + ... start-test-program cat x + ... start-timeout 1 + ... + ... ''' % sys.executable) + + >>> import time + >>> start = time.time() + + >>> system("./zdaemon -Cconf start") + + Program took too long to start + Failed: 1 + + >>> system("./zdaemon -Cconf stop") + + daemon process stopped + """ + + +def DAEMON_MANAGER_MODE_leak(): + """ + Zdaemon used an environment variable to flag that it's running in + daemon-manager mode, as opposed to UI mode. If this environment + variable is allowed to leak to the program, them the program will + be unable to invoke zdaemon correctly. + + >>> write('c', ''' + ... + ... program env + ... transcript t + ... + ... ''') + + >>> system('./zdaemon -b0 -T1 -Cc start', quiet=True) + Failed: 1 + >>> 'DAEMON_MANAGER_MODE' not in read('t') + True + """ + + +def nonzero_exit_on_program_failure(): + """ + >>> write('conf', + ... ''' + ... + ... backoff-limit 1 + ... program nosuch + ... + ... ''') + + >>> system("./zdaemon -Cconf start", echo=True) # doctest: +ELLIPSIS + ./zdaemon... + daemon manager not running + Failed: 1 + + >>> write('conf', + ... ''' + ... + ... backoff-limit 1 + ... program cat nosuch + ... + ... ''') + + >>> system("./zdaemon -Cconf start", echo=True) # doctest: +ELLIPSIS + ./zdaemon... + daemon manager not running + Failed: 1 + + >>> write('conf', + ... ''' + ... + ... backoff-limit 1 + ... program pwd + ... + ... ''') + + >>> system("./zdaemon -Cconf start", echo=True) # doctest: +ELLIPSIS + ./zdaemon... + daemon manager not running + Failed: 1 + + """ + + +def setUp(test): + test.globs['_td'] = td = [] + here = os.getcwd() + td.append(lambda: os.chdir(here)) + tmpdir = tempfile.mkdtemp() + td.append(lambda: shutil.rmtree(tmpdir)) + test.globs['tmpdir'] = tmpdir + workspace = tempfile.mkdtemp() + td.append(lambda: shutil.rmtree(workspace)) + os.chdir(workspace) + write('zdaemon', zdaemon_template % dict( + python=sys.executable, + zdaemon=zdaemon_loc, + ZConfig=zconfig_loc, + )) + os.chmod('zdaemon', 0o755) + test.globs['system'] = system + + +def tearDown(test): + for f in test.globs['_td']: + f() + + +class Timeout(BaseException): + pass + + +@contextmanager +def timeout(seconds): + this_frame = sys._getframe() + + def raiseTimeout(signal, frame): + # the if statement here is meant to prevent an exception in the + # finally: clause before clean up can take place + if frame is not this_frame: + raise Timeout('timed out after %s seconds' % seconds) + + try: + prev_handler = signal.signal(signal.SIGALRM, raiseTimeout) + except ValueError: + # signal only works in main thread + # let's ignore the request for a timeout and hope the test doesn't hang + yield + else: + try: + signal.alarm(seconds) + yield + finally: + signal.alarm(0) + signal.signal(signal.SIGALRM, prev_handler) + + +def system(command, input='', quiet=False, echo=False): + if echo: + print(command) + p = subprocess.Popen( + command, shell=True, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + with timeout(60): + data = p.communicate(input)[0] + if not quiet: + print(data.decode(), end='') + r = p.wait() + if r: + print('Failed:', r) + + +def checkenv(match): + match = [a for a in match.group(1).split('\n')[:-1] + if a.split('=')[0] in ('HOME', 'LD_LIBRARY_PATH')] + match.sort() + return '\n'.join(match) + '\n' + + +zdaemon_template = """#!%(python)s + +import sys +sys.path[0:0] = [ + %(zdaemon)r, + %(ZConfig)r, + ] + +try: + import coverage +except ImportError: + pass +else: + coverage.process_startup() + +import zdaemon.zdctl + +if __name__ == '__main__': + zdaemon.zdctl.main() +""" + + +def test_suite(): + README_checker = renormalizing.RENormalizing([ + (re.compile('pid=\d+'), 'pid=NNN'), + (re.compile('(\. )+\.?'), ''), + (re.compile('^env\n((?:.*\n)+)$'), checkenv), + ]) + + return unittest.TestSuite(( + doctest.DocTestSuite( + setUp=setUp, tearDown=tearDown, + checker=renormalizing.RENormalizing([ + (re.compile('pid=\d+'), 'pid=NNN'), + (re.compile('(\. )+\.?'), ''), + (re.compile('process \d+'), 'process NNN'), + (re.compile('kill\(\d+,'), 'kill(NNN,'), + ])), + manuel.testing.TestSuite( + manuel.doctest.Manuel( + parser=zc.customdoctests.DocTestParser( + ps1='sh>', + transform=lambda s: 'system("%s")\n' % s.rstrip() + ), + checker=README_checker, + ) + + manuel.doctest.Manuel(checker=README_checker) + + manuel.capture.Manuel(), + '../README.rst', + setUp=setUp, tearDown=tearDown), + )) diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/tests/testuser.py b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/testuser.py new file mode 100644 index 0000000..f2999ba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/testuser.py @@ -0,0 +1,144 @@ +############################################################################## +# +# Copyright (c) 2010 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +# Test user and groups options + +from zope.testing import setupstack +import doctest +import mock +import os +import sys +import zdaemon.zdctl + + +def write(name, text): + with open(name, 'w') as f: + f.write(text) + + +class O: + def __init__(self, **kw): + self.__dict__.update(kw) + + +def test_user_fails_when_not_root(): + """ + + >>> write('conf', + ... ''' + ... + ... program sleep 9 + ... user zope + ... + ... ''') + + >>> with mock.patch('os.geteuid') as geteuid: + ... with mock.patch('sys.stderr'): + ... sys.stderr = sys.stdout + ... geteuid.return_value = 42 + ... try: + ... zdaemon.zdctl.main(['-C', 'conf', 'status']) + ... except SystemExit: + ... pass + ... else: + ... print('oops') + ... # doctest: +ELLIPSIS + Error: only root can use -u USER to change users + For help, use ... -h + + >>> import pwd + >>> pwd.getpwnam.assert_called_with('zope') + + """ + + +def test_user_sets_supplemtary_groups(): + """ + + >>> write('conf', + ... ''' + ... + ... program sleep 9 + ... user zope + ... + ... ''') + + >>> import grp + >>> grp.getgrall.return_value = [ + ... O(gr_gid=8, gr_mem =['g', 'zope', ]), + ... O(gr_gid=1, gr_mem =['a', 'x', ]), + ... O(gr_gid=2, gr_mem =['b', 'x', 'zope']), + ... O(gr_gid=5, gr_mem =['c', 'x', ]), + ... O(gr_gid=4, gr_mem =['d', 'x', ]), + ... O(gr_gid=3, gr_mem =['e', 'x', 'zope', ]), + ... O(gr_gid=6, gr_mem =['f', ]), + ... O(gr_gid=7, gr_mem =['h', ]), + ... ] + + >>> with mock.patch('sys.exit'): + ... zdaemon.zdctl.main(['-C', 'conf', 'status']) + daemon manager not running + + >>> import pwd, os + >>> os.geteuid.assert_called_with() + >>> pwd.getpwnam.assert_called_with('zope') + >>> grp.getgrall.assert_called_with() + >>> os.setuid.assert_called_with(99) + >>> os.setgid.assert_called_with(5) + >>> os.setgroups.assert_called_with([2, 3, 8]) + + """ + + +def test_do_nothing_if_effective_user_is_configured_user(): + """ + + >>> write('conf', + ... ''' + ... + ... program sleep 9 + ... user zope + ... + ... ''') + + >>> with mock.patch('os.geteuid') as geteuid: + ... with mock.patch('sys.exit'): + ... geteuid.return_value = 99 + ... zdaemon.zdctl.main(['-C', 'conf', 'status']) + ... os.geteuid.assert_called_with() + daemon manager not running + + >>> import pwd, os, grp + >>> pwd.getpwnam.assert_called_with('zope') + >>> _ = grp.getgrall.assert_not_called() + >>> _ = os.setuid.assert_not_called() + >>> _ = os.setgid.assert_not_called() + >>> _ = os.setgroups.assert_not_called() + + """ + + +def setUp(test): + setupstack.setUpDirectory(test) + getpwname = setupstack.context_manager(test, mock.patch('pwd.getpwnam')) + getpwname.return_value = O(pw_gid=5, pw_uid=99, pw_name='zope') + setupstack.context_manager(test, mock.patch('os.geteuid')).return_value = 0 + setupstack.context_manager(test, mock.patch('grp.getgrall')) + setupstack.context_manager(test, mock.patch('os.setgroups')) + setupstack.context_manager(test, mock.patch('os.setuid')) + setupstack.context_manager(test, mock.patch('os.setgid')) + + +def test_suite(): + return doctest.DocTestSuite(setUp=setUp, tearDown=setupstack.tearDown) diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/tests/testzdctl.py b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/testzdctl.py new file mode 100644 index 0000000..70a6a96 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/testzdctl.py @@ -0,0 +1,79 @@ +"""Test suite for zdctl.py.""" + +import doctest + +from zdaemon import zdctl + + +def run(args): + options = zdctl.ZDCtlOptions() + options.realize(['-p', 'true'] + args.split()) + cmd = zdctl.ZDCmd(options) + cmd.onecmd(" ".join(options.args)) + + +def doctest_ZDCmd_help(): + """Test for ZDCmd.help_xxx + + >>> run("help") + + Documented commands (type help ): + ======================================== + fg help logreopen reopen_transcript show status wait + foreground kill logtail restart start stop + + + >>> run("help fg") + foreground -- Run the program in the forground. + fg -- an alias for foreground. + + >>> run("help help") + help -- Print a list of available actions. + help -- Print help for . + + >>> run("help kill") + kill [sig] -- Send signal sig to the daemon process. + The default signal is SIGTERM. + + >>> run("help logreopen") + logreopen -- Send a SIGUSR2 signal to the daemon process. + This is designed to reopen the log file. + Also reopens the transcript log file. + + >>> run("help logtail") + logtail [logfile] -- Run tail -f on the given logfile. + A default file may exist. + Hit ^C to exit this mode. + + >>> run("help reopen_transcript") + reopen_transcript -- Reopen the transcript log file. + Use after log rotation. + + >>> run("help restart") + restart -- Stop and then start the daemon process. + + >>> run("help show") + show options -- show zdctl options + show python -- show Python version and details + show all -- show all of the above + + >>> run("help start") + start -- Start the daemon process. + If it is already running, do nothing. + + >>> run("help status") + status [-l] -- Print status for the daemon process. + With -l, show raw status output as well. + + >>> run("help stop") + stop -- Stop the daemon process. + If it is not running, do nothing. + + >>> run("help wait") + wait -- Wait for the daemon process to exit. + + """ + + +def test_suite(): + return doctest.DocTestSuite(optionflags=doctest.NORMALIZE_WHITESPACE) diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/tests/testzdoptions.py b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/testzdoptions.py new file mode 100644 index 0000000..1f81812 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/testzdoptions.py @@ -0,0 +1,496 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +"""Test suite for zdaemon.zdoptions.""" + +import os +import sys +import tempfile +import shutil +import unittest +import doctest + +import ZConfig +import zdaemon +from zdaemon.zdoptions import ( + ZDOptions, RunnerOptions, list_of_ints, + existing_parent_directory, existing_parent_dirpath) + +try: + from StringIO import StringIO +except: + # Python 3 support. + from io import StringIO + + +class ZDOptionsTestBase(unittest.TestCase): + + OptionsClass = ZDOptions + + def save_streams(self): + self.save_stdout = sys.stdout + self.save_stderr = sys.stderr + sys.stdout = self.stdout = StringIO() + sys.stderr = self.stderr = StringIO() + + def restore_streams(self): + sys.stdout = self.save_stdout + sys.stderr = self.save_stderr + + def check_exit_code(self, options, args, exit_code=2): + save_sys_stderr = sys.stderr + try: + sys.stderr = StringIO() + try: + options.realize(args) + except SystemExit as err: + self.assertEqual(err.code, exit_code) + else: + self.fail("SystemExit expected") + finally: + sys.stderr = save_sys_stderr + + +class TestZDOptions(ZDOptionsTestBase): + + input_args = ["arg1", "arg2"] + output_opts = [] + output_args = ["arg1", "arg2"] + + def test_basic(self): + progname = "progname" + doc = "doc" + options = self.OptionsClass() + options.positional_args_allowed = 1 + options.schemadir = os.path.dirname(zdaemon.__file__) + options.realize(self.input_args, progname, doc) + self.assertEqual(options.progname, "progname") + self.assertEqual(options.doc, "doc") + self.assertEqual(options.options, self.output_opts) + self.assertEqual(options.args, self.output_args) + + def test_configure(self): + configfile = os.path.join(os.path.dirname(zdaemon.__file__), + "sample.conf") + for arg in "-C", "--c", "--configure": + options = self.OptionsClass() + options.realize([arg, configfile]) + self.assertEqual(options.configfile, configfile) + + # The original intent was that the docstring of whatever module is + # __main__ would be used as help documentation. + # Because of the way buildout generates scripts, this will always + # be an empty string. + # So, we now use the __doc__ of the options class being used. + + def help_test_helper(self, optionsclass, kw, expected): + for arg in "-h", "--h", "--help": + options = optionsclass() + try: + self.save_streams() + try: + options.realize([arg], **kw) + finally: + self.restore_streams() + except SystemExit as err: + self.assertEqual(err.code, 0) + else: + self.fail("%s didn't call sys.exit()" % repr(arg)) + helptext = self.stdout.getvalue() + self.assertEqual(helptext, expected) + + def test_default_help(self): + # test what happens if OptionsClass is used directly. + # Not sure this ever happens :-S + self.help_test_helper( + self.OptionsClass, {}, + self.OptionsClass.__doc__ or 'No help available.') + + def test_default_subclass_help(self): + # test what happens when the subclass doesn't do anything + # with __doc__ + class SubClass(self.OptionsClass): + pass + # __doc__ isn't inherited :-( + self.help_test_helper(SubClass, {}, 'No help available.') + + def test_default_help_with_doc_kw(self): + # test what happens when the subclass doesn't do anything + # with __doc__, but doc is supplied to realize + self.help_test_helper(self.OptionsClass, + {'doc': 'Example help'}, + 'Example help') + + def test_no_help(self): + # test what happens when the subclass has None for __doc__ + class NoHelp(self.OptionsClass): + __doc__ = None + self.help_test_helper(NoHelp, {}, 'No help available.') + + def test_no_help_with_doc_kw(self): + # test what happens when the subclass has None for __doc__, + # but doc is supplied to realize + class NoHelp(self.OptionsClass): + __doc__ = None + self.help_test_helper(NoHelp, {'doc': 'Example help'}, 'Example help') + + def test_help(self): + # test what happens when the subclass has None for __doc__ + class HasHelp(self.OptionsClass): + __doc__ = 'Some help for %s' + self.help_test_helper(HasHelp, {'progname': 'me'}, 'Some help for me') + + def test_has_help_with_doc_kw(self): + # test what happens when the subclass has something for __doc__, + # and doc is also supplied to realize + class HasHelp(self.OptionsClass): + __doc__ = 'Some help' + self.help_test_helper(HasHelp, {'doc': 'Example help'}, 'Example help') + + def test_version(self): + options = self.OptionsClass() + options.version = '2.4.frog-knows' + self.save_streams() + try: + self.check_exit_code(options, ['--version'], exit_code=0) + finally: + self.restore_streams() + self.assertNotEqual(self.stdout.getvalue(), "2.4.frog-knows") + + def test_unrecognized(self): + # Check that we get an error for an unrecognized option + self.check_exit_code(self.OptionsClass(), ["-/"]) + + +class TestBasicFunctionality(ZDOptionsTestBase): + + def test_no_positional_args(self): + # Check that we get an error for positional args when they + # haven't been enabled. + self.check_exit_code(self.OptionsClass(), ["A"]) + + def test_positional_args(self): + options = self.OptionsClass() + options.positional_args_allowed = 1 + options.realize(["A", "B"]) + self.assertEqual(options.args, ["A", "B"]) + + def test_positional_args_empty(self): + options = self.OptionsClass() + options.positional_args_allowed = 1 + options.realize([]) + self.assertEqual(options.args, []) + + def test_positional_args_unknown_option(self): + # Make sure an unknown option doesn't become a positional arg. + options = self.OptionsClass() + options.positional_args_allowed = 1 + self.check_exit_code(options, ["-o", "A", "B"]) + + def test_conflicting_flags(self): + # Check that we get an error for flags which compete over the + # same option setting. + options = self.OptionsClass() + options.add("setting", None, "a", flag=1) + options.add("setting", None, "b", flag=2) + self.check_exit_code(options, ["-a", "-b"]) + + def test_duplicate_flags(self): + # Check that we don't get an error for flags which reinforce the + # same option setting. + options = self.OptionsClass() + options.add("setting", None, "a", flag=1) + options.realize(["-a", "-a"]) + + def test_handler_simple(self): + # Test that a handler is called; use one that doesn't return None. + options = self.OptionsClass() + options.add("setting", None, "a:", handler=int) + options.realize(["-a2"]) + self.assertEqual(options.setting, 2) + + def test_handler_side_effect(self): + # Test that a handler is called and conflicts are not + # signalled when it returns None. + options = self.OptionsClass() + L = [] + options.add("setting", None, "a:", "append=", handler=L.append) + options.realize(["-a2", "--append", "3"]) + self.assertTrue(options.setting is None) + self.assertEqual(L, ["2", "3"]) + + def test_handler_with_bad_value(self): + options = self.OptionsClass() + options.add("setting", None, "a:", handler=int) + self.check_exit_code(options, ["-afoo"]) + + def test_required_options(self): + # Check that we get an error if a required option is not specified + options = self.OptionsClass() + options.add("setting", None, "a:", handler=int, required=True) + self.check_exit_code(options, []) + + def test_overrides_without_config_file(self): + # Check that we get an error if we use -X without -C + options = self.OptionsClass() + self.check_exit_code(options, ["-Xfoo"]) + + def test_raise_getopt_errors(self): + options = self.OptionsClass() + # note that we do not add "a" to the list of options; + # if raise_getopt_errors was true, this test would error + options.realize(["-afoo"], raise_getopt_errs=False) + # check_exit_code realizes the options with raise_getopt_errs=True + self.check_exit_code(options, ['-afoo']) + + def test_list_of_ints(self): + self.assertEqual(list_of_ints(''), []) + self.assertEqual(list_of_ints('42'), [42]) + self.assertEqual(list_of_ints('42,43'), [42, 43]) + self.assertEqual(list_of_ints('42, 43'), [42, 43]) + + +class TestOptionConfiguration(ZDOptionsTestBase): + + def test_add_flag_or_handler_not_both(self): + options = self.OptionsClass() + self.assertRaises(ValueError, options.add, short="a", flag=1, + handler=lambda x: None) + + def test_flag_requires_command_line_flag(self): + options = self.OptionsClass() + self.assertRaises(ValueError, options.add, flag=1) + + def test_flag_cannot_accept_arguments(self): + options = self.OptionsClass() + self.assertRaises(ValueError, options.add, short='a:', flag=1) + self.assertRaises(ValueError, options.add, long='an-option=', flag=1) + + def test_arguments_must_be_consistent(self): + options = self.OptionsClass() + self.assertRaises(ValueError, options.add, short='a:', long='an-option') + self.assertRaises(ValueError, options.add, short='a', long='an-option=') + + def test_short_cmdline_syntax(self): + options = self.OptionsClass() + self.assertRaises(ValueError, options.add, short='-a') + self.assertRaises(ValueError, options.add, short='ab') + self.assertRaises(ValueError, options.add, short='abc') + + def test_long_cmdline_syntax(self): + options = self.OptionsClass() + self.assertRaises(ValueError, options.add, long='--an-option') + self.assertRaises(ValueError, options.add, long='-an-option') + + def test_duplicate_short_flags(self): + options = self.OptionsClass() + options.add(short='a') + options.add(short='b') + self.assertRaises(ValueError, options.add, short='a') + + def test_duplicate_long_flags(self): + options = self.OptionsClass() + options.add(long='an-option') + options.add(long='be-still-my-beating-heart') + self.assertRaises(ValueError, options.add, long='an-option') + + +class EnvironmentOptions(ZDOptionsTestBase): + + saved_schema = None + + class OptionsClass(ZDOptions): + def __init__(self): + ZDOptions.__init__(self) + self.add("opt", "opt", "o:", "opt=", + default=42, handler=int, env="OPT") + + def load_schema(self): + # Doing this here avoids needing a separate file for the schema: + if self.schema is None: + if EnvironmentOptions.saved_schema is None: + schema = ZConfig.loadSchemaFile(StringIO("""\ + + + + """)) + EnvironmentOptions.saved_schema = schema + self.schema = EnvironmentOptions.saved_schema + + def load_configfile(self): + if getattr(self, "configtext", None): + self.configfile = tempfile.mktemp() + f = open(self.configfile, 'w') + f.write(self.configtext) + f.close() + try: + ZDOptions.load_configfile(self) + finally: + os.unlink(self.configfile) + else: + ZDOptions.load_configfile(self) + + # Save and restore the environment around each test: + + def setUp(self): + self._oldenv = os.environ + env = {} + for k, v in os.environ.items(): + env[k] = v + os.environ = env + + def tearDown(self): + os.environ = self._oldenv + + def create_with_config(self, text): + options = self.OptionsClass() + zdpkgdir = os.path.dirname(os.path.abspath(zdaemon.__file__)) + options.schemadir = os.path.join(zdpkgdir, 'tests') + options.schemafile = "envtest.xml" + # configfile must be set for ZDOptions to use ZConfig: + if text: + options.configfile = "not used" + options.configtext = text + return options + + +class TestZDOptionsEnvironment(EnvironmentOptions): + + def test_with_environment(self): + os.environ["OPT"] = "2" + self.check_from_command_line() + options = self.OptionsClass() + options.realize([]) + self.assertEqual(options.opt, 2) + + def test_without_environment(self): + self.check_from_command_line() + options = self.OptionsClass() + options.realize([]) + self.assertEqual(options.opt, 42) + + def check_from_command_line(self): + for args in (["-o1"], ["--opt", "1"]): + options = self.OptionsClass() + options.realize(args) + self.assertEqual(options.opt, 1) + + def test_with_bad_environment(self): + os.environ["OPT"] = "Spooge!" + # make sure the bad value is ignored if the command-line is used: + self.check_from_command_line() + options = self.OptionsClass() + try: + self.save_streams() + try: + options.realize([]) + finally: + self.restore_streams() + except SystemExit as e: + self.assertEqual(e.code, 2) + else: + self.fail("expected SystemExit") + + def test_environment_overrides_configfile(self): + options = self.create_with_config("opt 3") + options.realize([]) + self.assertEqual(options.opt, 3) + + os.environ["OPT"] = "2" + options = self.create_with_config("opt 3") + options.realize([]) + self.assertEqual(options.opt, 2) + + +class TestCommandLineOverrides(EnvironmentOptions): + + def test_simple_override(self): + options = self.create_with_config("# empty config") + options.realize(["-X", "opt=-2"]) + self.assertEqual(options.opt, -2) + + def test_error_propogation(self): + self.check_exit_code(self.create_with_config("# empty"), + ["-Xopt=1", "-Xopt=2"]) + self.check_exit_code(self.create_with_config("# empty"), + ["-Xunknown=foo"]) + + +class TestRunnerDirectory(ZDOptionsTestBase): + + OptionsClass = RunnerOptions + + def setUp(self): + super(TestRunnerDirectory, self).setUp() + # Create temporary directory to work in + self.root = tempfile.mkdtemp() + + def tearDown(self): + shutil.rmtree(self.root) + super(TestRunnerDirectory, self).tearDown() + + def test_not_existing_directory(self): + options = self.OptionsClass() + path = os.path.join(self.root, 'does-not-exist', 'really-not') + self.check_exit_code(options, ["-z", path]) + socket = os.path.join(path, 'socket') + self.check_exit_code(options, ["-s", socket]) + + def test_existing_directory(self): + options = self.OptionsClass() + options.realize(["-z", self.root]) + socket = os.path.join(self.root, 'socket') + self.check_exit_code(options, ["-s", socket]) + + def test_parent_is_created(self): + options = self.OptionsClass() + path = os.path.join(self.root, 'will-be-created') + options.realize(["-z", path]) + self.assertEqual(path, options.directory) + socket = os.path.join(path, 'socket') + options = self.OptionsClass() + options.realize(["-s", socket]) + # Directory will be created when zdaemon runs, not when the + # configuration is read + self.assertFalse(os.path.exists(path)) + + def test_existing_parent_directory(self): + self.assertTrue(existing_parent_directory(self.root)) + self.assertTrue(existing_parent_directory( + os.path.join(self.root, 'not-there'))) + self.assertRaises( + ValueError, existing_parent_directory, + os.path.join(self.root, 'not-there', 'this-also-not')) + + def test_existing_parent_dirpath(self): + self.assertTrue(existing_parent_dirpath( + os.path.join(self.root, 'sock'))) + self.assertTrue(existing_parent_dirpath( + os.path.join(self.root, 'not-there', 'sock'))) + self.assertTrue(existing_parent_dirpath( + os.path.join('not-there', 'sock'))) + self.assertRaises( + ValueError, existing_parent_dirpath, + os.path.join(self.root, 'not-there', 'this-also-not', 'sock')) + + +def test_suite(): + return unittest.TestSuite([ + doctest.DocTestSuite('zdaemon.zdoptions'), + unittest.defaultTestLoader.loadTestsFromName(__name__), + ]) + + +if __name__ == "__main__": + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/tests/testzdrun.py b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/testzdrun.py new file mode 100644 index 0000000..594685d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/tests/testzdrun.py @@ -0,0 +1,461 @@ +"""Test suite for zdrun.py.""" +from __future__ import print_function + +import os +import sys +import time +import shutil +import signal +import tempfile +import unittest +import socket + +try: + from StringIO import StringIO +except: + # Python 3 support. + from io import StringIO + +import ZConfig + +from zdaemon import zdrun, zdctl + + +class ConfiguredOptions: + """Options class that loads configuration from a specified string. + + This always loads from the string, regardless of any -C option + that may be given. + """ + + def set_configuration(self, configuration): + self.__configuration = configuration + self.configfile = "" + + def load_configfile(self): + sio = StringIO(self.__configuration) + cfg = ZConfig.loadConfigFile(self.schema, sio, self.zconfig_options) + self.configroot, self.confighandlers = cfg + + +class ConfiguredZDRunOptions(ConfiguredOptions, zdrun.ZDRunOptions): + + def __init__(self, configuration): + zdrun.ZDRunOptions.__init__(self) + self.set_configuration(configuration) + + +class ZDaemonTests(unittest.TestCase): + + python = os.path.abspath(sys.executable) + assert os.path.exists(python) + here = os.path.abspath(os.path.dirname(__file__)) + assert os.path.isdir(here) + nokill = os.path.join(here, "nokill.py") + assert os.path.exists(nokill) + parent = os.path.dirname(here) + zdrun = os.path.join(parent, "zdrun.py") + assert os.path.exists(zdrun) + + ppath = os.pathsep.join(sys.path) + + def setUp(self): + self.zdsock = tempfile.mktemp() + self.new_stdout = StringIO() + self.save_stdout = sys.stdout + sys.stdout = self.new_stdout + self.expect = "" + + def tearDown(self): + sys.stdout = self.save_stdout + for sig in (signal.SIGTERM, + signal.SIGHUP, + signal.SIGINT, + signal.SIGCHLD): + signal.signal(sig, signal.SIG_DFL) + try: + os.unlink(self.zdsock) + except os.error: + pass + output = self.new_stdout.getvalue() + self.assertEqual(self.expect, output) + + def quoteargs(self, args): + for i in range(len(args)): + if " " in args[i]: + args[i] = '"%s"' % args[i] + return " ".join(args) + + def rundaemon(self, args): + # Add quotes, in case some pathname contains spaces (e.g. Mac OS X) + args = self.quoteargs(args) + cmd = ('PYTHONPATH="%s" "%s" "%s" -d -s "%s" %s' % + (self.ppath, self.python, self.zdrun, self.zdsock, args)) + os.system(cmd) + # When the daemon crashes, the following may help debug it: + # os.system("PYTHONPATH=%s %s %s -s %s %s &" % + # (self.ppath, self.python, self.zdrun, self.zdsock, args)) + + def _run(self, args, cmdclass=None, module=zdctl): + if isinstance(args, str): + args = args.split() + kw = {} + if cmdclass: + kw['cmdclass'] = cmdclass + try: + module.main(["-s", self.zdsock] + args, **kw) + except SystemExit: + pass + + def testCmdclassOverride(self): + class MyCmd(zdctl.ZDCmd): + def do_sproing(self, rest): + print(rest) + self._run("-p echo sproing expected", cmdclass=MyCmd) + self.expect = "expected\n" + + def testSystem(self): + self.rundaemon(["echo", "-n"]) + self.expect = "" + + def test_help_zdrun(self): + self._run("-h", module=zdrun) + self.expect = zdrun.__doc__ + + def test_help_zdctl(self): + self._run("-h") + self.expect = zdctl.__doc__ + + def testOptionsSysArgv(self): + # Check that options are parsed from sys.argv by default + options = zdrun.ZDRunOptions() + save_sys_argv = sys.argv + try: + sys.argv = ["A", "B", "C"] + options.realize() + finally: + sys.argv = save_sys_argv + self.assertEqual(options.options, []) + self.assertEqual(options.args, ["B", "C"]) + + def testOptionsBasic(self): + # Check basic option parsing + options = zdrun.ZDRunOptions() + options.realize(["B", "C"], "foo") + self.assertEqual(options.options, []) + self.assertEqual(options.args, ["B", "C"]) + self.assertEqual(options.progname, "foo") + + def testOptionsHelp(self): + # Check that -h behaves properly + options = zdrun.ZDRunOptions() + try: + options.realize(["-h"], doc=zdrun.__doc__) + except SystemExit as err: + self.assertEqual(err.code, 0) + else: + self.fail("SystemExit expected") + self.expect = zdrun.__doc__ + + def testSubprocessBasic(self): + # Check basic subprocess management: spawn, kill, wait + options = zdrun.ZDRunOptions() + options.realize(["sleep", "100"]) + proc = zdrun.Subprocess(options) + self.assertEqual(proc.pid, 0) + pid = proc.spawn() + self.assertEqual(proc.pid, pid) + msg = proc.kill(signal.SIGTERM) + self.assertEqual(msg, None) + wpid, wsts = os.waitpid(pid, 0) + self.assertEqual(wpid, pid) + self.assertEqual(os.WIFSIGNALED(wsts), 1) + self.assertEqual(os.WTERMSIG(wsts), signal.SIGTERM) + proc.setstatus(wsts) + self.assertEqual(proc.pid, 0) + + def testEventlogOverride(self): + # Make sure runner.eventlog is used if it exists + options = ConfiguredZDRunOptions("""\ + + program /bin/true + + level 42 + + + + + level 35 + + """) + options.realize(["/bin/true"]) + self.assertEqual(options.config_logger.level, 42) + + def testEventlogWithoutOverride(self): + # Make sure eventlog is used if runner.eventlog doesn't exist + options = ConfiguredZDRunOptions("""\ + + program /bin/true + + + + level 35 + + """) + options.realize(["/bin/true"]) + self.assertEqual(options.config_logger.level, 35) + + def testRunIgnoresParentSignals(self): + # Spawn a process which will in turn spawn a zdrun process. + # We make sure that the zdrun process is still running even if + # its parent process receives an interrupt signal (it should + # not be passed to zdrun). + tmp = tempfile.mkdtemp() + zdrun_socket = os.path.join(tmp, 'testsock') + try: + zdctlpid = os.spawnvpe( + os.P_NOWAIT, + sys.executable, + [sys.executable, os.path.join(self.here, 'parent.py'), tmp], + dict(os.environ, + PYTHONPATH=":".join(sys.path), + ) + ) + # Wait for it to start, but no longer than a minute. + deadline = time.time() + 60 + is_started = False + while time.time() < deadline: + response = send_action('status\n', zdrun_socket) + if response is None: + time.sleep(0.05) + else: + is_started = True + break + self.assertTrue(is_started, + "spawned process failed to start in a minute") + # Kill it, and wait a little to ensure it's dead. + os.kill(zdctlpid, signal.SIGINT) + time.sleep(0.25) + # Make sure the child is still responsive. + response = send_action('status\n', zdrun_socket, + raise_on_error=True) + self.assertTrue(b'\n' in response, + 'no newline in response: ' + repr(response)) + # Kill the process. + send_action('stop\n', zdrun_socket) + finally: + # Remove the tmp directory. + # Caution: this is delicate. The code here used to do + # shutil.rmtree(tmp), but that suffers a sometimes-fatal + # race with zdrun.py. The 'testsock' socket is created + # by zdrun in the tmp directory, and zdrun tries to + # unlink it. If shutil.rmtree sees 'testsock' too, it + # will also try to unlink it, but zdrun may complete + # unlinking it before shutil gets to it (there's more + # than one process here). So, in effect, we code a + # 1-level rmtree inline here, suppressing errors. + for fname in os.listdir(tmp): + try: + os.unlink(os.path.join(tmp, fname)) + except os.error: + pass + os.rmdir(tmp) + + def testUmask(self): + # people have a strange tendency to run the tests as root + if os.getuid() == 0: + self.fail(""" +I am root! +Do not run the tests as root. +Testing proper umask handling cannot be done as root. +Furthermore, it is not a good idea and strongly discouraged to run zope, the +build system (configure, make) or the tests as root. +In general do not run anything as root unless absolutely necessary. +""") + + path = tempfile.mktemp() + # With umask 666, we should create a file that we aren't able + # to write. If access says no, assume that umask works. + try: + touch_cmd = "/bin/touch" + if not os.path.exists(touch_cmd): + touch_cmd = "/usr/bin/touch" # Mac OS X + self.rundaemon(["-m", "666", touch_cmd, path]) + for i in range(5): + if not os.path.exists(path): + time.sleep(0.1) + self.assertTrue(os.path.exists(path)) + self.assertTrue(not os.access(path, os.W_OK)) + finally: + if os.path.exists(path): + os.remove(path) + + +class TestRunnerDirectory(unittest.TestCase): + + def setUp(self): + super(TestRunnerDirectory, self).setUp() + self.root = tempfile.mkdtemp() + self.save_stdout = sys.stdout + self.save_stderr = sys.stdout + sys.stdout = StringIO() + sys.stderr = StringIO() + self.expect = '' + self.cmd = "/bin/true" + if not os.path.exists(self.cmd): + self.cmd = "/usr/bin/true" # Mac OS X + + def tearDown(self): + shutil.rmtree(self.root) + got = sys.stdout.getvalue() + err = sys.stderr.getvalue() + sys.stdout = self.save_stdout + sys.stderr = self.save_stderr + if err: + print(err, end='', file=sys.stderr) + self.assertEqual(self.expect, got) + super(TestRunnerDirectory, self).tearDown() + + def run_ctl(self, opts): + options = zdctl.ZDCtlOptions() + options.realize(opts + ['fg']) + self.expect = self.cmd + '\n' + proc = zdctl.ZDCmd(options) + proc.onecmd(" ".join(options.args)) + + def testCtlRunDirectoryCreation(self): + path = os.path.join(self.root, 'rundir') + self.run_ctl(['-z', path, '-p', self.cmd]) + self.assertTrue(os.path.exists(path)) + + def testCtlRunDirectoryCreationFromConfigFile(self): + path = os.path.join(self.root, 'rundir') + options = ['directory ' + path, + 'program ' + self.cmd] + config = self.writeConfig( + '\n%s\n' % '\n'.join(options)) + self.run_ctl(['-C', config]) + self.assertTrue(os.path.exists(path)) + + def testCtlRunDirectoryCreationOnlyOne(self): + path = os.path.join(self.root, 'rundir', 'not-created') + self.assertRaises(SystemExit, + self.run_ctl, ['-z', path, '-p', self.cmd]) + self.assertFalse(os.path.exists(path)) + got = sys.stderr.getvalue().strip() + sys.stderr = StringIO() + self.assertTrue(got.startswith('Error: invalid value for -z')) + + def testCtlSocketDirectoryCreation(self): + path = os.path.join(self.root, 'rundir', 'sock') + self.run_ctl(['-s', path, '-p', self.cmd]) + self.assertTrue(os.path.exists(os.path.dirname(path))) + + def testCtlSocketDirectoryCreationRelativePath(self): + path = os.path.join('rundir', 'sock') + self.run_ctl(['-s', path, '-p', self.cmd]) + self.assertTrue( + os.path.exists(os.path.dirname(os.path.join(os.getcwd(), path)))) + + def testCtlSocketDirectoryCreationOnlyOne(self): + path = os.path.join(self.root, 'rundir', 'not-created', 'sock') + self.assertRaises(SystemExit, + self.run_ctl, ['-s', path, '-p', self.cmd]) + self.assertFalse(os.path.exists(path)) + got = sys.stderr.getvalue().strip() + sys.stderr = StringIO() + self.assertTrue(got.startswith('Error: invalid value for -s')) + + def testCtlSocketDirectoryCreationFromConfigFile(self): + path = os.path.join(self.root, 'rundir') + options = ['socket-name %s/sock' % path, + 'program ' + self.cmd] + config = self.writeConfig( + '\n%s\n' % '\n'.join(options)) + self.run_ctl(['-C', config]) + self.assertTrue(os.path.exists(path)) + + def testCtlSocketDirectoryCreationFromConfigFileRelativePath(self): + path = 'rel-rundir' + options = ['socket-name %s/sock' % path, + 'program ' + self.cmd] + config = self.writeConfig( + '\n%s\n' % '\n'.join(options)) + self.run_ctl(['-C', config]) + self.assertTrue(os.path.exists(os.path.join(os.getcwd(), path))) + + def writeConfig(self, config): + config_file = os.path.join(self.root, 'config') + with open(config_file, 'w') as f: + f.write(config) + return config_file + + def testDirectoryChown(self): + path = os.path.join(self.root, 'foodir') + options = zdctl.ZDCtlOptions() + options.realize(['-p', self.cmd, 'status']) + cmd = zdctl.ZDCmd(options) + options.uid = 27 + options.gid = 28 + # Patch chown and geteuid, because we're not root + chown = os.chown + geteuid = os.geteuid + calls = [] + + def my_chown(*args): + calls.append(('chown',) + args) + + def my_geteuid(): + return 0 + + try: + os.chown = my_chown + os.geteuid = my_geteuid + cmd.create_directory(path) + finally: + os.chown = chown + os.geteuid = geteuid + self.assertEqual([('chown', path, 27, 28)], calls) + + +def send_action(action, sockname, raise_on_error=False): + """Send an action to the zdrun server and return the response. + + Return None if the server is not up or any other error happened. + """ + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + try: + sock.connect(sockname) + sock.send(action.encode() + b"\n") + sock.shutdown(1) # We're not writing any more + response = b"" + while 1: + data = sock.recv(1000) + if not data: + break + response += data + sock.close() + return response + except socket.error as msg: + if str(msg) == 'AF_UNIX path too long': + # MacOS has apparent small limits on the length of a UNIX + # domain socket filename, we want to make MacOS users aware + # of the actual problem + raise + if raise_on_error: + raise + return None + finally: + sock.close() + + +def test_suite(): + suite = unittest.TestSuite() + if os.name == "posix": + suite.addTest(unittest.makeSuite(ZDaemonTests)) + suite.addTest(unittest.makeSuite(TestRunnerDirectory)) + return suite + +if __name__ == '__main__': + __file__ = sys.argv[0] + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/zdctl.py b/thesisenv/lib/python3.6/site-packages/zdaemon/zdctl.py new file mode 100644 index 0000000..c8e6b3d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/zdctl.py @@ -0,0 +1,643 @@ +#!python +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""zdctl -- control an application run by zdaemon. + +Usage: python zdctl.py [-C URL] [-S schema.xml] [-h] [-p PROGRAM] + [zdrun-options] [action [arguments]] + +Options: +-b/--backoff-limit SECONDS -- set backoff limit to SECONDS (default 10) +-C/--configure URL -- configuration file or URL +-d/--daemon -- run as a proper daemon; fork a subprocess, close files etc. +-f/--forever -- run forever (by default, exit when backoff limit is exceeded) +-h/--help -- print this usage message and exit +-t/--transcript FILE -- log file where to redirect stdout and stderr +-l/--logfile -- log file to be read by logtail command +-p/--program PROGRAM -- the program to run +-S/--schema XML Schema -- XML schema for configuration file +-T/--start-timeout SECONDS -- Start timeout when a test program is used +-s/--socket-name SOCKET -- Unix socket name for client (default "zdsock") +-u/--user USER -- run as this user (or numeric uid) +-m/--umask UMASK -- use this umask for daemon subprocess (default is 022) +-x/--exit-codes LIST -- list of fatal exit codes (default "0,2") +--version -- print zdaemon version and exit +-z/--directory DIRECTORY -- directory to chdir to when using -d (default off) +action [arguments] -- see below + +Actions are commands like "start", "stop" and "status". Use the +action "help" to find out about available actions. +""" +from __future__ import print_function + +import os +import os.path +import re +import cmd +import sys +import time +import signal +import socket +import stat + +if __name__ == "__main__": + # Add the parent of the script directory to the module search path + # (but only when the script is run from inside the zdaemon package) + from os.path import dirname, basename, abspath, normpath + scriptdir = dirname(normpath(abspath(sys.argv[0]))) + if basename(scriptdir).lower() == "zdaemon": + sys.path.append(dirname(scriptdir)) + here = os.path.dirname(os.path.realpath(__file__)) + swhome = os.path.dirname(here) + for parts in [("src",), ("lib", "python"), ("Lib", "site-packages")]: + d = os.path.join(swhome, *(parts + ("zdaemon",))) + if os.path.isdir(d): + d = os.path.join(swhome, *parts) + sys.path.insert(0, d) + break + +from zdaemon.zdoptions import RunnerOptions, name2signal + + +def string_list(arg): + return arg.split() + + +class ZDCtlOptions(RunnerOptions): + + __doc__ = __doc__ + + positional_args_allowed = True + + def __init__(self): + RunnerOptions.__init__(self) + self.add("schemafile", short="S:", long="schema=", + default="schema.xml", + handler=self.set_schemafile) + self.add("program", "runner.program", "p:", "program=", + handler=string_list, + required="no program specified; use -p or -C") + self.add("logfile", "runner.logfile", "l:", "logfile=") + self.add("start_timeout", "runner.start_timeout", + "T:", "start-timeout=", int, default=300) + self.add("python", "runner.python") + self.add("zdrun", "runner.zdrun") + programname = os.path.basename(sys.argv[0]) + base, ext = os.path.splitext(programname) + if ext == ".py": + programname = base + self.add("prompt", "runner.prompt", default=(programname + ">")) + + def realize(self, *args, **kwds): + + RunnerOptions.realize(self, *args, **kwds) + + # Maybe the config file requires -i or positional args + if not self.args: + self.usage("an action argument is required") + + # Where's python? + if not self.python: + self.python = sys.executable + + def set_schemafile(self, file): + self.schemafile = file + + +class ZDCmd(cmd.Cmd): + + def __init__(self, options): + self.options = options + self.prompt = self.options.prompt + ' ' + cmd.Cmd.__init__(self) + self.get_status() + if self.zd_status: + m = re.search("(?m)^args=(.*)$", self.zd_status) + if m: + s = m.group(1) + args = eval(s, {"__builtins__": {}}) + program = self.options.program + if args[:len(program)] != program: + print("WARNING! zdrun is managing a different program!") + print("our program =", program) + print("daemon's args =", args) + + if options.configroot is not None: + env = getattr(options.configroot, 'environment', None) + if env is not None: + if getattr(env, 'mapping', None) is not None: + for k, v in env.mapping.items(): + os.environ[k] = v + elif isinstance(env, dict): + for k, v in env.items(): + os.environ[k] = v + + self.create_rundir() + self.create_socket_dir() + self.set_uid() + + def create_rundir(self): + if self.options.directory is None: + return + self.create_directory(self.options.directory) + + def create_socket_dir(self): + dir = os.path.dirname(self.options.sockname) + if not dir: + return + self.create_directory(dir) + + def create_directory(self, directory): + if os.path.isdir(directory): + return + os.mkdir(directory) + uid = os.geteuid() + if uid == 0 and uid != self.options.uid: + # Change owner of directory to target + os.chown(directory, self.options.uid, self.options.gid) + + def set_uid(self): + user = self.options.user + if user is None: + return + + import pwd + try: + uid = int(user) + except ValueError: + try: + pwrec = pwd.getpwnam(user) + except KeyError: + self.options.usage("username %r not found" % user) + uid = pwrec.pw_uid + else: + try: + pwrec = pwd.getpwuid(uid) + except KeyError: + self.options.usage("uid %r not found" % user) + + # See if we're already that user: + euid = os.geteuid() + if euid != 0: + if euid != uid: + self.options.usage("only root can use -u USER to change users") + return + + # OK, we have to set user and groups: + os.setgid(pwrec.pw_gid) + + import grp + user = pwrec.pw_name + os.setgroups( + sorted(g.gr_gid for g in grp.getgrall() # sort for tests + if user in g.gr_mem) + ) + os.setuid(uid) + + def emptyline(self): + # We don't want a blank line to repeat the last command. + # Showing status is a nice alternative. + self.do_status() + + def send_action(self, action): + """Send an action to the zdrun server and return the response. + + Return None if the server is not up or any other error happened. + """ + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + try: + sock.connect(self.options.sockname) + sock.send(action.encode() + b"\n") + sock.shutdown(1) # We're not writing any more + response = b"" + while 1: + data = sock.recv(1000) + if not data: + break + response += data + return response.decode() + except socket.error: + return None + finally: + sock.close() + + zd_testing = 0 + + def get_status(self): + self.zd_up = 0 + self.zd_pid = 0 + self.zd_should_be_up = 0 + self.zd_status = None + resp = self.send_action("status") + if not resp: + return resp + m = re.search("(?m)^application=(\d+)$", resp) + if not m: + return resp + self.zd_up = 1 + self.zd_pid = int(m.group(1)) + self.zd_status = resp + m = re.search("(?m)^should_be_up=(\d+)$", resp) + if m: + self.zd_should_be_up = int(m.group(1)) + else: + self.zd_should_be_up = 1 + m = re.search("(?m)^testing=(\d+)$", resp) + if m: + self.zd_testing = int(m.group(1)) + else: + self.zd_testing = 0 + + return resp + + def awhile(self, cond, msg): + n = 0 + was_running = False + try: + if self.get_status(): + was_running = True + + while not cond(n): + sys.stdout.write(". ") + sys.stdout.flush() + time.sleep(1) + n += 1 + if self.get_status(): + was_running = True + elif (was_running or n > 10) and not cond(n): + print("\ndaemon manager not running") + return 1 + + except KeyboardInterrupt: + print("^C") + print("\n" + msg % self.__dict__) + + def _start_cond(self, n): + if (n > self.options.start_timeout): + print('\nProgram took too long to start') + sys.exit(1) + return self.zd_pid and not self.zd_testing + + def do_start(self, arg): + self.get_status() + if not self.zd_up: + if self.options.zdrun: + args = [self.options.python, self.options.zdrun] + else: + args = [self.options.python, sys.argv[0]] + os.environ['DAEMON_MANAGER_MODE'] = '1' + + args += self._get_override("-S", "schemafile") + args += self._get_override("-C", "configfile") + args += self._get_override("-b", "backofflimit") + args += self._get_override("-f", "forever", flag=1) + args += self._get_override("-s", "sockname") + args += self._get_override("-u", "user") + args += self._get_override("-t", "transcript") + if self.options.umask: + args += self._get_override("-m", "umask", + oct(self.options.umask)) + args += self._get_override( + "-x", "exitcodes", ",".join(map(str, self.options.exitcodes))) + args += self._get_override("-z", "directory") + args.extend(self.options.program) + args.extend(self.options.args[1:]) + if self.options.daemon: + flag = os.P_NOWAIT + else: + flag = os.P_WAIT + os.spawnvp(flag, args[0], args) + elif not self.zd_pid: + self.send_action("start") + else: + print("daemon process already running; pid=%d" % self.zd_pid) + return + if self.options.daemon: + return self.awhile( + self._start_cond, "daemon process started, pid=%(zd_pid)d") + + def _get_override(self, opt, name, svalue=None, flag=0): + value = getattr(self.options, name) + if value is None: + return [] + configroot = self.options.configroot + if configroot is not None: + for n, cn in self.options.names_list: + if n == name and cn: + v = configroot + for p in cn.split("."): + v = getattr(v, p, None) + if v is None: + break + if v == value: # We didn't override anything + return [] + break + if flag: + if value: + args = [opt] + else: + args = [] + else: + if svalue is None: + svalue = str(value) + args = [opt, svalue] + return args + + def help_start(self): + print("start -- Start the daemon process.") + print(" If it is already running, do nothing.") + + def do_stop(self, arg): + self.get_status() + if not self.zd_up: + print("daemon manager not running") + elif not self.zd_pid and not self.zd_should_be_up: + print("daemon process not running") + else: + self.send_action("stop") + self.awhile(lambda n: not self.zd_pid, "daemon process stopped") + + def help_stop(self): + print("stop -- Stop the daemon process.") + print(" If it is not running, do nothing.") + + def do_reopen_transcript(self, arg): + if not self.zd_up: + print("daemon manager not running") + else: + self.send_action("reopen_transcript") + + def help_reopen_transcript(self): + print("reopen_transcript -- Reopen the transcript log file.") + print(" Use after log rotation.") + + def do_restart(self, arg): + self.get_status() + pid = self.zd_pid + if not pid: + self.do_start(arg) + else: + self.send_action("restart") + self.awhile(lambda n: (self.zd_pid != pid) and self._start_cond(n), + "daemon process restarted, pid=%(zd_pid)d") + + def help_restart(self): + print("restart -- Stop and then start the daemon process.") + + def do_kill(self, arg): + if not arg: + arg = 'SIGTERM' + try: + signame = name2signal(arg) + except ValueError: + print("invalid signal", repr(arg)) + return + self.get_status() + if not self.zd_pid: + print("daemon process not running") + return + sig = getattr(signal, signame) + print("kill(%d, %d)" % (self.zd_pid, sig)) + try: + os.kill(self.zd_pid, sig) + except os.error as msg: + print("Error:", msg) + else: + print("signal %s sent to process %d" % (signame, self.zd_pid)) + + def help_kill(self): + print("kill [sig] -- Send signal sig to the daemon process.") + print(" The default signal is SIGTERM.") + + def do_wait(self, arg): + self.awhile(lambda n: not self.zd_pid, "daemon process stopped") + self.do_status() + + def help_wait(self): + print("wait -- Wait for the daemon process to exit.") + + def do_status(self, arg=""): + status = 0 + if arg not in ["", "-l"]: + print("status argument must be absent or -l") + return 1 + self.get_status() + if not self.zd_up: + print("daemon manager not running") + status = 3 + elif not self.zd_pid: + print("daemon manager running; daemon process not running") + else: + print("program running; pid=%d" % self.zd_pid) + if arg == "-l" and self.zd_status: + print(self.zd_status) + return status + + def help_status(self): + print("status [-l] -- Print status for the daemon process.") + print(" With -l, show raw status output as well.") + + def do_show(self, arg): + if not arg: + arg = "options" + try: + method = getattr(self, "show_" + arg) + except AttributeError as err: + print(err) + self.help_show() + return + method() + + def show_options(self): + print("zdctl/zdrun options:") + print("schemafile: ", repr(self.options.schemafile)) + print("configfile: ", repr(self.options.configfile)) + print("zdrun: ", repr(self.options.zdrun)) + print("python: ", repr(self.options.python)) + print("program: ", repr(self.options.program)) + print("backofflimit:", repr(self.options.backofflimit)) + print("daemon: ", repr(self.options.daemon)) + print("forever: ", repr(self.options.forever)) + print("sockname: ", repr(self.options.sockname)) + print("exitcodes: ", repr(self.options.exitcodes)) + print("user: ", repr(self.options.user)) + umask = self.options.umask + if not umask: + # Here we're just getting the current umask so we can report it: + umask = os.umask(0o777) + os.umask(umask) + print("umask: ", oct(umask)) + print("directory: ", repr(self.options.directory)) + print("logfile: ", repr(self.options.logfile)) + print("transcript: ", repr(self.options.transcript)) + + def show_python(self): + print("Python info:") + version = sys.version.replace("\n", "\n ") + print("Version: ", version) + print("Platform: ", sys.platform) + print("Executable: ", repr(sys.executable)) + print("Arguments: ", repr(sys.argv)) + print("Directory: ", repr(os.getcwd())) + print("Path:") + for dir in sys.path: + print(" " + repr(dir)) + + def show_all(self): + self.show_options() + print() + self.show_python() + + def help_show(self): + print("show options -- show zdctl options") + print("show python -- show Python version and details") + print("show all -- show all of the above") + + def do_logreopen(self, arg): + self.do_reopen_transcript('') + self.do_kill('USR2') + + def help_logreopen(self): + print("logreopen -- Send a SIGUSR2 signal to the daemon process.") + print(" This is designed to reopen the log file.") + print(" Also reopens the transcript log file.") + + def do_logtail(self, arg): + if not arg: + arg = self.options.logfile + if not arg: + print("No default log file specified; use logtail ") + return + try: + helper = TailHelper(arg) + helper.tailf() + except KeyboardInterrupt: + print() + except IOError as msg: + print(msg) + except OSError as msg: + print(msg) + + def help_logtail(self): + print("logtail [logfile] -- Run tail -f on the given logfile.") + print(" A default file may exist.") + print(" Hit ^C to exit this mode.") + + def do_foreground(self, arg): + self.get_status() + pid = self.zd_pid + if pid: + print( + "To run the program in the foreground, please stop it first.") + return + + program = self.options.program + self.options.args[1:] + print(" ".join(program)) + sys.stdout.flush() + try: + os.spawnlp(os.P_WAIT, program[0], *program) + except KeyboardInterrupt: + print() + + def do_fg(self, arg): + self.do_foreground(arg) + + def help_foreground(self): + print("foreground -- Run the program in the forground.") + print("fg -- an alias for foreground.") + + def help_fg(self): + self.help_foreground() + + def help_help(self): + print("help -- Print a list of available actions.") + print("help -- Print help for .") + + +class TailHelper: + + MAX_BUFFSIZE = 1024 + + def __init__(self, fname): + self.f = open(fname, 'r') + + def tailf(self): + sz, lines = self.tail(10) + for line in lines: + sys.stdout.write(line) + sys.stdout.flush() + while 1: + newsz = self.fsize() + bytes_added = newsz - sz + if bytes_added < 0: + sz = 0 + print("==> File truncated <==") + bytes_added = newsz + if bytes_added > 0: + self.f.seek(-bytes_added, 2) + bytes = self.f.read(bytes_added) + sys.stdout.write(bytes) + sys.stdout.flush() + sz = newsz + time.sleep(1) + + def tail(self, max=10): + self.f.seek(0, 2) + pos = sz = self.f.tell() + + lines = [] + bytes = [] + num_bytes = 0 + + while 1: + if pos == 0: + break + self.f.seek(pos) + byte = self.f.read(1) + if byte == '\n': + if len(lines) == max: + break + bytes.reverse() + line = ''.join(bytes) + line and lines.append(line) + bytes = [] + bytes.append(byte) + num_bytes = num_bytes + 1 + if num_bytes > self.MAX_BUFFSIZE: + break + pos = pos - 1 + lines.reverse() + return sz, lines + + def fsize(self): + return os.fstat(self.f.fileno())[stat.ST_SIZE] + + +def main(args=None, options=None, cmdclass=ZDCmd): + if args is None: + args = sys.argv[1:] + + if os.environ.get('DAEMON_MANAGER_MODE'): + del os.environ['DAEMON_MANAGER_MODE'] + import zdaemon.zdrun + return zdaemon.zdrun.main(args) + + if options is None: + options = ZDCtlOptions() + options.realize(args) + c = cmdclass(options) + sys.exit(c.onecmd(" ".join(options.args))) + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/zdaemon/zdoptions.py b/thesisenv/lib/python3.6/site-packages/zdaemon/zdoptions.py new file mode 100644 index 0000000..2ba61bb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zdaemon/zdoptions.py @@ -0,0 +1,528 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Option processing for zdaemon and related code.""" +from __future__ import print_function +import os +import sys +import getopt +import signal + +import pkg_resources +import ZConfig + + +class ZDOptions: + """a zdaemon script. + + Usage: python + + + + + + + + + + + + + + + + + + + + + + + + + Andorra + Forenede Arabiske Emirater + Afghanistan + Antigua og Barbuda + Anguilla + Albanien + Armenien + Hollandske Antiller + Angola + Antarktis + Argentina + Amerikansk Samoa + Østrig + Australien + Aruba + Aserbajdsjan + Bosnien-Hercegovina + Barbados + Bangladesh + Belgien + Burkina Faso + Bulgarien + Bahrain + Burundi + Benin + Bermuda + Brunei Darussalam + Bolivia + Brasilien + Bahamas + Bhutan + Bouvetø + Botswana + Hviderusland + Belize + Canada + Cocos-øerne (Keelingøerne) + Den Demokratiske Republik Congo + Centralafrikanske Republik + Congo + Schweiz + Elfenbenskysten + Cook-øerne + Chile + Cameroun + Kina + Colombia + Costa Rica + Cuba + Kap Verde + Juleøen + Cypern + Tjekkiet + Tyskland + Djibouti + Danmark + Dominica + Den Dominikanske Republik + Algeriet + Ecuador + Estland + Egypten + Vestsahara + Eritrea + Spanien + Etiopien + Finland + Fiji-øerne + Falklandsøerne + Mikronesiens Forenede Stater + Færøerne + Frankrig + en + Gabon + Storbritannien + Grenada + Georgien + Fransk Guyana + Ghana + Gibraltar + Grønland + Gambia + Guinea + Guadeloupe + Ækvatorialguinea + Grækenland + South Georgia og De Sydlige Sandwichøer + Guatemala + Guam + Guinea-Bissau + Guyana + SAR Hongkong + Heard- og McDonald-øerne + Honduras + Kroatien + Haiti + Ungarn + Indonesien + Irland + Israel + Indien + Det Britiske Territorium i Det Indiske Ocean + Irak + Iran + Island + Italien + Jamaica + Jordan + Japan + Kenya + Kirgisistan + Cambodja + Kiribati + Comorerne + Saint Kitts og Nevis + Nordkorea + Sydkorea + Kuwait + Caymanøerne + Kasakhstan + Laos + Libanon + Saint Lucia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Litauen + Luxembourg + Letland + Libyen + Marokko + Monaco + Republikken Moldova + Madagaskar + Marshalløerne + Republikken Makedonien + Mali + Myanmar + Mongoliet + SAR Macao + Nordmarianerne + Martinique + Mauretanien + Montserrat + Malta + Mauritius + Maldiverne + Malawi + Mexico + Malaysia + Mozambique + Namibia + Ny Caledonien + Niger + Norfolk Island + Nigeria + Nicaragua + Holland + Norge + Nepal + Nauru + Niue + New Zealand + Oman + Panama + Peru + Fransk Polynesien + Papua Ny Guinea + Filippinerne + Pakistan + Polen + Saint Pierre og Miquelon + Pitcairn + Puerto Rico + De palæstinensiske områder + Portugal + Palau + Paraguay + Qatar + Reunion + Rumænien + Rusland + Rwanda + Saudi-Arabien + Salomonøerne + Seychellerne + Sudan + Sverige + Singapore + St. Helena + Slovenien + Svalbard og Jan Mayen + Slovakiet + Sierra Leone + San Marino + Senegal + Somalia + Serbien + Surinam + São Tomé og Príncipe + El Salvador + Syrien + Swaziland + Turks- og Caicosøerne + Tchad + Franske Besiddelser i Det Sydlige Indiske Ocean + Togo + Thailand + Tadsjikistan + Tokelau + Timor-Leste + Turkmenistan + Tunesien + Tonga + Tyrkiet + Trinidad og Tobago + Tuvalu + Taiwan + Tanzania + Ukraine + Uganda + De Mindre Amerikanske Oversøiske Øer + USA + Uruguay + Usbekistan + Vatikanstaten + St. Vincent og Grenadinerne + Venezuela + De britiske jomfruøer + De amerikanske jomfruøer + Vietnam + Vanuatu + Wallis og Futunaøerne + Samoa + Yemen + Mayotte + Jugoslavien + Sydafrika + Zambia + Zimbabwe + + + Kalender + Sortering + Valuta + + + Buddhistisk kalender + Kinesisk kalender + Gregoriansk kalender + Jødisk kalender + Islamisk kalender + Verdslig islamisk kalender + Japansk kalender + Direkte sorteringsrækkefølge + Sorteringsrækkefølge i telefonbøger + Pinyin-baseret sorteringsrækkefølge + Stroke-baseret sorteringsrækkefølge + Traditionel sorteringsrækkefølge + + + + [a-z æ å ø á é í ó ú ý] + + + GuMtkHmsSEDFwWahKzUeygAZ + + + + + + jan + feb + mar + apr + maj + jun + jul + aug + sep + okt + nov + dec + + + J + F + M + A + M + J + J + A + S + O + N + D + + + januar + februar + marts + april + maj + juni + juli + august + september + oktober + november + december + + + + + + + søn + man + tir + ons + tor + fre + lør + + + S + M + T + O + T + F + L + + + søndag + mandag + tirsdag + onsdag + torsdag + fredag + lørdag + + + + + + + + + + f.Kr. + e.Kr. + + + + + + + EEEE dd MMMM yyyy + + + + + d. MMM yyyy + + + + + dd-MM-yyyy + + + + + dd-MM-yy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + Pacific-normaltid + Pacific-sommertid + + + PST + PDT + + Los Angeles + + + + Pacific-normaltid + Pacific-sommertid + + + PST + PDT + + Los Angeles + + + + Mountain-normaltid + Mountain-sommertid + + + MST + MDT + + Denver + + + + Mountain-normaltid + Mountain-sommertid + + + MST + MDT + + Denver + + + + Mountain-normaltid + Mountain-normaltid + + + MST + MST + + Phoenix + + + + Mountain-normaltid + Mountain-normaltid + + + MST + MST + + Phoenix + + + + Central-normaltid + Central-sommertid + + + CST + CDT + + Chicago + + + + Central-normaltid + Central-sommertid + + + CST + CDT + + Chicago + + + + Eastern-normaltid + Eastern-sommertid + + + EST + EDT + + New York + + + + Eastern-normaltid + Eastern-sommertid + + + EST + EDT + + New York + + + + Eastern-normaltid + Eastern-normaltid + + + EST + EST + + Indianapolis + + + + Eastern-normaltid + Eastern-normaltid + + + EST + EST + + Indianapolis + + + + Hawaii-normaltid + Hawaii--normaltid + + + HST + HST + + Honolulu + + + + Hawaii-normaltid + Hawaii-normaltid + + + HST + HST + + Honolulu + + + + Alaska-normaltid + Alaska-sommertid + + + AST + ADT + + Anchorage + + + + Alaska-normaltid + Alaska-sommertid + + + AST + ADT + + Anchorage + + + + Atlantic-normaltid + Atlantic-sommertid + + + AST + ADT + + Halifax + + + + Newfoundland-normaltid + Newfoundland-sommertid + + + CNT + CDT + + St. Johns + + + + Newfoundland-normaltid + Newfoundland-sommertid + + + CNT + CDT + + St. Johns + + + + Mellemeuropæisk normaltid + Mellemeuropæisk sommertid + + + CET + CEST + + Paris + + + + Mellemeuropæisk normaltid + Mellemeuropæisk sommertid + + + CET + CEST + + Paris + + + + Verdenstid + Verdenstid + + + GMT + GMT + + London + + + + Verdenstid + Verdenstid + + + GMT + GMT + + Casablanca + + + + Israelsk normaltid + Israelsk sommertid + + + IST + IDT + + Jerusalem + + + + Japansk normaltid + Japansk normaltid + + + JST + JST + + Tokyo + + + + Japansk normaltid + Japansk normaltid + + + JST + JST + + Tokyo + + + + Østeuropæisk normaltid + Østeuropæisk sommertid + + + EET + EEST + + Bukarest + + + + Kinesisk normaltid + Kinesisk normaltid + + + CTT + CDT + + Shanghai + + + + Kinesisk normaltid + Kinesisk normaltid + + + CTT + CDT + + Shanghai + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + Andorransk diner + ADD + + + Andorransk peseta + ADP + + + Dirham fra de Forenede Arabiske Emirater + AED + + + Affars og Issas franc + AIF + + + Albansk lek (1946-1961) + ALK + + + Albansk lek + lek + + + Albansk lek + ALV + + + Armensk dram + dram + + + Gylden fra De Nederlandske Antiller + NA f. + + + Angolansk kwanza + AOA + + + Angolansk kwanza (1977-1990) + AOK + + + Ny angolansk kwanza (1990-2000) + AON + + + Angolansk kwanza reajustado (1995-1999) + AOR + + + Angolansk escudo + AOS + + + Argentinsk austral + ARA + + + Argentinsk peso moneda nacional + ARM + + + Argentinsk peso (1983-1985) + ARP + + + Argentinsk peso + Arg$ + + + Østrigsk schilling + ATS + + + Australsk dollar + $A + + + Australsk pund + AUP + + + Arubansk gylden + AWG + + + Aserbajdsjansk manat + AZM + + + Bosnien-Hercegovinsk dinar + BAD + + + Bosnien-Hercegovinsk konvertibel mark + KM + + + Ny bosnien-hercegovinsk dinar + BAN + + + Barbadisk dollar + BDS$ + + + Bangladeshisk taka + Tk + + + Belgisk franc (konvertibel) + BEC + + + Belgisk franc + BF + + + Belgisk franc (financial) + BEL + + + Bulgarsk hard lev + lev + + + Bulgarsk socialist lev + BGM + + + Ny Bulgarsk lev + BGN + + + Bulgarsk lev (1879-1952) + BGO + + + Bahrainsk dinar + BD + + + Burundisk franc + Fbu + + + Bermudansk dollar + Ber$ + + + Bermudansk pund + BMP + + + Bruneisk dollar + BND + + + Boliviansk peso + BOP + + + Boliviansk mvdol + BOV + + + Brasiliansk cruzeiro novo (1967-1986) + BRB + + + Brasiliansk cruzado + BRC + + + Brasiliansk cruzeiro (1990-1993) + BRE + + + Brasiliansk real + R$ + + + Brasiliansk cruzado novo + BRN + + + Brasiliansk cruzeiro + BRR + + + Brasiliansk cruzeiro (1942-1967) + BRZ + + + Bahamansk dollar + BSD + + + Bahamansk pund + BSP + + + Bhutansk ngultrum + Nu + + + Bhutansk rupee + BTR + + + Burmesisk kyat + BUK + + + Burmesisk rupee + BUR + + + Botswansk pula + BWP + + + Ny hviderussisk rubel (1994-1999) + BYB + + + Hviderussisk rubel (1992-1994) + BYL + + + Hviderussisk rubel + Rbl + + + Belizisk dollar + BZ$ + + + Britisk Honduras dollar + BZH + + + Canadisk dollar + Can$ + + + Congolesisk franc congolais + CDF + + + Congolesisk franc + CDG + + + Congolesisk Zaire + CDL + + + CFA-franc fra den Centralafrikanske republik + CFF + + + Schweizisk franc + SwF + + + Dollar fra Cookøerne + CKD + + + Chilensk condor + CLC + + + Chilensk escudo + CLE + + + Chilensk unidades de fomento + CLF + + + Chilensk peso + Ch$ + + + Camerounsk CFA-franc + CMF + + + Kinesisk jen min piao yuan + CNP + + + Kinesisk yuan renminbi + Y + + + Colombiansk papirpeso + COB + + + Congolesisk CFA-franc + COF + + + Colombiansk peso + Col$ + + + Costaricansk colon + C + + + Tjekkoslovakisk koruna + CSC + + + Tjekkoslovakisk hard koruna + CSK + + + Cubansk peso + CUP + + + Kapverdisk escudo + CVEsc + + + Curacaosk gylden + CWG + + + Cypriotisk pund + £C + + + Tjekkisk koruna + CZK + + + Østtysk mark + DDM + + + Tysk mark + DEM + + + Tysk sperrmark + DES + + + Djiboutisk franc + DF + + + Dansk krone + kr + + + Dominikansk peso + RD$ + + + Algerisk dinar + DA + + + Ny algerisk franc + DZF + + + Algerisk franc germinal + DZG + + + Ecuadoriansk sucre + ECS + + + Estisk kroon + EEK + + + Egyptisk pund + EGP + + + Eritreisk nakfa + ERN + + + Spansk peseta + ESP + + + Etiopisk birr + Br + + + Etiopisk dollar + ETD + + + Euro + + + + Finsk mark + FIM + + + Finsk mark (1860-1962) + FIN + + + Fijiansk dollar + F$ + + + Fijiansk pund + FJP + + + Pund fra Falklandsøerne + FKP + + + Færøsk krone + FOK + + + Fransk franc + FRF + + + Gabonesisk CFA-franc + GAF + + + Britisk pund + £ + + + Georgisk kupon larit + GEK + + + Georgisk lari + lari + + + Ghanesisk cedi + GHC + + + Gammel ghanesisk cedi + GHO + + + Ghanesisk pund + GHP + + + Ghanesisk revalueret cedi + GHR + + + Gibraltarisk pund + GIP + + + Grønlandsk krone + GLK + + + Gambisk dalasi + GMD + + + Gambisk pund + GMP + + + Guineansk franc + GF + + + Guineansk franc (1960-1972) + GNI + + + Guineansk syli + GNS + + + Guadeloupsk franc + GPF + + + Ækvatorialguineask ekwele guineana + GQE + + + Ækvatorialguineask franco + GQF + + + Ækvatorialguineask peseta guineana + GQP + + + Græsk drachma + GRD + + + Ny græsk drachma + GRN + + + Guatemalansk quetzal + Q + + + Fransk-guyansk franc guiana + GUF + + + Portugisisk guinea escudo + GWE + + + Portugisisk guinea mil reis + GWM + + + Guineansk peso + GWP + + + Guyansk dollar + G$ + + + Honduransk lempira + L + + + Kroatisk dinar + HRD + + + Kroatisk kuna + HRK + + + Haitisk gourde + HTG + + + Ungarsk forint + Ft + + + Nordirsk pund + IBP + + + Indonesisk nica guilder + IDG + + + Indonesisk java rupiah + IDJ + + + Ny indonesisk rupiah + IDN + + + Indonesisk pupiah + Rp + + + Irsk pund + IR£ + + + Israelsk shekel + ILL + + + Israelsk pund + ILP + + + Ny israelsk shekel + ILS + + + Indisk rupee + =0#Rs.|1#Re.|1<Rs. + + + Irakisk dinar + ID + + + Iransk rial + RI + + + Islandsk krona + ISK + + + Italiensk lira + + + + Pund fra Jersey + JEP + + + Jamaicansk dollar + J$ + + + Jamaicansk pund + JMP + + + Jordansk dinar + JD + + + Japansk yen + ¥ + + + Kenyansk shilling + K Sh + + + Kirgisisk som + som + + + Gammel cambodjansk riel + KHO + + + Cambodjansk riel + CR + + + Comorisk franc + CF + + + Nordkoreansk won + KPW + + + Sydkoreansk hwan + KRH + + + Gammel sydkoreansk won + KRO + + + Sydkoreansk won + KRW + + + Kuwaitisk dinar + KD + + + Dollar fra Caymanøerne + KYD + + + Kasakhisk rubel + KZR + + + Kasakhisk tenge + T + + + Laotisk kip + LAK + + + Libanesisk pund + LL + + + Liechtensteinsk franc + LIF + + + Srilankansk rupee + SL Re + + + Ceylonesisk rupee + LNR + + + Liberisk dollar + LRD + + + Lesothisk loti + M + + + Litauisk lita + LTL + + + Litauisk talonas + LTT + + + Luxembourgsk franc + LUF + + + Lettisk lats + LVL + + + Lettisk rubel + LVR + + + Libysk dinar + LD + + + Libysk pund + LYP + + + Marokkansk dirham + MAD + + + Marokkansk franc + MAF + + + Ny monegaskisk franc + MCF + + + Monegaskisk franc germinal + MCG + + + Moldovisk leu cupon + MDC + + + Moldovisk leu + MDL + + + Moldovisk ruble cupon + MDR + + + Madagaskisk ariary + MGA + + + Madagaskisk franc + MGF + + + Dollar fra Marshalløerne + MHD + + + Makedonsk denar + MDen + + + Makedonsk denar (1992-1993) + MKN + + + Malisk franc + MLF + + + Myanmarsk kyat + MMK + + + Mongolsk tugrik + Tug + + + Macaosk pataca + MOP + + + Martiniquisk franc + MQF + + + Mauritansk ouguiya + UM + + + Maltesisk lira + Lm + + + Maltesisk pund + MTP + + + Mauritisk rupee + MUR + + + Maldivisk rupee + MVP + + + Maldivisk rufiyaa + MVR + + + Malawisk kwacha + MK + + + Malawisk pund + MWP + + + Mexicansk peso + MEX$ + + + Mexicansk silver peso (1861-1992) + MXP + + + Malaysisk ringgit + RM + + + Mozambiquisk escudo + MZE + + + Mozambiquisk metical + Mt + + + Namibisk dollar + N$ + + + Nykaledonsk franc germinal + NCF + + + Nigeriansk naira + NGN + + + Nigeriansk pund + NGP + + + CFP-franc fra Ny-Hebriderne + NHF + + + Nicaraguansk cordoba + NIC + + + Nicaraguansk gold cordoba + NIG + + + Nicaraguansk cordoba oro + NIO + + + Hollandsk guilder + NLG + + + Norsk krone + NKr + + + Nepalesisk rupee + Nrs + + + New Zealandsk dollar + $NZ + + + New Zealandsk pund + NZP + + + Omansk rial + RO + + + Omansk rial saidi + OMS + + + Panamansk balboa + PAB + + + Transdniestrisk rubelkupon + PDK + + + Ny transdniestrisk rubel + PDN + + + Transdniestrisk rubel + PDR + + + Peruviansk inti + PEI + + + Peruviansk sol nuevo + PEN + + + Peruviansk sol + PES + + + Papuansk kina + PGK + + + Filippinsk peso + PHP + + + Pakistansk rupee + Pra + + + Polsk zloty + Zl + + + Polsk zloty (1950-1995) + PLZ + + + Palæstinensisk pund + PSP + + + Portugisisk conto + PTC + + + Portugisisk escudo + PTE + + + Paraguaysk guarani + PYG + + + Qatarsk rial + QR + + + Rumænsk leu + leu + + + Ny rumænsk leu + RON + + + Russisk rubel + RUB + + + Russisk rubel (1991-1998) + RUR + + + Rwandisk franc + RWF + + + Saudisk riyal + SRl + + + Saudisk sovereign riyal + SAS + + + Salomonsk dollar + SI$ + + + Seychellisk rupee + SR + + + Sudansk dinar + SDD + + + Sudansk pund + SDP + + + Svensk krona + SKr + + + Singaporeansk dollar + S$ + + + Pund fra Saint Helena + SHP + + + Slovensk tolar bons + SIB + + + Slovensk tolar + SIT + + + Slovakisk koruna + Sk + + + Sierraleonsk leone + SLL + + + Lira fra San Marino + SML + + + Somalisk shilling + So. Sh. + + + Somalilands shilling + SQS + + + Surinamsk guilder + Sf + + + Skotsk pund + SSP + + + Dobra fra Sao Tome og Principe + Db + + + Escudo fra Sao Tome og Principe + STE + + + Ny sovjetisk rubel + SUN + + + Sovjetisk rubel + SUR + + + Salvadoransk colon + SVC + + + Syrisk pund + LS + + + Swazilandsk lilangeni + E + + + Tchadisk CFA-franc + TDF + + + Thailandsk baht + THB + + + Tadsjikisk rubel + TJR + + + Tadsjikisk somoni + TJS + + + Turkmensk manat + TMM + + + Tunesisk dinar + TND + + + Tongask paʻanga + T$ + + + Tongask pund + TOS + + + Escudo fra Timor + TPE + + + Pataca fra Timor + TPP + + + Tyrkisk lira + TL + + + Dollar fra Trinidad og Tobago + TT$ + + + Gammel dollar fra Trinidad og Tobago + TTO + + + Tuvaluansk dollar + TVD + + + Ny taiwansk dollar + NT$ + + + Tanzanisk shilling + T Sh + + + Ukrainsk grynia + UAH + + + Ukrainsk karbovanetz + UAK + + + Ugandisk shilling (1966-1987) + UGS + + + Ugandisk shilling + U Sh + + + Amerikanske dollar + US$ + + + Amerikansk dollar (næste dag) + USN + + + Amerikansk dollar (samme dag) + USS + + + Uruguaysk peso fuerte + UYF + + + Uruguaysk peso (1975-1993) + UYP + + + Uruguaysk peso uruguayo + Ur$ + + + Usbekisk coupon som + UZC + + + Usbekisk sum + UZS + + + Vatikansk lira + VAL + + + Nordvietnamesisk piastre dong viet + VDD + + + Ny nordvietnamesisk dong + VDN + + + Nordvietnamesisk viet minh piastre dong viet + VDP + + + Venezuelansk bolivar + Be + + + Dollar fra De Britiske Jomfruøer + VGD + + + Vietnamesisk dong + VND + + + Ny vietnamesisk dong + VNN + + + Vietnamesisk republikansk dong + VNR + + + Vietnamesisk national dong + VNS + + + Vanuaisk vatu + VT + + + Samoansk pund + WSP + + + Samoansk tala + WST + + + Beninsk CFA-franc + XAF + + + Guld + XAU + + + Øst-karaibisk dollar + EC$ + + + Fransk guldfranc + XFO + + + Fransk UIC-franc + XFU + + + Islamisk dinar + XID + + + CFP-franc + CFPF + + + Yemenitisk dinar + YDD + + + Yemenitisk imadi riyal + YEI + + + Yemenitisk rial + YRl + + + Jugoslavisk hard dinar + YUD + + + Jugoslavisk federation dinar + YUF + + + Jugoslavisk 1994 dinar + YUG + + + Jugoslavisk noviy dinar + YUM + + + Jugoslavisk convertible dinar + YUN + + + Jugoslavisk october dinar + YUO + + + Jugoslavisk reformed dinar + YUR + + + Sydafrikansk rand (financial) + ZAL + + + Sydafrikansk pund + ZAP + + + Sydafrikansk rand + R + + + Zambisk kwacha + ZMK + + + Zambisk pund + ZMP + + + Ny zairisk zaire + ZRN + + + Zairisk naire + ZRZ + + + Zimbabwisk dollar + Z$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/da_DK.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/da_DK.xml new file mode 100644 index 0000000..d2cb485 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/da_DK.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤ #,##0.00;¤ -#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de.xml new file mode 100644 index 0000000..cd644b4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de.xml @@ -0,0 +1,2069 @@ + + + + + + + + + + + Afar + Abchasisch + Avestisch + Afrikaans + Akan + Amharisch + Aragonesisch + Arabisch + Assamesisch + Awarisch + Aymará-Sprache + Aserbaidschanisch + Baschkirisch + Weißrussisch + Bulgarisch + Biharisch + Bislama + Bambara-Sprache + Bengalisch + Tibetisch + Bretonisch + Bosnisch + Katalanisch + Tschetschenisch + Chamorro-Sprache + Cherokee + Korsisch + Cree + Tschechisch + Kirchenslawisch + Tschuwaschisch + Kymrisch + Dänisch + Deutsch + Maledivisch + Bhutanisch + Ewe-Sprache + Griechisch + Englisch + Mittelenglisch + Esperanto + Spanisch + Estnisch + Baskisch + Persisch + Ful + Finnisch + Fidschianisch + Färöisch + Französisch + Friesisch + Irisch + Schottisch-Gälisch + Altäthiopisch + Galizisch + Guarani + Gujarati + Manx + Hausa + Hawaiianisch + Hebräisch + Hindi + Hiri-Motu + Kroatisch + Kreolisch + Ungarisch + Armenisch + Herero-Sprache + Interlingua + Indonesisch + Interlingue + Igbo-Sprache + Inupiak + Isländisch + Italienisch + Inukitut + Japanisch + Javanisch + Georgisch + Kongo + Kikuyu-Sprache + Kwanyama + Kasachisch + Grönländisch + Kambodschanisch + Kannada + Koreanisch + Konkani + Kanuri-Sprache + Kaschmirisch + Kurdisch + Komi-Sprache + Kornisch + Kirgisisch + Latein + Luxemburgisch + Ganda-Sprache + Limburgisch + Lingala + Laotisch + Litauisch + Luba + Lettisch + Madagassisch + Marschallesisch + Maori + Mazedonisch + Malayalam + Mongolisch + Moldauisch + Marathi + Malaiisch + Maltesisch + Birmanisch + Nauruisch + Norwegisch Bokmål + Ndebele-Sprache (Nord) + Nepalesisch + Ndonga + Niederländisch + Norwegisch Nynorsk + Norwegisch + Ndebele-Sprache (Süd) + Navajo-Sprache + Chewa-Sprache + Okzitanisch + Ojibwa-Sprache + Oromo + Orija + Ossetisch + Pandschabisch + Pali + Polnisch + Afghanisch (Paschtu) + Portugiesisch + Quechua + Rätoromanisch + Rundi-Sprache + Rumänisch + Russisch + Rwanda-Sprache + Sanskrit + Sardisch + Sindhi + Nord-Samisch + Sango + Serbo-Kroatisch + Singhalesisch + Sidamo + Slowakisch + Slowenisch + Samoanisch + Shona + Somali + Albanisch + Serbisch + Swazi + Süd-Sotho-Sprache + Sudanesisch + Schwedisch + Suaheli + Syrisch + Tamilisch + Telugu + Tadschikisch + Thai + Tigrinja + Tigre + Turkmenisch + Tagalog + Tswana-Sprache + Tongaisch + Türkisch + Tsonga + Tatarisch + Twi + Tahitisch + Uigurisch + Ukrainisch + Urdu + Usbekisch + Venda-Sprache + Vietnamesisch + Volapük + Wallonisch + Wolof + Xhosa + Jiddisch + Joruba + Zhuang + Zapotekisch + Chinesisch + Zulu + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Andorra + Vereinigte Arabische Emirate + Afghanistan + Antigua und Barbuda + Anguilla + Albanien + Armenien + Niederländische Antillen + Angola + Antarktis + Argentinien + Amerikanisch-Samoa + Österreich + Australien + Aruba + Aserbaidschan + Bosnien und Herzegowina + Barbados + Bangladesch + Belgien + Burkina Faso + Bulgarien + Bahrain + Burundi + Benin + Bermuda + Brunei Darussalam + Bolivien + Brasilien + Bahamas + Bhutan + Bouvetinsel + Botsuana + Belarus + Belize + Kanada + Kokosinseln (Keeling) + Demokratische Republik Kongo + Zentralafrikanische Republik + Kongo + Schweiz + Côte d’Ivoire + Cookinseln + Chile + Kamerun + China + Kolumbien + Costa Rica + Kuba + Kap Verde + Weihnachtsinsel + Zypern + Tschechische Republik + Deutschland + Dschibuti + Dänemark + Dominica + Dominikanische Republik + Algerien + Ecuador + Estland + Ägypten + Westsahara + Eritrea + Spanien + Äthiopien + Finnland + Fidschi + Falklandinseln + Mikronesien + Färöer + Frankreich + en + Gabun + Vereinigtes Königreich + Grenada + Georgien + Französisch-Guayana + Ghana + Gibraltar + Grönland + Gambia + Guinea + Guadeloupe + Äquatorialguinea + Griechenland + Südgeorgien und die Südlichen Sandwichinseln + Guatemala + Guam + Guinea-Bissau + Guyana + Hong Kong S.A.R., China + Heard und McDonaldinseln + Honduras + Kroatien + Haiti + Ungarn + Indonesien + Irland + Israel + Indien + Britisches Territorium im Indischen Ozean + Irak + Iran + Island + Italien + Jamaika + Jordanien + Japan + Kenia + Kirgisistan + Kambodscha + Kiribati + Komoren + St. Kitts und Nevis + Demokratische Volksrepublik Korea + Republik Korea + Kuwait + Kaimaninseln + Kasachstan + Laos + Libanon + St. Lucia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Litauen + Luxemburg + Lettland + Libyen + Marokko + Monaco + Republik Moldau + Madagaskar + Marshallinseln + Mazedonien + Mali + Myanmar + Mongolei + Macau S.A.R., China + Nördliche Marianen + Martinique + Mauretanien + Montserrat + Malta + Mauritius + Malediven + Malawi + Mexiko + Malaysia + Mosambik + Namibia + Neukaledonien + Niger + Norfolkinsel + Nigeria + Nicaragua + Niederlande + Norwegen + Nepal + Nauru + Niue + Neuseeland + Oman + Panama + Peru + Französisch-Polynesien + Papua-Neuguinea + Philippinen + Pakistan + Polen + St. Pierre und Miquelon + Pitcairn + Puerto Rico + Palästinensische Gebiete + Portugal + Palau + Paraguay + Katar + Réunion + Rumänien + Russische Föderation + Ruanda + Saudi-Arabien + Salomonen + Seychellen + Sudan + Schweden + Singapur + St. Helena + Slowenien + Svalbard und Jan Mayen + Slowakei + Sierra Leone + San Marino + Senegal + Somalia + Serbien + Suriname + São Tomé und Príncipe + El Salvador + Syrien + Swasiland + Turks- und Caicosinseln + Tschad + Französische Süd- und Antarktisgebiete + Togo + Thailand + Tadschikistan + Tokelau + Osttimor + Turkmenistan + Tunesien + Tonga + Türkei + Trinidad und Tobago + Tuvalu + Taiwan + Tansania + Ukraine + Uganda + Amerikanisch-Ozeanien + Vereinigte Staaten + Uruguay + Usbekistan + Vatikanstadt + St. Vincent und die Grenadinen + Venezuela + Britische Jungferninseln + Amerikanische Jungferninseln + Vietnam + Vanuatu + Wallis und Futuna + Samoa + Jemen + Mayotte + Jugoslawien + Südafrika + Sambia + Simbabwe + + + en + Posix + Revidiert + + + Kalender + Sortierung + Währung + + + Buddhistischer Kalender + Chinesischer Kalender + Gregorianischer Kalender + Hebräischer Kalender + Islamischer Kalender + Bürgerlicher islamischer Kalender + Japanischer Kalender + Direkte Sortierregeln + Telefonbuch-Sortierregeln + Pinyin-Sortierregeln + Strichfolge + Traditionelle Sortierregeln + + + + [a-z ä ö ü ß] + + + GjMtkHmsSEDFwWahKzJeugAZ + + + + + + Jan + Feb + Mrz + Apr + Mai + Jun + Jul + Aug + Sep + Okt + Nov + Dez + + + J + F + M + A + M + J + J + A + S + O + N + D + + + Januar + Februar + März + April + Mai + Juni + Juli + August + September + Oktober + November + Dezember + + + + + + + So + Mo + Di + Mi + Do + Fr + Sa + + + S + M + D + M + D + F + S + + + Sonntag + Montag + Dienstag + Mittwoch + Donnerstag + Freitag + Samstag + + + + + + + + vorm. + nachm. + + + v. Chr. + n. Chr. + + + + + + + EEEE, d. MMMM yyyy + + + + + d. MMMM yyyy + + + + + dd.MM.yyyy + + + + + dd.MM.yy + + + + + + + + H:mm' Uhr 'z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + Mitteleuropäische Zeit + Mitteleuropäische Sommerzeit + + + MEZ + MESZ + + Berlin + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + Andorranischer Diner + ADD + + + Andorranische Pesete + ADP + + + UAE Dirham + AED + + + Afghani + AFA + + + Afghani + Af + + + Afar und Issa Franc + AIF + + + Albanischer Lek (1946-1961) + ALK + + + Lek + ALL + + + Albanischer Lek Valute + ALV + + + Dram + AMD + + + Niederl. Antillen Gulden + ANG + + + Kwanza + AOA + + + Angolanischer Kwanza (1977-1990) + AOK + + + Neuer Kwanza + AON + + + Kwanza Reajustado + AOR + + + Angolanischer Escudo + AOS + + + Argentinischer Austral + ARA + + + Argentinischer Peso Moneda Nacional + ARM + + + Argentinischer Peso (1983-1985) + ARP + + + Argentinischer Peso + ARS + + + Österreichischer Schilling + öS + + + Australischer Dollar + AUD + + + Australisches Pfund + AUP + + + Aruba Florin + AWG + + + Aserbeidschan Manat + AZM + + + Bosnien und Herzegowina Dinar + BAD + + + Konvertierbare Mark + BAM + + + Bosnien und Herzegowina Neuer Dinar + BAN + + + Barbados-Dollar + BBD + + + Taka + BDT + + + Belgischer Franc (konvertibel) + BEC + + + Belgischer Franc + BEF + + + Lew + BGL + + + Lew + BGN + + + Lew (1879-1952) + BGO + + + Bahrain-Dinar + BHD + + + Burundi-Franc + BIF + + + Bermuda-Dollar + BMD + + + Bermuda-Pfund + BMP + + + Brunei-Dollar + BND + + + Boliviano + BOB + + + Boliviano (1863-1962) + BOL + + + Bolivianischer Peso + BOP + + + Mvdol + BOV + + + Brasilianischer Cruzeiro Novo (1967-1986) + BRB + + + Brasilianischer Cruzado + BRC + + + Brasilianischer Cruzeiro (1990-1993) + BRE + + + Real + BRL + + + Brasilianischer Cruzado Novo + BRN + + + Brasilianischer Cruzeiro + BRR + + + Brasilianischer Cruzeiro (1942-1967) + BRZ + + + Bahama-Dollar + BSD + + + Bahama-Pfund + BSP + + + Ngultrum + BTN + + + Bhutan Rupie + BTR + + + Birmanischer Kyat + BUK + + + Birmanische Rupie + BUR + + + Pula + BWP + + + Belarus Rubel (alt) + BYB + + + Belarus Rubel (1992-1994) + BYL + + + Belarus Rubel (neu) + BYR + + + Belize-Dollar + BZD + + + Kanadischer Dollar + CAD + + + Franc congolais + CDF + + + Republik Kongo Franc + CDG + + + Kongolesische Zaire + CDL + + + Zentralafrikanische Republik CFA Franc + CFF + + + Schweizer Franken + SFr. + + + Chilenischer Condor + CLC + + + Chilenischer Escudo + CLE + + + Unidades de Fomento + CLF + + + Chilenischer Peso + CLP + + + Kamerun CFA Franc + CMF + + + Chinesischer Jen Min Piao Yuan + CNP + + + Renminbi Yuan + CNY + + + Kolumbianischer Papier-Peso + COB + + + Kongo CFA Franc + COF + + + Kolumbianischer Peso + COP + + + Costa Rica Colon + CRC + + + Tschechoslowakische Krone + CSC + + + Kubanischer Peso + CUP + + + Kap Verde Escudo + CVE + + + Zypern Pfund + CYP + + + Tschechische Krone + CZK + + + Deutsche Mark + DM + + + Sperrmark + DES + + + Dschibuti-Franc + DJF + + + Dänische Krone + DKK + + + Dominikanischer Peso + DOP + + + Algerischer Dinar + DZD + + + Algerischer Neuer Franc + DZF + + + Algerischer Franc Germinal + DZG + + + Ecuadorianischer Sucre + ECS + + + Verrechnungseinheit für EC + ECV + + + Estnische Krone + EEK + + + Ägyptisches Pfund + EGP + + + Nakfa + ERN + + + Spanische Pesete + ESP + + + Birr + ETB + + + Äthiopischer Dollar + ETD + + + Euro + + + + Finnische Mark + FIM + + + Finnische Mark (1860-1962) + FIN + + + Fidschi Dollar + FJD + + + Fidschi Pfund + FJP + + + Falkland Pfund + FKP + + + Färöer Inseln Krone + FOK + + + Französischer Franc + FF + + + Französischer Franc Germinal/Franc Poincare + FRG + + + Pfund Sterling + £ + + + Georgischer Kupon Larit + GEK + + + Georgischer Lari + GEL + + + Cedi + GHC + + + Ghana Cedi (alt) + GHO + + + Ghana Pfund + GHP + + + Gibraltar Pfund + GIP + + + Grönland Krone + GLK + + + Dalasi + GMD + + + Gambia Pfund + GMP + + + Guinea Franc + GNF + + + Äquatorialguinea Ekwele Guineana + GQE + + + Äquatorialguinea Franco + GQF + + + Äquatorialguinea Peseta Guineana + GQP + + + Griechische Drachme + GRD + + + Neue Griechische Drachme + GRN + + + Quetzal + GTQ + + + Französisch Guayana Franc Guiana + GUF + + + Portugiesisch Guinea Escudo + GWE + + + Portugiesisch Guinea Mil Reis + GWM + + + Guinea Bissau Peso + GWP + + + Guyana Dollar + GYD + + + Hongkong Dollar + HKD + + + Lempira + HNL + + + Kroatischer Dinar + HRD + + + Kuna + HRK + + + Gourde + HTG + + + Forint + HUF + + + Nordirisches Pfund + IBP + + + Indonesischer Nica Guilder + IDG + + + Indonesische Java Rupiah + IDJ + + + Indonesische Neue Rupiah + IDN + + + Rupiah + IDR + + + Irisches Pfund + IEP + + + Schekel + ILL + + + Israelisches Pfund + ILP + + + Schekel + ILS + + + Indische Rupie + =0#Rs.|1#Re.|1<Rs. + + + Irak Dinar + IQD + + + Rial + IRR + + + Isländische Krone + ISK + + + Italienische Lire + + + + Jersey Pfund Sterling + JEP + + + Jamaika Dollar + JMD + + + Jamaika Pfund + JMP + + + Jordanischer Dinar + JOD + + + Yen + ¥ + + + Kenia Schilling + KES + + + Som + som + + + Riel (alt) + KHO + + + Riel + KHR + + + Komoren Franc + KMF + + + Nordkoreanischer Won (alt) + KPP + + + Nordkoreanischer Won + KPW + + + Südkoreanischer Hwan + KRH + + + Südkoreanischer Won (alt) + KRO + + + Südkoreanischer Won + KRW + + + Kuwait Dinar + KWD + + + Kaiman-Dollar + KYD + + + Kasachstan Rubel + KZR + + + Tenge + KZT + + + Kip + LAK + + + Libanesisches Pfund + LBP + + + Liechtenstein Franken + LIF + + + Sri Lanka Rupie + LKR + + + Ceylon Rupie + LNR + + + Liberianischer Dollar + LRD + + + Loti + LSL + + + Litauischer Litas + LTL + + + Litauischer Talonas + LTT + + + Luxemburgischer Franc + LUF + + + Lettischer Lats + LVL + + + Lettischer Rubel + LVR + + + Libyscher Dinar + LYD + + + Libysches Pfund + LYP + + + Marokkanischer Dirham + MAD + + + Marokkanischer Franc + MAF + + + Monaco Franc Nouveau + MCF + + + Monaco Franc Germinal + MCG + + + Moldau Leu Cupon + MDC + + + Moldau Leu + MDL + + + Moldau Rubel Cupon + MDR + + + Madagaskar Ariary + MGA + + + Madagaskar Franc + MGF + + + Marshall Inseln Dollar + MHD + + + Denar + MKD + + + Kyat + MMK + + + Tugrik + MNT + + + Pataca + MOP + + + Ouguiya + MRO + + + Maltesische Lira + MTL + + + Maltesisches Pfund + MTP + + + Mauritius Rupie + MUR + + + Malediven Rupie + MVP + + + Rufiyaa + MVR + + + Malawi Kwacha + MWK + + + Malawi Pfund + MWP + + + Mexikanischer Peso + MXN + + + Mexikanischer Silber-Peso (1861-1992) + MXP + + + Mexican Unidad de Inversion (UDI) + MXV + + + Malaysischer Ringgit + MYR + + + Metical + MZM + + + Namibia Dollar + NAD + + + Neukaledonien Franc Germinal + NCF + + + Naira + NGN + + + Nigerianisches Pfund + NGP + + + Neue Hebriden CFP Franc + NHF + + + Cordoba + NIC + + + Gold-Cordoba + NIG + + + Gold-Cordoba + NIO + + + Holländischer Gulden + NLG + + + Norwegische Krone + NOK + + + Nepalesische Rupie + NPR + + + Neuseeland Dollar + NZD + + + Neuseeland Pfund + NZP + + + Rial Omani + OMR + + + Balboa + PAB + + + Dnjestr-Republik Rubel Kupon + PDK + + + Dnjestr-Republik Rubel (neu) + PDN + + + Dnjestr-Republik Rubel + PDR + + + Peruanischer Inti + PEI + + + Neuer Sol + PEN + + + Sol + PES + + + Kina + PGK + + + Philippinischer Peso + PHP + + + Pakistanische Rupie + PKR + + + Zloty + PLN + + + Zloty (1950-1995) + PLZ + + + Palästina Pfund + PSP + + + Portugiesischer Conto + PTC + + + Portugiesischer Escudo + PTE + + + Guarani + PYG + + + Katar Riyal + QAR + + + Leu + ROL + + + Neuer Leu + RON + + + Russischer Rubel (neu) + RUB + + + Russischer Rubel (alt) + RUR + + + Ruanda Franc + RWF + + + Saudi Riyal + SAR + + + Salomonen Dollar + SBD + + + Seychellen Rupie + SCR + + + Sudanesischer Dinar + SDD + + + Sudanesisches Pfund + SDP + + + Schwedische Krone + SEK + + + Singapur Dollar + SGD + + + St. Helena Pfund + SHP + + + Tolar Bons + SIB + + + Tolar + SIT + + + Slowakische Krone + SKK + + + Leone + SLL + + + San Marino Lire + SML + + + Somalia Schilling + SOS + + + Somaliland Schilling + SQS + + + Suriname Gulden + SRG + + + Schottisches Pfund + SSP + + + Dobra + STD + + + Sao Tome und Principe Escudo + STE + + + Sowjetischer Neuer Rubel + SUN + + + Sowjetischer Rubel + SUR + + + El Salvador Colon + SVC + + + Syrisches Pfund + SYP + + + Lilangeni + SZL + + + Turks und Caicos Krone + TCC + + + Tschad CFA Franc + TDF + + + Baht + THB + + + Tadschikistan Rubel + TJR + + + Tadschikistan Somoni + TJS + + + Turkmenistan-Manat + TMM + + + Tunesischer Dinar + TND + + + Paʻanga + TOP + + + Tonga Pfund Sterling + TOS + + + Timor Escudo + TPE + + + Timor Pataca + TPP + + + Türkische Lira + TRL + + + Trinidad und Tobago Dollar + TTD + + + Trinidad und Tobago Dollar (alt) + TTO + + + Tuvalu Dollar + TVD + + + Neuer Taiwan Dollar + TWD + + + Tansania Schilling + TZS + + + Hryvnia + UAH + + + Ukrainischer Karbovanetz + UAK + + + Uganda Schilling (1966-1987) + UGS + + + Uganda Schilling + UGX + + + US Dollar + $ + + + US Dollar (Nächster Tag) + USN + + + US Dollar (Gleicher Tag) + USS + + + Uruguayischer Peso + UYU + + + Usbekistan Sum + UZS + + + Vatikanstadt Lire + VAL + + + Nordvietnam Piastre Dong Viet + VDD + + + Nordvietnam Neuer Dong + VDN + + + Nordvietnam Viet Minh Piastre Dong Viet + VDP + + + Bolivar + VEB + + + Britische Jungferninseln Dollar + VGD + + + Dong + VND + + + Vietnamesischer Neuer Dong + VNN + + + Republik Vietnam Dong + VNR + + + Vietnamesischer Dong + VNS + + + Vatu + VUV + + + West-Samoa Pfund + WSP + + + Tala + WST + + + CFA Franc (Äquatorial) + XAF + + + Gold + XAU + + + Europäische Rechnungseinheit + XBA + + + Europäische Rechnungseinheit (XBC) + XBC + + + Europäische Rechnungseinheit (XBD) + XBD + + + Ostkaribischer Dollar + EC$ + + + Sonderziehungsrechte + XDR + + + Französischer Gold-Franc + XFO + + + Französischer UIC-Franc + XFU + + + Islamischer Dinar + XID + + + Französischer Antillen CFA Franc + XNF + + + CFA Franc (West) + XOF + + + CFP Franc + XPF + + + COMECON Transferabler Rubel + XTR + + + Jemen Dinar + YDD + + + Jemen Imadi Riyal + YEI + + + Jemen Rial + YER + + + Jugoslawischer Dinar (1966-1990) + YUD + + + Jugoslawische Föderation Dinar + YUF + + + Jugoslawischer 1994-Dinar + YUG + + + Neuer Dinar + YUM + + + Jugoslawischer Dinar (konvertibel) + YUN + + + Jugoslawischer Oktober-Dinar + YUO + + + Jugoslawischer Reformierter Dinar + YUR + + + Südafrikanisches Pfund + ZAP + + + Rand + ZAR + + + Kwacha + ZMK + + + Sambisches Pfund + ZMP + + + Neuer Zaire + ZRN + + + Zaire + ZRZ + + + Simbabwe Dollar + ZWD + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_AT.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_AT.xml new file mode 100644 index 0000000..8b54952 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_AT.xml @@ -0,0 +1,105 @@ + + + + + + + + + + + + + + + + Jän + Feb + Mär + Apr + Mai + Jun + Jul + Aug + Sep + Okt + Nov + Dez + + + Jänner + Februar + März + April + Mai + Juni + Juli + August + September + Oktober + November + Dezember + + + + + + + + + + + + EEEE, dd. MMMM yyyy + + + + + dd. MMMM yyyy + + + + + dd.MM.yyyy + + + + + dd.MM.yy + + + + + + + + HH:mm' Uhr 'z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_BE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_BE.xml new file mode 100644 index 0000000..a6d9f40 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_BE.xml @@ -0,0 +1,140 @@ + + + + + + + + + + + + + + + + Jan + Feb + Mär + Apr + Mai + Jun + Jul + Aug + Sep + Okt + Nov + Dez + + + + + + + Son + Mon + Die + Mit + Don + Fre + Sam + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + d-MMM-yy + + + + + d/MM/yy + + + + + + + + HH 'h' mm 'min' ss 's' z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + + Franken + FF + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_CH.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_CH.xml new file mode 100644 index 0000000..e2ed5b2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_CH.xml @@ -0,0 +1,69 @@ + + + + + + + + + + + + Bangladesh + Brunei + Botswana + Kapverden + Djibouti + Grossbritannien + Marshall-Inseln + Rwanda + Salomon-Inseln + Sao Tomé und Principe + Zimbabwe + + + + + . + ' + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤ #,##0.00;¤-#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_DE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_DE.xml new file mode 100644 index 0000000..f24f340 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_DE.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_LI.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_LI.xml new file mode 100644 index 0000000..f554efa --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_LI.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + . + ' + ; + % + 0 + # + + + - + E + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_LU.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_LU.xml new file mode 100644 index 0000000..0aa5d6f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/de_LU.xml @@ -0,0 +1,59 @@ + + + + + + + + + + + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + + Luxemburgischer Franc + F + #,##0 ¤;-#,##0 ¤ + #,##0 ¤;-#,##0 ¤ + , + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/dv.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/dv.xml new file mode 100644 index 0000000..72b061c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/dv.xml @@ -0,0 +1,70 @@ + + + + + + + + + + + ދިވެހިބަސް + + + ދިވެހި ރާއްޖެ + + + + [[:Thaa:]‌‍‏‎] + + + + . + , + ، + % + ٠ + # + + + - + E + + + + + + + + #,##,##0.###;-#,##,##0.### + + + + + + + #E0 + + + + + + + #,##,##0% + + + + + + + ¤ #,##,##0.00;-¤ #,##,##0.00 + + + + + + MVR + ރ. + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/dv_MV.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/dv_MV.xml new file mode 100644 index 0000000..7fb2241 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/dv_MV.xml @@ -0,0 +1,73 @@ + + + + + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd-MM-yyyy + + + + + d-M-yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/el.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/el.xml new file mode 100644 index 0000000..cc0c68c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/el.xml @@ -0,0 +1,541 @@ + + + + + + + + + + + Αραβικά + Βουλγαρικά + Καταλανικά + Τσεχικά + Δανικά + Γερμανικά + Ελληνικά + Αγγλικά + Ισπανικά + Εσθονικά + Φινλανδικά + Γαλλικά + Εβραϊκά + Κροατικά + Ουγγρικά + Ιταλικά + Ιαπωνικά + Κορεατικά + Λιθουανικά + Λετονικά + Σλαβομακεδονικά + Ολλανδικά + Νορβηγικά + Πολωνικά + Πορτογαλικά + Ρουμανικά + Ρωσικά + Σλοβακικά + Σλοβενικά + Αλβανικά + Σερβικά + Σουηδικά + Τουρκικά + Κινεζικά + + + Ανδόρα + Ηνωμένα Αραβικά Εμιράτα + Αφγανιστάν + Αντίγκουα και Μπαρμπούντα + Ανγκουίλα + Αλβανία + Αρμενία + Ολλανδικές Αντίλλες + Ανγκόλα + Ανταρκτική + Αργεντινή + Αμερικανική Σαμόα + Αυστρία + Αυστραλία + Αρούμπα + Αζερμπαϊτζάν + Βοσνία - Ερζεγοβίνη + Μπαρμπάντος + Μπανγκλαντές + Βέλγιο + Μπουρκίνα Φάσο + Βουλγαρία + Μπαχρέιν + Μπουρούντι + Μπένιν + Βερμούδες + Μπρουνέι Νταρουσαλάμ + Βολιβία + Βραζιλία + Μπαχάμες + Μπουτάν + Νήσος Μπουβέ + Μποτσουάνα + Λευκορωσία + Μπελίζ + Καναδάς + Νήσοι Κόκος (Κήλινγκ) + Κονγκό, Λαϊκή Δημοκρατία του + Κεντροαφρικανική Δημοκρατία + Κονγκό + Ελβετία + Ακτή Ελεφαντόδοντος + Νήσοι Κουκ + Χιλή + Καμερούν + Κίνα + Κολομβία + Κόστα Ρίκα + Κούβα + Νήσοι Πράσινου Ακρωτηρίου + Νήσος Χριστουγέννων + Κύπρος + Τσεχία + Γερμανία + Τζιμπουτί + Δανία + Ντομίνικα + Δομινικανή Δημοκρατία + Αλγερία + Ισημερινός + Εσθονία + Αίγυπτος + Δυτική Σαχάρα + Ερυθραία + Ισπανία + Αιθιοπία + Φινλανδία + Φίτζι + Νήσοι Φώκλαντ + Μικρονησία, Ομόσπονδες Πολιτείες της + Νήσοι Φερόες + Γαλλία + Γκαμπόν + Ηνωμένο Βασίλειο + Γρενάδα + Γεωργία + Γαλλική Γουιάνα + Γκάνα + Γιβραλτάρ + Γροιλανδία + Γκάμπια + Γουινέα + Γουαδελούπη + Ισημερινή Γουινέα + Ελλάδα + Νότια Γεωργία και Νήσοι Νότιες Σάντουιτς + Γουατεμάλα + Γκουάμ + Γουινέα-Μπισάου + Γουιάνα + Χονγκ Κονγκ, Ειδική Διοικητική Περιφέρεια της Κίνας + Νήσοι Χερντ και Μακντόναλντ + Ονδούρα + Κροατία + Αϊτή + Ουγγαρία + Ινδονησία + Ιρλανδία + Ισραήλ + Ινδία + Βρετανικά Έδάφη Ινδικού Ωκεανού + Ιράκ + Ιράν, Ισλαμική Δημοκρατία του + Ισλανδία + Ιταλία + Τζαμάικα + Ιορδανία + Ιαπωνία + Κένυα + Κιργιζία + Καμπότζη + Κιριμπάτι + Κομόρες + Σαιντ Κιτς και Νέβις + Κορέα, Βόρεια + Κορέα, Νότια + Κουβέιτ + Νήσοι Κέιμαν + Καζακστάν + Λατινική Αμερική + Λίβανος + Αγία Λουκία + Λιχτενστάιν + Σρι Λάνκα + Λιβερία + Λεσότο + Λιθουανία + Λουξεμβούργο + Λετονία + Μαρόκο + Μονακό + Μολδαβία, Δημοκρατία της + Μαδαγασκάρη + Νήσοι Μάρσαλ + ΠΓΔ Μακεδονίας + Μάλι + Μιανμάρ + Μογγολία + Μακάο, Ειδική Διοικητική Περιφέρεια της Κίνας + Νήσοι Βόρειες Μαριάνες + Μαρτινίκα + Μαυριτανία + Μονσεράτ + Μάλτα + Μαυρίκιος + Μαλδίβες + Μαλάουι + Μεξικό + Μαλαισία + Μοζαμβίκη + Ναμίμπια + Νέα Καληδονία + Νίγηρ + Νήσος Νόρφολκ + Νιγηρία + Νικαράγουα + Ολλανδία + Νορβηγία + Νεπάλ + Ναούρου + Νιούε + Νέα Ζηλανδία + Ομάν + Παναμάς + Περού + Γαλλική Πολυνησία + Παπούα - Νέα Γουινέα + Φιλιππίνες + Πακιστάν + Πολωνία + Σαιντ Πιέρ και Μικελόν + Πίτκερν + Πουέρτο Ρίκο + Παλαιστινιακά Εδάφη + Πορτογαλία + Παλάου + Παραγουάη + Κατάρ + Ρεϋνιόν + Ρουμανία + Ρωσία + Ρουάντα + Σαουδική Αραβία + Νήσοι Σολομώντος + Σεϋχέλλες + Σουδάν + Σουηδία + Σιγκαπούρη + Αγία Ελένη + Σλοβενία + Νήσοι Σβάλμπαρ και Γιαν Μαγιέν + Σλοβακία + Σιέρα Λεόνε + Άγιος Μαρίνος + Σενεγάλη + Σομαλία + Σερβία + Σουρινάμ + Σάο Τομέ και Πρίνσιπε + Ελ Σαλβαδόρ + Συρία, Αραβική Δημοκρατία της + Σουαζιλάνδη + Νήσοι Τερκς και Κάικος + Τσαντ + Γαλλικά Νότια Εδάφη + Τόγκο + Ταϊλάνδη + Τατζικιστάν + Τοκελάου + Ανατολικό Τιμόρ + Τουρκμενιστάν + Τυνησία + Τόνγκα + Τουρκία + Τρινιδάδ και Τομπάγκο + Τουβαλού + Ταϊβάν (Δ.Κ.) + Τανζανία + Ουκρανία + Ουγκάντα + Απομακρυσμένες Νησίδες των Ηνωμένων Πολιτειών + Ηνωμένες Πολιτείες + Ουρουγουάη + Ουζμπεκιστάν + Αγία Έδρα (Βατικανό) + Άγιος Βικέντιος και Γρεναδίνες + Βενεζουέλα + Βρετανικές Παρθένοι Νήσοι + Αμερικανικές Παρθένοι Νήσοι + Βιετνάμ + Βανουάτου + Νήσοι Ουαλλίς και Φουτουνά + Σαμόα + Υεμένη + Μαγιότ + Γιουγκοσλαβία + Νότια Αφρική + Ζάμπια + Ζιμπάμπουε + + + + [ά-ώ] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + Ιαν + Φεβ + Μαρ + Απρ + Μαϊ + Ιουν + Ιουλ + Αυγ + Σεπ + Οκτ + Νοε + Δεκ + + + Ι + Φ + Μ + Α + Μ + Ι + Ι + Α + Σ + Ο + Ν + Δ + + + Ιανουαρίου + Φεβρουαρίου + Μαρτίου + Απριλίου + Μαΐου + Ιουνίου + Ιουλίου + Αυγούστου + Σεπτεμβρίου + Οκτωβρίου + Νοεμβρίου + Δεκεμβρίου + + + + + Ιαν + Φεβ + Μαρ + Απρ + Μαϊ + Ιουν + Ιουλ + Αυγ + Σεπ + Οκτ + Νοε + Δεκ + + + Ι + Φ + Μ + Α + Μ + Ι + Ι + Α + Σ + Ο + Ν + Δ + + + Ιανουάριος + Φεβρουάριος + Μάρτιος + Απρίλιος + Μάιος + Ιούνιος + Ιούλιος + Αύγουστος + Σεπτέμβριος + Οκτώβριος + Νοέμβριος + Δεκέμβριος + + + + + + + Κυρ + Δευ + Τρι + Τετ + Πεμ + Παρ + Σαβ + + + Κυριακή + Δευτέρα + Τρίτη + Τετάρτη + Πέμπτη + Παρασκευή + Σάββατο + + + + + + + + ΠΜ + ΜΜ + + + π.Χ. + μ.Χ. + + + + + + + EEEE, dd MMMM yyyy + + + + + dd MMMM yyyy + + + + + dd MMM yyyy + + + + + dd/MM/yyyy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + ΔΟΛΑΡΙΟ ΑΥΣΤΡΑΛΙΑΣ + AUD + + + ΔΟΛΑΡΙΟ ΚΑΝΑΔΑ + CAD + + + ΦΡΑΓΚΟ ΕΛΒΕΤΙΑΣ + CHF + + + ΛΙΡΑ ΚΥΠΡΟΥ + CYP + + + ΚΟΡΟΝΑ ΔΑΝΙΑΣ + DKK + + + ΕΥΡΩ + + + + ΛΙΡΑ ΑΓΓΛΙΑΣ + £ + + + Δρχ + Δρχ + + + ΓΙΕΝ ΙΑΠΩΝΙΑΣ + ¥ + + + ΚΟΡΟΝΑ ΝΟΡΒΗΓΙΑΣ + NOK + + + ΚΟΡΟΝΑ ΣΟΥΗΔΙΑΣ + SEK + + + ΔΟΛΑΡΙΟ ΗΠΑ + USD + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/el_GR.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/el_GR.xml new file mode 100644 index 0000000..9761012 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/el_GR.xml @@ -0,0 +1,49 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00¤;-¤#,##0.00 + + + + + + Δρχ + Δρχ + #,##0.00 ¤;-#,##0.00 ¤ + #,##0.00 ¤;-#,##0.00 ¤ + . + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en.xml new file mode 100644 index 0000000..286b905 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en.xml @@ -0,0 +1,2800 @@ + + + + + + + + + + + Afar + Abkhazian + Achinese + Acoli + Adangme + Adyghe + Avestan + Afrikaans + Afro-Asiatic (Other) + Afrihili + Akan + Akkadian + Aleut + Algonquian Languages + Amharic + Aragonese + English, Old (ca.450-1100) + Apache Languages + Arabic + Aramaic + Araucanian + Arapaho + Artificial (Other) + Arawak + Assamese + Asturian + Athapascan Languages + Australian Languages + Avaric + Awadhi + Aymara + Azerbaijani + Bashkir + Banda + Bamileke Languages + Baluchi + Bambara + Balinese + Basa + Baltic (Other) + Belarusian + Beja + Bemba + Berber + Bulgarian + Bihari + Bhojpuri + Bislama + Bikol + Bini + Siksika + Bambara + Bengali + Bantu + Tibetan + Breton + Braj + Bosnian + Batak + Buriat + Buginese + Blin + Catalan + Caddo + Central American Indian (Other) + Carib + Caucasian (Other) + Chechen + Cebuano + Celtic (Other) + Chamorro + Chibcha + Chagatai + Chuukese + Mari + Chinook Jargon + Choctaw + Chipewyan + Cherokee + Cheyenne + Chamic Languages + Corsican + Coptic + Creoles and Pidgins, English-based (Other) + Creoles and Pidgins, French-based (Other) + Creoles and pidgins, Portuguese-based (Other) + Cree + Crimean Turkish; Crimean Tatar + Creoles and Pidgins (Other) + Czech + Kashubian + Church Slavic + Cushitic (Other) + Chuvash + Welsh + Danish + Dakota + Dargwa + Dayak + German + Delaware + Slave + Dogrib + Dinka + Dogri + Dravidian (Other) + Lower Sorbian + Duala + Dutch, Middle (ca. 1050-1350) + Divehi + Dyula + Dzongkha + Ewe + Efik + Egyptian (Ancient) + Ekajuk + Greek + Elamite + English + English, Middle (1100-1500) + Esperanto + Spanish + Estonian + Basque + Ewondo + Persian + Fang + Fanti + Fulah + Finnish + Finno - Ugrian (Other) + Fijian + Faroese + Fon + French + French, Middle (ca.1400-1600) + French, Old (842-ca.1400) + Friulian + Frisian + Irish + Ga + Gayo + Gbaya + Scottish Gaelic + Germanic (Other) + Geez + Gilbertese + Gallegan + German, Middle High (ca.1050-1500) + Guarani + German, Old High (ca.750-1050) + Gondi + Gorontalo + Gothic + Gerbo + Greek, Ancient (to 1453) + Gujarati + Manx + Gwichʻin + Hausa + Haida + Hawaiian + Hebrew + Hindi + Hiligaynon + Himachali + Hittite + Hmong + Hiri Motu + Croatian + Upper Sorbian + Haitian + Hungarian + Hupa + Armenian + Herero + Interlingua + Iban + Indonesian + Interlingue + Igbo + Sichuan Yi + Ijo + Inupiaq + Iloko + Indic (Other) + Indo-European (Other) + Ingush + Ido + Iranian + Iroquoian languages + Icelandic + Italian + Inuktitut + Japanese + Lojban + Judeo-Persian + Judeo-Arabic + Javanese + Georgian + Kara-Kalpak + Kabyle + Kachin + Kamba + Karen + Kawi + Kabardian + Kongo + Khasi + Khoisan (Other) + Khotanese + Kikuyu + Kuanyama + Kazakh + Kalaallisut + Khmer + Kimbundu + Kannada + Korean + Konkani + Kosraean + Kpelle + Kanuri + Karachay-Balkar + Kru + Kurukh + Kashmiri + Kurdish + Kumyk + Kutenai + Komi + Cornish + Kirghiz + Latin + Ladino + Lahnda + Lamba + Luxembourgish + Lezghian + Ganda + Limburgish + Lingala + Lao + Mongo + Lozi + Lithuanian + Luba-Katanga + Luba-Lulua + Luiseno + Lunda + Luo + Lushai + Latvian + Madurese + Magahi + Maithili + Makasar + Mandingo + Austronesian + Masai + Moksha + Mandar + Mende + Malagasy + Irish, Middle (900-1200) + Marshallese + Maori + Micmac + Minangkabau + Miscellaneous Languages + Macedonian + Mon-Khmer (Other) + Malayalam + Mongolian + Manchu + Manipuri + Manobo Languages + Moldavian + Mohawk + Mossi + Marathi + Malay + Maltese + Multiple Languages + Munda Languages + Creek + Marwari + Burmese + Mayan + Erzya + Nauru + Nahuatl + North American Indian (Other) + Neapolitan + Norwegian Bokmål + Ndebele, North + Low German; Low Saxon + Nepali + Newari + Ndonga + Nias + Niger - Kordofanian (Other) + Niuean + Dutch + Norwegian Nynorsk + Norwegian + Nogai + Norse, Old + Ndebele, South + Sotho, Northern + Nubian Languages + Navajo + Nyanja; Chichewa; Chewa + Nyamwezi + Nyankole + Nyoro + Nzima + Occitan (post 1500); Provençal + Ojibwa + Oromo + Oriya + Ossetic + Osage + Turkish, Ottoman (1500-1928) + Otomian Languages + Punjabi + Papuan (Other) + Pangasinan + Pahlavi + Pampanga + Papiamento + Palauan + Persian Old (ca.600-400 B.C.) + Philippine (Other) + Phoenician + Pali + Polish + Pohnpeian + Prakrit Languages + Provençal, Old (to 1500) + Pashto (Pushto) + Portuguese + Quechua + Rajasthani + Rapanui + Rarotongan + Rhaeto-Romance + Rundi + Romanian + Romance (Other) + Romany + Root + Russian + Kinyarwanda + Sanskrit + Sandawe + Yakut + South American Indian (Other) + Salishan languages + Samaritan Aramaic + Sasak + Santali + Sardinian + Scots + Sindhi + Northern Sami + Selkup + Semitic (Other) + Sango + Irish, Old (to 900) + Sign Languages + Serbo-Croatian + Shan + Sinhalese + Sidamo + Siouan Languages + Sino-Tibetan (Other) + Slovak + Slovenian + Slavic (Other) + Samoan + Southern Sami + Sami languages (Other) + Lule Sami + Inari Sami + Skolt Sami + Shona + Soninke + Somali + Sogdien + Songhai + Albanian + Serbian + Serer + Swati + Nilo-Saharam (Other) + Sotho, Southern + Sundanese + Sukuma + Susu + Sumerian + Swedish + Swahili + Syriac + Tamil + Tai (Other) + Telugu + Timne + Tereno + Tetum + Tajik + Thai + Tigrinya + Tigre + Tiv + Turkmen + Tokelau + Tagalog + Tlingit + Tamashek + Tswana + Tonga (Tonga Islands) + Tonga (Nyasa) + Tok Pisin + Turkish + Tsonga + Tsimshian + Tatar + Tumbuka + Tupi languages + Altaic (Other) + Tuvalu + Twi + Tahitian + Tuvinian + Udmurt + Uighur + Ugaritic + Ukrainian + Umbundu + Undetermined + Urdu + Uzbek + Vai + Venda + Vietnamese + Volapük + Votic + Walloon + Wakashan Languages + Walamo + Waray + Washo + Sorbian Languages + Wolof + Kalmyk + Xhosa + Yao + Yapese + Yiddish + Yoruba + Yupik Languages + Zhuang + Zapotec + Zenaga + Chinese + Zande + Zulu + Zuni + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Andorra + United Arab Emirates + Afghanistan + Antigua and Barbuda + Anguilla + Albania + Armenia + Netherlands Antilles + Angola + Antarctica + Argentina + American Samoa + Austria + Australia + Aruba + Azerbaijan + Bosnia and Herzegovina + Barbados + Bangladesh + Belgium + Burkina Faso + Bulgaria + Bahrain + Burundi + Benin + Bermuda + Brunei + Bolivia + Brazil + Bahamas + Bhutan + Bouvet Island + Botswana + Belarus + Belize + Canada + Cocos (Keeling) Islands + Democratic Republic of the Congo + Central African Republic + Congo + Switzerland + Côte d’Ivoire + Cook Islands + Chile + Cameroon + China + Colombia + Costa Rica + Cuba + Cape Verde + Christmas Island + Cyprus + Czech Republic + Germany + Djibouti + Denmark + Dominica + Dominican Republic + Algeria + Ecuador + Estonia + Egypt + Western Sahara + Eritrea + Spain + Ethiopia + Finland + Fiji + Falkland Islands + Micronesia + Faroe Islands + France + Gabon + United Kingdom + Grenada + Georgia + French Guiana + Ghana + Gibraltar + Greenland + Gambia + Guinea + Guadeloupe + Equatorial Guinea + Greece + South Georgia and the South Sandwich Islands + Guatemala + Guam + Guinea-Bissau + Guyana + Hong Kong S.A.R., China + Heard Island and McDonald Islands + Honduras + Croatia + Haiti + Hungary + Indonesia + Ireland + Israel + India + British Indian Ocean Territory + Iraq + Iran + Iceland + Italy + Jamaica + Jordan + Japan + Kenya + Kyrgyzstan + Cambodia + Kiribati + Comoros + Saint Kitts and Nevis + North Korea + South Korea + Kuwait + Cayman Islands + Kazakhstan + Laos + Lebanon + Saint Lucia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Lithuania + Luxembourg + Latvia + Libya + Morocco + Monaco + Moldova + Madagascar + Marshall Islands + Macedonia + Mali + Myanmar + Mongolia + Macao S.A.R., China + Northern Mariana Islands + Martinique + Mauritania + Montserrat + Malta + Mauritius + Maldives + Malawi + Mexico + Malaysia + Mozambique + Namibia + New Caledonia + Niger + Norfolk Island + Nigeria + Nicaragua + Netherlands + Norway + Nepal + Nauru + Niue + New Zealand + Oman + Panama + Peru + French Polynesia + Papua New Guinea + Philippines + Pakistan + Poland + Saint Pierre and Miquelon + Pitcairn + Puerto Rico + Palestinian Territory + Portugal + Palau + Paraguay + Qatar + Réunion + Romania + Russia + Rwanda + Saudi Arabia + Solomon Islands + Seychelles + Sudan + Sweden + Singapore + Saint Helena + Slovenia + Svalbard and Jan Mayen + Slovakia + Sierra Leone + San Marino + Senegal + Somalia + Serbia + Suriname + Sao Tome and Principe + El Salvador + Syria + Swaziland + Turks and Caicos Islands + Chad + French Southern Territories + Togo + Thailand + Tajikistan + Tokelau + Timor-Leste + Turkmenistan + Tunisia + Tonga + Turkey + Trinidad and Tobago + Tuvalu + Taiwan + Tanzania + Ukraine + Uganda + United States Minor Outlying Islands + United States + Uruguay + Uzbekistan + Vatican + Saint Vincent and the Grenadines + Venezuela + British Virgin Islands + U.S. Virgin Islands + Vietnam + Vanuatu + Wallis and Futuna + Samoa + Yemen + Mayotte + Yugoslavia + South Africa + Zambia + Zimbabwe + + + Posix + Revised Orthography + + + Calendar + Collation + Currency + + + Buddhist Calendar + Chinese Calendar + Gregorian Calendar + Hebrew Calendar + Islamic Calendar + Islamic-Civil Calendar + Japanese Calendar + Direct Order + Phonebook Order + Pinyin Order + Stroke Order + Traditional + + + + [a-z] + + + + + + + + Jan + Feb + Mar + Apr + May + Jun + Jul + Aug + Sep + Oct + Nov + Dec + + + January + February + March + April + May + June + July + August + September + October + November + December + + + + + + + Sun + Mon + Tue + Wed + Thu + Fri + Sat + + + Sunday + Monday + Tuesday + Wednesday + Thursday + Friday + Saturday + + + + + + BC + AD + + + + + + + EEEE, MMMM d, yyyy + + + + + MMMM d, yyyy + + + + + MMM d, yyyy + + + + + M/d/yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + Pacific Standard Time + Pacific Daylight Time + + + PST + PDT + + Los Angeles + + + + Pacific Standard Time + Pacific Daylight Time + + + PST + PDT + + Los Angeles + + + + Mountain Standard Time + Mountain Daylight Time + + + MST + MDT + + Denver + + + + Mountain Standard Time + Mountain Daylight Time + + + MST + MDT + + Denver + + + + Mountain Standard Time + Mountain Standard Time + + + MST + MST + + Phoenix + + + + Mountain Standard Time + Mountain Standard Time + + + MST + MST + + Phoenix + + + + Central Standard Time + Central Daylight Time + + + CST + CDT + + Chicago + + + + Central Standard Time + Central Daylight Time + + + CST + CDT + + Chicago + + + + Eastern Standard Time + Eastern Daylight Time + + + EST + EDT + + New York + + + + Eastern Standard Time + Eastern Daylight Time + + + EST + EDT + + New York + + + + Eastern Standard Time + Eastern Standard Time + + + EST + EST + + Indianapolis + + + + Eastern Standard Time + Eastern Standard Time + + + EST + EST + + Indianapolis + + + + Hawaii Standard Time + Hawaii Standard Time + + + HST + HST + + Honolulu + + + + Hawaii Standard Time + Hawaii Standard Time + + + HST + HST + + Honolulu + + + + Alaska Standard Time + Alaska Daylight Time + + + AST + ADT + + Anchorage + + + + Alaska Standard Time + Alaska Daylight Time + + + AST + ADT + + Anchorage + + + + Atlantic Standard Time + Atlantic Daylight Time + + + AST + ADT + + Halifax + + + + Newfoundland Standard Time + Newfoundland Daylight Time + + + CNT + CDT + + St. Johns + + + + Newfoundland Standard Time + Newfoundland Daylight Time + + + CNT + CDT + + St. Johns + + + + Central European Standard Time + Central European Daylight Time + + + CET + CEST + + Paris + + + + Central European Standard Time + Central European Daylight Time + + + CET + CEST + + Paris + + + + Greenwich Mean Time + Greenwich Mean Time + + + GMT + GMT + + London + + + + Greenwich Mean Time + Greenwich Mean Time + + + GMT + GMT + + Casablanca + + + + Israel Standard Time + Israel Daylight Time + + + IST + IDT + + Jerusalem + + + + Japan Standard Time + Japan Standard Time + + + JST + JST + + Tokyo + + + + Japan Standard Time + Japan Standard Time + + + JST + JST + + Tokyo + + + + Eastern European Standard Time + Eastern European Daylight Time + + + EET + EEST + + Bucharest + + + + China Standard Time + China Standard Time + + + CTT + CDT + + Shanghai + + + + China Standard Time + China Standard Time + + + CTT + CDT + + Shanghai + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + Andorran Diner + ADD + + + Andorran Peseta + ADP + + + United Arab Emirates Dirham + AED + + + Afghani (1927-2002) + AFA + + + Afghani + Af + + + Affars and Issas Franc + AIF + + + Albanian Lek (1946-1961) + ALK + + + Albanian Lek + lek + + + Albanian Lek Valute + ALV + + + Albanian Dollar Foreign Exchange Certificates + ALX + + + Armenian Dram + dram + + + Netherlands Antillan Guilder + NA f. + + + Angolan Kwanza + AOA + + + Angolan Kwanza (1977-1990) + AOK + + + Angolan New Kwanza (1990-2000) + AON + + + Angolan Kwanza Reajustado (1995-1999) + AOR + + + Angolan Escudo + AOS + + + Argentine Austral + ARA + + + Argentine Peso Moneda Nacional + ARM + + + Argentine Peso (1983-1985) + ARP + + + Argentine Peso + Arg$ + + + Austrian Schilling + ATS + + + Australian Dollar + $A + + + Australian Pound + AUP + + + Aruban Guilder + AWG + + + Azerbaijanian Manat + AZM + + + Bosnia-Herzegovina Dinar + BAD + + + Bosnia-Herzegovina Convertible Mark + KM + + + Bosnia-Herzegovina New Dinar + BAN + + + Barbados Dollar + BDS$ + + + Bangladesh Taka + Tk + + + Belgian Franc (convertible) + BEC + + + Belgian Franc + BF + + + Belgian Franc (financial) + BEL + + + Bulgarian Hard Lev + lev + + + Bulgarian Socialist Lev + BGM + + + Bulgarian New Lev + BGN + + + Bulgarian Lev (1879-1952) + BGO + + + Bulgarian Lev Foreign Exchange Certificates + BGX + + + Bahraini Dinar + BD + + + Burundi Franc + Fbu + + + Bermudan Dollar + Ber$ + + + Bermudan Pound + BMP + + + Brunei Dollar + BND + + + Boliviano + Bs + + + Boliviano (1863-1962) + BOL + + + Bolivian Peso + BOP + + + Bolivian Mvdol + BOV + + + Brazilian Cruzeiro Novo (1967-1986) + BRB + + + Brazilian Cruzado + BRC + + + Brazilian Cruzeiro (1990-1993) + BRE + + + Brazilian Real + R$ + + + Brazilian Cruzado Novo + BRN + + + Brazilian Cruzeiro + BRR + + + Brazilian Cruzeiro (1942-1967) + BRZ + + + Bahamian Dollar + BSD + + + Bahamian Pound + BSP + + + Bhutan Ngultrum + Nu + + + Bhutan Rupee + BTR + + + Burmese Kyat + BUK + + + Burmese Rupee + BUR + + + Botswanan Pula + BWP + + + Belarussian New Ruble (1994-1999) + BYB + + + Belarussian Ruble (1992-1994) + BYL + + + Belarussian Ruble + Rbl + + + Belize Dollar + BZ$ + + + British Honduras Dollar + BZH + + + Canadian Dollar + Can$ + + + Congolese Franc Congolais + CDF + + + Congolese Republic Franc + CDG + + + Congolese Zaire + CDL + + + Central African Republic CFA Franc + CFF + + + Swiss Franc + SwF + + + Cook Islands Dollar + CKD + + + Chilean Condor + CLC + + + Chilean Escudo + CLE + + + Chilean Unidades de Fomento + CLF + + + Chilean Peso + Ch$ + + + Cameroon CFA Franc + CMF + + + Chinese Jen Min Piao Yuan + CNP + + + Chinese US Dollar Foreign Exchange Certificates + CNX + + + Chinese Yuan Renminbi + Y + + + Colombian Paper Peso + COB + + + Congo CFA Franc + COF + + + Colombian Peso + Col$ + + + Costa Rican Colon + C + + + Czechoslovak Koruna + CSC + + + Czechoslovak Hard Koruna + CSK + + + Cuban Peso + CUP + + + Cuban Foreign Exchange Certificates + CUX + + + Cape Verde Escudo + CVEsc + + + Curacao Guilder + CWG + + + Cyprus Pound + £C + + + Czech Republic Koruna + CZK + + + East German Ostmark + DDM + + + Deutsche Mark + DEM + + + German Sperrmark + DES + + + Djibouti Franc + DF + + + Danish Krone + DKr + + + Dominican Peso + RD$ + + + Algerian Dinar + DA + + + Algerian New Franc + DZF + + + Algerian Franc Germinal + DZG + + + Ecuador Sucre + ECS + + + Ecuador Unidad de Valor Constante (UVC) + ECV + + + Estonian Kroon + EEK + + + Egyptian Pound + EGP + + + Eritrean Nakfa + ERN + + + Spanish Peseta + + + + Ethiopian Birr + Br + + + Ethiopian Dollar + ETD + + + Euro + + + + Finnish Markka + FIM + + + Finnish Markka (1860-1962) + FIN + + + Fiji Dollar + F$ + + + Fiji Pound + FJP + + + Falkland Islands Pound + FKP + + + Faeroe Islands Kronur + FOK + + + French Franc + FRF + + + French Franc Germinal/Franc Poincare + FRG + + + Gabon CFA Franc + GAF + + + British Pound Sterling + £ + + + Georgian Kupon Larit + GEK + + + Georgian Lari + lari + + + Ghana Cedi + GHC + + + Ghana Old Cedi + GHO + + + Ghana Pound + GHP + + + Ghana Revalued Cedi + GHR + + + Gibraltar Pound + GIP + + + Greenland Krone + GLK + + + Gambia Dalasi + GMD + + + Gambia Pound + GMP + + + Guinea Franc + GF + + + Guinea Franc (1960-1972) + GNI + + + Guinea Syli + GNS + + + Guadeloupe Franc + GPF + + + Equatorial Guinea Ekwele Guineana + GQE + + + Equatorial Guinea Franco + GQF + + + Equatorial Guinea Peseta Guineana + GQP + + + Greek Drachma + GRD + + + Greek New Drachma + GRN + + + Guatemala Quetzal + Q + + + French Guyana Franc Guiana + GUF + + + Portuguese Guinea Escudo + GWE + + + Portuguese Guinea Mil Reis + GWM + + + Guinea-Bissau Peso + GWP + + + Guyana Dollar + G$ + + + Hong Kong Dollar + HK$ + + + Hoduras Lempira + L + + + Croatian Dinar + HRD + + + Croatian Kuna + HRK + + + Haitian Gourde + HTG + + + Hungarian Forint + Ft + + + Northern Irish Pound + IBP + + + Indonesian Nica Guilder + IDG + + + Indonesian Java Rupiah + IDJ + + + Indonesian New Rupiah + IDN + + + Indonesian Rupiah + Rp + + + Irish Pound + IR£ + + + Israeli Sheqel + ILL + + + Israeli Pound + ILP + + + Israeli New Sheqel + ILS + + + Isle of Man Pound Sterling + IMP + + + Indian Rupee + =0#Rs.|1#Re.|1<Rs. + + + Iraqi Dinar + ID + + + Iranian Rial + RI + + + Icelandic Krona + ISK + + + Italian Lira + + + + Jersey Pound Sterling + JEP + + + Jamaican Dollar + J$ + + + Jamaican Pound + JMP + + + Jordanian Dinar + JD + + + Japanese Yen + ¥ + + + Kenyan Shilling + K Sh + + + Kyrgystan Som + som + + + Cambodian Old Riel + KHO + + + Cambodian Riel + CR + + + Kiribati Dollar + KID + + + Comoro Franc + CF + + + North Korean People’s Won + KPP + + + North Korean Won + KPW + + + South Korean Hwan + KRH + + + South Korean Old Won + KRO + + + South Korean Won + KRW + + + Kuwaiti Dinar + KD + + + Cayman Islands Dollar + KYD + + + Kazakhstan Ruble + KZR + + + Kazakhstan Tenge + T + + + Laotian Kip + LAK + + + Lebanese Pound + LL + + + Liechtenstein Franc + LIF + + + Sri Lanka Rupee + SL Re + + + Ceylon Rupee + LNR + + + Liberian Dollar + LRD + + + Lesotho Loti + M + + + Lithuanian Lita + LTL + + + Lithuanian Talonas + LTT + + + Luxembourg Franc + LUF + + + Latvian Lats + LVL + + + Latvian Ruble + LVR + + + Libyan British Military Authority Lira + LYB + + + Libyan Dinar + LD + + + Libyan Pound + LYP + + + Moroccan Dirham + MAD + + + Moroccan Franc + MAF + + + Monaco Franc Nouveau + MCF + + + Monaco Franc Germinal + MCG + + + Moldovan Leu Cupon + MDC + + + Moldovan Leu + MDL + + + Moldovan Ruble Cupon + MDR + + + Madagascar Ariary + MGA + + + Madagascar Franc + MGF + + + Marshall Islands Dollar + MHD + + + Macedonian Denar + MDen + + + Macedonian Denar (1992-1993) + MKN + + + Mali Franc + MLF + + + Myanmar Kyat + MMK + + + Myanmar Dollar Foreign Exchange Certificates + MMX + + + Mongolian Tugrik + Tug + + + Macao Pataca + MOP + + + Martinique Franc + MQF + + + Mauritania Ouguiya + UM + + + Maltese Lira + Lm + + + Maltese Pound + MTP + + + Mauritius Rupee + MUR + + + Maldive Islands Rupee + MVP + + + Maldive Islands Rufiyaa + MVR + + + Malawi Kwacha + MK + + + Malawi Pound + MWP + + + Mexican Peso + MEX$ + + + Mexican Silver Peso (1861-1992) + MXP + + + Mexican Unidad de Inversion (UDI) + MXV + + + Malaysian Ringgit + RM + + + Mozambique Escudo + MZE + + + Mozambique Metical + Mt + + + Namibia Dollar + N$ + + + New Caledonia Franc Germinal + NCF + + + Nigerian Naira + NGN + + + Nigerian Pound + NGP + + + New Hebrides CFP Franc + NHF + + + Nicaraguan Cordoba + NIC + + + Nicaraguan Gold Cordoba + NIG + + + Nicaraguan Cordoba Oro + NIO + + + Netherlands Guilder + NLG + + + Norwegian Krone + NKr + + + Nepalese Rupee + Nrs + + + New Zealand Dollar + $NZ + + + New Zealand Pound + NZP + + + Oman Rial + RO + + + Oman Rial Saidi + OMS + + + Panamanian Balboa + PAB + + + Transdniestria Ruble Kupon + PDK + + + Transdniestria New Ruble + PDN + + + Transdniestria Ruble + PDR + + + Peruvian Inti + PEI + + + Peruvian Sol Nuevo + PEN + + + Peruvian Sol + PES + + + Papua New Guinea Kina + PGK + + + Philippine Peso + PHP + + + Pakistan Rupee + Pra + + + Polish Zloty + Zl + + + Polish US Dollar Foreign Exchange Certificates + PLX + + + Polish Zloty (1950-1995) + PLZ + + + Palestine Pound + PSP + + + Portuguese Conto + PTC + + + Portuguese Escudo + PTE + + + Paraguay Guarani + PYG + + + Qatari Rial + QR + + + Reunion Franc + REF + + + Romanian Leu + leu + + + Romanian New Leu + RON + + + Russian Ruble + RUB + + + Russian Ruble (1991-1998) + RUR + + + Rwandan Franc + RWF + + + Saudi Riyal + SRl + + + Saudi Sovereign Riyal + SAS + + + Solomon Islands Dollar + SI$ + + + Seychelles Rupee + SR + + + Sudanese Dinar + SDD + + + Sudanese Pound + SDP + + + Swedish Krona + SKr + + + Singapore Dollar + S$ + + + Saint Helena Pound + SHP + + + Slovenia Tolar Bons + SIB + + + Slovenia Tolar + SIT + + + Slovak Koruna + Sk + + + Sierra Leone Leone + SLL + + + San Marino Lira + SML + + + Somali Shilling + So. Sh. + + + Somaliland Shilling + SQS + + + Suriname Guilder + Sf + + + Scotland Pound + SSP + + + Sao Tome and Principe Dobra + Db + + + Sao Tome and Principe Escudo + STE + + + Soviet New Ruble + SUN + + + Soviet Rouble + SUR + + + El Salvador Colon + SVC + + + Syrian Pound + LS + + + Swaziland Lilangeni + E + + + Turks and Caicos Crown + TCC + + + Chad CFA Franc + TDF + + + Thai Baht + THB + + + Tajikistan Ruble + TJR + + + Tajikistan Somoni + TJS + + + Turkmenistan Manat + TMM + + + Tunisian Dinar + TND + + + Tonga Paʻanga + T$ + + + Tonga Pound Sterling + TOS + + + Timor Escudo + TPE + + + Timor Pataca + TPP + + + Turkish Lira + TL + + + Trinidad and Tobago Dollar + TT$ + + + Trinidad and Tobago Old Dollar + TTO + + + Tuvalu Dollar + TVD + + + Taiwan New Dollar + NT$ + + + Tanzanian Shilling + T Sh + + + Ukrainian Hryvnia + UAH + + + Ukrainian Karbovanetz + UAK + + + Ugandan Shilling (1966-1987) + UGS + + + Ugandan Shilling + U Sh + + + US Dollar + US$ + + + US Dollar (Next day) + USN + + + US Dollar (Same day) + USS + + + Uruguay Peso Fuerte + UYF + + + Uruguay Peso (1975-1993) + UYP + + + Uruguay Peso Uruguayo + Ur$ + + + Uzbekistan Coupon Som + UZC + + + Uzbekistan Sum + UZS + + + Vatican City Lira + VAL + + + North Vietnam Piastre Dong Viet + VDD + + + North Vietnam New Dong + VDN + + + North Vietnam Viet Minh Piastre Dong Viet + VDP + + + Venezuelan Bolivar + Be + + + British Virgin Islands Dollar + VGD + + + Vietnamese Dong + VND + + + Vietnamese New Dong + VNN + + + Vietnamese Republic Dong + VNR + + + Vietnamese National Dong + VNS + + + Vanuatu Vatu + VT + + + Western Samoa Pound + WSP + + + Western Samoa Tala + WST + + + Asian Dinar Unit of Account + XAD + + + CFA Franc BEAC + XAF + + + Asian Monetary Unit + XAM + + + Gold + XAU + + + European Composite Unit + XBA + + + European Monetary Unit + XBB + + + European Unit of Account (XBC) + XBC + + + European Unit of Account (XBD) + XBD + + + East Caribbean Dollar + EC$ + + + CFA Nouveau Franc + XCF + + + Special Drawing Rights + XDR + + + CFA Franc BCEAEC + XEF + + + European Currency Unit + XEU + + + French Gold Franc + XFO + + + French UIC-Franc + XFU + + + Islamic Dinar + XID + + + French Metropolitan Nouveau Franc + XMF + + + French Antilles CFA Franc + XNF + + + CFA Franc BCEAO + XOF + + + CFP Franc + CFPF + + + COMECON Transferable Ruble + XTR + + + Yemeni Dinar + YDD + + + Yemeni Imadi Riyal + YEI + + + Yemeni Rial + YRl + + + Yugoslavian Hard Dinar + YUD + + + Yugoslavian Federation Dinar + YUF + + + Yugoslavian 1994 Dinar + YUG + + + Yugoslavian Noviy Dinar + YUM + + + Yugoslavian Convertible Dinar + YUN + + + Yugoslavian October Dinar + YUO + + + Yugoslavian Reformed Dinar + YUR + + + South African Rand (financial) + ZAL + + + South African Pound + ZAP + + + South African Rand + R + + + Zambian Kwacha + ZMK + + + Zambian Pound + ZMP + + + Zairean New Zaire + ZRN + + + Zairean Zaire + ZRZ + + + Zimbabwe Dollar + Z$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_AS.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_AS.xml new file mode 100644 index 0000000..ff070fb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_AS.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_AU.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_AU.xml new file mode 100644 index 0000000..09816ae --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_AU.xml @@ -0,0 +1,81 @@ + + + + + + + + + + + + + + + + + EEEE, d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd/MM/yyyy + + + + + d/MM/yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + Australian Dollar + $ + + + US Dollar + US$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_BE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_BE.xml new file mode 100644 index 0000000..af6b2e0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_BE.xml @@ -0,0 +1,126 @@ + + + + + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + EEE d MMM yyyy + + + + + dd MMM yyyy + + + + + dd/MM/yy + + + + + + + + HH' h 'mm' min 'ss' s 'z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + + Belgian Franc + BF + #,##0.00 ¤;-#,##0.00 ¤ + #,##0.00 ¤;-#,##0.00 ¤ + . + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_BW.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_BW.xml new file mode 100644 index 0000000..8822934 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_BW.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE dd MMMM yyyy + + + + + dd MMMM yyyy + + + + + MMM dd,yy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_BZ.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_BZ.xml new file mode 100644 index 0000000..dfa92c4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_BZ.xml @@ -0,0 +1,73 @@ + + + + + + + + + + + + + + + + + + + + + dd MMMM yyyy + + + + + dd MMMM yyyy + + + + + dd-MMM-yy + + + + + dd/MM/yy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_CA.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_CA.xml new file mode 100644 index 0000000..08cbbe3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_CA.xml @@ -0,0 +1,109 @@ + + + + + + + + + + + + + + + + + EEEE, MMMM d, yyyy + + + + + MMMM d, yyyy + + + + + d-MMM-yy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + + Canadian Dollar + $ + + + US Dollar + US$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_GB.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_GB.xml new file mode 100644 index 0000000..59552ef --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_GB.xml @@ -0,0 +1,85 @@ + + + + + + + + + + + + + + + + + + + + + EEEE, d MMMM yyyy + + + + + d MMMM yyyy + + + + + d MMM yyyy + + + + + dd/MM/yyyy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + Greenwich Mean Time + British Summer Time + + + GMT + BST + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_GU.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_GU.xml new file mode 100644 index 0000000..71d4133 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_GU.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_HK.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_HK.xml new file mode 100644 index 0000000..a4bf28a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_HK.xml @@ -0,0 +1,109 @@ + + + + + + + + + + + + + + + + + EEEE, d MMMM yyyy + + + + + d MMMM yyyy + + + + + d MMM yyyy + + + + + dd/MM/yyyy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + + Hong Kong Dollar + $ + + + USD + US$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_IE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_IE.xml new file mode 100644 index 0000000..8746d17 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_IE.xml @@ -0,0 +1,120 @@ + + + + + + + + + + + + + a.m. + p.m. + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + d MMM yyyy + + + + + dd/MM/yyyy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + Greenwich Mean Time + Irish Summer Time + + + GMT + IST + + Dublin + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + Irish Pound + £ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_IN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_IN.xml new file mode 100644 index 0000000..b6ea230 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_IN.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd-MMM-yy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + ##,##,##0.###;-##,##,##0.### + + + + + + + #E0 + + + + + + + ##,##,##0% + + + + + + + ¤ ##,##,##0.00;-¤ ##,##,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_JM.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_JM.xml new file mode 100644 index 0000000..d24e13f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_JM.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_MH.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_MH.xml new file mode 100644 index 0000000..c3bad12 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_MH.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_MP.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_MP.xml new file mode 100644 index 0000000..6411b88 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_MP.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_MT.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_MT.xml new file mode 100644 index 0000000..c64e89d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_MT.xml @@ -0,0 +1,109 @@ + + + + + + + + + + + + + + + + + + + + + EEEE, d MMMM yyyy + + + + + dd MMMM yyyy + + + + + dd MMM yyyy + + + + + dd/MM/yyyy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + British Pound Sterling + GBP + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_NZ.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_NZ.xml new file mode 100644 index 0000000..8af0fac --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_NZ.xml @@ -0,0 +1,81 @@ + + + + + + + + + + + + + + + + + EEEE, d MMMM yyyy + + + + + d MMMM yyyy + + + + + d/MM/yyyy + + + + + d/MM/yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + New Zealand Dollar + $ + + + US Dollar + US$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_PH.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_PH.xml new file mode 100644 index 0000000..011a3b7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_PH.xml @@ -0,0 +1,105 @@ + + + + + + + + + + + + + + + + + EEEE, MMMM d, yyyy + + + + + MMMM d, yyyy + + + + + MM d, yy + + + + + M/d/yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + + Peso + PHP + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_SG.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_SG.xml new file mode 100644 index 0000000..f6d3576 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_SG.xml @@ -0,0 +1,94 @@ + + + + + + + + + + + + + + + + + dd MMMM yyyy + + + + + dd MMM yyyy + + + + + dd-MMM-yy + + + + + dd/MM/yy + + + + + + + + a hh:mm:ss + + + + + a hh:mm:ss + + + + + a hh:mm + + + + + a hh:mm + + + + + + + {1} {0} + + + + + + + + + Singapore Standard Time + Singapore Standard Time + + + SST + SST + + Singapore + + + + + + + Singapore Dollar + $ + + + USD + US$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_TT.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_TT.xml new file mode 100644 index 0000000..2f89bc6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_TT.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_UM.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_UM.xml new file mode 100644 index 0000000..56dd247 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_UM.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_US.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_US.xml new file mode 100644 index 0000000..ba14d94 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_US.xml @@ -0,0 +1,53 @@ + + + + + + + + + + + + + 279 + 216 + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + + US Dollar + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_US_POSIX.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_US_POSIX.xml new file mode 100644 index 0000000..4ae8ced --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_US_POSIX.xml @@ -0,0 +1,55 @@ + + + + + + + + + + + + + . + , + ; + % + 0 + # + + + - + E + 0/00 + INF + NaN + + + + + ###0.###;-###0.### + + + + + + + 0.000000E+000 + + + + + + + ###0% + + + + + + + ¤ ###0.00;-¤ ###0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_VI.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_VI.xml new file mode 100644 index 0000000..d5f85bc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_VI.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_ZA.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_ZA.xml new file mode 100644 index 0000000..eb8deb2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_ZA.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE dd MMMM yyyy + + + + + dd MMMM yyyy + + + + + dd MMM yyyy + + + + + yyyy/MM/dd + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_ZW.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_ZW.xml new file mode 100644 index 0000000..8597c3e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/en_ZW.xml @@ -0,0 +1,109 @@ + + + + + + + + + + + + + + + + + EEEE dd MMMM yyyy + + + + + dd MMMM yyyy + + + + + dd MMM,yy + + + + + d/M/yyyy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + USD + US$ + + + Zimbabwean Dollar + Z$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/eo.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/eo.xml new file mode 100644 index 0000000..e971317 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/eo.xml @@ -0,0 +1,523 @@ + + + + + + + + + + + afara + abĥaza + afrikansa + amhara + araba + asama + ajmara + azerbajĝana + baŝkira + belorusa + bulgara + bihara + bislamo + bengala + tibeta + bretona + kataluna + korsika + ĉeĥa + kimra + dana + germana + dzonko + greka + angla + esperanto + hispana + estona + eŭska + persa + finna + fiĝia + feroa + franca + frisa + irlanda + gaela + galega + gvarania + guĝarata + haŭsa + hebrea + hinda + kroata + hungara + armena + interlingvao + indonezia + okcidentalo + eskima + islanda + itala + inuita + japana + java + kartvela + kazaĥa + gronlanda + kmera + kanara + korea + kaŝmira + kurda + kirgiza + latino + lingala + laŭa + litova + latva + malagasa + maoria + makedona + malajalama + mongola + marata + malaja + malta + birma + naura + nepala + nederlanda + norvega + okcitana + oroma + orijo + panĝaba + pola + paŝtua + portugala + keĉua + romanĉa + burunda + rumana + rusa + ruanda + sanskrito + sinda + sangoa + serbo-Kroata + sinhala + slovaka + slovena + samoa + ŝona + somala + albana + serba + svazia + sota + sunda + sveda + svahila + tamila + telugua + taĝika + taja + tigraja + turkmena + filipina + cvana + tongaa + turka + conga + tatara + akana + ujgura + ukraina + urduo + uzbeka + vjetnama + volapuko + volofa + ksosa + jida + joruba + ĝuanga + ĉina + zulua + + + Andoro + Unuiĝintaj Arabaj Emirlandos + Afganujo + Antigvo-Barbudo + Angvilo + Albanujo + Armenujo + Nederlandaj Antiloj + Angolo + Antarkto + Argentino + Aŭstrujo + Aŭstralio + Arubo + Azerbajĝano + Bosnio-Hercegovino + Barbado + Bangladeŝo + Belgujo + Burkino + Bulgarujo + Barejno + Burundo + Benino + Bermudoj + Brunejo + Bolivio + Brazilo + Bahamoj + Butano + Bocvano + Belorusujo + Belizo + Kanado + Centr-Afrika Respubliko + Kongolo + Svisujo + Ebur-Bordo + Kukinsuloj + Ĉilio + Kameruno + Ĉinujo + Kolombio + Kostariko + Kubo + Kabo-Verdo + Kipro + Ĉeĥujo + Germanujo + Ĝibutio + Danujo + Dominiko + Domingo + Alĝerio + Ekvadoro + Estonujo + Egipto + Okcidenta Saharo + Eritreo + Hispanujo + Etiopujo + Finnlando + Fiĝoj + Mikronezio + Ferooj + Francujo + Gabono + Unuiĝinta Reĝlando + Grenado + Kartvelujo + Franca Gviano + Ganao + Ĝibraltaro + Gronlando + Gambio + Gvineo + Gvadelupo + Ekvatora Gvineo + Grekujo + Sud-Georgio kaj Sud-Sandviĉinsuloj + Gvatemalo + Gvamo + Gvineo-Bisaŭo + Gujano + Herda kaj Makdonaldaj Insuloj + Honduro + Kroatujo + Haitio + Hungarujo + Indonezio + Irlando + Israelo + Hindujo + Brita Hindoceana Teritorio + Irako + Irano + Islando + Italujo + Jamajko + Jordanio + Japanujo + Kenjo + Kirgizistano + Kamboĝo + Kiribato + Komoroj + Sent-Kristofo kaj Neviso + Nord-Koreo + Sud-Koreo + Kuvajto + Kejmanoj + Kazaĥstano + Laoso + Libano + Sent-Lucio + Liĥtenŝtejno + Sri-Lanko + Liberio + Lesoto + Litovujo + Luksemburgo + Latvujo + Libio + Maroko + Monako + Moldavujo + Madagaskaro + Marŝaloj + Makedonujo + Malio + Mjanmao + Mongolujo + Nord-Marianoj + Martiniko + Maŭritanujo + Malto + Maŭricio + Maldivoj + Malavio + Meksiko + Malajzio + Mozambiko + Namibio + Nov-Kaledonio + Niĝero + Norfolkinsulo + Niĝerio + Nikaragvo + Nederlando + Norvegujo + Nepalo + Nauro + Niuo + Nov-Zelando + Omano + Panamo + Peruo + Franca Polinezio + Papuo-Nov-Gvineo + Filipinoj + Pakistano + Pollando + Sent-Piero kaj Mikelono + Pitkarna Insulo + Puerto-Riko + Portugalujo + Belaŭo + Paragvajo + Kataro + Reunio + Rumanujo + Rusujo + Ruando + Saŭda Arabujo + Salomonoj + Sejŝeloj + Sudano + Svedujo + Singapuro + Sent-Heleno + Slovenujo + Svalbardo kaj Jan-Majen-insulo + Slovakujo + Siera-Leono + San-Marino + Senegalo + Somalujo + Serbujo + Surinamo + Sao-Tomeo kaj Principeo + Salvadoro + Sirio + Svazilando + Ĉado + Togolo + Tajlando + Taĝikujo + Turkmenujo + Tunizio + Tongo + Turkujo + Trinidado kaj Tobago + Tuvalo + Tajvano + Tanzanio + Ukrajno + Ugando + Usonaj malgrandaj insuloj + Usono + Urugvajo + Uzbekujo + Vatikano + Sent-Vincento kaj la Grenadinoj + Venezuelo + Britaj Virgulininsuloj + Usonaj Virgulininsuloj + Vjetnamo + Vanuatuo + Valiso kaj Futuno + Samoo + Jemeno + Majoto + Sud-Afriko + Zambio + Zimbabvo + + + + [a-z ŭ ĉ ĝ ĥ ĵ ŝ] + + + GjMtkHmslTDUSnahKzJdugAZ + + + + + + jan + feb + mar + apr + maj + jun + jul + aŭg + sep + okt + nov + dec + + + januaro + februaro + marto + aprilo + majo + junio + julio + aŭgusto + septembro + oktobro + novembro + decembro + + + + + + + di + lu + ma + me + ĵa + ve + sa + + + dimanĉo + lundo + mardo + merkredo + ĵaŭdo + vendredo + sabato + + + + + + + + atm + ptm + + + aK + pK + + + + + + + EEEE, d'-a de 'MMMM yyyy + + + + + yyyy-MMMM-dd + + + + + yyyy-MMM-dd + + + + + yy-MM-dd + + + + + + + + H'-a horo kaj 'm z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es.xml new file mode 100644 index 0000000..7b43f5f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es.xml @@ -0,0 +1,2553 @@ + + + + + + + + + + + afar + abkhaziano + avéstico + afrikaans + akan + amárico + aragonés + árabe + asamés + avar + aymara + azerí + bashkir + bielorruso + búlgaro + bihari + bislama + bambara + bengalí + tibetano + bretón + bosnio + blin + catalán + checheno + chamorro + cherokee + corso + cree + checo + eslavo eclesiástico + chuvash + galés + danés + alemán + divehi + bhutaní + ewe + griego + inglés + esperanto + español + estonio + vasco + farsi + fula + finlandés + fidji + feroés + francés + frisón + irlandés + gaélico escocés + geez + gallego + guaraní + gujarati + gaélico manés + hausa + hawaiano + hebreo + hindi + hiri motu + croata + haitiano + húngaro + armenio + herero + interlingua + indonesio + interlingue + igbo + sichuan yi + inupiak + ido + islandés + italiano + inuktitut + japonés + javanés + georgiano + kongo + kikuyu + kuanyama + kazajo + groenlandés + jemer + canarés + coreano + konkani + kanuri + cachemiro + kurdo + komi + córnico + kirghiz + latín + luxemburgués + ganda + limburgués + lingala + laosiano + lituano + luba-katanga + letón + malgache + marshalés + maorí + macedonio + malayalam + mongol + moldavo + marathi + malayo + maltés + birmano + nauruano + bokmal noruego + ndebele septentrional + nepalí + ndonga + holandés + nynorsk noruego + noruego + ndebele meridional + navajo + nyanja + occitano (después del 1500) + ojibwa + oromo + oriya + osético + punjabí + pali + polaco + pashto + portugués + quechua + reto-romance + kiroundi + rumano + raíz + ruso + kinyarwanda + sánscrito + sardo + sindhi + sami septentrional + sango + serbo-croata + singalés + sidamo + eslovaco + esloveno + samoano + shona + somalí + albanés + serbio + siswati + sesotho + sundanés + sueco + swahili + siriaco + tamil + telugu + tayiko + tailandés + tigrinya + tigré + turkmeno + tagalo + setchwana + tonga (Islas Tonga) + turco + tsonga + tatar + twi + tahitiano + uigur + ucraniano + urdu + uzbeko + venda + vietnamita + volapuk + valón + uolof + xhosa + yidish + yoruba + zhuang + chino + zulú + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Andorra + Emiratos Árabes Unidos + Afganistán + Antigua y Barbuda + Anguila + Albania + Armenia + Antillas Neerlandesas + Angola + Antártida + Argentina + Samoa Americana + Austria + Australia + Aruba + Azerbaiyán + Bosnia-Herzegovina + Barbados + Bangladesh + Bélgica + Burkina Faso + Bulgaria + Bahráin + Burundi + Benín + Bermudas + Brunéi + Bolivia + Brasil + Bahamas + Bután + Isla Bouvet + Botsuana + Bielorrusia + Belice + Canadá + Islas Cocos (Keeling) + República Democrática del Congo + República Centroafricana + Congo + Suiza + Costa de Marfil + Islas Cook + Chile + Camerún + China + Colombia + Costa Rica + Cuba + Cabo Verde + Isla Navidad + Chipre + República Checa + Alemania + Yibuti + Dinamarca + Dominica + República Dominicana + Argelia + Ecuador + Estonia + Egipto + Sáhara Occidental + Eritrea + España + Etiopía + Finlandia + Fiyi + Islas Falkland (Malvinas) + Micronesia + Islas Feroe + Francia + en + Gabón + Reino Unido + Granada + Georgia + Guayana Francesa + Ghana + Gibraltar + Groenlandia + Gambia + Guinea + Guadalupe + Guinea Ecuatorial + Grecia + Islas Georgia del Sur y Sandwich del Sur + Guatemala + Guam + Guinea-Bissau + Guyana + Hong-Kong, Región administrativa especial de China + Islas Heard y McDonald + Honduras + Croacia + Haití + Hungría + Indonesia + Irlanda + Israel + India + Territorios Británico del Océano Índico + Iraq + Irán + Islandia + Italia + Jamaica + Jordania + Japón + Kenia + Kirguizistán + Camboya + Kiribati + Comoras + San Cristóbal y Nieves + Corea del Norte + Corea del Sur + Kuwait + Islas Caimán + Kazajstán + Laos + Líbano + Saint Lucia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Lituania + Luxemburgo + Letonia + Libia + Marruecos + Mónaco + Moldova + Madagascar + Islas Marshall + Macedonia + Malí + Myanmar + Mongolia + Macao, Región administrativa especial de China + Islas Marianas del Norte + Martinica + Mauritania + Montserrat + Malta + Mauricio + Maldivas + Malawi + México + Malasia + Mozambique + Namibia + Nueva Caledonia + Níger + Isla Norfolk + Nigeria + Nicaragua + Países Bajos + Noruega + Nepal + Nauru + Isla Niue + Nueva Zelanda + Omán + Panamá + Perú + Polinesia Francesa + Papúa Nueva Guinea + Filipinas + Pakistán + Polonia + San Pedro y Miquelón + Pitcairn + Puerto Rico + Territorios Palestinos + Portugal + Palau + Paraguay + Qatar + Réunion + Rumanía + Rusia + Ruanda + Arabia Saudí + Islas Salomón + Seychelles + Sudán + Suecia + Singapur + Santa Elena + Eslovenia + Svalbard y Jan Mayen + Eslovaquia + Sierra Leona + San Marino + Senegal + Somalia + Serbia + Suriname + Santo Tomé y Príncipe + El Salvador + Siria + Suazilandia + Islas Turcas y Caicos + Chad + Territorios Australes Franceses + Togo + Tailandia + Tayikistán + Islas Tokelau + Timor Oriental + Turkmenistán + Túnez + Tonga + Turquía + Trinidad y Tabago + Tuvalu + Taiwán, República de China + Tanzania + Ucrania + Uganda + Islas menores alejadas de los Estados Unidos + Estados Unidos + Uruguay + Uzbekistán + Ciudad del Vaticano + San Vicente y las Granadinas + Venezuela + Islas Vírgenes Británicas + Islas Vírgenes de los Estados Unidos + Vietnam + Vanuatu + Wallis y Futuna + Samoa + Yemen + Mayotte + Yugoslavia + Sudáfrica + Zambia + Zimbabue + + + Revisado + + + calendario + intercalación + moneda + + + calendario budista + calendario chino + calendario gregoriano + calendario hebreo + calendario islámico + calendario civil islámico + calendario japonés + orden directo + orden de listín telefónico + orden pinyin + orden pincelada + orden tradicional + + + + [a-z ñ á é í ó ú ü] + + + GuMtkHmsSEDFwWahKzUeygAZ + + + + + + ene + feb + mar + abr + may + jun + jul + ago + sep + oct + nov + dic + + + E + F + M + A + M + J + J + A + S + O + N + D + + + enero + febrero + marzo + abril + mayo + junio + julio + agosto + septiembre + octubre + noviembre + diciembre + + + + + + + dom + lun + mar + mié + jue + vie + sáb + + + D + L + M + M + J + V + S + + + domingo + lunes + martes + miércoles + jueves + viernes + sábado + + + + + + + + + + a.C. + d.C. + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd-MMM-yy + + + + + d/MM/yy + + + + + + + + HH'H'mm''ss" z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + Hora estándar del Pacífico + Hora de verano del Pacífico + + + PST + PDT + + Los Ángeles + + + + Hora estándar del Pacífico + Hora de verano del Pacífico + + + PST + PDT + + Los Ángeles + + + + Hora estándar de Montaña + Hora de verano de Montaña + + + MST + MDT + + Denver + + + + Hora estándar de Montaña + Hora de verano de Montaña + + + MST + MDT + + Denver + + + + Hora estándar de Montaña + Hora estándar de Montaña + + + MST + MST + + Phoenix + + + + Hora estándar de Montaña + Hora estándar de Montaña + + + MST + MST + + Phoenix + + + + Hora estándar central + Hora de verano central + + + CST + CDT + + Chicago + + + + Hora estándar central + Hora de verano central + + + CST + CDT + + Chicago + + + + Hora estándar oriental + Hora de verano oriental + + + EST + EDT + + Nueva York + + + + Hora estándar oriental + Hora de verano oriental + + + EST + EDT + + Nueva York + + + + Hora estándar oriental + Hora estándar oriental + + + EST + EST + + Indianápolis + + + + Hora estándar oriental + Hora estándar oriental + + + EST + EST + + Indianápolis + + + + Hora estándar de Hawai + Hora estándar de Hawai + + + HST + HST + + Honolulu + + + + Hora estándar de Hawai + Hora estándar de Hawai + + + HST + HST + + Honolulu + + + + Hora estándar de Alaska + Hora de verano de Alaska + + + AST + ADT + + Anchorage + + + + Hora estándar de Alaska + Hora de verano de Alaska + + + AST + ADT + + Anchorage + + + + Hora estándar del Atlántico + Hora de verano del Atlántico + + + AST + ADT + + Halifax + + + + Hora estándar de Newfoundland + Hora de verano de Newfoundland + + + CNT + CDT + + St. Johns + + + + Hora estándar de Newfoundland + Hora de verano de Newfoundland + + + CNT + CDT + + St. Johns + + + + Hora estándar de Europa Central + Hora de verano de Europa Central + + + CET + CEST + + París + + + + Hora estándar de Europa Central + Hora de verano de Europa Central + + + CET + CEST + + París + + + + Hora media de Greenwich + Hora media de Greenwich + + + GMT + GMT + + Londres + + + + Hora media de Greenwich + Hora media de Greenwich + + + GMT + GMT + + Casablanca + + + + Hora estándar de Israel + Hora de verano de Israel + + + IST + IDT + + Jerusalén + + + + Hora estándar de Japón + Hora estándar de Japón + + + JST + JST + + Tokio + + + + Hora estándar de Japón + Hora estándar de Japón + + + JST + JST + + Tokio + + + + Hora estándar de Europa del Este + Hora de verano de Europa del Este + + + EET + EEST + + Bucarest + + + + Hora estándar de China + Hora estándar de China + + + CTT + CDT + + Shanghai + + + + Hora estándar de China + Hora estándar de China + + + CTT + CDT + + Shanghai + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + + diner andorrano + ADD + + + peseta andorrana + ADP + + + dirham de los Emiratos Árabes Unidos + AED + + + afgani (1927-2002) + AFA + + + afgani + Af + + + franco de Affars e Issas + AIF + + + lek albanés (1946-1961) + ALK + + + lek albanés + lek + + + lek valute albanés + ALV + + + certificados de cambio albaneses en dólares + ALX + + + dram armenio + dram + + + florín de las Antillas Neerlandesas + NA f. + + + kwanza angoleño + AOA + + + kwanza angoleño (1977-1990) + AOK + + + nuevo kwanza angoleño (1990-2000) + AON + + + kwanza reajustado angoleño (1995-1999) + AOR + + + escudo angoleño + AOS + + + austral argentino + ARA + + + peso moneda nacional argentino + ARM + + + peso argentino (1983-1985) + ARP + + + peso argentino + Arg$ + + + chelín austriaco + ATS + + + dólar australiano + $A + + + libra australiana + AUP + + + florín de Aruba + AWG + + + manat azerí + AZM + + + dinar bosnio + BAD + + + marco bosnio convertible + KM + + + nuevo dinar bosnio + BAN + + + dólar de Barbados + BDS$ + + + taka de Bangladesh + Tk + + + franco belga (convertible) + BEC + + + franco belga + BF + + + franco belga (financiero) + BEL + + + lev fuerte búlgaro + lev + + + lev socialista búlgaro + BGM + + + nuevo lev búlgaro + BGN + + + lev búlgaro (1879-1952) + BGO + + + certificados de cambio búlgaros en leva + BGX + + + dinar bahreiní + BD + + + franco de Burundi + Fbu + + + dólar de Bermudas + Ber$ + + + libra de Bermudas + BMP + + + dólar de Brunéi + BND + + + boliviano + Bs + + + boliviano (1863-1962) + BOL + + + peso boliviano + BOP + + + MVDOL boliviano + BOV + + + nuevo cruceiro brasileño (1967-1986) + BRB + + + cruzado brasileño + BRC + + + cruceiro brasileño (1990-1993) + BRE + + + real brasileño + R$ + + + nuevo cruzado brasileño + BRN + + + cruceiro brasileño + BRR + + + cruceiro brasileño (1942-1967) + BRZ + + + dólar de las Bahamas + BSD + + + libra de las Bahamas + BSP + + + ngultrum butanés + Nu + + + rupia butanesa + BTR + + + kyat birmano + BUK + + + rupia birmana + BUR + + + pula botsuano + BWP + + + nuevo rublo bielorruso (1994-1999) + BYB + + + rublo bielorruso (1992-1994) + BYL + + + rublo bielorruso + Rbl + + + dólar de Belice + BZ$ + + + dólar de Honduras Británica + BZH + + + dólar canadiense + Can$ + + + franco congoleño + CDF + + + franco de la República del Congo + CDG + + + zaire congoleño + CDL + + + franco CFA de la República Centroafricana + CFF + + + franco suizo + SwF + + + dólar de las Islas Cook + CKD + + + cóndor chileno + CLC + + + escudo chileno + CLE + + + unidad de fomento chilena + CLF + + + peso chileno + Ch$ + + + franco CFA de Camerún + CMF + + + jen min piao yuan chino + CNP + + + certificados de cambio chinos en dólares estadounidenses + CNX + + + yuan renminbi chino + Y + + + peso de papel colombiano + COB + + + franco CFA del Congo + COF + + + peso colombiano + Col$ + + + colón costarricense + C + + + corona checoslovaca + CSC + + + corona fuerte checoslovaca + CSK + + + peso cubano + CUP + + + certificados de cambio cubanos + CUX + + + escudo de Cabo Verde + CVEsc + + + florín de Curazao + CWG + + + libra chipriota + £C + + + corona checa + CZK + + + ostmark de Alemania del Este + DDM + + + marco alemán + DEM + + + sperrmark alemán + DES + + + franco de Yibuti + DF + + + corona danesa + DKr + + + peso dominicano + RD$ + + + dinar argelino + DA + + + nuevo franco argelino + DZF + + + franco germinal argelino + DZG + + + sucre ecuatoriano + ECS + + + unidad de valor constante (UVC) ecuatoriana + ECV + + + corona estonia + EEK + + + libra egipcia + EGP + + + nakfa eritreo + ERN + + + peseta española + + + + birr etíope + Br + + + dólar etíope + ETD + + + euro + + + + marco finlandés + FIM + + + marco finlandés (1860-1962) + FIN + + + dólar de las Islas Fiyi + F$ + + + libra de las Islas Fiyi + FJP + + + libra de las Islas Malvinas + FKP + + + corona de las Islas Feroe + FOK + + + franco francés + FRF + + + franco germinal/franco Poincaré francés + FRG + + + franco CFA de Gabón + GAF + + + libra esterlina británica + £ + + + kupon larit georgiano + GEK + + + lari georgiano + lari + + + cedi ghanés + GHC + + + antiguo cedi ghanés + GHO + + + libra ghanesa + GHP + + + cedi revaluado ghanés + GHR + + + libra de Gibraltar + GIP + + + corona de Groenlandia + GLK + + + dalasi gambiano + GMD + + + libra gambiana + GMP + + + franco guineo + GF + + + franco guineo (1960-1972) + GNI + + + syli guineano + GNS + + + franco de Guadalupe + GPF + + + ekuele de Guinea Ecuatorial + GQE + + + franco de Guinea Ecuatorial + GQF + + + peseta guineana de Guinea Ecuatorial + GQP + + + dracma griego + GRD + + + nuevo dracma griego + GRN + + + quetzal guatemalteco + Q + + + franco guayanés de la Guayana Francesa + GUF + + + escudo de Guinea Portuguesa + GWE + + + mil reis de Guinea Portuguesa + GWM + + + peso de Guinea-Bissáu + GWP + + + dólar guyanés + G$ + + + dólar de Hong Kong + HK$ + + + lempira hondureño + L + + + dinar croata + HRD + + + kuna croata + HRK + + + gourde haitiano + HTG + + + forinto húngaro + Ft + + + libra de Irlanda del Norte + IBP + + + florín Nica indonesio + IDG + + + rupia Java indonesia + IDJ + + + nueva rupia indonesia + IDN + + + rupia indonesia + Rp + + + libra irlandesa + IR£ + + + sheqel israelí + ILL + + + libra israelí + ILP + + + nuevo sheqel israelí + ILS + + + libra esterlina de la Isla de Man + IMP + + + rupia india + =0#Rs.|1#Re.|1<Rs. + + + dinar iraquí + ID + + + rial iraní + RI + + + corona islandesa + ISK + + + lira italiana + + + + libra esterlina de Jersey + JEP + + + dólar de Jamaica + J$ + + + libra jamaicana + JMP + + + dinar jordano + JD + + + yen japonés + ¥ + + + chelín keniata + K Sh + + + som kirguís + som + + + antiguo riel camboyano + KHO + + + riel camboyano + CR + + + dólar de Kiribati + KID + + + franco comorense + CF + + + won del pueblo norcoreano + KPP + + + won norcoreano + KPW + + + hwan surcoreano + KRH + + + antiguo won surcoreano + KRO + + + won surcoreano + KRW + + + dinar kuwaití + KD + + + dólar de las Islas Caimán + KYD + + + rublo kazako + KZR + + + tenge kazako + T + + + kip laosiano + LAK + + + libra libanesa + LL + + + franco de Liechtenstein + LIF + + + rupia de Sri Lanka + SL Re + + + rupia cingalesa + LNR + + + dólar liberiano + LRD + + + loti lesothense + M + + + litas lituano + LTL + + + talonas lituano + LTT + + + franco luxemburgués + LUF + + + lats letón + LVL + + + rublo letón + LVR + + + lira libia de la Autoridad Militar Británica + LYB + + + dinar libio + LD + + + libra libia + LYP + + + dirham marroquí + MAD + + + franco marroquí + MAF + + + nuevo franco monegasco + MCF + + + franco germinal monegasco + MCG + + + cupón leu moldavo + MDC + + + leu moldavo + MDL + + + cupón rublo moldavo + MDR + + + ariary malgache + MGA + + + franco malgache + MGF + + + dólar de las Islas Marshall + MHD + + + dinar macedonio + MDen + + + dinar macedonio (1992-1993) + MKN + + + franco malí + MLF + + + kyat de Myanmar + MMK + + + certificados de cambio birmanos en dólares + MMX + + + tugrik mongol + Tug + + + pataca de Macao + MOP + + + franco de Martinica + MQF + + + ouguiya mauritano + UM + + + lira maltesa + Lm + + + libra maltesa + MTP + + + rupia mauriciana + MUR + + + rupia de Maldivas + MVP + + + rufiyaa de Maldivas + MVR + + + kwacha de Malawi + MK + + + libra de Malawi + MWP + + + peso mexicano + MEX$ + + + peso de plata mexicano (1861-1992) + MXP + + + unidad de inversión (UDI) mexicana + MXV + + + ringgit malasio + RM + + + escudo mozambiqueño + MZE + + + metical mozambiqueño + Mt + + + dólar de Namibia + N$ + + + franco germinal de Nueva Caledonia + NCF + + + naira nigeriano + NGN + + + libra nigeriana + NGP + + + franco CFP de las Nuevas Hébridas + NHF + + + córdoba nicaragüense + NIC + + + córdoba oro nicaragüense + NIG + + + córdoba oro nicaragüense + NIO + + + florín neerlandés + NLG + + + corona noruega + NKr + + + rupia nepalesa + Nrs + + + dólar neozelandés + $NZ + + + libra neozelandesa + NZP + + + rial omaní + RO + + + rial saidi omaní + OMS + + + balboa panameño + PAB + + + cupón rublo de Transdniestria + PDK + + + nuevo rublo de Transdniestria + PDN + + + rublo de Transdniestria + PDR + + + inti peruano + PEI + + + nuevo sol peruano + PEN + + + sol peruano + PES + + + kina de Papúa Nueva Guinea + PGK + + + peso filipino + PHP + + + rupia pakistaní + Pra + + + zloty polaco + Zl + + + certificados de cambio polacos en dólares estadounidenses + PLX + + + zloty polaco (1950-1995) + PLZ + + + libra palestina + PSP + + + conto portugués + PTC + + + escudo portugués + PTE + + + guaraní paraguayo + PYG + + + riyal de Qatar + QR + + + franco de Reunión + REF + + + leu rumano + leu + + + nuevo leu rumano + RON + + + rublo ruso + RUB + + + rublo ruso (1991-1998) + RUR + + + franco ruandés + RWF + + + riyal saudí + SRl + + + riyal soberano saudí + SAS + + + dólar de las Islas Salomón + SI$ + + + rupia de Seychelles + SR + + + dinar sudanés + SDD + + + libra sudanesa + SDP + + + corona sueca + SKr + + + dólar singapurense + S$ + + + libra de Santa Elena + SHP + + + tólar bons esloveno + SIB + + + tólar esloveno + SIT + + + corona eslovaca + Sk + + + leone de Sierra Leona + SLL + + + lira de San Marino + SML + + + chelín somalí + So. Sh. + + + chelín de Somalilandia + SQS + + + florín surinamés + Sf + + + libra escocesa + SSP + + + dobra de Santo Tomé y Príncipe + Db + + + escudo de Santo Tomé y Príncipe + STE + + + nuevo rublo soviético + SUN + + + rublo soviético + SUR + + + colón salvadoreño + SVC + + + libra siria + LS + + + lilangeni suazi + E + + + corona de las Islas Turcas y Caicos + TCC + + + franco CFA del Chad + TDF + + + baht tailandés + THB + + + rublo tayiko + TJR + + + somoni tayiko + TJS + + + manat turcomano + TMM + + + dinar tunecino + TND + + + paʻanga tongano + T$ + + + libra esterlina tongana + TOS + + + escudo timorense + TPE + + + pataca timorense + TPP + + + lira turca + TL + + + dólar de Trinidad y Tobago + TT$ + + + antiguo dólar de Trinidad y Tobago + TTO + + + dólar de Tuvalu + TVD + + + nuevo dólar taiwanés + NT$ + + + chelín tanzano + T Sh + + + grivna ucraniana + UAH + + + karbovanet ucraniano + UAK + + + chelín ugandés (1966-1987) + UGS + + + chelín ugandés + U Sh + + + dólar estadounidense + US$ + + + dólar estadounidense (día siguiente) + USN + + + dólar estadounidense (mismo día) + USS + + + peso fuerte uruguayo + UYF + + + peso uruguayo (1975-1993) + UYP + + + peso uruguayo + Ur$ + + + cupón som uzbeko + UZC + + + sum uzbeko + UZS + + + lira de la Ciudad del Vaticano + VAL + + + piastre dong viet de Vietnam del Norte + VDD + + + nuevo dong de Vietnam del Norte + VDN + + + viet minh piastre dong viet de Vietnam del Norte + VDP + + + bolívar venezolano + Be + + + dólar de las Islas Vírgenes Británicas + VGD + + + dong vietnamita + VND + + + nuevo dong vietnamita + VNN + + + dong de la República del vietnamita + VNR + + + dong nacional vietnamita + VNS + + + vatu vanuatuense + VT + + + libra samoana + WSP + + + tala samoano + WST + + + unidad de cuenta asiática en dinares + XAD + + + franco CFA BEAC + XAF + + + unidad monetaria asiática + XAM + + + oro + XAU + + + unidad compuesta europea + XBA + + + unidad monetaria europea + XBB + + + unidad de cuenta europea (XBC) + XBC + + + unidad de cuenta europea (XBD) + XBD + + + dólar del Caribe Oriental + EC$ + + + nuevo franco CFA + XCF + + + derechos especiales de giro + XDR + + + franco CFA BCEAEC + XEF + + + unidad de moneda europea + XEU + + + franco oro francés + XFO + + + franco UIC francés + XFU + + + dinar islámico + XID + + + nuevo franco metropolitano francés + XMF + + + franco CFA de las Antillas Francesas + XNF + + + franco CFA BCEAO + XOF + + + franco CFP + CFPF + + + rublo transferible del COMECON + XTR + + + dinar yemení + YDD + + + riyal Imadi yemení + YEI + + + rial yemení + YRl + + + dinar fuerte yugoslavo + YUD + + + dinar de la Federación Yugoslava + YUF + + + dinar 1994 yugoslavo + YUG + + + super dinar yugoslavo + YUM + + + dinar convertible yugoslavo + YUN + + + dinar de octubre yugoslavo + YUO + + + dinar reformado yugoslavo + YUR + + + rand sudafricano (financiero) + ZAL + + + libra sudafricana + ZAP + + + rand sudafricano + R + + + kwacha zambiano + ZMK + + + libra zambiana + ZMP + + + nuevo zaire zaireño + ZRN + + + zaire zaireño + ZRZ + + + dólar de Zimbabue + Z$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_AR.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_AR.xml new file mode 100644 index 0000000..8d5b0cf --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_AR.xml @@ -0,0 +1,95 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + HH'h'''mm z + + + + + H:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + Peso Argentino + $ + + + Dólar Americano + US$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_BO.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_BO.xml new file mode 100644 index 0000000..159873e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_BO.xml @@ -0,0 +1,85 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_CL.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_CL.xml new file mode 100644 index 0000000..779ed34 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_CL.xml @@ -0,0 +1,123 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd-MM-yyyy + + + + + dd-MM-yy + + + + + + + + HH:mm:ss z + + + + + H:mm:ss z + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;¤-#,##0.00 + + + + + + Peso Chileno + $ + + + Dólar Americano + US$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_CO.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_CO.xml new file mode 100644 index 0000000..7359cb4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_CO.xml @@ -0,0 +1,95 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + d/MM/yyyy + + + + + d/MM/yy + + + + + + + + HH:mm:ss z + + + + + H:mm:ss z + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + Peso de Colombia + $ + + + Dólar Americano + US$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_CR.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_CR.xml new file mode 100644 index 0000000..a317215 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_CR.xml @@ -0,0 +1,85 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_DO.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_DO.xml new file mode 100644 index 0000000..631e5c0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_DO.xml @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_EC.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_EC.xml new file mode 100644 index 0000000..619533c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_EC.xml @@ -0,0 +1,113 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + HH:mm:ss z + + + + + H:mm:ss z + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;¤-#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_ES.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_ES.xml new file mode 100644 index 0000000..d3e383b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_ES.xml @@ -0,0 +1,122 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + HH'H'mm''ss" z + + + + + HH:mm:ss z + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + + peseta española + + #,##0 ¤;-#,##0 ¤ + #,##0 ¤;-#,##0 ¤ + . + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_GT.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_GT.xml new file mode 100644 index 0000000..75c8518 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_GT.xml @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + d/MM/yyyy + + + + + d/MM/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_HN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_HN.xml new file mode 100644 index 0000000..17bca96 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_HN.xml @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + EEEE dd' de 'MMMM' de 'yyyy + + + + + dd' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_MX.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_MX.xml new file mode 100644 index 0000000..4e24d72 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_MX.xml @@ -0,0 +1,81 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + + + + MXN + $ + + + Dólar Americano + US$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_NI.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_NI.xml new file mode 100644 index 0000000..f954332 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_NI.xml @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_PA.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_PA.xml new file mode 100644 index 0000000..c23c093 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_PA.xml @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + MM/dd/yyyy + + + + + MM/dd/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_PE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_PE.xml new file mode 100644 index 0000000..aefba93 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_PE.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;¤-#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_PR.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_PR.xml new file mode 100644 index 0000000..9e039ee --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_PR.xml @@ -0,0 +1,77 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + MM/dd/yyyy + + + + + MM/dd/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + + + + Dólar Americano + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_PY.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_PY.xml new file mode 100644 index 0000000..d11e124 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_PY.xml @@ -0,0 +1,113 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤ #,##0;¤ -#,##0 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_SV.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_SV.xml new file mode 100644 index 0000000..1309dba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_SV.xml @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_US.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_US.xml new file mode 100644 index 0000000..a8ba7fc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_US.xml @@ -0,0 +1,110 @@ + + + + + + + + + + + + + 279 + 216 + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + MMM d, yyyy + + + + + M/d/yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_UY.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_UY.xml new file mode 100644 index 0000000..472e8f0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_UY.xml @@ -0,0 +1,113 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤ #,##0.00;(¤#,##0.00) + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_VE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_VE.xml new file mode 100644 index 0000000..76172ce --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/es_VE.xml @@ -0,0 +1,113 @@ + + + + + + + + + + + + + + + + + EEEE d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;¤ -#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/et.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/et.xml new file mode 100644 index 0000000..d43be3b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/et.xml @@ -0,0 +1,435 @@ + + + + + + + + + + + Araabia + Bulgaaria + Tiehhi + Taani + Saksa + Kreeka + Inglise + Hispaania + Eesti + Soome + Prantsuse + Heebrea + Horvaadi + Ungari + Itaalia + Jaapani + Korea + Leedu + Läti + Hollandi + Norra + Poola + Portugali + Rumeenia + Vene + Slovaki + Sloveeni + Rootsi + Türgi + Hiina + + + Andorra + Araabia Ühendemiraadid + Afganistan + Antigua ja Barbuda + Anguilla + Albaania + Armeenia + Hollandi Antillid + Angola + Antarktika + Argentina + Ameerika Samoa + Austria + Austraalia + Aruba + AserbaidĪaan + Bosnia ja Hertsegoviina + Barbados + Bangladesh + Belgia + Burkina Faso + Bulgaaria + Bahrein + Burundi + Benin + Bermuda + Brunei + Boliivia + Brasiilia + Bahama saared + Bhutan + Bouvet’ saar + Botswana + Valgevene + Belize + Kanada + Kookossaared + Kongo DV + Kesk-Aafrika Vabariik + Kongo + Ĩveits + Cote d’Ivoire + Cooki saared + Tiiili + Kamerun + Hiina + Colombia + Costa Rica + Kuuba + Cabo Verde + Jõulusaar + Küpros + Tiehhi Vabariik + Saksamaa + Djibouti + Taani + Dominica + Dominikaani Vabariik + AlĪeeria + Ecuador + Eesti + Egiptus + Lääne-Sahara + Eritrea + Hispaania + Etioopia + Soome + FidĪi + Falklandi saared + Mikroneesia Liiduriigid + Fääri saared + Prantsusmaa + en + Gabon + Ühendkuningriik + Grenada + Gruusia + Prantsuse Guajaana + Ghana + Gibraltar + Gröönimaa + Gambia + Guinea + Guadeloupe + Ekvatoriaal-Guinea + Kreeka + Lõuna-Georgia ja Lõuna-Sandwichi saared + Guatemala + Guam + Guinea-Bissau + Guyana + Hongkongi erihalduspiirkond + Heard ja McDonald + Honduras + Horvaatia + Haiti + Ungari + Indoneesia + Iirimaa + Iisrael + India + Briti India ookeani ala + Iraak + Iraan + Island + Itaalia + Jamaica + Jordaania + Jaapan + Kenya + Kõrgõzstan + KambodĪa + Kiribati + Komoorid + Saint Kitts ja Nevis + Põhja-Korea + Lõuna-Korea + Kuveit + Kaimani saared + Kasahstan + Laose DRV + Liibanon + Saint Lucia + Liechtenstein + Sri Lanka + Libeeria + Lesotho + Leedu + Luksemburg + Läti + Liibüa + Maroko + Monaco + Moldova + Madagaskar + Marshalli Saared + Makedoonia Vabariik + Mali + Myanmar + Mongoolia + Aomeni Hiina erihalduspiirkond + Põhja-Mariaanid + Martinique + Mauritaania + Montserrat + Malta + Mauritius + Maldiivid + Malawi + Mehhiko + Malaisia + Mosambiik + Namiibia + Uus-Kaledoonia + Niger + Norfolk + Nigeeria + Nicaragua + Holland + Norra + Nepal + Nauru + Niue + Uus-Meremaa + Omaan + Panama + Peruu + Prantsuse Polüneesia + Paapua Uus-Guinea + Filipiinid + Pakistan + Poola + Saint-Pierre ja Miquelon + Pitcairn + Puerto Rico + Palestiina Omavalitsus + Portugal + Belau + Paraguay + Katar + Réunion + Rumeenia + Venemaa + Rwanda + Saudi Araabia + Saalomoni Saared + Seiiellid + Sudaan + Rootsi + Singapur + Saint Helena + Sloveenia + Svalbard ja Jan Mayen + Slovakkia + Sierra Leone + San Marino + Senegal + Somaalia + Serbia + Suriname + Sao Tomé ja Principe + El Salvador + Süüria + Svaasimaa + Turks ja Caicos + Tiaad + Prantsuse Lõunaalad + Togo + Tai + TadĪikistan + Tokelau + Ida-Timor + Türkmenistan + Tuneesia + Tonga + Türgi + Trinidad ja Tobago + Tuvalu + Taiwan + Tansaania + Ukraina + Uganda + Ühendriikide hajasaared + Ameerika Ühendriigid + Uruguay + Usbekistan + Püha Tool (Vatikan) + Saint Vincent ja Grenadiinid + Venezuela + Briti Neitsisaared + USA Neitsisaared + Vietnam + Vanuatu + Wallis ja Futuna + Samoa + Jeemen + Mayotte + Jugoslaavia + Lõuna-Aafrika Vabariik + Sambia + Zimbabwe + + + + [a-z õ ä ö ü š ž] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + jaan + veebr + märts + apr + mai + juuni + juuli + aug + sept + okt + nov + dets + + + jaanuar + veebruar + märts + aprill + mai + juuni + juuli + august + september + oktoober + november + detsember + + + + + + + P + E + T + K + N + R + L + + + pühapäev + esmaspäev + teisipäev + kolmapäev + neljapäev + reede + laupäev + + + + + + + + + + e.m.a. + m.a.j. + + + + + + + EEEE, d, MMMM yyyy + + + + + d MMMM yyyy + + + + + dd.MM.yyyy + + + + + dd.MM.yy + + + + + + + + H:mm:ss z + + + + + H:mm:ss z + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + EEK + kr + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/et_EE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/et_EE.xml new file mode 100644 index 0000000..f6da980 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/et_EE.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/eu.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/eu.xml new file mode 100644 index 0000000..5af6ea7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/eu.xml @@ -0,0 +1,213 @@ + + + + + + + + + + + euskara + + + Arabiar Emirrerri Batuak + Afganistan + Antigua eta Barbuda + Antartika + Bosnia-Herzegovina + Belgika + Bolibia + Brasil + Bahamak + Bielorrusia + Kanada + Afrika Erdiko Errepublika + Kongo + Suitza + Boli Kosta + Txile + Kamerun + Txina + Kolonbia + Kuba + Cabo Verde + Zipre + Txekiar errepublika + Alemania + Djibuti + Danimarka + Dominika + Dominikar Errepublika + Aljeria + Ekuador + Egipto + Mendebaldeko Sahara + Espainia + Etiopia + Finlandia + Mikronesia + Frantzia + Ginea + Ekuatore Ginea + Grezia + Ginea-Bissau + Kroazia + Hungaria + Irlanda + Irak + Islandia + Italia + Jamaika + Jordania + Japonia + Kenia + Kirgizistan + Kanbodia + Komoreak + Saint Kitts eta Nevis + Ipar Korea + Hego Korea + Libano + Santa Luzia + Lituania + Luxenburgo + Letonia + Libia + Maroko + Monako + Moldavia + Madagaskar + Marshall uharteak + Mazedonia + Maurizio + Maldivak + Mexiko + Malasia + Mozambike + Nikaragua + Herbehereak + Norvegia + Zeelanda Berria + Papua Ginea Berria + Filipinak + Polonia + Paraguai + Katar + Errumania + Errusia + Ruanda + Salomon uharteak + Seychelleak + Suedia + Singapur + Eslovenia + Eslovakia + Sierra Leona + Serbia + Surinam + Sao Tomé eta Principe + Siria + Swazilandia + Txad + Tailandia + Tadjikistan + Turkia + Trinidad eta Tobago + Ukraina + Ameriketako Estatu Batuak + Uruguai + Vatikano + Saint Vincent eta Grenadinak + Hegoafrika + + + + [a-zñç] + + + + + + + + urt + ots + mar + api + mai + eka + uzt + abu + ira + urr + aza + abe + + + urtarrila + otsaila + martxoa + apirila + maiatza + ekaina + uztaila + abuztua + iraila + urria + azaroa + abendua + + + + + + + ig + al + as + az + og + or + lr + + + igandea + astelehena + asteartea + asteazkena + osteguna + ostirala + larunbata + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + ESP + + ¤ #,##0;-¤ #,##0 + ¤ #,##0;-¤ #,##0 + . + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/eu_ES.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/eu_ES.xml new file mode 100644 index 0000000..26bdeaf --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/eu_ES.xml @@ -0,0 +1,103 @@ + + + + + + + + + + + + + + + + + + + + + EEEE, yyyy'eko' MMMM'ren' dd'a' + + + + + EEE, yyyy'eko' MMM'ren' dd'a' + + + + + yy'-'MMM'-'dd + + + + + yy'-'MM'-'dd + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fa.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fa.xml new file mode 100644 index 0000000..5731e43 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fa.xml @@ -0,0 +1,647 @@ + + + + + + + + + + + آفاری + آبخازی + اوستایی + آفریکانس + افریقا و آسیایی (غیره) + امهری + انگلیسی باستان (حدود ۴۵۰-۱۱۰۰ م.) + زبان‌های آپاچیایی + عربی + آرامی + ساخته‌گی (غیره) + آسامی + زبان‌های استرالیایی + آیمارایی + آذربایجانی + باشکیر + بلوچی + بالتیکی (غیره) + بلوروسی + بلغاری + بیهاری + بوجپوری + بیسلاما + بنگالی + تبتی + بوسنیایی + کاتالونیایی + سرخ‌پوستی امریکای مرکزی (غیره) + چچنی + سلتی (غیره) + چامورویی + چروکی + قبطی + چکی + اسلاوی کلیسایی + چوواشی + ویلزی + دانمارکی + داکوتایی + آلمانی + دراویدی (غیره) + هلندی میانه (حدود ۱۰۵۰-۱۳۵۰ م.) + بوتانی + مصری (باستانی) + یونانی + انگلیسی + انگلیسی میانه (۱۱۰۰ -۱۵۰۰ م.) + اسپرانتو + اسپانیایی + استونیایی + باسکی + فارسی + فنلاندی + فیجیایی + فارویی + فرانسوی + فرانسوی میانه (حدود ۱۴۰۰-۱۶۰۰ م.) + فرانسوی قدیم (۸۴۲-حدود ۱۴۰۰ م.) + ایرلندی + گا + ژرمنی (غیره) + آلمانی علیای میانه (حدود ۱۰۵۰-۱۵۰۰) + گوارانی + آلمانی علیای باستان (حدود ۷۵۰-۱۰۵۰) + گوتیک + یونانی کهن (تا ۱۴۵۳ م.) + گجراتی + مانی + هوسیایی + هاوائیایی + عبری + هندی + هیتی + کرواتی + مجاری + ارمنی + هریرویی + میان‌زبان + اندونزیایی + اینوپیک + هندیک (غیره) + هندوارودپایی + ایرانی + ایسلندی + ایتالیایی + اینوکیتوت + ژاپنی + فارسی یهودی + عربی یهودی + جاوه‌ای + گرجی + قزاقی + خمری + کاناده‌ای + کره‌ای + کنکانی + کشمیری + کردی + کومیایی + کرنوالی + قرقیزی + لاتینی + لتسه‌بورگیش + لینگالا + لائوسی + لیتوانیایی + لتونیایی + آسترونیزیایی + مالاگاسیایی + ایرلندی میانه (۹۰۰-۱۲۰۰ م.) + مارشالی + مائوریایی + زبان‌های متفرقه + مقدونی + مالایالامی + مغولی + مولداویایی + موهاکی + مراتی + مالزیایی + مالتی + چندین زبان + برمه‌ای + مایاییک + نائورویی + سرخ‌پوستی امریکای شمالی (غیره) + بوکسمال نروژی + انده‌بله‌ای شمالی + آلمانی سفلی؛ ساکسون سفلب + نپالی + هلندی + نینورسک نروژی + نروژی + انده‌بله‌ای جنوبی + ناواهویی + اوریه‌ای + ترکی (امپراتوری عثمانی) + پنجابی + پاپوایی (غیره) + پهلوی + فارسی قدیم (حدود ۶۰۰-۴۰۰ ق.م.) + فیایپینی (غیره) + فنیقی + پالی + لهستانی + پشتو + پرتغالی + کچوایی + رومانیایی + ریشه + روسی + کینیارواندایی + سنسکریت + سرخ‌پوستی امریکای جنوبی (غیره) + ساردینیایی + اسکاتلندی + سندی + سامی + سانگویی + ایرلندی باستان (تا ۹۰۰ م.) + زبان‌های نشانه‌ای + صرب و کرواتی + سینهالی + چین و تبتی (غیره) + اسلواکی + اسلووینیایی + ساموآیی + شونایی + سومالیایی + آلبانیایی + صربی + سوتویی جنوبی + سوندایی + سومری + سوئدی + سواحلی + سریانی + تامیلی + تلوگویی + تاجیکی + تایلندی + تیگرینیایی + ترکمتی + تاگالوگی + تسوانایی + تونگایی (جزایر تونگا) + ترکی + تسونگایی + تاتاری + توی‌یایی + تاهیتیایی + اویغوری + اوکراینی + نامشخص + اردو + ازبکی + ویتنامی + ولاپوک + ولوفی + خوسایی + یدی + یوروبایی + چینی + زولویی + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + آندورا + امارات متحده‌ی عربی + افغانستان + آنتیگوا و باربودا + آنگیل + آلبانی + ارمنستان + آنتیل هلند + آنگولا + جنوبگان + آرژانتین + ساموای امریکا + اتریش + استرالیا + آروبا + آذربایجان + بوسنی و هرزگوین + باربادوس + بنگلادش + بلژیک + بورکینافاسو + بلغارستان + بحرین + بوروندی + بنین + برمودا + برونئی + بولیوی + برزیل + باهاما + بوتان + جزیره‌ی بووت + بوتسوانا + بیلوروسی + بلیز + کانادا + جزایر کوکوس + جمهوری دموکراتیک کنگو + جمهوری افریقای مرکزی + کونگو + سوئیس + ساحل عاج + جزایر کوک + شیلی + کامرون + چین + کلمبیا + کاستاریکا + کوبا + کیپ ورد + جزیره‌ی کریسمس + قبرس + جمهوری چک + آلمان + جیبوتی + دانمارک + دومینیکا + جمهوری دومینیکن + الجزایر + اکوادر + استونی + مصر + صحرای غربی + اریتره + اسپانیا + اتیوپی + فنلاند + فیجی + جزایر فالکلند + میکرونزی + جزایر فارو + فرانسه + گابون + انگلستان + گرانادا + گرجستان + گویان فرانسه + غنا + گیبرالتار + گروئنلند + گامبیا + گینه + گوادلوپ + گینه‌ی استوایی + یونان + جورجیای جنوبی و جزایر ساندویچ جنوبی + گواتمالا + گوام + گینه‌ی بیسائو + گویان + هنگ‌کنگ + جزیره‌ی هرد و جزایر مک‌دونالد + هندوراس + کرواسی + هاییتی + مجارستان + اندونزی + ایرلند + اسرائیل + هند + مستعمره‌های انگلستان در اقیانوس هند + عراق + ایران + ایسلند + ایتالیا + جامائیکا + اردن + ژاپن + کنیا + قرقیزستان + کامبوج + کیریباتی + کومورو + سنت کیتس و نویس + کره‌ی شمالی + کره‌ی جنوبی + کویت + جزایر کِیمن + قزاقستان + لائوس + لبنان + سنت لوسیا + لیختن‌اشتاین + سری‌لانکا + لیبریا + لسوتو + لیتوانی + لوکزامبورگ + لتونی + لیبی + مراکش + موناکو + مولدووا + ماداگاسکار + جزایر مارشال + مقدونیه + مالی + میانمار + مغولستان + ماکائو + جزایر ماریانای شمالی + مارتینیک + موریتانی + مونت‌سرات + مالت + موریتیوس + مالدیو + مالاوی + مکزیک + مالزی + موزامبیک + نامیبیا + کالدونیای جدید + نیجر + جزیره‌ی نورفولک + نیجریه + نیکاراگوئه + هلند + نروژ + نپال + نائورو + نیوئه + زلاند نو + عمان + پاناما + پرو + پلی‌نزی فرانسه + پاپوا گینه‌ی نو + فیلیپین + پاکستان + لهستان + سنت پیر و میکلون + پیتکایرن + پورتو ریکو + پرتغال + پالائو + پاراگوئه + قطر + ریونیون + رومانی + روسیه + رواندا + عربستان سعودی + جزایر سلیمان + سیشل + سودان + سوئد + سنگاپور + سنت هلن + اسلوونی + اسوالبارد و جان ماین + اسلواکی + سیرالئون + سان مارینو + سنگال + سومالی + صربستان + سورینام + سائو تومه و پرینسیپه + السالوادور + سوریه + سوازیلند + جزایر ترک و کایکوس + چاد + مستعمره‌های جنوبی فرانسه + توگو + تایلند + تاجیکستان + توکلائو + تیمور شرقی + ترکمنستان + تونس + تونگا + ترکیه + ترینیداد و توباگو + تووالو + تایوان + تانزانیا + اوکراین + اوگاندا + جزایر کوچک دورافتاده‌ی ایالات متحده + ایالات متحده‌ی امریکا + اوروگوئه + ازبکستان + واتیکان + سنت وینسنت و گرنادین + ونزوئلا + جزایر ویرجین انگلستان + جزایر ویرجین ایالات متحده + ویتنام + وانواتو + والیس و فیوتونا + ساموا + یمن + مایوت + یوگسلاوی + افریقای جنوبی + زامبیا + زیمبابوه + + + + + + + [ء-ؤئ-غفقل-وً-ْٰٔپچژکگی‌‍‏‎] + + + + + + + + ژان + فور + مار + آور + مـه + ژون + ژوی + اوت + سپت + اکت + نوا + دسا + + + ژانویه + فوریه + مارس + آوریل + مه + ژوئن + ژوئیه + اوت + سپتامبر + اکتبر + نوامبر + دسامبر + + + + + + + ی. + د. + س. + چ. + پ. + ج. + ش. + + + یک‌شنبه + دوشنبه + سه‌شنبه + چهارشنبه + پنج‌شنبه + جمعه + شنبه + + + + ق.ظ. + ب.ظ. + + + ق.م. + م. + + + + + + + EEEE، d MMMM yyyy + + + + + d MMMM yyyy + + + + + yyyy/MM/d + + + + + yyyy/MM/d + + + + + + + + HH:mm:ss (z) + + + + + HH:mm:ss (z) + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + وقت استاندارد تهران + وقت تابستانی تهران + + + IRST + IRDT + + تهران + + + + + + + افغانی + افغانی + + + IRR + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fa_AF.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fa_AF.xml new file mode 100644 index 0000000..3abf21d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fa_AF.xml @@ -0,0 +1,235 @@ + + + + + + + + + + + + هسپانوی + دری + فنلندی + آیرلندی + کروشیایی + اندونیزیایی + آیسلندی + ایتالوی + جاپانی + کوریایی + قرغزی + مغلی + نیپالی + هالندی + نارویژی + پولندی + پرتگالی + سویدنی + تاجکی + + + اندورا + امارات متحدهٔ عربی + انتیگوا و باربودا + البانیا + انگولا + ارجنتاین + آسترالیا + بوسنیا و هرزه‌گوینا + بنگله‌دیش + بلجیم + بلغاریا + برونی + بولیویا + برازیل + بهاماس + روسیهٔ سفید + جمهوری دموکراتیک کانگو + افریقای مرکزی + کانگو + سویس + چلی + کولمبیا + کاستریکا + کیوبا + دنمارک + اکوادور + استونیا + اریتریا + هسپانیه + ایتوپیا + فنلند + میکرونزیا + گرینادا + گینیا + گینیا استوایی + گواتیمالا + گینیا بیسائو + گیانا + هاندوراس + کروشیا + هایتی + اندونیزیا + آیرلند + آیسلند + جاپان + کینیا + قرغزستان + کمپوچیا + کومور + سنت کیتس و نیویس + کوریای شمالی + کوریای جنوبی + سریلانکا + لیسوتو + لتوانیا + لاتویا + لیبیا + مادغاسکر + منگولیا + موریتانیا + مالتا + مکسیکو + مالیزیا + موزمبیق + نیجریا + نیکاراگوا + هالند + ناروی + نیپال + زیلاند جدید + پانامه + پیرو + پاپوا نیو گینیا + پولند + پرتگال + پاراگوای + رومانیا + روآندا + جزایر سلومون + سویدن + سینگاپور + سلونیا + سلواکیا + سیرالیون + سینیگال + سومالیه + سائو تومه و پرینسیپ + السلوادور + تاجکستان + اکراین + ایالات متحدهٔ امریکا + یوروگوای + سنت وینسنت و گرینادین + ونزویلا + ساموآی غربی + یوگوسلاویا + افریقای جنوبی + زیمبابوی + + + + + + + + + + + + جنو + فبر + مار + اپر + مـی + جون + جول + اگس + سپت + اکت + نوم + دسم + + + جنوری + فبروری + مارچ + اپریل + می + جون + جولای + اگست + سپتمبر + اکتوبر + نومبر + دسمبر + + + + + + + + + + + + + وقت افغانستان + وقت افغانستان + + + AFT + AFT + + کابل + + + + + + ٫ + ٬ + ; + ٪ + ۰ + # + + + + ×۱۰^ + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0 ¤;-#,##0 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fa_IR.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fa_IR.xml new file mode 100644 index 0000000..41b085b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fa_IR.xml @@ -0,0 +1,67 @@ + + + + + + + + + + + + + + + + + + + + + + + + + ٫ + ٬ + ; + ٪ + ۰ + # + + + - + ×۱۰^ + + + + + + + + #,##0.###;−#,##0.### + + + + + + + #E0 + + + + + + + %#,##0 + + + + + + + #,##0 ¤;−#,##0 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fi.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fi.xml new file mode 100644 index 0000000..8f93344 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fi.xml @@ -0,0 +1,2876 @@ + + + + + + + + + + + afar + abhaasi + aceh + adangme + adyghe + avesta + afrikaans + muut afroaasialaiset kielet + afrihili + akan + akkadi + aleutti + algonkin-kielet + amhara + aragonia + muinaisenglanti + apassi + arabia + aramea + araukaani + arapaho + muut tekokielet + arawak + assami + astuuri + athabasca-kielet + australialaiset kielet + avaari + awadhi + aimara + azerbaizani + baškiiri + banda + bamileke + balutši + bambara + bali + basa + muut balttilaiset kielet + valkovenäjä + bedauje + bemba + berberi + bulgaria + bihari + bhodžpuri + bislama + bicol + bini + mustajalka + bambara + bengali + bantu + tiibet + bretoni + bradž + bosnia + batakki + burjaatti + bugi + blin + katalaani + caddo + muut Keski-Amerikan intiaanikielet + karib + muut kaukasialaiset kielet + tšetšeeni + sebuano + muut kelttiläiset kielet + chamorro + chibcha + tšagatai + truk + mari + chinook + choctaw + chipewya + cherokee + cheyenne + tšam + korsika + kopti + pidginenglanti + muut ranskaan perustuvat kreolit ja pidginit + muut portugaliin perustuvat kreolit ja pidginit + cree + krimin turkki; krimin tataari + muut kreolit ja pidginit + tsekki + kashubi + kirkkoslaavi + muut kuusilaiset kielet + tšuvassi + kymri + tanska + sioux + dargva + dajakki + saksa + delaware + slaavi + dogrib + dinka + dogri + muut dravidakielet + ala-sorbi + duala + keskihollanti + malediivi + djula + bhutani + ewe + efik + muinaisegypti + ekajuk + kreikka + elami + englanti + keskienglanti + esperanto + espanja + viro + baski + ewondo + farsi + fang + fanti + fulani + suomi + muut suomalais-ugrilaiset kielet + fidži + fääri + fong + ranska + keskiranska + muinaisranska + friuli + friisi + iiri + ga + gayo + gbaja + gaeli + muut germaaniset kielet + etiopia + kiribati + galicia + keskiyläsaksa + guarani + muinaisyläsaksa + gondi + gorontalo + gootti + grebo + muinaiskreikka + gujarati + manx + gwitşin + hausa + haida + havaiji + heprea + hindi + ilongo + himachali + heetti + hmong + hiri-motu + kroaatti + ylä-sorbi + haiti + unkari + hupa + armenia + herero + interlingua + iban + indonesia + interlingue + igbo + pohjois-ji + inupiak + iloko + muut intialaiset kielet + muut indoeurooppalaiset kielet + inguuši + ido + iran + irokeesi + islanti + italia + eskimo + japani + lojba + juutalaispersia + juutalaisarabia + jaava + georgia + karakalpakki + kabyyli + džingpho + kamba + karen + kavi + kabardi + kongo + khasi + muut khoisankielet + khotani + kikuju + kuanjama + kazakki + grönlanti + khmer + kimbundu + kannada + korea + konkani + kosrae + kpelle + kanuri + karachay-balkar + kru-kielet + kurukh + kašmiri + kurdi + kumukki + kutenai + komi + korni + kirgiisi + latina + juutalaisespanja + lahnda + lamba + luxemburg + lezgi + ganda + limburgi + lingala + lao + mongo + lozi + liettua + katangan luba + luba + luiseno + lunda + lushai + latvia + madura + magahi + maithili + makassar + mandingo + austronesia + maasai + mokša + mandar + mende + malagasi + keski-iiri + marshalli + maori + micmac + minangkabau + sekalaiset kielet + makedonia + muut mon-khmer-kielet + malajalam + mongoli + mantšu + manipuri + manobo-kielet + moldova + mohawk + mosi + marathi + malaiji + malta + monikielinen + mundakielet + muskogi + marwari + burma + maya + ersä + nauru + nahuatl + muut Pohjois-Amerikan intiaanikielet + napolin italia + kirjanorja + pohjoisndebele + alasaksa + nepali + newari + ndonga + nias + muut nigeriläis-kongolaiset kielet + niue + hollanti + uusnorja + norja + nogai + muinaisskandinaavi + eteländebele + pedi + nuubialaiset kielet + navaho + njandža, tšewa + nyamwezi + nyankole + nyoro + nzima + oksitaani, provensaali + ojibwa + oromo + orija + osseetti + osage + osmani + otomangelaiset kielet + punjabi + muut papualaiskielet + pangasinan + pahlavi + pampanga + papiamentu + palau + muinaispersia + muut filippiiniläiskielet + foinikia + pali + puola + pohnpei + prakriitit + muinaisprovensaali + pašto + portugali + ketšua + rajasthani + rapanui + rarotonga + retoromaani + rundi + romania + muut romaaniset kielet + romani + venäjä + ruanda + sanskriitti + sandawe + jakuutti + muut Etelä-Amerikan intiaanikielet + sališ + länsiaramea + sasak + santali + sardi + skotti + sindhi + pohjoissaame + selkuppi + muut seemiläiset kielet + sango + muinaisiiri + viittomakielet + serbokroatia + šan + singaleesi + sidamo + siouxilaiset kielet + muut sinotiibetiläiset kielet + slovakki + sloveeni + muut slaavilaiset kielet + samoa + eteläsaame + muut saamen kielet + luulajan saame + inarinsaame + koltansaame + shona + soninke + somali + sogdi + songhai + albania + serbia + serer + swazi + muut nilosaharalaiset kielet + eteläsotho + sunda + sukuma + susu + sumeri + ruotsi + suahili + syyria + tamili + muut thaikielet + telugu + temne + tereno + tetum + tadžikki + thai + tigrinja + tigre + turkmeeni + tokelau + tagalog + tlingit + tamašek + tswana + Tonga-saarten tonga + tonga + tok-pisin + turkki + tsonga + tsimshian + tataari + tumbuka + tupilaiset kielet + muut altailaiset kielet + tuvalu + twi + tahiti + tuviini + udmurtti + uiguuri + ugarit + ukraina + umbundu + määrittelemätön + urdu + uzbekki + venda + vietnam + volapük + vatja + valloni + wakasilaiset kielet + walamo + waray + washo + sorbi + wolof + kalmukki + kafferi + jao + jap + jiddi + joruba + juppik-kielet + zhuang + zapoteekki + zenaga + kiina + zande + zulu + zuni + + + + + + + + + + + + + + + + + + + + + + + + + + Andorra + Arabiemiirikunnat + Afganistan + Antigua ja Barbuda + Anguilla + Albania + Armenia + Alankomaiden Antillit + Angola + Antarktis + Argentiina + Amerikan Samoa + Itävalta + Australia + Aruba + Azerbaidzan + Bosnia ja Hertsegovina + Barbados + Bangladesh + Belgia + Burkina Faso + Bulgaria + Bahrain + Burundi + Benin + Bermuda + Brunei + Bolivia + Brasilia + Bahama + Bhutan + Bouvet’nsaari + Botswana + Valko-Venäjä + Belize + Kanada + Kookossaaret + Kongon demokraattinen tasavalta + Keski-Afrikan tasavalta + Kongo + Sveitsi + Norsunluurannikko + Cookinsaaret + Chile + Kamerun + Kiina + Kolumbia + Costa Rica + Kuuba + Kap Verde + Joulusaari + Kypros + Tsekin tasavalta + Saksa + Djibouti + Tanska + Dominica + Dominikaaninen tasavalta + Algeria + Ecuador + Viro + Egypti + Länsi-Sahara + Eritrea + Espanja + Etiopia + Suomi + Fidzi + Falklandinsaaret + Mikronesia + Färsaaret + Ranska + en + Gabon + Iso-Britannia + Grenada + Georgia + Ranskan Guayana + Ghana + Gibraltar + Grönlanti + Gambia + Guinea + Guadeloupe + Päiväntasaajan Guinea + Kreikka + Etelä-Georgia ja Eteläiset Sandwichsaaret + Guatemala + Guam + Guinea-Bissau + Guyana + Hongkongin erityishallintoalue + Heard ja McDonaldinsaaret + Honduras + Kroatia + Haiti + Unkari + Indonesia + Irlanti + Israel + Intia + Brittiläinen Intian valtameren alue + Irak + Iran + Islanti + Italia + Jamaika + Jordania + Japani + Kenia + Kirgisia + Kambodza + Kiribati + Komorit + Saint Kitts ja Nevis + Pohjois-Korea + Korean tasavalta + Kuwait + Caymansaaret + Kazakstan + Laos + Libanon + Saint Lucia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Liettua + Luxemburg + Latvia + Libya + Marokko + Monaco + Moldova + Madagaskar + Marshallinsaaret + Makedonia + Mali + Myanmar + Mongolia + Macaon erityishallintoalue + Pohjois-Mariaanit + Martinique + Mauritania + Montserrat + Malta + Mauritius + Malediivit + Malawi + Meksiko + Malesia + Mosambik + Namibia + Uusi-Kaledonia + Niger + Norfolkinsaari + Nigeria + Nicaragua + Alankomaat + Norja + Nepal + Nauru + Niue + Uusi-Seelanti + Oman + Panama + Peru + Ranskan Polynesia + Papua-Uusi-Guinea + Filippiinit + Pakistan + Puola + Saint-Pierre ja Miquelon + Pitcairn + Puerto Rico + Palestiina + Portugali + Palau + Paraguay + Qatar + Réunion + Romania + Venäjä + Ruanda + Saudi-Arabia + Salomonsaaret + Seychellit + Sudan + Ruotsi + Singapore + Saint Helena + Slovenia + Huippuvuoret ja Jan Mayen + Slovakia + Sierra Leone + San Marino + Senegal + Somalia + Serbia + Suriname + Sao Tome ja Principe + El Salvador + Syyria + Swazimaa + Turks- ja Caicossaaret + Tsad + Ranskan eteläiset alueet + Togo + Thaimaa + Tadzikistan + Tokelau + Timor-Leste + Turkmenistan + Tunisia + Tonga + Turkki + Trinidad ja Tobago + Tuvalu + Taiwan + Tansania + Ukraina + Uganda + Yhdysvaltain pienet erillissaaret + Yhdysvallat + Uruguay + Uzbekistan + Vatikaani + Saint Vincent ja Grenadiinit + Venezuela + Brittiläiset Neitsytsaaret + Yhdysvaltain Neitsytsaaret + Vietnam + Vanuatu + Wallis ja Futuna + Samoa + Jemen + Mayotte + Jugoslavia + Etelä-Afrikka + Sambia + Zimbabwe + + + Kalenteri + Kerääminen + Valuutta + + + Buddhalainen kalenteri + Kiinalainen kalenteri + Gregoriaaninen kalenteri + Juutalainen kalenteri + Islamilainen kalenteri + Islamilainen siviilikalenteri + Japanilainen kalenteri + Suora järjestys + Osoitekirjajärjestys + Pinyin-järjestys + Piirtojärjestys + Perinteinen + + + + [a-z ä ö] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + tammi + helmi + maalis + huhti + touko + kesä + heinä + elo + syys + loka + marras + joulu + + + T + H + M + H + T + K + H + E + S + L + M + J + + + tammikuu + helmikuu + maaliskuu + huhtikuu + toukokuu + kesäkuu + heinäkuu + elokuu + syyskuu + lokakuu + marraskuu + joulukuu + + + + + + + su + ma + ti + ke + to + pe + la + + + S + M + T + K + T + P + L + + + sunnuntai + maanantai + tiistai + keskiviikko + torstai + perjantai + lauantai + + + + + + + + ap. + ip. + + + eaa. + jaa. + + + + + + + d. MMMM'ta 'yyyy + + + + + d. MMMM'ta 'yyyy + + + + + d.M.yyyy + + + + + d.M.yyyy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + Tišrìkuu + Hešvánkuu + Kislévkuu + Tevétkuu + Ševatkuu + Adárkuu + Adárkuu II + Nisánkuu + Ijjárkuu + Sivánkuu + Tammúzkuu + Abkuu + Elúlkuu + + + Tišrìkuu + Hešvánkuu + Kislévkuu + Tevétkuu + Ševatkuu + Adárkuu + Adárkuu II + Nisánkuu + Ijjárkuu + Sivánkuu + Tammúzkuu + Abkuu + Elúlkuu + + + + + + + + + Muhárram + Sáfar + Rabíʻ al-áwwal + Rabíʻ al-ákhir + Džumada-l-úla + Džumada-l-ákhira + Radžab + Šaʻbán + Ramadán + Šawwal + Dhu-l-qada + Dhu-l-hiddža + + + Muhárram + Sáfar + Rabíʻ al-áwwal + Rabíʻ al-ákhir + Džumada-l-úla + Džumada-l-ákhira + Radžab + Šaʻbán + Ramadán + Šawwal + Dhu-l-qada + Dhu-l-hiddža + + + + + + + + + Muhárram + Sáfar + Rabíʻ al-áwwal + Rabíʻ al-ákhir + Džumada-l-úla + Džumada-l-ákhira + Radžab + Šaʻbán + Ramadán + Šawwal + Dhu-l-qada + Dhu-l-hiddža + + + Muhárram + Sáfar + Rabíʻ al-áwwal + Rabíʻ al-ákhir + Džumada-l-úla + Džumada-l-ákhira + Radžab + Šaʻbán + Ramadán + Šawwal + Dhu-l-qada + Dhu-l-hiddža + + + + + + + + + Tyynenmeren normaaliaika + Tyynenmeren kesäaika + + + PST + PDT + + Los Angeles + + + + Tyynenmeren normaaliaika + Tyynenmeren kesäaika + + + PST + PDT + + Los Angeles + + + + Kalliovuorten normaaliaika + Kalliovuorten kesäaika + + + MST + MDT + + Denver + + + + Kalliovuorten normaaliaika + Kalliovuorten kesäaika + + + MST + MDT + + Denver + + + + Kalliovuorten normaaliaika + Kalliovuorten normaaliaika + + + MST + MST + + Phoenix + + + + Kalliovuorten normaaliaika + Kalliovuorten normaaliaika + + + MST + MST + + Phoenix + + + + Keskinen normaaliaika + Keskinen kesäaika + + + CST + CDT + + Chicago + + + + Keskinen normaaliaika + Keskinen kesäaika + + + CST + CDT + + Chicago + + + + Itäinen normaaliaika + Itäinen kesäaika + + + EST + EDT + + New York + + + + Itäinen normaaliaika + Itäinen kesäaika + + + EST + EDT + + New York + + + + Itäinen normaaliaika + Itäinen normaaliaika + + + EST + EST + + Indianapolis + + + + Itäinen normaaliaika + Itäinen normaaliaika + + + EST + EST + + Indianapolis + + + + Havaijin normaaliaika + Havaijin kesäaika + + + HST + HST + + Honolulu + + + + Havaijin normaaliaika + Havaijin normaaliaika + + + HST + HST + + Honolulu + + + + Alaskan normaaliaika + Alaskan kesäaika + + + AST + ADT + + Anchorage + + + + Alaskan normaaliaika + Alaskan kesäaika + + + AST + ADT + + Anchorage + + + + Atlantin normaaliaika + Atlantin kesäaika + + + AST + ADT + + Halifax + + + + Newfoundlandin normaaliaika + Newfoundlandin kesäaika + + + CNT + CDT + + St. Johns + + + + Newfoundlandin normaaliaika + Newfoundlandin kesäaika + + + CNT + CDT + + St. Johns + + + + Keski-Euroopan normaaliaika + Keski-Euroopan kesäaika + + + CET + CEST + + Pariisi + + + + Keski-Euroopan normaaliaika + Keski-Euroopan kesäaika + + + CET + CEST + + Pariisi + + + + Greenwichin aika + Greenwichin aika + + + GMT + GMT + + Lontoo + + + + Greenwichin aika + Greenwichin aika + + + GMT + GMT + + Casablanca + + + + Israelin normaaliaika + Israelin kesäaika + + + IST + IDT + + Jerusalem + + + + Japanin normaaliaika + Japanin normaaliaika + + + JST + JST + + Tokio + + + + Japanin normaaliaika + Japanin normaaliaika + + + JST + JST + + Tokio + + + + Itä-Euroopan normaaliaika + Itä-Euroopan kesäaika + + + EET + EEST + + Bukarest + + + + Kiinan normaaliaika + Kiinan normaaliaika + + + CTT + CDT + + Shanghai + + + + Kiinan normaaliaika + Kiinan normaaliaika + + + CTT + CDT + + Shanghai + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + Andorran dinaari + ADD + + + Andorran peseta + ADP + + + Arabiemiirikuntien dirhami + AED + + + Afgaani (1927-2002) + AFA + + + Afgaani + Af + + + Affarsin ja Issasin frangi + AIF + + + Albanian lek (1946-1961) + ALK + + + Albanian lek + lek + + + Albanian lek valute + ALV + + + Albanian dollarin FEC + ALX + + + Armenian dram + dram + + + Alankomaiden Antillien guldeni + NA f. + + + Angolan kwanza + AOA + + + Angolan kwanza (1977-1990) + AOK + + + Angolan uusi kwanza (1990-2000) + AON + + + Angolan kwanza reajustado (1995-1999) + AOR + + + Angolan escudo + AOS + + + Argentiinan austral + ARA + + + Argentiinan peso moneda nacional + ARM + + + Argentiinan peso (1983-1985) + ARP + + + Argentiinan peso + Arg$ + + + Itävallan shillinki + ATS + + + Australian dollari + $A + + + Australian punta + AUP + + + Aruban guldeni + AWG + + + Azerbaidžanin manat + AZM + + + Bosnia-Hertsegovinan dinaari + BAD + + + Bosnia-Hertsegovinan vaihdettava markka + KM + + + Bosnia-Hertsegovinan uusi dinaari + BAN + + + Barbadosin dollari + BDS$ + + + Bangladeshin taka + Tk + + + Belgian frangi (vaihdettava) + BEC + + + Belgian frangi + BF + + + Belgian frangi (rahoitus) + BEL + + + Bulgarian kova leva + lev + + + Bulgarian sosialistinen lev + BGM + + + Bulgarian uusi lev + BGN + + + Bulgarian lev (1879-1952) + BGO + + + Bulgarian lev FEC + BGX + + + Bahrainin dinaari + BD + + + Burundin frangi + Fbu + + + Bermudan dollari + Ber$ + + + Bermudan punta + BMP + + + Brunein dollari + BND + + + Boliviano + Bs + + + Boliviano (1863-1962) + BOL + + + Bolivian peso + BOP + + + Bolivian mvdol + BOV + + + Brasilian uusi cruzeiro (1967-1986) + BRB + + + Brasilian cruzado + BRC + + + Brasilian cruzeiro (1990-1993) + BRE + + + Brasilian real + R$ + + + Brasilian uusi cruzado + BRN + + + Brasilian cruzeiro + BRR + + + Brazilian cruzeiro (1942-1967) + BRZ + + + Bahaman dollari + BSD + + + Bahaman punta + BSP + + + Bhutanin ngultrum + Nu + + + Bhutanin rupia + BTR + + + Burman kyat + BUK + + + Burman rupia + BUR + + + Botswanan pula + BWP + + + Valko-Venäjän uusi rupla (1994-1999) + BYB + + + Valko-Venäjän rupla (1992-1994) + BYL + + + Valko-Venäjän rupla + Rbl + + + Belizen dollari + BZ$ + + + Brittiläisen Hondurasin dollari + BZH + + + Kanadan dollari + Can$ + + + Kongon kongolainen frangi + CDF + + + Kongon tasavallan frangi + CDG + + + Kongon zaire + CDL + + + Keski-Afrikan tasavallan CFA-frangi + CFF + + + Sveitsin frangi + SwF + + + Cookinsaarten dollari + CKD + + + Chilen condor + CLC + + + Chilen escudo + CLE + + + Chilen unidades de fomento + CLF + + + Chilen peso + Ch$ + + + Kamerunin CFA-frangi + CMF + + + Kiinan jen min piao yuan + CNP + + + Kiinan US-dollarin FEC + CNX + + + Kiinan yuan renminbi + Y + + + Kolumbian paperipeso + COB + + + Kongon CFA-frangi + COF + + + Kolumbian peso + Col$ + + + Costa Rican colon + C + + + Tšekkoslovakian koruna + CSC + + + Tšekkoslovakian kova koruna + CSK + + + Kuuban peso + CUP + + + Kuuban FEC + CUX + + + Kap Verden escudo + CVEsc + + + Curacaon guldeni + CWG + + + Kyproksen punta + £C + + + Tšekin koruna + CZK + + + Itä-Saksan ostmark + DDM + + + Saksan markka + DEM + + + Saksan sperrmark + DES + + + Djiboutin frangi + DF + + + Tanskan kruunu + DKr + + + Dominikaanisen tasavallan peso + RD$ + + + Algerian dinaari + DA + + + Algerian uusi frangi + DZF + + + Algerian franc germinal + DZG + + + Ecuadorin sucre + ECS + + + Ecuadorin UVC + ECV + + + Viron kruunu + EEK + + + Egyptin punta + EGP + + + Eritrean nakfa + ERN + + + Espanjan peseta + ESP + + + Etiopian birr + Br + + + Etiopian dollari + ETD + + + euro + + + + Suomen markka + mk + + + Suomen markka (1860-1962) + FIN + + + Fidžin dollari + F$ + + + Fidžin punta + FJP + + + Falklandinsaarten punta + FKP + + + Färsaarten kruunu + FOK + + + Ranskan frangi + FRF + + + Ranskan franc germinal/franc poincare + FRG + + + Gabonin CFA-frangi + GAF + + + Iso-Britannian sterling-punta + £ + + + Georgian kuponkilari + GEK + + + Georgian lari + lari + + + Ghanan cedi + GHC + + + Ghanan vanha cedi + GHO + + + Ghanan punta + GHP + + + Ghanan revalvoitu cedi + GHR + + + Gibraltarin punta + GIP + + + Grönlannin kruunu + GLK + + + Gambian dalasi + GMD + + + Gambian punta + GMP + + + Guinean frangi + GF + + + Guinean frangi (1960-1972) + GNI + + + Guinean syli + GNS + + + Guadeloupen frangi + GPF + + + Päiväntasaajan Guinean ekwele guineana + GQE + + + Päiväntasaajan Guinean frangi + GQF + + + Päiväntasaajan Guinean peseta guineana + GQP + + + Kreikan drakhma + GRD + + + Kreikan uusi drakhma + GRN + + + Guatemalan quetzal + Q + + + Ranskan Guyanan franc guiana + GUF + + + Portugalin Guinean escudo + GWE + + + Portugalin Guinean mil reis + GWM + + + Guinea-Bissaun peso + GWP + + + Guyanan dollari + G$ + + + Hong Kongin dollari + HK$ + + + Hondurasin lempira + L + + + Kroatian dinaari + HRD + + + Kroatian kuna + HRK + + + Haitin gourde + HTG + + + Unkarin forintti + Ft + + + Pohjois-Irlannin punta + IBP + + + Indonesian nica guldeni + IDG + + + Indonesian java rupia + IDJ + + + Indonesian uusi rupia + IDN + + + Indonesian rupia + Rp + + + Irlannin punta + IR£ + + + Israelin sekeli + ILL + + + Israelin punta + ILP + + + Israelin uusi sekeli + ILS + + + Mansaaren sterling-punta + IMP + + + Intian rupia + =0#Rs.|1#Re.|1<Rs. + + + Irakin dinaari + ID + + + Iranin rial + RI + + + Islannin kruunu + ISK + + + Italian liira + + + + Jerseyn sterling-punta + JEP + + + Jamaikan dollari + J$ + + + Jamaikan punta + JMP + + + Jordanian dinaari + JD + + + Japanin jeni + ¥ + + + Kenian shillinki + K Sh + + + Kirgistanin som + som + + + Kambodžan vanha riel + KHO + + + Kambodžan riel + CR + + + Kiribatin dollari + KID + + + Komorien frangi + CF + + + Pohjois-Korean kansan won + KPP + + + Pohjois-Korean won + KPW + + + Etelä-Korean hwan + KRH + + + Etelä-Korean vanha won + KRO + + + Etelä-Korean won + KRW + + + Kuwaitin dinaari + KD + + + Caymansaarten dollari + KYD + + + Kazakhstanin rupla + KZR + + + Kazakhstanin tenge + T + + + Laosin kip + LAK + + + Libanonin punta + LL + + + Liechtensteinin frangi + LIF + + + Sri Lankan rupia + SL Re + + + Ceylonin rupia + LNR + + + Liberian dollari + LRD + + + Lesothon loti + M + + + Liettuan liti + LTL + + + Liettuan talonas + LTT + + + Luxemburgin frangi + LUF + + + Latvian lati + LVL + + + Latvian rupla + LVR + + + Libyan sotilasvallan liira + LYB + + + Libyan dinaari + LD + + + Libyan punta + LYP + + + Marokon dirhami + MAD + + + Marokon frangi + MAF + + + Monacon uusi frangi + MCF + + + Monacon franc germinal + MCG + + + Moldovan kuponkileu + MDC + + + Moldovan leu + MDL + + + Moldovan kuponkirupla + MDR + + + Madagaskarin ariary + MGA + + + Madagaskarin frangi + MGF + + + Marshallinsaarten dollari + MHD + + + Makedonian dinaari + MDen + + + Makedonian dinaari (1992-1993) + MKN + + + Malin frangi + MLF + + + Myanmarin kyat + MMK + + + Myanmarin dollarin FEC + MMX + + + Mongolian tugrik + Tug + + + Macaon pataca + MOP + + + Martiniquen frangi + MQF + + + Mauritanian ouguiya + UM + + + Maltan liira + Lm + + + Maltan punta + MTP + + + Mauritiuksen rupia + MUR + + + Malediivien rupia + MVP + + + Malediivien rufiyaa + MVR + + + Malawin kwacha + MK + + + Malawin punta + MWP + + + Meksikon peso + MEX$ + + + Meksikon hopeapeso (1861-1992) + MXP + + + Meksikon UDI + MXV + + + Malesian ringgit + RM + + + Mosambikin escudo + MZE + + + Mosambikin metical + Mt + + + Namibian dollari + N$ + + + Uuden-Kaledonian franc germinal + NCF + + + Nigerian naira + NGN + + + Nigerian punta + NGP + + + Uusien-Hebridien CFP-frangi + NHF + + + Nicaraguan cordoba + NIC + + + Nicaraguan kultacordoba + NIG + + + Nicaraguan cordoba oro + NIO + + + Alankomaiden guldeni + NLG + + + Norjan kruunu + NKr + + + Nepalin rupia + Nrs + + + Uuden-Seelannin dollari + $NZ + + + Uuden-Seelannin punta + NZP + + + Omanin rial + RO + + + Omanin rial saidi + OMS + + + Panaman balboa + PAB + + + Transdniestrian kuponkirupla + PDK + + + Transdniestrian uusi rupla + PDN + + + Transdniestrian rupla + PDR + + + Perun inti + PEI + + + Perun uusi sol + PEN + + + Perun sol + PES + + + Papua-Uuden-Guinean kina + PGK + + + Filippiinien peso + PHP + + + Pakistanin rupia + Pra + + + Puolan zloty + Zl + + + Puolan US-dollarin FEC + PLX + + + Puolan zloty (1950-1995) + PLZ + + + Palestiinan punta + PSP + + + Portugalin conto + PTC + + + Portugalin escudo + PTE + + + Paraguayn guarani + PYG + + + Qatarin rial + QR + + + Reunionin frangi + REF + + + Romanian lei + leu + + + Romanian uusi lei + RON + + + Venäjän rupla + RUB + + + Venäjän rupla (1991-1998) + RUR + + + Ruandan frangi + RWF + + + Saudi-Arabian rial + SRl + + + Saudi-Arabian itsenäinen rial + SAS + + + Salomonsaarten dollari + SI$ + + + Seychellien rupia + SR + + + Sudanin dinaari + SDD + + + Sudanin punta + SDP + + + Ruotsin kruunu + SKr + + + Singaporen dollari + S$ + + + Saint Helenan punta + SHP + + + Slovenian tolar bons + SIB + + + Slovenian tolar + SIT + + + Slovakin koruna + Sk + + + Sierra Leonen leone + SLL + + + San Marinon liira + SML + + + Somalin shillinki + So. Sh. + + + Somalimaan shillinki + SQS + + + Surinamin guldeni + Sf + + + Skotlannin punta + SSP + + + São Tomén ja Principén dobra + Db + + + São Tomén ja Principén escudo + STE + + + Neuvostoliiton uusi rupla + SUN + + + Neuvostoliiton rupla + SUR + + + El Salvadorin colon + SVC + + + Syyrian punta + LS + + + Swazimaan lilangeni + E + + + Turks- ja Caicossaarten crown + TCC + + + Tšadin CFA-frangi + TDF + + + Thaimaan baht + THB + + + Tadžikistanin rupla + TJR + + + Tadžikistanin somoni + TJS + + + Turkmenistanin manat + TMM + + + Tunisian dinaari + TND + + + Tongan paʻanga + T$ + + + Tongan sterling-punta + TOS + + + Timorin escudo + TPE + + + Timorin pataca + TPP + + + Turkin liira + TL + + + Trinidadin ja Tobagon dollari + TT$ + + + Trinidadin ja Tobagon vanha dollari + TTO + + + Tuvalun dollari + TVD + + + Taiwanin uusi dollari + NT$ + + + Tansanian shillinki + T Sh + + + Ukrainan hryvnia + UAH + + + Ukrainan karbovanetz + UAK + + + Ugandan shillinki (1966-1987) + UGS + + + Ugandan shillinki + U Sh + + + Yhdysvaltain dollari + US$ + + + Yhdysvaltain dollari (Seuraava päivä) + USN + + + Yhdysvaltain dollari (Sama päivä) + USS + + + Uruguayn peso fuerte + UYF + + + Uruguayn peso (1975-1993) + UYP + + + Uruguayn peso uruguayo + Ur$ + + + Uzbekistanin kuponkisom + UZC + + + Uzbekistanin som + UZS + + + Vatikaanin kaupungin liira + VAL + + + Pohjois-Vietnamin piastre dong viet + VDD + + + Pohjois-Vietnamin uusi dong + VDN + + + Pohjois-Vietnamin viet minh piastre dong viet + VDP + + + Venezuelan bolivar + Be + + + Brittiläisten Neitsytsaarten dollari + VGD + + + Vietnamin dong + VND + + + Vietnamin uusi dong + VNN + + + Vietnamin tasavallan dong + VNR + + + Vietnamin kansallinen dong + VNS + + + Vanuatun vatu + VT + + + Länsi-Samoan punta + WSP + + + Länsi-Samoan tala + WST + + + Aasian dinaarin UA + XAD + + + CFA-frangi BEAC + XAF + + + Aasian rahayksikkö (AMU) + XAM + + + Kulta + XAU + + + EURCO + XBA + + + Euroopan rahayksikkö (EMU) + XBB + + + EUA (XBC) + XBC + + + EUA (XBD) + XBD + + + Itä-Karibian dollari + EC$ + + + CFA uusi frangi + XCF + + + Erityiset nosto-oikeudet + XDR + + + CFA-frangi BCEAEC + XEF + + + Euroopan valuuttayksikkö + XEU + + + Ranskan kulta frangi + XFO + + + Ranskan UIC-frangi + XFU + + + Islamin dinaari + XID + + + Ranskan emämaan uusi frangi + XMF + + + Ranskan Antillien CFA-frangi + XNF + + + CFA-frangi BCEAO + XOF + + + CFP-frangi + CFPF + + + COMECONin siirrettävä rupla + XTR + + + Jemenin dinaari + YDD + + + Jemenin imadi rial + YEI + + + Jemenin rial + YRl + + + Jugoslavian kova dinaari + YUD + + + Jugoslavian liittovaltion dinaari + YUF + + + Jugoslavian 1994 dinaari + YUG + + + Jugoslavian uusi dinaari + YUM + + + Jugoslavian vaihdettava dinaari + YUN + + + Jugoslavian lokakuun dinaari + YUO + + + Jugoslavian uudistettu dinaari + YUR + + + Etelä-Afrikan randi (rahoitus) + ZAL + + + Etelä-Afrikan punta + ZAP + + + Etelä-Afrikan randi + R + + + Zambian kwacha + ZMK + + + Zambian punta + ZMP + + + Zairen uusi zaire + ZRN + + + Zairen zaire + ZRZ + + + Zimbabwen dollari + Z$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fi_FI.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fi_FI.xml new file mode 100644 index 0000000..113ef40 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fi_FI.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fo.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fo.xml new file mode 100644 index 0000000..71af55c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fo.xml @@ -0,0 +1,276 @@ + + + + + + + + + + + føroyskt + + + Sameindu Emirríkini + Afganistan + Antigua og Barbuda + Antarktis + Eysturríki + Avstralia + Aserbajdsjan + Bosnia-Hersegovina + Bangladesj + Belgia + Brasilia + Butan + Botsvana + Hvítarussland + Belis + Kanada + Miðafrikalýðveldið + Kongo + Sveis + Fílabeinsstrondin + Kili + Kamerun + Kina + Kolombia + Kosta Rika + Kuba + Grønhøvdaoyggjarnar + Kýpros + Kekkia + Týskland + Danmørk + Dominika + Domingo lýðveldið + Ekvador + Estland + Egyptaland + Spania + Etiopia + Finnland + Mikronesia + Føroyar + Frakland + Ekvator Guinea + Grikkaland + Guinea Bissau + Gujana + Kroatia + Ungarn + Írland + Ísrael + Irak + Ísland + Italia + Jameika + Kenja + Kirgisia + Kambodja + Komorooyggjarnar + Saint Kitts og Nevis + Norður-Korea + Suður-Korea + Kuvait + Kasakstan + Libanon + Saint Lusia + Liktenstein + Lesoto + Litava + Luksemborg + Lettland + Marokko + Monako + Madagaskar + Marshalloyggjarnar + Makedónia + Móritania + Móritius + Maldivuoyggjarnar + Malavi + Meksiko + Maleisia + Mosambik + Nikaragua + Niðurlond + Noreg + Ný Sæland + Perú + Papua Nýguinea + Filipsoyggjar + Pólland + Paraguei + Katar + Rumenia + Russland + Ruanda + Saudi-Arábia + Sálomonoyggjarnar + Seyskelloyggjarnar + Svøríki + Singapor + Surinam + Sao Tome og Prinsipi + Svasiland + Kjad + Tadsjikistan + Tunesia + Turkaland + Trinidad og Tobago + Teivan + Tansania + Ukreina + Sambandsríki Amerika + Uruguei + Usbekistan + Vatikan + Saint Vinsent og Grenadinoyggjar + Venesuela + Sámoa + Jemen + Suðurafrika + Sambia + Simbabvi + + + + [a-záæíðóøúý] + + + + + + + + jan + feb + mar + apr + mai + jun + jul + aug + sep + okt + nov + des + + + januar + februar + mars + apríl + mai + juni + juli + august + september + oktober + november + desember + + + + + + + sun + mán + týs + mik + hós + frí + ley + + + sunnudagur + mánadagur + týsdagur + mikudagur + hósdagur + fríggjadagur + leygardagur + + + + + + + + EEEE dd MMMM yyyy + + + + + d. MMM yyyy + + + + + dd-MM-yyyy + + + + + dd-MM-yy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + DKK + kr + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fo_FO.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fo_FO.xml new file mode 100644 index 0000000..342b1e9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fo_FO.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;¤ -#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr.xml new file mode 100644 index 0000000..3f687e5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr.xml @@ -0,0 +1,2651 @@ + + + + + + + + + + + afar + abkhaze + avestique + afrikaans + akan + amharique + aragonais + arabe + assamais + avar + aymara + azéri + bachkir + biélorusse + bulgare + bihari + bichlamar + bambara + bengali + tibétain + breton + bosniaque + blin + catalan + tchétchène + chamorro + cherokee + corse + cree + tchèque + slavon d’église + tchouvache + gallois + danois + allemand + maldivien + dzongkha + éwé + grec + anglais + espéranto + espagnol + estonien + basque + persan + peul + finnois + fidjien + féroïen + français + frison + irlandais + gaélique écossais + guèze + galicien + guarani + goudjrati + manx + haoussa + hawaïen + hébreu + hindi + hiri motu + croate + haïtien + hongrois + arménien + héréro + interlingua + indonésien + interlingue + igbo + yi de Sichuan + inupiaq + ido + islandais + italien + inuktitut + japonais + javanais + géorgien + kongo + kikuyu + kuanyama + kazakh + groenlandais + khmer + kannada + coréen + konkani + kanouri + kashmiri + kurde + komi + cornique + kirghize + latin + luxembourgeois + ganda + limbourgeois + lingala + lao + lituanien + luba-katanga + letton + malgache + marshall + maori + macédonien + malayalam + mongol + moldave + marathe + malais + maltais + birman + nauruan + bokmål norvégien + ndébélé du Nord + népalais + ndonga + néerlandais + nynorsk norvégien + norvégien + ndébélé du Sud + navaho + nyanja + occitan (après 1500) + ojibwa + galla + oriya + ossète + pendjabi + pali + polonais + pachto + portugais + quechua + rhéto-roman + roundi + roumain + racine + russe + rwanda + sanskrit + sarde + sindhi + sami du Nord + sango + serbo-croate + singhalais + sidamo + slovaque + slovène + samoan + shona + somali + albanais + serbe + swati + sotho du Sud + soundanais + suédois + swahili + syriaque + tamoul + télougou + tadjik + thaï + tigrigna + tigré + turkmène + tagalog + setswana + tongan (Îles Tonga) + turc + tsonga + tatar + twi + tahitien + ouïgour + ukrainien + ourdou + ouzbek + venda + vietnamien + volapük + wallon + wolof + xhosa + yiddish + yoruba + zhuang + chinois + zoulou + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Andorre + Émirats arabes unis + Afghanistan + Antigua-et-Barbuda + Anguilla + Albanie + Arménie + Antilles néerlandaises + Angola + Antarctique + Argentine + Samoa américaines + Autriche + Australie + Aruba + Azerbaïdjan + Bosnie-Herzégovine + Barbade + Bangladesh + Belgique + Burkina Faso + Bulgarie + Bahreïn + Burundi + Benin + Bermudes + Brunei + Bolivie + Brésil + Bahamas + Bhoutan + Île Bouvet + Botswana + Bélarus + Belize + Canada + Îles Cocos + République démocratique du Congo + République centrafricaine + Congo + Suisse + Côte d’Ivoire + Îles Cook + Chili + Cameroun + Chine + Colombie + Costa Rica + Cuba + Cap Vert + Île Christmas + Chypre + République tchèque + Allemagne + Djibouti + Danemark + Dominique + République dominicaine + Algérie + Équateur + Estonie + Égypte + Sahara occidental + Érythrée + Espagne + Éthiopie + Finlande + Fidji + Îles Falkland (Malvinas) + Micronésie + Îles Féroé + France + en + Gabon + Royaume-Uni + Grenade + Géorgie + Guyane française + Ghana + Gibraltar + Groenland + Gambie + Guinée + Guadeloupe + Guinée équatoriale + Grèce + Géorgie du Sud, Îles Sandwich du Sud + Guatemala + Guam + Guinée-Bissau + Guyana + Hong-Kong R.A.S. + Îles Heard et MacDonald + Honduras + Croatie + Haïti + Hongrie + Indonésie + Irlande + Israël + Inde + Territoire britannique de l’océan indien + Iraq + Iran + Islande + Italie + Jamaïque + Jordanie + Japon + Kenya + Kirghizistan + Cambodge + Kiribati + Comores + Saint Kitts et Nevis + Corée du Nord + Corée du Sud + Koweït + Îles Caïmanes + Kazakhstan + Laos + Liban + Sainte-Lucie + Liechtenstein + Sri Lanka + Libéria + Lesotho + Lithuanie + Luxembourg + Lettonie + Libye + Maroc + Monaco + Moldova + Madagascar + Îles Marshall + Macédoine + Mali + Myanmar + Mongolie + Macao R.A.S. de Chine + Îles Mariannes du Nord + Martinique + Mauritanie + Montserrat + Malte + Maurice + Maldives + Malawi + Mexique + Malaisie + Mozambique + Namibie + Nouvelle-Calédonie + Niger + Île Norfolk + Nigéria + Nicaragua + Pays-Bas + Norvège + Népal + Nauru + Niué + Nouvelle-Zélande + Oman + Panama + Pérou + Polynésie française + Papouasie-Nouvelle-Guinée + Philippines + Pakistan + Pologne + Saint-Pierre-et-Miquelon + Pitcairn + Porto Rico + Territoire palestinien + Portugal + Palaos + Paraguay + Qatar + Réunion + Roumanie + Russie + Rwanda + Arabie saoudite + Îles Salomon + Seychelles + Soudan + Suède + Singapour + Sainte-Hélène + Slovénie + Svalbard et Île Jan Mayen + Slovaquie + Sierra Leone + Saint-Marin + Sénégal + Somalie + Serbie + Suriname + Sao Tomé-et-Principe + El Salvador + Syrie + Swaziland + Îles Turks et Caïques + Tchad + Terres australes françaises + Togo + Thaïlande + Tadjikistan + Tokelau + Timor-Leste + Turkmenistan + Tunisie + Tonga + Turquie + Trinité-et-Tobago + Tuvalu + Taïwan + Tanzanie + Ukraine + Ouganda + Îles Mineures Éloignées des États-Unis + États-Unis + Uruguay + Ouzbékistan + Saint-Siège (Etat de la Cité du Vatican) + Saint-Vincent-et-les Grenadines + Vénézuela + Îles Vierges Britanniques + Îles Vierges des États-Unis + Viet Nam + Vanuatu + Wallis et Futuna + Samoa + Yémen + Mayotte + Yougoslavie + Afrique du Sud + Zambie + Zimbabwe + + + Révisé + + + Calendrier + Ordonnancement + Devise + + + Calendrier bouddhiste + Calendrier chinois + Calendrier grégorien + Calendrier hébraïque + Calendrier musulman + Calendrier civil musulman + Calendrier japonais + Ordre direct + Ordre de l’annuaire + Ordre pinyin + Ordre des traits + Ordre traditionnel + + + + [a-zéèùçàâêîôûæœëïÿü] + + + GaMjkHmsSEDFwWxhKzAeugXZ + + + + + + janv. + févr. + mars + avr. + mai + juin + juil. + août + sept. + oct. + nov. + déc. + + + J + F + M + A + M + J + J + A + S + O + N + D + + + janvier + février + mars + avril + mai + juin + juillet + août + septembre + octobre + novembre + décembre + + + + + + + dim. + lun. + mar. + mer. + jeu. + ven. + sam. + + + D + L + M + M + J + V + S + + + dimanche + lundi + mardi + mercredi + jeudi + vendredi + samedi + + + + + + av. J.-C. + ap. J.-C. + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + d MMM yy + + + + + dd/MM/yy + + + + + + + + HH' h 'mm z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + Tisseri + Hesvan + Kislev + Tébeth + Schébat + Adar + Adar II + Nissan + Iyar + Sivan + Tamouz + Ab + Elloul + + + Tisseri + Hesvan + Kislev + Tébeth + Schébat + Adar + Adar II + Nissan + Iyar + Sivan + Tamouz + Ab + Elloul + + + + + + + + + Mouharram + Safar + Rabiʻ-oul-Aououal + Rabiʻ-out-Tani + Djoumada-l-Oula + Djoumada-t-Tania + Radjab + Chaʻban + Ramadan + Chaououal + Dou-l-Qaʻda + Dou-l-Hidjja + + + Mouharram + Safar + Rabiʻ-oul-Aououal + Rabiʻ-out-Tani + Djoumada-l-Oula + Djoumada-t-Tania + Radjab + Chaʻban + Ramadan + Chaououal + Dou-l-Qaʻda + Dou-l-Hidjja + + + + + + + + + Mouharram + Safar + Rabiʻ-oul-Aououal + Rabiʻ-out-Tani + Djoumada-l-Oula + Djoumada-t-Tania + Radjab + Chaʻban + Ramadan + Chaououal + Dou-l-Qaʻda + Dou-l-Hidjja + + + Mouharram + Safar + Rabiʻ-oul-Aououal + Rabiʻ-out-Tani + Djoumada-l-Oula + Djoumada-t-Tania + Radjab + Chaʻban + Ramadan + Chaououal + Dou-l-Qaʻda + Dou-l-Hidjja + + + + + + + + + Heure Normale du Pacifique + Heure Avancée du Pacifique + + + PST + PDT + + Los Angeles + + + + Heure Normale du Pacifique + Heure Avancée du Pacifique + + + PST + PDT + + Los Angeles + + + + Heure Normale des Rocheuses + Heure Avancée des Rocheuses + + + MST + MDT + + Denver + + + + Heure Normale des Rocheuses + Heure Avancée des Rocheuses + + + MST + MDT + + Denver + + + + Heure Normale des Rocheuses + Heure Normale des Rocheuses + + + MST + MST + + Phoenix + + + + Heure Normale des Rocheuses + Heure Normale des Rocheuses + + + MST + MST + + Phoenix + + + + Heure Normale du Centre + Heure Avancée du Centre + + + CST + CDT + + Chicago + + + + Heure Normale du Centre + Heure Avancée du Centre + + + CST + CDT + + Chicago + + + + Heure Normale de l’Est + Heure Avancée de l’Est + + + EST + EDT + + New York + + + + Heure Normale de l’Est + Heure Avancée de l’Est + + + EST + EDT + + New York + + + + Heure Normale de l’Est + Heure Normale de l’Est + + + EST + EST + + Indianapolis + + + + Heure Normale de l’Est + Heure Normale de l’Est + + + EST + EST + + Indianapolis + + + + Heure Normale de Hawaï + Heure Normale de Hawaï + + + HST + HST + + Honolulu + + + + Heure Normale de Hawaï + Heure Normale de Hawaï + + + HST + HST + + Honolulu + + + + Heure Normale de l’Alaska + Heure Avancée de l’Alaska + + + AST + ADT + + Anchorage + + + + Heure Normale de l’Alaska + Heure Avancée de l’Alaska + + + AST + ADT + + Anchorage + + + + Heure Normale de l’Atlantique + Heure Avancée de l’Atlantique + + + AST + ADT + + Halifax + + + + Heure Normale de Terre-Neuve + Heure Avancée de Terre-Neuve + + + CNT + CDT + + St. Johns + + + + Heure Normale de Terre-Neuve + Heure Avancée de Terre-Neuve + + + CNT + CDT + + St. Johns + + + + Heure Normale de l’Europe Centrale + Heure Avancée de l’Europe Centrale + + + CET + CEST + + Paris + + + + Heure Normale de l’Europe Centrale + Heure Avancée de l’Europe Centrale + + + CET + CEST + + Paris + + + + Greenwich Mean Time + Greenwich Mean Time + + + GMT + GMT + + London + + + + Temps Moyen de Greenwich + Temps Moyen de Greenwich + + + GMT + GMT + + Casablanca + + + + Heure Normale d’Israël + Heure Avancée d’Israël + + + IST + IDT + + Jerusalem + + + + Heure Normale du Japon + Heure Normale du Japon + + + JST + JST + + Tokyo + + + + Heure Normale du Japon + Heure Normale du Japon + + + JST + JST + + Tokyo + + + + Heure Normale de l’Europe de l’Est + Heure Avancée de l’Europe de l’Est + + + EET + EEST + + Bucharest + + + + Heure Normale de Chine + Heure Normale de Chine + + + CTT + CDT + + Shanghai + + + + Heure Normale de Chine + Heure Normale de Chine + + + CTT + CDT + + Shanghai + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + + diner andorran + ADD + + + peseta andorrane + ADP + + + dirham des Émirats arabes unis + AED + + + afghani + AFA + + + afghani + Af + + + franc Affars et Issas + AIF + + + lek albanais (1946-1961) + ALK + + + lek albanais + lek + + + Albanian Lek Valute + ALV + + + dollar albanais (certificat de devises étrangères) + ALX + + + dram arménien + dram + + + florin des Antilles néerl. + ANG + + + kwanza angolais + AOA + + + kwanza angolais (1977-1990) + AOK + + + nouveau kwanza angolais (1990-2000) + AON + + + kwanza angolais réajusté (1995-1999) + AOR + + + escudo angolais + AOS + + + austral + ARA + + + Argentine Peso Moneda Nacional + ARM + + + peso argentin (1983-1985) + ARP + + + peso argentin + Arg$ + + + schilling autrichien + ATS + + + dollar australien + AUD + + + livre australienne + AUP + + + florin d’Aruba + AWG + + + Azerbaijanian Manat + AZM + + + dinar de Bosnie-Herzegovine + BAD + + + mark convertible de Bosnie-Herzegovine + KM + + + nouveau dinar de Bosnie-Herzegovine + BAN + + + dollar de Barbade + BBD + + + taka + BDT + + + franc belge (convertible) + BEC + + + franc belge + FB + + + franc belge (financier) + BEL + + + lev + BGL + + + lev de Bulgarie socialiste + BGM + + + nouveau lef + BGN + + + lev (1879-1952) + BGO + + + lev (certificat de devises étrangères) + BGX + + + dinar de Bahrein + BHD + + + franc du Burundi + BIF + + + dollar des Bermudes + BMD + + + livre des Bermudes + BMP + + + dollar de Brunei + BND + + + boliviano + Bs + + + boliviano (1863-1962) + BOL + + + peso bolivien + BOP + + + Bolivian Mvdol + BOV + + + nouveau cruzeiro (1967-1986) + BRB + + + cruzeiro + BRC + + + cruzeiro (1990-1993) + BRE + + + réal + R$ + + + nouveau cruzado + BRN + + + cruzeiro + BRR + + + cruzeiro (1942-1967) + BRZ + + + dollar des Bahamas + BSD + + + livre des Bahamas + BSP + + + ngultrum + Nu + + + roupie de Bhoutan + BTR + + + kyat + BUK + + + roupie de Birmanie + BUR + + + pula + BWP + + + nouveau rouble biélorusse (1994-1999) + BYB + + + rouble biélorusse (1992-1994) + BYL + + + rouble biélorusse + Rbl + + + dollar de Bélize + BZD + + + dollar du Honduras britannique + BZH + + + dollar canadien + CAD + + + franc congolais + CDF + + + franc de la République Congolaise + CDG + + + Congolese Zaire + CDL + + + franc CFA de la République du Centre Afrique + CFF + + + franc suisse + sFr. + + + dollar des îles Cook + CKD + + + condor chilien + CLC + + + escudo chilien + CLE + + + Chilean Unidades de Fomento + CLF + + + peso chilien + CLP + + + franc CFA camerounais + CMF + + + Chinese Jen Min Piao Yuan + CNP + + + dollar US chinois (certificat de devises étrangères) + CNX + + + Yuan Ren-min-bi + CNY + + + Colombian Paper Peso + COB + + + franc CFA congolais + COF + + + peso colombien + COP + + + colon + CRC + + + couronne tchèque + CSC + + + couronne tchèque + CSK + + + peso cubain + CUP + + + certificat de devises étrangères de Cuba + CUX + + + escudo du Cap-Vert + CVE + + + florin de Curacao + CWG + + + livre cypriote + CYP + + + couronne tchèque + CZK + + + mark est-allemand + DDM + + + deutsche mark + DEM + + + sperrmark allemand + DES + + + franc de Djibouti + DF + + + couronne danoise + DKK + + + peso dominicain + DOP + + + dinar algérien + DZD + + + nouveau franc algérien + DZF + + + franc germinal algérien + DZG + + + sucre + ECS + + + unité de valeur constante équatoriale (UVC) + ECV + + + couronne estonienne + EEK + + + livre égyptienne + EGP + + + Eritrean Nakfa + ERN + + + peseta espagnole + ESP + + + birr + ETB + + + dollar éthiopien + ETD + + + euro + + + + mark finlandais + FIM + + + mark finlandais (1860-1962) + FIN + + + dollar de Fidji + FJD + + + livre de Fiji + FJP + + + livre des Falkland (Malvinas) + FKP + + + Faeroe Islands Kronur + FOK + + + franc français + F + + + franc germinal/franc Poincaré + FRG + + + franc CFA gabonnais + GAF + + + livre sterling + £ + + + Georgian Kupon Larit + GEK + + + lari + lari + + + cédi + GHC + + + ancien cedi + GHO + + + livre ghanéenne + GHP + + + cedi revalorisé + GHR + + + livre de Gibraltar + GIP + + + couronne du Groenland + GLK + + + dalasie + GMD + + + livre de Gambie + GMP + + + franc guinéen + GF + + + franc guinéen (1960-1972) + GNI + + + syli + GNS + + + franc guadeloupéen + GPF + + + ekwélé + GQE + + + franco de Guinée Equatoriale + GQF + + + peseta de Guinée Equatoriale + GQP + + + drachme + GRD + + + nouveau drachme + GRN + + + quetzal + GTQ + + + franc guyanais + GUF + + + Escudo de Guinée Portugaise + GWE + + + Portuguese Guinea Mil Reis + GWM + + + peso de Guinée-Bissau + GWP + + + dollar de Guyane + G$ + + + dollar de Hong Kong + HKD + + + lempira + HNL + + + dinar croate + HRD + + + kuna + HRK + + + gourde + HTG + + + forint + HUF + + + livre d’Irlande du Nord + IBP + + + florin de Nica + IDG + + + roupie de Java + IDJ + + + nouvelle roupie indonésienne + IDN + + + rupiah + IDR + + + livre irlandaise + IEP + + + shékel + ILL + + + livre israélienne + ILP + + + shékel + ILS + + + livre sterling de l’Ile de Man + IMP + + + roupie indienne + =0#Rs.|1#Re.|1<Rs. + + + dinar irakien + IQD + + + rial iranien + IRR + + + couronne islandaise + ISK + + + lire italienne + + + + livre sterling de Jersey + JEP + + + dollar jamaïcain + JMD + + + livre jamaïcaine + JMP + + + dinar jordanien + JOD + + + yen + ¥ + + + shilling du Kenya + KES + + + som du Kyrgystan + som + + + vieux riel + KHO + + + riel + KHR + + + dollar de Kiribati + KID + + + franc des Comores + KMF + + + won du peuple nord-coréen + KPP + + + won nord-coréen + KPW + + + hwan + KRH + + + vieux won + KRO + + + won sud-coréen + KRW + + + dinar koweitien + KWD + + + dollar des îles Caïmans + KYD + + + rouble du Kazakhstan + KZR + + + tenge du Kazakhstan + T + + + kip + LAK + + + livre libanaise + LBP + + + franc du Liechtenstein + LIF + + + roupie de Sri Lanka + LKR + + + roupie de Ceylan + LNR + + + dollar libérien + LRD + + + Lesotho Loti + M + + + Lita de Lithuanian + LTL + + + Talonas de Lithuanie + LTT + + + franc luxembourgeois + LUF + + + lats letton + LVL + + + rouble letton + LVR + + + lire de l’autorié militaire britannique de Libye + LYB + + + dinar Iibyen + LD + + + livre libyenne + LYP + + + dirham marocain + MAD + + + franc marocain + MAF + + + nouveau franc marocain + MCF + + + franc Germinal monégasque + MCG + + + Moldovan Leu Cupon + MDC + + + leu moldave + MDL + + + rouble moldave + MDR + + + ariary malgache + MGA + + + franc malgache + MGF + + + dollar des îles Marshall + MHD + + + dinar macédonien + MDen + + + dinar macédonien (1992-1993) + MKN + + + franc malien + MLF + + + Myanmar Kyat + MMK + + + dollar de Myanmar (certificat de devises étrangères) + MMX + + + tugrik + MNT + + + pataca + MOP + + + franc martiniquais + MQF + + + ouguija + MRO + + + lire maltaise + Lm + + + livre maltaise + MTP + + + roupie de l’île Maurice + MUR + + + roupie des Maldives + MVP + + + roupie des Maldives + MVR + + + kwacha + MWK + + + livre de Malawi + MWP + + + peso d’argent mexicain (1861-1992) + MXP + + + unité de conversion mexicaine (UDI) + MXV + + + ringgit + MYR + + + escudo du Mozambique + MZE + + + métical + MZM + + + dollar de Namibie + N$ + + + franc Germinal de Nouvelle Calédonie + NCF + + + naira + NGN + + + livre nigériane + NGP + + + franc CFP des Nouvelles Hébrides + NHF + + + cordoba + NIC + + + cordoba d’or + NIG + + + cordoba d’or + NIO + + + florin néerlandais + NLG + + + couronne norvégienne + NOK + + + roupie du Népal + NPR + + + dollar néo-zélandais + NZD + + + livre néo-zélandaise + NZP + + + rial omani + OMR + + + Oman Rial Saidi + OMS + + + balboa + PAB + + + Transdniestria Ruble Kupon + PDK + + + nouveau rouble moldave + PDN + + + rouble moldave + PDR + + + Inti péruvien + PEI + + + nouveau sol péruvien + PEN + + + sol péruvien + PES + + + kina + PGK + + + peso philippin + PHP + + + roupie du Pakistan + PKR + + + dollar US polonais (certificat de devises étrangères) + PLX + + + zloty (1950-1995) + PLZ + + + livre palestinienne + PSP + + + conto portugais + PTC + + + escudo portugais + PTE + + + guarani + PYG + + + rial du Qatar + QAR + + + franc de la Réunion + REF + + + leu + ROL + + + nouveau leu + RON + + + rouble de Russie (1991-1998) + RUR + + + franc du Rwanda + RWF + + + riyal séoudien + SAR + + + riyal saoudien + SAS + + + dollar de Salomon + SBD + + + roupie des Seychelles + SCR + + + dinar soudanais + SDD + + + livre soudanaise + SDP + + + couronne suédoise + SEK + + + dollar de Singapour + SGD + + + livre de Sainte-Hélène + SHP + + + bons de tolar slovène + SIB + + + tolar slovène + SIT + + + couronne slovaque + SKK + + + léone + SLL + + + lire de Saint-Marin + SML + + + shilling de Somalie + SOS + + + shilling de Somalie + SQS + + + florin du Surinam + SRG + + + livre écossaise + SSP + + + dobra + STD + + + escudo de Sao Tomé et Principe + STE + + + nouveau rouble soviétique + SUN + + + rouble de C.E.I. + SUR + + + colon + SVC + + + livre syrienne + SYP + + + lilangeni + SZL + + + couronne des îles Turks et Caïques + TCC + + + franc CFA du Tchad + TDF + + + baht + THB + + + rouble du Tadjikistan + TJR + + + somoni du Tadjikistan + TJS + + + Turkmenistan Manat + TMM + + + dinar tunisien + TND + + + paʻanga + TOP + + + livre sterling du Tonga + TOS + + + escudo de Timor + TPE + + + pataca de Timor + TPP + + + livre turque + TL + + + dollar de la Trinité + TTD + + + vieux dollar de la Trinité + TTO + + + dollar du Tuvalu + TVD + + + dollar taïwanais + TWD + + + shilling de Tanzanie + TZS + + + hryvnia + UAH + + + karbovanetz + UAK + + + shilling ougandais (1966-1987) + UGS + + + shilling ougandais + U Sh + + + dollar des États-Unis + $ + + + dollar des Etats-Unis (jour suivant) + USN + + + dollar des Etats-Unis (jour même) + USS + + + peso fort uruguayen + UYF + + + peso uruguayen (1975-1993) + UYP + + + peso uruguayen + Ur$ + + + Uzbekistan Coupon Som + UZC + + + sum + UZS + + + lire du Vatican + VAL + + + North Vietnam Piastre Dong Viet + VDD + + + nouveau dong nord-vietnamien + VDN + + + North Vietnam Viet Minh Piastre Dong Viet + VDP + + + bolivar + VEB + + + dollar des îles Vierges britanniques + VGD + + + dong + VND + + + nouveau dong vietnamien + VNN + + + dong de la République Vietnamienne + VNR + + + dong national du Vietnam + VNS + + + vatu + VUV + + + livre du Samoa + WSP + + + tala + WST + + + dinar asiatique (unité de compte) + XAD + + + franc CFA (BEAC) + XAF + + + unité monétaire asiatique + XAM + + + Or + XAU + + + unité composite européenne + XBA + + + unité monétaire européenne + XBB + + + unité de compte européenne (XBC) + XBC + + + unité de compte européenne (XBD) + XBD + + + dollar des Caraïbes + XCD + + + nouveau franc CFA + XCF + + + franc CFA (BCEAEC) + XEF + + + unité de compte européenne (ECU) + XEU + + + franc or + XFO + + + franc UIC + XFU + + + dinar musulman + XID + + + nouveau franc métropolitain + XMF + + + franc CFA antillais + XNF + + + franc CFA (BCEAO) + XOF + + + franc CFP + XPF + + + rouble transférable du COMECON + XTR + + + dinar du Yémen + YDD + + + riyal du Yémen + YEI + + + riyal du Yémen + YER + + + nouveau dinar yougoslave + YUD + + + dinar de la Fédération Yougoslave + YUF + + + dinar yougoslave 1994 + YUG + + + dinar yougoslave Noviy + YUM + + + dinar yougoslave convertible + YUN + + + dinar yougoslave d’Octobre + YUO + + + dinar yougoslave réformé + YUR + + + rand sud-africain (financier) + ZAL + + + livre sud-africaine + ZAP + + + rand + ZAR + + + kwacha + ZMK + + + livre zambienne + ZMP + + + nouveau zaïre + ZRN + + + zaïre + ZRZ + + + dollar du Zimbabwe + Z$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_BE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_BE.xml new file mode 100644 index 0000000..b8d017d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_BE.xml @@ -0,0 +1,117 @@ + + + + + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd-MMM-yy + + + + + d/MM/yy + + + + + + + + H' h 'mm' min 'ss' s 'z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_CA.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_CA.xml new file mode 100644 index 0000000..fca33c7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_CA.xml @@ -0,0 +1,109 @@ + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + yy-MM-dd + + + + + yy-MM-dd + + + + + + + + HH' h 'mm' min 'ss' s 'z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;(#,##0.00¤) + + + + + + dollar canadien + $ + + + dollar des États-Unis + $ US + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_CH.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_CH.xml new file mode 100644 index 0000000..5109455 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_CH.xml @@ -0,0 +1,117 @@ + + + + + + + + + + + + + + + + + + + + + EEEE, d MMMM yyyy + + + + + d MMMM yyyy + + + + + d MMM yy + + + + + dd.MM.yy + + + + + + + + HH.mm:ss' h' z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + . + ' + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤ #,##0.00;¤-#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_FR.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_FR.xml new file mode 100644 index 0000000..fac6afb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_FR.xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_LU.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_LU.xml new file mode 100644 index 0000000..21827a6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_LU.xml @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + franc français + FRF + + + franc luxembourgeois + F + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_MC.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_MC.xml new file mode 100644 index 0000000..4f03e53 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/fr_MC.xml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ga.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ga.xml new file mode 100644 index 0000000..8d07a8b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ga.xml @@ -0,0 +1,2001 @@ + + + + + + + + + + + Afar + Abcáisis + Aivéistis + Afracáinis + Araibis + Asaimis + Asarbaiseáinis + Baiscíris + Bealarúisis + Bulgáiris + Beangálais + Tibéadais + Briotáinis + Boisnis + Catalóinis + Sisinis + Corsaicis + Craíais + Seicis + Slavais na hEaglaise + Suvaisis + Breatnais + Danmhairgis + Gearmáinis + Gréigis + Béarla + Esperanto + Spáinnis + Eastóinis + Bascais + Peirsis + Fionnlainnis + Fidsis + Faróis + Fraincis + Freaslainnais + Gaeilge + Gaeilge na hAlban + Gúisearáitis + Mannainis + Haváíais + Eabhrais + Hiondúis + Cróitis + Ungáiris + Airméinis + Interlingua + Indinéisis + Interlingue + Inupiaq + Ido + Íoslainnais + Iodáilis + Ionúitis + Seapáinis + Iávais + Seoirsis + Casachais + Cannadais + Cóiréis + Caismíris + Cornais + Cirgeasais + Laidin + Leitseabuirgis + Laosais + Liotuáinis + Laitvis + Malagásais + Maorais + Macadóinis + Mailéalaimis + Mongóilis + Moldáivis + Maraitis + Maltais + Burmais + Nárúis + Ioruais Bokmål + Neipealais + Ollainnais + Ioruais Nynorsk + Ioruais + Navachóis + Ocatáinis (tar éis 1500); Provençal + Óiséitis + Puinseaibis + Polainnis + Paisteo + Portaingéilis + Ceatsuais + Romáinis + Rúisis + Sanscrait + Sairdínis + Sindis + Sáimis Thuaidh + Seirbea-Chróitis + Slóvacais + Slóvéinis + Samóis + Somálais + Albáinis + Seirbis + Sualainnis + Svahaílis + Tamailis + Téalainnis + Tagálaigis + Tuircis + Tatarais + Taihítis + Úcráinis + Urdais + Úisbéicis + Vítneamais + Vallúnais + Giúdais + Sínis + Súlúis + + + Andóra + Aontas na nÉimíríochtaí Arabacha + An Afganastáin + Antigua agus Barbuda + Anguilla + An Albáin + An Airméin + Antillí na hÍsiltíre + Angóla + An Antartaice + An Airgintín + Samó Meiriceánach + An Ostair + An Astráil + Aruba + An Asarbaiseáin + An Bhoisnia-Heirseagaivéin + Barbadós + An Bhanglaidéis + An Bheilg + Buircíne Fasó + An Bhulgáir + Bairéin + An Bhurúin + Beinin + Beirmiúda + Brúiné + An Bholaiv + An Bhrasaíl + Na Bahámaí + An Bhútáin + Oileáin Bouvet + An Bhotsuáin + An Bhealarúis + An Bheilís + Ceanada + Oileáin Cocos (Keeling) + Poblacht Dhaonlathach an Chongó + Poblacht na hAfraice Láir + An Congó + An Eilvéis + An Cósta Eabhair + Oileáin Cook + An tSile + Camarún + An tSín + An Cholóim + Cósta Ríce + Cúba + Rinn Verde + Oileán na Nollag + An Chipir + Poblacht na Seice + An Ghearmáin + Djibouti + An Danmhairg + Doiminice + An Phoblacht Dhoiminiceach + An Ailgéir + Eacuadór + An Eastóin + An Éigipt + An Sahára Thiar + Eritrea + An Spáinn + An Aetóip + An Fhionlainn + Fidsí + Oileáin Fháclainne + An Mhicrinéis + Oileáin Fharó + An Fhrainc + An Ghabúin + An Ríocht Aontaithe + Grenada + An tSeoirsia + An Ghuáin Fhrancach + Gána + Giobráltar + An Ghraonlainn + An Ghaimbia + An Ghuine + Guadalúip + An Ghuine Mheánchriosach + An Ghréig + An tSeoirsia Theas agus Oileáin Sandwich Theas + Guatamala + Guam + An Ghuine-Bhissau + An Ghuáin + Hong Cong + Oileán Heard agus Oileáin McDonald + Hondúras + An Chróit + Háití + An Ungáir + An Indinéis + Éire + Iosrael + An India + Críocha Briotanacha an Aigéin Indiagh + An Iaráic + An Iaráin + An Íoslainn + An Iodáil + Iamáice + An Iordáin + An tSeapáin + An Chéinia + An Chirgeastáin + An Chambóid + Cireabaití + Oileáin Chomóra + Saint Kitts agus Nevis + An Chóiré Thuaidh + An Chóiré Theas + Cuáit + Oileáin Cayman + An Chasacstáin + Laos + An Liobáin + Saint Lucia + Lichtinstéin + Srí Lanca + An Libéir + Leosóta + An Liotuáin + Lucsamburg + An Laitvia + An Libia + Maracó + Monacó + An Mholdóiv + Madagascar + Oileáin Marshall + An Mhacadóin + Mailí + Maenmar + An Mhongóil + Macao + Oileáin Mariana Thuaidh + Martinique + An Mharatáin + Montsarat + Málta + Oileán Mhuirís + Mhaildiví + An Mhaláiv + Meicsiceo + An Mhalaeisia + Mósaimbíc + An Namaib + An Nua-Chaladóin + An Nígir + Oileán Norfolk + An Nigéir + Nicearagua + An Ísiltír + An Iorua + Neipeal + Nárú + Niue + An Nua-Shéalainn + Oman + Panama + Peiriú + An Pholainéis Fhrancach + Nua-Ghuine Phapua + Na hOileáin Fhilipíneacha + An Phacastáin + An Pholainn + Saint Pierre agus Miquelon + Pitcairn + Portó Ríce + Na Críocha Pailistíneacha + An Phortaingéil + Palau + Paragua + Catar + Réunion + An Rómáin + Cónaidhm na Rúise + Ruanda + An Araib Shádach + Oileáin Solomon + Na Séiséil + An tSúdáin + An tSualainn + Singeapór + San Héilin + An tSlóvéin + Svalbard agus Jan Mayen + An tSlóvaic + Siarra Leon + San Mairíne + An tSeineagáil + An tSomáil + An tSeirbia + Suranam + Sao Tome agus Principe + An tSalvadóir + An tSiria + An tSuasalainn + Oileáin Turks agus Caicos + Sead + Críocha Francacha Theas + Tóga + An Téalainn + An Táidsíceastáin + Tócalá + Tíomór-Leste + An Tuircméanastáin + An Túinéis + Tonga + An Tuirc + Oileáin na Tríonóide agus Tobága + Tuvalú + An Téaváin + An Tansáin + An Úcráin + Uganda + Mion-Oileáin Imeallacha S.A.M. + Stáit Aontaithe Mheiriceá + Urugua + Úisbéiceastáin + An Chathaoir Naofa (Stát Chathair na Vatacáine) + Saint Vincent agus na Grenadines + Veiniséala + Oileáin Bhriotanacha na Maighdean + Oileáin na Maighdean S.A.M. + Vítneam + Vanuatú + Oileáin Vailís agus Futúna + Samó + Éimin + Mayotte + An Iúgslaiv + An Afraic Theas + An tSaimbia + An tSiombáib + + + + [a-z á é í ó ú] + + + RbMLkUnsSElFtTauKcBeyrAC + + + + + + Ean + Feabh + Márta + Aib + Beal + Meith + Iúil + Lún + MFómh + DFómh + Samh + Noll + + + Eanáir + Feabhra + Márta + Aibreán + Bealtaine + Meitheamh + Iúil + Lúnasa + Meán Fómhair + Deireadh Fómhair + Samhain + Nollaig + + + + + + + Domh + Luan + Máirt + Céad + Déar + Aoine + Sath + + + Dé Domhnaigh + Dé Luain + Dé Máirt + Dé Céadaoin + Déardaoin + Dé hAoine + Dé Sathairn + + + + a.m. + p.m. + + + RC + AD + + + + + + + + Meán-Am Greenwich + Am Samhraidh na hÉireann + + + MAG + ASÉ + + Baile Átha Cliath + + + + Meán-Am Greenwich + Am Samhraidh na Breataine + + + MAG + ASB + + Londain + + + + Meán-Am Greenwich + Am Samhraidh na Breataine + + + MAG + ASB + + Béal Feirste + + + + Meán-Am Greenwich + Meán-Am Greenwich + + + MAG + MAG + + Londain + + + + + + + Dínear Andóra + ADD + + + Peseta Andóra + ADP + + + Dirham Aontas na nÉimíríochtaí Arabacha + AED + + + Afgainí (1927-2002) + AFA + + + Afgainí + Af + + + Franc Affars agus Issas + AIF + + + Lek Albánach (1946-1961) + ALK + + + Lek Albánach + lek + + + Lek Valute Albánach + ALV + + + Teastais Airgeadraí Dollar na hAlbáine + ALX + + + Dram Airméanach + dram + + + Guilder na nAntillí Ísiltíreach + AÍ f. + + + Kwanza Angólach + AOA + + + Kwanza Angólach (1977-1990) + AOK + + + Kwanza Nua Angólach (1990-2000) + AON + + + Kwanza Reajustado Angólach (1995-1999) + AOR + + + Escudo Angólach + AOS + + + Austral Airgintíneach + ARA + + + Peso Moneda Nacional Airgintíneach + ARM + + + Peso na Airgintíne (1983-1985) + ARP + + + Peso na Airgintíne + Arg$ + + + Scilling Ostarach + ATS + + + Dollar Astrálach + A$ + + + Punt Astrálach + AUP + + + Guilder Aruba + AWG + + + Manat Asarbaiseánach + AZM + + + Dínear Bhoisnia-Heirseagaivéin + BAD + + + Marc Inathraithe Bhoisnia-Heirseagaivéin + KM + + + Dínear Nua Bhoisnia-Heirseagaivéin + BAN + + + Dollar Bharbadóis + BDS$ + + + Taka Bhanglaidéiseach + Tk + + + Franc Beilgeach (inathraithe) + BEC + + + Franc Beilgeach + BF + + + Franc Beilgeach (airgeadúil) + BEL + + + Lev Bulgárach Crua + lev + + + Lev Bulgárach Sóisialaíoch + BGM + + + Lev Nua Bulgárach + BGN + + + Lev Bulgárach (1879-1952) + BGO + + + Teastais Airgeadraí Lev Bulgárach + BGX + + + Dínear na Bairéine + BD + + + Franc na Burúine + Fbu + + + Dollar Bheirmiúda + Ber$ + + + Punt Bheirmiúda + BMP + + + Dollar Bhrúiné + BND + + + Boliviano + Bs + + + Boliviano (1863-1962) + BOL + + + Peso na Bolaive + BOP + + + Mvdol Bolavach + BOV + + + Cruzeiro Novo Brasaíleach (1967-1986) + BRB + + + Cruzado Brasaíleach + BRC + + + Cruzeiro Brasaíleach (1990-1993) + BRE + + + Real Brasaíleach + R$ + + + Cruzado Novo Brasaíleach + BRN + + + Cruzeiro Brasaíleach + BRR + + + Cruzeiro Brasaíleach (1942-1967) + BRZ + + + Dollar na mBahámaí + BSD + + + Punt na mBahámaí + BSP + + + Ngultrum Bútánach + Nu + + + Rúipí na Bútáine + BTR + + + Kyat Burmach + BUK + + + Rúipí Bhurma + BUR + + + Pula Botsuánach + BWP + + + Rúbal Nua Béalarúiseach (1994-1999) + BYB + + + Rúbal Béalarúiseach (1992-1994) + BYL + + + Rúbal Béalarúiseach + Rbl + + + Dollar na Beilíse + BZ$ + + + Dollar Hondúrais Bhriotanaigh + BZH + + + Dollar Ceanada + Can$ + + + Franc Congolais an Chongó + CDF + + + Franc Phoblacht an Chongó + CDG + + + Zaire an Chongó + CDL + + + CFA Franc Phoblacht na hAfraice Láir + CFF + + + Franc na hEilvéise + CHF + + + Dollar Oileáin Cook + CKD + + + Condor na Sile + CLC + + + Escudo na Sile + CLE + + + Unidades de Fomento na Sile + CLF + + + Peso na Sile + Ch$ + + + CFA Franc Chamarúin + CMF + + + Jen Min Piao Yuan Síneach + CNP + + + Teastais Airgeadraí Dollar SAM Síneach + CNX + + + Yuan Renminbi Síneach + Y + + + Peso Páipéir na Colóime + COB + + + CFA Franc Chongó + COF + + + Peso na Colóime + Col$ + + + Colon Chósta Ríce + C + + + Koruna na Seicslóvaice + CSC + + + Koruna Crua na Seicslóvaice + CSK + + + Peso Cúba + CUP + + + Teastais Airgeadraí Chúba + CUX + + + Escudo na Rinne Verde + CVEsc + + + Guilder Curacao + CWG + + + Punt na Cipire + £C + + + Koruna Phoblacht na Seice + CZK + + + Ostmark na hOirGhearmáine + DDM + + + Deutsche Mark + DEM + + + Sperrmark Gearmánach + DES + + + Franc Djibouti + DF + + + Krone Danmhargach + DKr + + + Peso Doimineacach + RD$ + + + Dínear na hAilgéire + DA + + + Franc Nua Ailgérach + DZF + + + Franc Germinal Ailgérach + DZG + + + Sucre Eacuadóir + ECS + + + Unidad de Valor Constante (UVC) Eacuadóir + ECV + + + Kroon na hEastóine + EEK + + + Punt na hÉigipte + EGP + + + Peseta Spáinneach + ESP + + + Birr na hAetóipe + Br + + + Dollar na hAetóipe + ETD + + + Euro + + + + Markka Fionnlannach + FIM + + + Markka Fionnlannach (1860-1962) + FIN + + + Dollar Fhidsí + F$ + + + Punt Fhidsí + FJP + + + Punt Oileáin Fháclainne + FKP + + + Kronur Oileáin Fharó + FOK + + + Franc Francach + FRF + + + Franc Germinal Francach/Franc Poincare + FRG + + + CFA Franc na Gabúine + GAF + + + Punt Steirling + £ + + + Kupon Larit na Grúise + GEK + + + Lari na Grúise + lari + + + Cedi Ghána + GHC + + + Sean-Cedi Ghána + GHO + + + Punt Ghána + GHP + + + Cedi Athluachtha Ghána + GHR + + + Punt Ghiobráltair + GIP + + + Krone na Graonlainne + GLK + + + Dalasi Gaimbia + GMD + + + Punt Gaimbia + GMP + + + Franc Guine + GF + + + Franc Guine (1960-1972) + GNI + + + Syli Guine + GNS + + + Franc Guadeloupe + GPF + + + Ekwele Guineana na Guine Meánchriosaí + GQE + + + Franco na Guine Meánchriosaí + GQF + + + Peseta Guineana na Guine Meánchriosaí + GQP + + + Drachma Gréagach + GRD + + + Drachma Nua Gréagach + GRN + + + Quetzal Guatamala + Q + + + Franc Guiana na Guáine Francaí + GUF + + + Escudo na Guine Portaingéalaí + GWE + + + Mil Reis na Guine Portaingéalaí + GWM + + + Peso Guine-Bhissau + GWP + + + Dollar na Guáine + G$ + + + Dollar Hong Cong + HK$ + + + Lempira Hondúrais + L + + + Dínear na Cróite + HRD + + + Kuna Crótach + HRK + + + Gourde Háití + HTG + + + Forint Ungárach + Ft + + + Punt Thuaisceart Éireann + IBP + + + Nica Guilder Indinéiseach + IDG + + + Java Rupiah Indinéiseach + IDJ + + + Rupiah Nua Indinéiseach + IDN + + + Rupiah Indinéiseach + Rp + + + Punt Éireannach + IR£ + + + Sheqel Iosraelach + ILL + + + Punt Iosraelach + ILP + + + Sheqel Nua Iosraelach + ILS + + + Punt Steirling Oileán Mhanann + IMP + + + Rúipí India + =0#Rs.|1#Re.|1<Rs. + + + Dínear Irácach + ID + + + Rial Iaránach + RI + + + Krona Íoslannach + ISK + + + Lira Iodálach + + + + Punt Steirling Gheirsí + JEP + + + Dollar Iamácach + J$ + + + Punt Iamácach + JMP + + + Dínear Iordánach + JD + + + Yen Seapánach + ¥ + + + Scilling Céiniach + K Sh + + + Som na Cirgeastáine + som + + + Sean-Riel na Cambóide + KHO + + + Riel na Cambóide + CR + + + Dollar Chireabaití + KID + + + Franc Chomóra + CF + + + Won Na nDaoine na Cóiré Thuaidh + KPP + + + Won na Cóiré Thuaidh + KPW + + + Hwan na Cóiré Theas + KRH + + + Sean-Won na Cóiré Theas + KRO + + + Won na Cóiré Theas + KRW + + + Dínear Cuátach + KD + + + Dollar Oileáin Cayman + KYD + + + Rúbal Casacstánach + KZR + + + Tenge Casacstánach + T + + + Kip Laosach + LAK + + + Punt na Liobáine + LL + + + Franc Lichtinstéin + LIF + + + Rúipí Srí Lanca + SL Re + + + Rúipí na Siolióne + LNR + + + Dollar na Libéire + LRD + + + Loti Leosóta + M + + + Lita Liotuánach + LTL + + + Talonas Liotuánach + LTT + + + Franc Lucsamburg + LUF + + + Lats Laitviach + LVL + + + Rúbal Laitviach + LVR + + + Lira Údarás Míleata Briotanach Libia + LYB + + + Dínear Libia + LD + + + Punt Libia + LYP + + + Dirham Mharacó + MAD + + + Franc Mharacó + MAF + + + Franc Nouveau Mhonacó + MCF + + + Franc Germinal Mhonacó + MCG + + + Leu Cúpóin Moldóvach + MDC + + + Leu Moldóvach + MDL + + + Rúbal Cúpóin Moldóvach + MDR + + + Ariary Madagascar + MGA + + + Franc Madagascar + MGF + + + Dollar Oileáin Marshall + MHD + + + Denar na Macadóine + MDen + + + Denar na Macadóine (1992-1993) + MKN + + + Franc Mhailí + MLF + + + Kyat Mhaenmar + MMK + + + Teastais Airgeadra Dollar Mhaenmar + MMX + + + Tugrik Mongólach + Tug + + + Pataca Macao + MOP + + + Franc Martinique + MQF + + + Ouguiya na Maratáine + UM + + + Lira Maltach + Lm + + + Punt Maltach + MTP + + + Rúipí Oileán Mhuirís + MUR + + + Maldive Islands Rúipí + MVP + + + Maldive Islands Rufiyaa + MVR + + + Kwacha na Maláive + MK + + + Punt na Maláive + MWP + + + Peso Meicsiceo + MEX$ + + + Peso Airgid Meicsiceo (1861-1992) + MXP + + + Unidad de Inversion (UDI) Meicsiceo + MXV + + + Ringgit Malaeisia + RM + + + Escudo Mósaimbíce + MZE + + + Metical Mósaimbíce + Mt + + + Dollar na Namaibe + N$ + + + Franc Germinal na Nua-Chaladóine + NCF + + + Naira Nígéarach + NGN + + + Punt Nígéarach + NGP + + + CFP Franc Nua-Inse Ghall + NHF + + + Cordoba Nicearagua + NIC + + + Cordoba Ór Nicearagua + NIG + + + Cordoba Oro Nicearagua + NIO + + + Guilder Ísiltíreach + NLG + + + Krone Ioruach + NKr + + + Rúipí Neipeáil + Nrs + + + Dollar na Nua-Shéalainne + $NZ + + + Punt na Nua-Shéalainne + NZP + + + Rial Omain + RO + + + Rial Saidi Omain + OMS + + + Balboa Panamach + PAB + + + Rúbal Cupóin Transdniestria + PDK + + + Rúbal Nua Transdniestria + PDN + + + Transdniestria Rúbal + PDR + + + Inti Pheiriú + PEI + + + Sol Nuevo Pheiriú + PEN + + + Sol Pheiriú + PES + + + Kina Nua-Ghuine Phapua + PGK + + + Peso Filipíneach + PHP + + + Rúipí na Pacastáine + Pra + + + Zloty Polannach + Zl + + + Teastais Airgeadra Dollar SAM Polannach + PLX + + + Zloty Polannach (1950-1995) + PLZ + + + Punt Pailistíneach + PSP + + + Conto Portaingéalach + PTC + + + Escudo Portaingélach + PTE + + + Guarani Pharagua + PYG + + + Rial Catarach + QR + + + Franc Réunion + REF + + + Leu Rómánach + leu + + + Leu Nua Rómánach + RON + + + Rúbal Rúiseach + RUB + + + Rúbal Rúiseach (1991-1998) + RUR + + + Franc Ruanda + RWF + + + Riyal Sádach + SRl + + + Dollar Oileáin Solomon + SI$ + + + Rúipí na Séiséil + SR + + + Dínear na Súdáine + SDD + + + Punt na Súdáine + SDP + + + Krona Sualannach + SKr + + + Dollar Singeapóir + S$ + + + Punt San Héilin + SHP + + + Tolar Bons Slóvéanach + SIB + + + Tolar Slóvénach + SIT + + + Koruna na Slóvaice + Sk + + + Leone Shiarra Leon + SLL + + + Lira San Marino + SML + + + Scilling na Sómáile + So. Sh. + + + Guilder Shuranaim + Sf + + + Punt Albanach + SSP + + + Dobra Sao Tome agus Principe + Db + + + Escudo Sao Tome agus Principe + STE + + + Rúbal Nua Sóvéadach + SUN + + + Rúbal Sóvéadach + SUR + + + Colon na Salvadóire + SVC + + + Punt Siria + LS + + + Lilangeni na Suasalainne + E + + + CFA Franc Sead + TDF + + + Baht na Téalainne + THB + + + Rúbal na Táidsíceastáine + TJR + + + Somoni na Táidsíceastáine + TJS + + + Manat na An Tuircméanastáine + TMM + + + Dínear na Túinéise + TND + + + Paʻanga Tonga + T$ + + + Punt Steirling Tonga + TOS + + + Escudo Tíomóir + TPE + + + Pataca Tíomóir + TPP + + + Lira Turcach + TL + + + Dollar Oileáin na Tríonóide agus Tobága + TT$ + + + Sean-Dollar Oileáin na Tríonóide agus Tobága + TTO + + + Dollar Tuvalu + TVD + + + Dollar Nua na Téaváine + NT$ + + + Scilling na Tansáine + T Sh + + + Hryvnia Úcránach + UAH + + + Karbovanetz Úcránach + UAK + + + Scilling Uganda (1966-1987) + UGS + + + Scilling Uganda + U Sh + + + Dollar S.A.M. + $ + + + Dollar S.A.M. (an chéad lá eile) + USN + + + Dollar S.A.M. (an la céanna) + USS + + + Peso Fuerte Uragua + UYF + + + Peso Uragua (1975-1993) + UYP + + + Peso Uruguayo Uragua + Ur$ + + + Som Cúpóin na hÚisbéiceastáine + UZC + + + Sum na hÚisbéiceastáine + UZS + + + Lira na Vatacáine + VAL + + + Piastre Dong Viet Vítneam Thuaidh + VDD + + + Dong Nua Vítneam Thuaidh + VDN + + + Viet Minh Piastre Dong Viet Vítneam Thuaidh + VDP + + + Bolivar Veiniséala + Be + + + Dollar Oileáin Bhriotanacha na Maighdean + VGD + + + Dong Vítneamach + VND + + + Dong Nua Vítneamach + VNN + + + Dong Phoblacht Vítneaim + VNR + + + Dong Náisiúnta Vítneamach + VNS + + + Vatu Vanuatú + VT + + + Punt Samó Thiar + WSP + + + Tala Samó Thiar + WST + + + Dínear Áiseach Unit of Account + XAD + + + CFA Franc BEAC + XAF + + + Aonad Airgeadaíochta na hÁise + XAM + + + Ór + XAU + + + Aonad Ilchodach Eorpach + XBA + + + Aonad Airgeadaíochta Eorpach + XBB + + + Aonad Cuntais Eorpach (XBC) + XBC + + + Aonad Cuntais Eorpach (XBD) + XBD + + + Dollar Oirthear na Cairibe + EC$ + + + CFA Nouveau Franc + XCF + + + Cearta Speisialta Tarraingthe + XDR + + + CFA Franc BCEAEC + XEF + + + Aonad Airgeadra Eorpach + XEU + + + Franc Ór Francach + XFO + + + UIC-Franc Francach + XFU + + + Dínear Ioslamach + XID + + + Nouveau Franc Ceannchathartha Francach + XMF + + + CFA Franc na nAntillí Francach + XNF + + + CFA Franc BCEAO + XOF + + + CFP Franc + CFPF + + + Rúbal Inaistrithe COMECON + XTR + + + Dínear Éimin + YDD + + + Imadi Riyal Éimin + YEI + + + Rial Éimin + YRl + + + Dínear Crua Iúgslavach + YUD + + + Dínear Chónaidhm na hIúgslaive + YUF + + + Dínear 1994 Iúgslavach + YUG + + + Noviy Dinar Iúgslavach + YUM + + + Dínear Inathraithe Iúgslavach + YUN + + + Dínear Dheireadh Fómhar Iúgslavach + YUO + + + Dínear Leasaithe Iúgslavach + YUR + + + Rand na hAfraice Theas (airgeadúil) + ZAL + + + Punt na hAfraice Theas + ZAP + + + Rand na hAfraice Theas + R + + + Kwacha Saimbiach + ZMK + + + Punt Saimbiach + ZMP + + + Zaire Nua Sáíreach + ZRN + + + Zaire Sáíreach + ZRZ + + + Dollar Siombábach + Z$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ga_IE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ga_IE.xml new file mode 100644 index 0000000..18ee4d7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ga_IE.xml @@ -0,0 +1,105 @@ + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + d MMM yyyy + + + + + dd/MM/yyyy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + Punt Éireannach + £ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gez.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gez.xml new file mode 100644 index 0000000..f10d9e0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gez.xml @@ -0,0 +1,374 @@ + + + + + + + + + + + am + አፋርኛ + አብሐዚኛ + አፍሪቃንስኛ + አምሐረኛ + ዐርቢኛ + አሳሜዛዊ + አያማርኛ + አዜርባይጃንኛ + ባስኪርኛ + ቤላራሻኛ + ቡልጋሪኛ + ቢሃሪ + ቢስላምኛ + በንጋሊኛ + ትበትንኛ + ብሬቶንኛ + ብሊን + ካታላንኛ + ኮርሲካኛ + ቼክኛ + ወልሽ + ዴኒሽ + ጀርመን + ድዞንግኻኛ + ግሪክኛ + እንግሊዝኛ + ኤስፐራንቶ + ስፓኒሽ + ኤስቶኒአን + ባስክኛ + ፐርሲያኛ + ፊኒሽ + ፊጂኛ + ፋሮኛ + ፈረንሳይኛ + ፍሪስኛ + አይሪሽ + እስኮትስ፡ጌልክኛ + ግዕዝኛ + ጋለጋኛ + ጓራኒኛ + ጉጃርቲኛ + ሃውሳኛ + ዕብራስጥ + ሐንድኛ + ክሮሽያንኛ + ሀንጋሪኛ + አርመናዊ + ኢንቴርሊንጓ + እንዶኒሲኛ + እንተርሊንግወ + እኑፒያቅኛ + አይስላንድኛ + ጣሊያንኛ + እኑክቲቱትኛ + ጃፓንኛ + ጃቫንኛ + ጊዮርጊያን + ካዛክኛ + ካላሊሱትኛ + ክመርኛ + ካናዳኛ + ኮሪያኛ + ካሽሚርኛ + ኩርድሽኛ + ኪርጊዝኛ + ላቲንኛ + ሊንጋላኛ + ላውስኛ + ሊቱአኒያን + ላትቪያን + ማላጋስኛ + ማዮሪኛ + ማከዶኒኛ + ማላያላምኛ + ሞንጎላዊኛ + ሞልዳቫዊና + ማራዚኛ + ማላይኛ + ማልቲስኛ + ቡርማኛ + ናኡሩ + ኔፓሊኛ + ደች + ኖርዌጂያን + ኦኪታንኛ + ኦሮምኛ + ኦሪያኛ + ፓንጃቢኛ + ፖሊሽ + ፑሽቶኛ + ፖርቱጋሊኛ + ኵቿኛ + ሮማንስ + ሩንዲኛ + ሮማኒያን + ራሽኛ + ኪንያርዋንድኛ + ሳንስክሪትኛ + ሲንድሂኛ + ሳንጎኛ + ስንሃልኛ + ሲዳምኛ + ስሎቫክኛ + ስሎቪኛ + ሳሞአኛ + ሾናኛ + ሱማልኛ + ልቤኒኛ + ሰርቢኛ + ስዋቲኛ + ሶዞኛ + ሱዳንኛ + ስዊድንኛ + ስዋሂሊኛ + ታሚልኛ + ተሉጉኛ + ታጂኪኛ + ታይኛ + ትግርኛ + ትግረ + ቱርክመንኛ + ታጋሎገኛ + ጽዋናዊኛ + ቶንጋ + ቱርክኛ + ጾንጋኛ + ታታርኛ + ትዊኛ + ኡዊግሁርኛ + ዩክረኒኛ + ኡርዱኛ + ኡዝበክኛ + ቪትናምኛ + ቮላፑክኛ + ዎሎፍኛ + ዞሳኛ + ይዲሻዊኛ + ዮሩባዊኛ + ዡዋንግኛ + ቻይንኛ + ዙሉኛ + + + አንዶራ + የተባበሩት፡አረብ፡ኤምሬትስ + አልባኒያ + አርሜኒያ + ኔዘርላንድስ፡አንቲልስ + አርጀንቲና + ኦስትሪያ + አውስትሬሊያ + አዘርባጃን + ቦስኒያ፡እና፡ሄርዞጎቪኒያ + ባርቤዶስ + ቤልጄም + ቡልጌሪያ + ባህሬን + ቤርሙዳ + ቦሊቪያ + ብራዚል + ቡህታን + ቤላሩስ + ቤሊዘ + የመካከለኛው፡አፍሪካ፡ሪፐብሊክ + ስዊዘርላንድ + ቺሊ + ካሜሩን + ቻይና + ኮሎምቢያ + ኬፕ፡ቬርዴ + ሳይፕረስ + ቼክ፡ሪፑብሊክ + ጀርመን + ዴንማርክ + ዶሚኒካ + ዶሚኒክ፡ሪፑብሊክ + አልጄሪያ + ኢኳዶር + ኤስቶኒያ + ግብጽ + ምዕራባዊ፡ሳህራ + ኤርትራ + ስፔን + ኢትዮጵያ + ፊንላንድ + ፊጂ + ሚክሮኔዢያ + እንግሊዝ + ጆርጂያ + የፈረንሳይ፡ጉዊአና + ጋምቢያ + ጊኒ + ኢኳቶሪያል፡ጊኒ + ግሪክ + ቢሳዎ + ጉያና + ሆንግ፡ኮንግ + ክሮኤሽያ + ሀይቲ + ሀንጋሪ + ኢንዶኔዢያ + አየርላንድ + እስራኤል + ህንድ + ኢራቅ + አይስላንድ + ጣሊያን + ጃማይካ + ጆርዳን + ጃፓን + ካምቦዲያ + ኮሞሮስ + ደቡብ፡ኮሪያ + ሰሜን፡ኮሪያ + ክዌት + ሊባኖስ + ሊቱዌኒያ + ላትቪያ + ሊቢያ + ሞሮኮ + ሞልዶቫ + ማከዶኒያ + ሞንጎሊያ + ማካዎ + ሞሪቴኒያ + ማልታ + ማሩሸስ + ሜክሲኮ + ማሌዢያ + ናሚቢያ + ኒው፡ካሌዶኒያ + ናይጄሪያ + ኔዘርላንድ + ኖርዌ + ኔፓል + ኒው፡ዚላንድ + ፔሩ + የፈረንሳይ፡ፖሊኔዢያ + ፓፑዋ፡ኒው፡ጊኒ + ፖላንድ + ፖርታ፡ሪኮ + ሮሜኒያ + ራሺያ + ሳውድአረቢያ + ሱዳን + ስዊድን + ሲንጋፖር + ስሎቬኒያ + ስሎቫኪያ + ሴኔጋል + ሱማሌ + ሰርቢያ + ሲሪያ + ቻድ + የፈረንሳይ፡ደቡባዊ፡ግዛቶች + ታይላንድ + ታጃኪስታን + ምስራቅ፡ቲሞር + ቱኒዚያ + ቱርክ + ትሪኒዳድ፡እና፡ቶባጎ + ታንዛኒያ + ዩጋንዳ + አሜሪካ + ዩዝበኪስታን + ቬንዙዌላ + የእንግሊዝ፡ድንግል፡ደሴቶች + የአሜሪካ፡ቨርጂን፡ደሴቶች + የመን + ዩጎዝላቪያ + ደቡብ፡አፍሪካ + ዛምቢያ + + + + [:Ethi:] + + + + + + + + ጠሐረ + ከተተ + መገበ + አኀዘ + ግንባ + ሠንየ + ሐመለ + ነሐሰ + ከረመ + ጠቀመ + ኀደረ + ኀሠሠ + + + ጠሐረ + ከተተ + መገበ + አኀዘ + ግንባት + ሠንየ + ሐመለ + ነሐሰ + ከረመ + ጠቀመ + ኀደረ + ኀሠሠ + + + + + + + እኁድ + ሰኑይ + ሠሉስ + ራብዕ + ሐሙስ + ዓርበ + ቀዳሚ + + + እኁድ + ሰኑይ + ሠሉስ + ራብዕ + ሐሙስ + ዓርበ + ቀዳሚት + + + + + + + + ጽባሕ + ምሴት + + + ዓ/ዓ + ዓ/ም + + + + + + + + + ERN + ERN + + + ETB + ETB + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gez_ER.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gez_ER.xml new file mode 100644 index 0000000..dce7e4d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gez_ER.xml @@ -0,0 +1,105 @@ + + + + + + + + + + + + + + + + + EEEE፥ dd MMMM መዓልት yyyy G + + + + + dd MMMM yyyy + + + + + dd-MMM-yyyy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #ወ##0.###;-#ወ##0.### + + + + + + + #E0 + + + + + + + #ወ##0% + + + + + + + ¤#ወ##0.00;-¤#ወ##0.00 + + + + + + ERN + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gez_ET.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gez_ET.xml new file mode 100644 index 0000000..98b8f09 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gez_ET.xml @@ -0,0 +1,105 @@ + + + + + + + + + + + + + + + + + EEEE፥ dd MMMM መዓልት yyyy G + + + + + dd MMMM yyyy + + + + + dd-MMM-yyyy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #ወ##0.###;-#ወ##0.### + + + + + + + #E0 + + + + + + + #ወ##0% + + + + + + + ¤#ወ##0.00;-¤#ወ##0.00 + + + + + + ETB + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gl.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gl.xml new file mode 100644 index 0000000..634ff41 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gl.xml @@ -0,0 +1,105 @@ + + + + + + + + + + + galego + + + España + + + + [a-záéíóúüñ] + + + + + + + + Xan + Feb + Mar + Abr + Mai + Xuñ + Xul + Ago + Set + Out + Nov + Dec + + + Xaneiro + Febreiro + Marzo + Abril + Maio + Xuño + Xullo + Agosto + Setembro + Outubro + Novembro + Decembro + + + + + + + Dom + Lun + Mar + Mér + Xov + Ven + Sáb + + + Domingo + Luns + Martes + Mércores + Xoves + Venres + Sábado + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + ESP + + ¤ #,##0;-¤ #,##0 + ¤ #,##0;-¤ #,##0 + . + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gl_ES.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gl_ES.xml new file mode 100644 index 0000000..5e9da8c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gl_ES.xml @@ -0,0 +1,103 @@ + + + + + + + + + + + + + + + + + + + + + EEEE dd MMMM yyyy + + + + + dd MMMM yyyy + + + + + MMM dd,yy + + + + + dd/MM/yy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gu.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gu.xml new file mode 100644 index 0000000..8febd5e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gu.xml @@ -0,0 +1,113 @@ + + + + + + + + + + + ગુજરાતી + + + ચીન + જમિની + મિસર + જ્યોર્જીયા + ભારત + નેપાળ + કરાંચી + તુર્ક્મનિસ્તાન + તુર્કસ્તાન + સંયુકત રાજ્ય અમેરિકા + + + + [[:Gujr:]‌‍] + + + + + + + + જાન્યુ + ફેબ્રુ + માર્ચ + એપ્રિલ + મે + જૂન + જુલાઈ + ઑગસ્ટ + સપ્ટે + ઑક્ટો + નવે + ડિસે + + + જાન્યુઆરી + ફેબ્રુઆરી + માર્ચ + એપ્રિલ + મે + જૂન + જુલાઈ + ઑગસ્ટ + સપ્ટેમ્બર + ઑક્ટ્બર + નવેમ્બર + ડિસેમ્બર + + + + + + + રવિ + સોમ + મંગળ + બુધ + ગુરુ + શુક્ર + શનિ + + + રવિવાર + સોમવાર + મંગળવાર + બુધવાર + ગુરુવાર + શુક્રવાર + શનિવાર + + + + પૂર્વ મધ્યાહ્ન + ઉત્તર મધ્યાહ્ન + + + + + + . + , + ; + % + + # + + + - + E + + + + + + + INR + રુ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gu_IN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gu_IN.xml new file mode 100644 index 0000000..c650a78 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gu_IN.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd-MM-yyyy + + + + + d-MM-yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + + + + + ##,##,##0.###;-##,##,##0.### + + + + + + + #E0 + + + + + + + ##,##,##0% + + + + + + + ¤ ##,##,##0.00;-¤ ##,##,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gv.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gv.xml new file mode 100644 index 0000000..79a50e8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gv.xml @@ -0,0 +1,88 @@ + + + + + + + + + + + Gaelg + + + Rywvaneth Unys + + + + [a-zç] + + + + + + + + J-guer + T-arree + Mayrnt + Avrril + Boaldyn + M-souree + J-souree + Luanistyn + M-fouyir + J-fouyir + M.Houney + M.Nollick + + + Jerrey-geuree + Toshiaght-arree + Mayrnt + Averil + Boaldyn + Mean-souree + Jerrey-souree + Luanistyn + Mean-fouyir + Jerrey-fouyir + Mee Houney + Mee ny Nollick + + + + + + + Jed + Jel + Jem + Jerc + Jerd + Jeh + Jes + + + Jedoonee + Jelhein + Jemayrt + Jercean + Jerdein + Jeheiney + Jesarn + + + + a.m. + p.m. + + + RC + AD + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gv_GB.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gv_GB.xml new file mode 100644 index 0000000..57e1ebb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/gv_GB.xml @@ -0,0 +1,103 @@ + + + + + + + + + + + + + + + + + + + + + EEEE dd MMMM yyyy + + + + + dd MMMM yyyy + + + + + MMM dd,yy + + + + + dd/MM/yy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/haw.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/haw.xml new file mode 100644 index 0000000..7a175fe --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/haw.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + ʻōlelo Hawaiʻi + + + Nūhōlani + Kanakā + Kina + Kelemānia + Kenemaka + Kepania + Palani + Aupuni Mōʻī Hui Pū ʻIa + Helene + ʻIlelani + ʻIseraʻela + ʻĪnia + ʻĪkālia + Iāpana + Mekiko + Hōlani + Aotearoa + ʻĀina Pilipino + Lūkia + ʻAmelika Hui Pū ʻIa + + + + [āēīōūaeiouhklmnpwʻ] + + + + + + + + Ian. + Pep. + Mal. + ʻAp. + Mei + Iun. + Iul. + ʻAu. + Kep. + ʻOk. + Now. + Kek. + + + Ianuali + Pepeluali + Malaki + ʻApelila + Mei + Iune + Iulai + ʻAukake + Kepakemapa + ʻOkakopa + Nowemapa + Kekemapa + + + + + + + LP + P1 + P2 + P3 + P4 + P5 + P6 + + + Lāpule + Poʻakahi + Poʻalua + Poʻakolu + Poʻahā + Poʻalima + Poʻaono + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/haw_US.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/haw_US.xml new file mode 100644 index 0000000..9b5dd5c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/haw_US.xml @@ -0,0 +1,106 @@ + + + + + + + + + + + + + 279 + 216 + + + + + + + + + + EEEE, d MMMM yyyy + + + + + d MMMM yyyy + + + + + d MMM yyyy + + + + + d/M/yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/he.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/he.xml new file mode 100644 index 0000000..0718262 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/he.xml @@ -0,0 +1,537 @@ + + + + + + + + + + + ערבית + בולגרית + צ׳כית + דנית + גרמנית + יוונית + אנגלית + ספרדית + אסטונית + פינית + צרפתית + עברית + קרואטית + הונגרית + איטלקית + יפנית + קוריאנית + ליטאית + לטבית + הולנדית + נורווגית + פולנית + פורטוגזית + רומנית + רוסית + סלובקית + סלובנית + שוודית + טורקית + סינית + + + אנדורה + איחוד האמירויות הערביות + אפגניסטן + אנטיגואה וברבודה + אנגילה + אלבניה + ארמניה + האינטילים ההולנדיים + אנגולה + אנטארקטיקה + ארגנטינה + סמואה האמריקנית + אוסטריה + אוסטרליה + ארובה + אזרבייג׳ן + בוסניה הרצגובינה + ברבדוס + בנגלדש + בלגיה + בורקינה פאסו + בולגריה + בחריין + בורונדי + בנין + ברמודה + ברוניי דארסלאם + בוליביה + ברזיל + בהאמה + בוטאן + האי בובה + בוטסוואנה + בלרוס + בליז + קנדה + איי קוקוס (קילינג) + קונגו, הרפובליקה הדמוקרטית של + הרפובליקה של מרכז אפריקה + קונגו + שווייץ + חוף השנהב + איי קוק + צ׳ילה + קמרון + סין + קולומביה + קוסטה ריקה + קובה + קייפ ורדה + איי כריסטמס + קפריסין + הרפובליקה הצ׳כית + גרמניה + ג׳יבוטי + דנמרק + דומיניקה + הרפובליקה הדומיניקנית + אלג׳יריה + אקוואדור + אסטוניה + מצרים + סהרה המערבית + אריתריאה + ספרד + אתיופיה + פינלנד + פיג׳י + איי פוקלנד + מאוריציוס, המדינות המאוגדות של + איי פארו + צרפת + גבון + בריטניה + גרנדה + גרוזיה + גיאנה הצרפתית + גאנה + גיברלטר + גרינלנד + גמביה + גיניאה + גוואדלופ + גיניאה המשוונית + יוון + האי ג׳ורג׳יה הדרומית ואיי סנדוויץ׳ הדרומיים + גווטמאלה + גואם + גיניאה-ביסאו + גיאנה + הונג קונג S.A.R. של סין + איי הרד ואיי מקדונלנד + הונדורס + קרואטיה + האיטי + הונגריה + אינדונזיה + אירלנד + ישראל + הודו + הטריטוריה הבריטית באוקינוס ההודי + עירק + איראן, הרפובליקה האיסלמית + איסלנד + איטליה + ג׳מייקה + ירדן + יפן + קניה + קירגיזסטן + קמבודיה + קיריבאטי + קומורוס + סנט קיטס וניבס + קוריאה, צפון + קוריאה, דרום + כווית + איי קיימאן + קזחסטן + לאוס, הרפובליקה הדמקורטית העממית + לבנון + סנט לושיה + ליכטנשטיין + סרי לנקה + ליבריה + לסוטו + ליטא + לוקסמבורג + לטביה + לוב + מרוקו + מונקו + מולדובה, הרפובליקה + מדגסקר + איי מרשל + מקדוניה, הרפובליקה של + מאלי + מינמאר + מונגוליה + מקאו S.A.R. של סין + איי מריאנה הצפוניים + מרטיניק + מאוריטניה + מונטסראט + מלטה + מאוריציוס + מלדיבאס + מלאווי + מכסיקו + מלזיה + מוזמביק + נמיביה + קלדוניה החדשה + ניז׳ר + איי נורפק + ניגריה + ניקראגווה + הולנד + נורווגיה + נפאל + נאורו + ניווה + ניו זילנד + עומן + פנמה + פרו + פולינזיה הצרפתית + פפואה גיניאה החדשה + פיליפינים + פקיסטן + פולין + סנט פייר ומיקלון + פיטקיירן + פורטו ריקו + הרשות הפלשתינית + פורטוגל + פלאו + פראגוואי + קטר + ראוניון + רומניה + חבר המדינות הרוסיות + רואנדה + ערב הסעודית + איי שלמה + איי סיישל + סודן + שוודיה + סינגפור + סיינט הלנה + סלובניה + סוולבארד וז׳אן מאיין + סלובקיה + סיירה לאונה + סן מרינו + סנגל + סומליה + סורינאם + סן תומה ופרינסיפה + אל סלבאדור + הרפובליקה הערבית הסורית + סווזילנד + איי טורקס וקאיקוס + צ׳אד + טריטוריות דרומיות של צרפת + טוגו + תאילנד + טג׳יקיסטן + טוקלאו + מזרח טימור + טורקמניסטן + טוניסיה + טונגה + טורקיה + טרינידד וטובגו + טובאלו + טיוואן + טנזניה + אוקראינה + אוגנדה + איים קטנים שלחוף ארצות הברית + ארצות הברית + אורוגוואי + אוזבקיסטן + הוותיקן + סנט וינסנט והגרנדינים + ונצואלה + איי הבתולה הבריטיים + איי הבתולה האמריקניים + וייטנאם + ואנואטו + ואליס ופוטונה + סמואה + תימן + מיוטה + יוגוסלביה + דרום אפריקה + זמביה + זימבבווה + + + + + + + [[:Hebr:]‏‎] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + ינו + פבר + מרץ + אפר + מאי + יונ + יול + אוג + ספט + אוק + נוב + דצמ + + + ינואר + פברואר + מרץ + אפריל + מאי + יוני + יולי + אוגוסט + ספטמבר + אוקטובר + נובמבר + דצמבר + + + + + + + א + ב + ג + ד + ה + ו + ש + + + יום ראשון + יום שני + יום שלישי + יום רביעי + יום חמישי + יום שישי + שבת + + + + + + לפנה״ס + לסה״נ + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {0} {1} + + + + + + + + + תשרי + חשון + כסלו + טבת + שבט + אדר ראשון + אדר שני + ניסן + אייר + סיון + תמוז + אב + אלול + + + תשרי + חשון + כסלו + טבת + שבט + אדר ראשון + אדר שני + ניסן + אייר + סיון + תמוז + אב + אלול + + + + + + לבה"ע + + + + + + + + מוחרם + ספר + רביע אל-אוואל + רביע אל-תני + ג׳ומדה אל-אוואל + ג׳ומדה אל-תני + רג׳אב + שעבאן + ראמדן + שוואל + זו אל-QI'DAH + זו אל-חיג׳ה + + + מוחרם + ספר + רביע אל-אוואל + רביע אל-תני + ג׳ומדה אל-אוואל + ג׳ומדה אל-תני + רג׳אב + שעבאן + ראמדן + שוואל + זו אל-QI'DAH + זו אל-חיג׳ה + + + + + + שנת היג׳רה + + + + + + + + מוחרם + ספר + רביע אל-אוואל + רביע אל-תני + ג׳ומדה אל-אוואל + ג׳ומדה אל-תני + רג׳אב + שעבאן + ראמדן + שוואל + זו אל-QI'DAH + זו אל-חיג׳ה + + + מוחרם + ספר + רביע אל-אוואל + רביע אל-תני + ג׳ומדה אל-אוואל + ג׳ומדה אל-תני + רג׳אב + שעבאן + ראמדן + שוואל + זו אל-QI'DAH + זו אל-חיג׳ה + + + + + + שנת היג׳רה + + + + + + + + + ש"ח + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/he_IL.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/he_IL.xml new file mode 100644 index 0000000..c2fadb5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/he_IL.xml @@ -0,0 +1,43 @@ + + + + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hi.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hi.xml new file mode 100644 index 0000000..5ab8cea --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hi.xml @@ -0,0 +1,430 @@ + + + + + + + + + + + अफ़ार + अब्खाज़ियन् + अफ्रीकी + अम्हारिक् + अरबी + असामी + आयमारा + अज़रबैंजानी + बशख़िर + बैलोरूशियन् + बल्गेरियन् + बिहारी + बिस्लामा + बँगाली + तिब्बती + ब्रेटन + कातालान + कोर्सीकन + चेक + वेल्श + डैनीश + ज़र्मन + भुटानी + ग्रीक + अंग्रेजी + एस्पेरान्तो + स्पेनिश + ऐस्तोनियन् + बास्क् + पर्शियन् + फिनिश + फ़ीजी + फिरोज़ी + फ्रेंच + फ्रीज़न् + आईरिश + स्काट्स् गायेलिक् + गैलिशियन् + गुआरानी + गुज़राती + होउसा + हिब्रीऊ + हिंदी + क्रोएशन् + हंगेरी + अरमेनियन् + ईन्टरलिंगुआ + इन्डोनेशियन् + ईन्टरलिंगुइ + इनुपियाक् + आईस्लैंडिक् + ईटालियन् + इनूकीटूत् + जापानी + जावानीस + जॉर्जीयन् + कज़ाख + ग्रीनलैंडिक + कैम्बोडियन् + कन्नड़ + कोरीयन् + कोंकणी + काश्मिरी + कुरदीश + किरघिज़ + लैटीन + लिंगाला + लाओथीयन् + लिथुनियन् + लाटवियन् (लेट्टीश) + मालागासी + मेओरी + मैसेडोनियन् + मलयालम + मोंगोलियन + मोलडावियन् + मराठी + मलय + मालटिस् + बर्लिस + नायरू + नेपाली + डच् + नार्वेजीयन् + ओसीटान + ओरोमो (अफ़ान) + उड़िया + पंजाबी + पॉलिश + पॉशतो (पुशतो) + पुर्तुगी + क्वेशुआ + राजेस्थानी + रहेय्टो-रोमान्स + किरून्दी + रूमानीयन् + शिखर + रुसी + किन्यारवाण्डा + संस्कृत + सिन्धी + साँग्रो + सेर्बो-क्रोएशन् + शिंघालीस् + स्लोवाक् + स्लोवेनियन् + सामोन + सोणा + सोमाली + अल्बेनियन् + सर्बियन् + सीस्वाटि + सेसोथो + सुन्दानीस + स्विडिश + स्वाहिली + तमिल + तेलेगु + ताजिक् + थाई + तिग्रीन्या + तुक्रमेन + तागालोग + सेत्स्वाना + टोंगा + तुक्रीश + सोंगा + टाटर + ट्वी + उईघुर + यूक्रेनियन् + ऊर्दु + उज़बेक् + वियेतनामी + वोलापुक + वोलोफ + षोसा + येहुदी + योरूबा + ज़ुआंग + चीनी + ज़ुलू + + + अन्डोरा + संयुक्त अरब अमीरात + अफगानिस्तान + एन्टिगुवा और बारबूडा + अल्बानिया + आर्मेनिया + अंगोला + अर्जेन्टीना + ऑस्ट्रिया + ऑस्ट्रेलिया + अजरबैजान + बोसनिया हर्जिगोविना + बारबाडोस + बंगलादेश + बेल्जियम + बर्किना फासो + बुल्गारिया + बहरैन + बुरुंडी + बेनिन + ब्रूनइ + बोलीविया + ब्राजील + बहामा + भूटान + बोत्स्वाना + बेलारूस + बेलिज + कनाडा + किंशासा + सेंट्रल अफ्रीकन रिपब्लिक + कांगो + स्विस + आईवरी कोस्ट + चिली + कामेरान + चीन + कोलम्बिया + कोस्टारीका + क्यूबा + कैप वर्त + साइप्रस + चेक गणराज्य + जर्मनी + जिबूती + डेनमार्क + डोमिनिका + डोमिनिकन गणराज्य + अल्जीरिया + इक्वाडोर + एस्टोनिया + मिस्र + पश्चिमी सहारा + इरिट्रिया + स्पेन + इथियोपिया + फिनलैंड + फिजी + फ्रांस + ग्रीस + गोतेदाला + गीनी-बिसाउ + गुयाना + हाण्डूरस + क्रोशिया + हाइती + हंगरी + इंडोनेशिया + आयरलैंड + इसराइल + भारत + इराक + ईरान + आइसलैंड + इटली + जमाइका + जोर्डन + जापान + केन्या + किर्गिज + कम्बोडिया + कोमोरस + सेंट किट्स और नेविस + उत्तर कोरिया + दक्षिण कोरिया + कुवैत + कजाखस्तान + लाओस + लेबनान + सेंट लूसिया + लिकटेंस्टीन + श्रीलंका + लाइबेरिया + लेसोथो + लिथुआनिया + लक्समबर्ग + लात्विया + लीबिया + मोरक्को + मोनाको + मोल्डाविया + मदागास्कर + मैसेडोनिया + माली + म्यानमार + मंगोलिया + मॉरिटानिया + माल्टा + मौरिस + मालदीव + मलावी + मेक्सिको + मलेशिया + मोजाम्बिक + पनामा + पेरू + पापुआ न्यू गिनी + फिलीपिंस + पाकिस्तान + पोलैंड + पुर्तगाल + पारागुए + कतर + रोमानिया + रूस + रूआण्डा + सऊदी अरब + सूडान + स्वीडन + सिंगापुर + स्लोवेनिया + स्लोवाकिया + सियरालेओन + सैन मेरीनो + सेनेगल + सोमालिया + सुरिनाम + साउ-तोम-प्रिंसिप + अल साल्वाडोर + सीरिया + सुआजीलैंड + चाड + टोगो + थाइलैंड + ताजिकिस्तान + तुर्कमेनिस्तान + तुनिशिया + टोंगा + तुर्की + ट्रिनिडाड और टोबैगो + तुवालु + ताइवान + तंजानिया + यूक्रेन + युगांडा + संयुक्त राज्य अमरिका + युरूगुए + उजबेकिस्तान + वैटिकन + वेनेजुएला + ब्रिटिश वर्जीन ऌईलैंडस् + ईउ, एस वर्जीन आईलैंडस् + वियतनाम + वानुअतु + यमन + दक्षिण अफ्रीका + जाम्बिया + जिम्बाब्वे + + + + [[:Deva:]‌‍] + + + + + + + + जनवरी + फरवरी + मार्च + अप्रैल + मई + जून + जुलाई + अगस्त + सितम्बर + अक्तूबर + नवम्बर + दिसम्बर + + + जनवरी + फरवरी + मार्च + अप्रैल + मई + जून + जुलाई + अगस्त + सितम्बर + अक्तूबर + नवम्बर + दिसम्बर + + + + + + + रवि + सोम + मंगल + बुध + गुरु + शुक्र + शनि + + + रविवार + सोमवार + मंगलवार + बुधवार + गुरुवार + शुक्रवार + शनिवार + + + + पूर्वाह्न + अपराह्न + + + ईसापूर्व + सन + + + + + + + + भारतीय समय + भारतीय समय + + + IST + IST + + + + + + + . + , + ; + % + + # + + + - + E + + + + + + + INR + रु + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hi_IN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hi_IN.xml new file mode 100644 index 0000000..712004c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hi_IN.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd-MM-yyyy + + + + + d-M-yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + ##,##,##0.###;-##,##,##0.### + + + + + + + #E0 + + + + + + + ##,##,##0% + + + + + + + ¤ ##,##,##0.00;-¤ ##,##,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hr.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hr.xml new file mode 100644 index 0000000..43e0134 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hr.xml @@ -0,0 +1,487 @@ + + + + + + + + + + + arapski + bugarski + češki + danski + njemački + grčki + engleski + španjolski + estonijski + finski + francuski + hebrejski + hrvatski + mađarski + talijanski + japanski + korejski + litvanski + latvijski + nizozemski + norveški + poljski + portugalski + rumunjski + ruski + slovački + slovenski + švedski + turski + kineski + + + Andora + Ujedinjeni Arapski Emirati + Afganistan + Antigua i Barbuda + Anguila + Albanija + Armenija + Nizozemski Antili + Angola + Antarktik + Argentina + Američka Samoa + Austrija + Australija + Aruba + Azerbajdžan + Bosna i Hercegovina + Barbados + Bangladeš + Belgija + Burkina Faso + Bugarska + Bahrein + Burundi + Benin + Bermuda + Brunei Darussalam + Bolivija + Brazil + Bahami + Butan + Otok Bouvet + Bocvana + Bjelorusija + Belize + Kanada + Kokos (Keeling) Otoci + Kongo, Demokratska Republika + Srednjoafrička Republika + Kongo + Švicarska + Obala Bjelokosti + Kukovi Otoci + Čile + Kamerun + Kina + Kolumbija + Kostarika + Kuba + Zeleni Rt + Božićni Otoci + Cipar + Češka Republika + Njemačka + Džibuti + Danska + Dominika + Dominikanska Republika + Alžir + Ekvador + Estonija + Egipat + Zapadna Sahara + Eritreja + Španjolska + Etiopija + Finska + Fidži + Falklandski Otoci + Mikronezija, Federalne Države + Farski Otoci + Francuska + en + Gabon + Ujedinjena Kraljevina + Grenada + Gruzija + Francuska Gvajana + Gana + Gibraltar + Greenland + Gambija + Gvineja + Guadeloupe + Ekvatorska Gvineja + Grčka + Južna Gruzija i Južni Sendvič Otoci + Gvatemala + Guam + Gvineja Bisau + Gvajana + Hong Kong S.A.R. Kine + Heard Otok i McDonald Otoci + Honduras + Hrvatska + Haiti + Mađarska + Indonezija + Irska + Izrael + Indija + Britanski Teritorij Indijskog Oceana + Irak + Iran, Islamska Republika + Island + Italija + Jamajka + Jordan + Japan + Kenija + Kirgistan + Kambodža + Kiribati + Komori + Sveti Kristofor i Nevis + Koreja, Sjeverna + Koreja, Južna + Kuvajt + Kajmanski Otoci + Kazakstan + Laoska Narodna Demokratska Republika + Libanon + Sveta Lucija + Lihtenštajn + Šri Lanka + Liberija + Lesoto + Litva + Luksemburg + Latvija + Libijska Arapska Džamahirija + Maroko + Monako + Moldavija, Republika + Madagaskar + Maršalovi Otoci + Makedonija, Republika + Mali + Mijanma + Mongolija + Makao S.A.R. Kine + Sjeverni Marianski Otoci + Martinik + Mauritanija + Montserat + Malta + Mauricijus + Maldivi + Malavi + Meksiko + Malezija + Mozambik + Namibija + Nova Kaledonija + Niger + Norfolški Otoci + Nigerija + Nikaragva + Nizozemska + Norveška + Nepal + Nauru + Niue + Novi Zeland + Oman + Panama + Peru + Francuska Polinezija + Papua Nova Gvineja + Filipini + Pakistan + Poljska + Sveti Petar i Miguel + Pitcairn + Portoriko + Palestinska Teritoija + Portugal + Palau + Paragvaj + Katar + Reunion + Rumunjska + Ruska Federacija + Ruanda + Saudijska Arabija + Salamunovi Otoci + Sejšeli + Sudan + Švedska + Singapur + Sveta Helena + Slovenija + Svalbard i Jan Mayen + Slovačka + Sijera Leone + San Marino + Senegal + Somalija + Serbia + Surinam + Sveti Toma i Prinsipe + El Salvador + Sirija + Svazi + Turkski i Kaikos Otoci + Čad + Francuske Južne Teritorije + Togo + Tajland + Tadžikistan + Tokelau + Istočni Timor + Turkmenistan + Tunis + Tonga + Turska + Trinidad i Tobago + Tuvalu + Tajvan, Kineska Pokrajina + Tanzanija + Ukrajina + Uganda + Sjedinjene Države Manji Vanjski Otoci + Sjedinjene Države + Urugvaj + Uzbekistan + Sveta Stolica (Država Vatikanskog Grada) + Sveti Vincent i Grenadini + Venezuela + Britanski Djevičanski Otoci + U.S. Djevičanski Otoci + Vijetnam + Vanuatu + Wallis i Futuna + Samoa + Jemen + Majote + Jugoslavija + Južna Afrika + Zambija + Zimbabve + + + + [a-p r-v z đ ć č ž š {lj} {nj} {dž}] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + sij + vel + ožu + tra + svi + lip + srp + kol + ruj + lis + stu + pro + + + s + v + o + t + s + l + s + k + r + l + s + p + + + siječnja + veljače + ožujka + travnja + svibnja + lipnja + srpnja + kolovoza + rujna + listopada + studenoga + prosinca + + + + + sij + vel + ožu + tra + svi + lip + srp + kol + ruj + lis + stu + pro + + + s + v + o + t + s + l + s + k + r + l + s + p + + + siječanj + veljača + ožujak + travanj + svibanj + lipanj + srpanj + kolovoz + rujan + listopad + studeni + prosinac + + + + + + + ned + pon + uto + sri + čet + pet + sub + + + nedjelja + ponedjeljak + utorak + srijeda + četvrtak + petak + subota + + + + + + + + + + + + yyyy. MMMM dd + + + + + yyyy. MMMM dd + + + + + yyyy.MM.dd + + + + + yyyy.MM.dd + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + HRK + Kn + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hr_HR.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hr_HR.xml new file mode 100644 index 0000000..6811413 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hr_HR.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hu.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hu.xml new file mode 100644 index 0000000..e8c19f4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hu.xml @@ -0,0 +1,672 @@ + + + + + + + + + + + afar + abház + afrikai + amhara + arab + asszámi + ajmara + azerbajdzsáni + baskír + belorusz + bolgár + bihari + bislama + bengáli + tibeti + breton + katalán + korzikai + cseh + walesi + dán + német + butáni + görög + angol + eszperantó + spanyol + észt + baszk + perzsa + finn + fidzsi + feröeri + francia + fríz + ír + skót (gael) + galíciai + guarani + gudzsaráti + hausza + héber + hindi + horvát + magyar + örmény + interlingua + indonéz + interlingue + inupiak + izlandi + olasz + inuktitut + japán + jávai + grúz + kazah + grönlandi + kambodzsai + kannada + koreai + kasmíri + kurd + kirgiz + latin + lingala + laoszi + litván + lett + madagaszkári + maori + macedón + malajalam + mongol + moldvai + marati + maláj + máltai + burmai + naurui + nepáli + holland + norvég + okszitán + oromói + orija + pandzsábi + lengyel + pastu (afgán) + portugál + kecsua + rétoromán + kirundi + román + orosz + kiruanda + szanszkrit + szindi + sango + szerb-horvát + szingaléz + szlovák + szlovén + szamoai + sona + szomáli + albán + szerb + sziszuati + szeszotó + szundanéz + svéd + szuahéli + tamil + telugu + tadzsik + thai + tigrinya + türkmén + tagalog + szecsuáni + tonga + török + conga + tatár + tui + ujgur + ukrán + urdu + üzbég + vietnámi + volapük + volof + hosza + zsidó + joruba + zsuang + kínai + zulu + + + Andorra + Egyesült Arab Emirátus + Afganisztán + Antigua és Barbuda + Anguilla + Albánia + Örményország + Holland Antillák + Angola + Antarktisz + Argentína + Amerikai Szamoa + Ausztria + Ausztrália + Aruba + Azerbajdzsán + Bosznia-Hercegovina + Barbados + Banglades + Belgium + Burkina Faso + Bulgária + Bahrain + Burundi + Benin + Bermuda + Brunei Darussalam + Bolívia + Brazília + Bahamák + Bhután + Bouvet-sziget + Botswana + Fehéroroszország + Beliz + Kanada + Kókusz (Keeling)-szigetek + Kongó, Demokratikus köztársaság + Közép-afrikai Köztársaság + Kongó + Svájc + Elefántcsontpart + Cook-szigetek + Chile + Kamerun + Kína + Kolumbia + Costa Rica + Kuba + Zöld-foki Köztársaság + Karácsony-szigetek + Ciprus + Cseh Köztársaság + Németország + Dzsibuti + Dánia + Dominika + Dominikai Köztársaság + Algéria + Ecuador + Észtország + Egyiptom + Nyugat Szahara + Eritrea + Spanyolország + Etiópia + Finnország + Fidzsi + Falkland-szigetek + Mikronézia, Szövetségi Államok + Feröer-szigetek + Franciaország + en + Gabon + Egyesült Királyság + Grenada + Grúzia + Francia Guyana + Ghana + Gibraltár + Grönland + Gambia + Guinea + Guadeloupe + Egyenlítďi Guinea + Görögország + Dél-Georgia és Dél-Sandwich Szigetek + Guatemala + Guam + Guinea-Bissau + Guyana + Hong Kong S.A.R., China + Heard és McDonald Szigetek + Honduras + Horvátország + Haiti + Magyarország + Indonézia + Írország + Izrael + India + Brit Indiai Oceán + Irak + Irán + Izland + Olaszország + Jamaica + Jordánia + Japán + Kenya + Kirgizisztán + Kambodzsa + Kiribati + Comore-szigetek + Saint Kitts és Nevis + Korea, Észak + Korea, Dél + Kuwait + Kajmán-szigetek + Kazahsztán + Laoszi Népi Demokratikus Köztársaság + Libanon + Saint Lucia + Liechtenstein + Sri Lanka + Libéria + Lesotho + Litvánia + Luxemburg + Lettország + Líbiai Arab Jamahiriya + Marokkó + Monaco + Moldáv Köztársaság + Madagaszkár + Marshall-szigetek + Macedónia, Köztársaság + Mali + Mianmar + Mongólia + Macao S.A.R., China + Északi Mariana-szigetek + Martinique (francia) + Mauritánia + Montserrat + Málta + Mauritius + Maldív-szigetek + Malawi + Mexikó + Malajzia + Mozambik + Namíbia + Új Kaledónia (francia) + Niger + Norfolk-sziget + Nigéria + Nicaragua + Hollandia + Norvégia + Nepál + Nauru + Niue + Új-Zéland + Omán + Panama + Peru + Polinézia (francia) + Pápua Új-Guinea + Fülöp-szigetek + Pakisztán + Lengyelország + Saint Pierre és Miquelon + Pitcairn-sziget + Puerto Rico + Palesztín Terület + Portugália + Palau + Paraguay + Katar + Reunion (francia) + Románia + Orosz Köztársaság + Ruanda + Szaud-Arábia + Salamon-szigetek + Seychelles + Szudán + Svédország + Szingapúr + Saint Helena + Szlovénia + Svalbard és Jan Mayen + Szlovákia + Sierra Leone + San Marino + Szenegál + Szomália + Serbia + Suriname + Saint Tome és Principe + El Salvador + Szíriai Arab Köztársaság + Szváziföld + Török és Caicos Szigetek + Csád + Francia Déli Területek + Togo + Thaiföld + Tadzsikisztán + Tokelau + Kelet-Timor + Türkmenisztán + Tunézia + Tonga + Törökország + Trinidad és Tobago + Tuvalu + Taiwan + Tanzánia + Ukrajna + Uganda + United States Minor Outlying Islands + Egyesült Államok + Uruguay + Üzbegisztán + Vatikán + Saint Vincent és Grenadines + Venezuela + Brit Virgin-szigetek + U.S. Virgin-szigetek + Vietnám + Vanuatu + Wallis és Futuna Szigetek + Szamoa + Jemen + Mayotte + Jugoszlávia + Dél-Afrika + Zambia + Zimbabwe + + + + [a-z á é í ó ú ö ü ő ű {cs} {dz} {dzs} {gy} {ly} {ny} {sz} {ty} {zs} {ccs} {ddz} {ddzs} {ggy} {lly} {nny} {ssz} {tty} {zzs}] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + BK + + + + + + + + jan. + febr. + márc. + ápr. + máj. + jún. + júl. + aug. + szept. + okt. + nov. + dec. + + + január + február + március + április + május + június + július + augusztus + szeptember + október + november + december + + + + + + + V + H + K + Sze + Cs + P + Szo + + + vasárnap + hétfő + kedd + szerda + csütörtök + péntek + szombat + + + + + + + + DE + DU + + + i.e. + i.u. + + + + + + + yyyy. MMMM d. + + + + + yyyy. MMMM d. + + + + + yyyy.MM.dd. + + + + + yyyy.MM.dd. + + + + + + + + H:mm:ss z + + + + + H:mm:ss z + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + + Tisri + Hesván + Kiszlév + Tévész + Svát + Ádár risón + Ádár séni + Niszán + Ijár + Sziván + Tamuz + Áv + Elul + + + Tisri + Hesván + Kiszlév + Tévész + Svát + Ádár risón + Ádár séni + Niszán + Ijár + Sziván + Tamuz + Áv + Elul + + + + + + + + + + + + + + Moharrem + Safar + Rébi el avvel + Rébi el accher + Dsemádi el avvel + Dsemádi el accher + Redseb + Sabán + Ramadán + Sevvál + Dsül kade + Dsül hedse + + + Moharrem + Safar + Rébi el avvel + Rébi el accher + Dsemádi el avvel + Dsemádi el accher + Redseb + Sabán + Ramadán + Sevvál + Dsül kade + Dsül hedse + + + + + + MF + + + + + + + + Moharrem + Safar + Rébi el avvel + Rébi el accher + Dsemádi el avvel + Dsemádi el accher + Redseb + Sabán + Ramadán + Sevvál + Dsül kade + Dsül hedse + + + Moharrem + Safar + Rébi el avvel + Rébi el accher + Dsemádi el avvel + Dsemádi el accher + Redseb + Sabán + Ramadán + Sevvál + Dsül kade + Dsül hedse + + + + + + MF + + + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + HUF + Ft + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hu_HU.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hu_HU.xml new file mode 100644 index 0000000..29e6aa9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hu_HU.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hy.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hy.xml new file mode 100644 index 0000000..21ffc5c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hy.xml @@ -0,0 +1,380 @@ + + + + + + + + + + + Հայերէն + + + Անդորա + Միացյալ Արաբական Էմիրաթներ + Աֆղանստան + Անտիգուա-Բարբուդա + Ալբանիա + Հայաստանի Հանրապետութիւն + Անգոլա + Արգենտինա + Ավստրիա + Ավստրալիա + Ադրբեջան + Բոսնիա-Հերցեգովինա + Բարբադոս + Բանգլադեշ + Բելգիա + Բուրկինա Ֆասո + Բուլղարիա + Բահրեյն + Բուրունդի + Բենին + Բրունեյ + Բոլիվիա + Բրազիլիա + Բահամներ + Բուտան + Բոտսվանա + Բելոռուս + Բելիզ + Կանադա + Կենտրոնական Աֆրիկյան Հանրապետություն + Կոնգո + Շվեյցարիա + Փղոսկրի Ափ + Չիլի + Կամերուն + Չինաստան + Կոլումբիա + Կոստա-Ռիկա + Կուբա + Կաբո-Վերդե + Կիպրոս + Չեխիայի Հանրապետություն + Գերմանիա + Ջիբուտի + Դանիա + Դոմինիկա + Դոմինիկյան Հանրապետություն + Ալժիր + Էկվադոր + Էստոնիա + Եգիպտոս + Արեվմտյան Սահարա + Էրիտրեա + Իսպանիա + Եթովպիա + Ֆինլանդիա + Ֆիջի + Միկրոնեզիա + Ֆրանսիա + Գաբոն + Մեծ Բրիտանիա + Գրենադա + Վրաստան + Գանա + Գամբիա + Գվինեա + Հասարակածային Գվինեա + Հունաստան + Գվատեմալա + Գվինեա-Բիսաու + Գայանա + Հոնդուրաս + Հորվաթիա + Հաիթի + Հունգարիա + Ինդոնեզիա + Իռլանդիա + Իսրայել + Հնդկաստան + Իրաք + Իրան + Իսլանդիա + Իտալիա + Ջամայկա + Հորդանան + Ճապոնիա + Քենիա + Կիրգիզստան + Կամբոջա + Կիրիբատի + Կոմորոս + Սենտ Կիտս-Նեվիս + Հյուսիսային Կորեա + Հարավային Կորեա + Քուվեյթ + Ղազախստան + Լաոս + Լիբանան + Սանտա Լուչիա + Լիխտենշտեյն + Շրի Լանկա + Լիբերիա + Լեսոտո + Լիտվա + Լյուքսեմբուրգ + Լատվիա + Լիբիա + Մարոկո + Մոնակո + Մոլդովա + Մադագասկար + Մարշալյան կղզիներ + Մակեդոնիա + Մալի + Մյանմա + Մոնղոլիա + Մավրիտանիա + Մալթա + Մավրիտոս + Մալդիվներ + Մալավի + Մեքսիկա + Մալայզիա + Մոզամբիկ + Նամիբիա + Նիգեր + Նիգերիա + Նիկարագուա + Նիդերլանդեր + Նորվեգիա + Նեպալ + Նաուրու + Նոր Զելանդիա + Օման + Պանամա + Պերու + Պապուա Նոր Գվինեա + Ֆիլիպիններ + Պակիստան + Լեհաստան + Պորտուգալիա + Պալաու + Պարագվայ + Կատար + Ռումինիա + Ռուսաստան + Ռուանդա + Սաուդիան Արաբիա + Սոլոմոնյան կղզիներ + Սեյշելներ + Սուդան + Շվեդիա + Սինգապուր + Սլովենիա + Սլովակիա + Սյերա-Լեոնե + Սան Մարինո + Սենեգալ + Սոմալի + Սերբիա + Սուրինամ + Սան-Թոմե-Փրինսիպի + Սալվադոր + Սիրիա + Սվազիլենդ + Չադ + Տոգո + Թաիլանդ + Տաճիկստան + Թուրքմենստան + Թունիս + Տոնգա + Թուրքիա + Տրինիդադ-Տոբագո + Տուվալու + Թայվան + Տանզանիա + Ուկրաինա + Ուգանդա + Ամէրիկայի Միացյալ Նահանգնէր + Ուրուգվայ + Ուզբեկստան + Վատիկան + Սենտ Վիսենտ-Գրենադիններ + Վենեսուելա + Վիետնամ + Վանուատու + Սամոա + Եմեն + Հարավային Աֆրիկա + Զամբիա + Զիմբաբվե + + + + [:Armn:] + + + + + + + + Յնր + Փտր + Մրտ + Ապր + Մյս + Յնս + Յլս + Օգս + Սեպ + Հոկ + Նոյ + Դեկ + + + Յունուար + Փետրուար + Մարտ + Ապրիլ + Մայիս + Յունիս + Յուլիս + Օգոստոս + Սեպտեմբեր + Հոկտեմբեր + Նոյեմբեր + Դեկտեմբեր + + + + + + + Կիր + Երկ + Երք + Չոր + Հնգ + Ուր + Շաբ + + + Կիրակի + Երկուշաբթի + Երեքշաբթի + Չորեքշաբթի + Հինգշաբթի + Ուրբաթ + Շաբաթ + + + + + + + + Առ․ + Եր․ + + + Յ․Տ․ + Ն․Ք․ + + + + + + + EEEE, MMMM d, yyyy + + + + + MMMM dd, yyyy + + + + + MMM d, yyyy + + + + + MM/dd/yy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + + #0.###;-#0.### + + + + + + + #E0 + + + + + + + #0% + + + + + + + #0.00 ¤;-#0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hy_AM.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hy_AM.xml new file mode 100644 index 0000000..f674042 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hy_AM.xml @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + Ք․Ա․ + Ք․Ե․ + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hy_AM_REVISED.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hy_AM_REVISED.xml new file mode 100644 index 0000000..969885d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/hy_AM_REVISED.xml @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + + + Հնվ + Փտվ + Մրտ + Ապր + Մյս + Հնս + Հլս + Օգս + Սեպ + Հոկ + Նոյ + Դեկ + + + Հունվար + Փետրվար + Մարտ + Ապրիլ + Մայիս + Հունիս + Հուլիս + Օգոստոս + Սեպտեմբեր + Հոկտեմբեր + Նոյեմբեր + Դեկտեմբեր + + + + Առ․ + Կե․ + + + Մ․Թ․Ա․ + Մ․Թ․ + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/id.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/id.xml new file mode 100644 index 0000000..5e34866 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/id.xml @@ -0,0 +1,590 @@ + + + + + + + + + + + Afar + Abkhaz + Aceh + Avestan + Afrikaans + Afro-Asiatik (Lainnya) + Akan + Akkadien + Bahasa Algonquia + Amharik + Inggris Kuno (kl.450-1100) + Bahasa-bahasa Apache + Arab + Aram + Araucan + Buatan (Lainnya) + Assam + Astur + Bahasa-bahasa Athapaska + Bahasa-bahasa Australia + Avarik + Aymara + Azerbaijan + Bashkir + Bahasa-bahasa Bamileke + Balin + Baltik (Lainnya) + Belarusia + Bulgaria + Bihari + Bislama + Bambara + Bengal + Tibet + Breton + Bosnia + Bugis + Catalan + India Amerika Tengah (Lainnnya) + Karib + Kaukasia (Lainnya) + Chechen + Celtic (Lainnya) + Chamorro + Chuuke + Korsika + Koptik + Cree + Ceko + Chuvash + Welsh + Denmark + Jerman + Divehi + Dzongkha + Ewe + Mesir Kuno + Yunani + Inggris + Inggris, Abad Pertengahan (1100-1500) + Esperanto + Spanyol + Estonian + Basque + Persia + Fulah + Finlandia + Fiji + Faro + Perancis + Perancis, Abad Pertengahan (kl.1400-1600) + Perancis Kuno (842-kl.1400) + Friuli + Frisi + Irlandia + Gaelik Skotlandia + Jermanik (Lainnya) + Gilbert + Gallegan + Jerman, Abad Pertengahan (kl.1050-1500) + Guarani + Jerman Kuno (kl.750-1050) + Gothik + Yunani Kuno (sd 1453) + Gujarati + Manx + Hausa + Hawaii + Ibrani + Hindi + Hiri Motu + Kroasia + Hungaria + Armenia + Herero + Interlingua + Bahasa Indonesia + Interlingue + Igbo + Sichuan Yi + Inupiaq + Ido + Icelandic + Italian + Japanese + Judeo-Persia + Judeo-Arab + Jawa + Georgian + Kongo + Kikuyu + Kuanyama + Kazakh + Kalaallisut + Khmer + Kannada + Korea + Konkani + Kosrae + Kanuri + Kashmir + Kurdi + Komi + Cornish + Kirghiz + Latin + Luxembourg + Lezghia + Ganda + Limburg + Lingala + Lao + Lithuania + Luba-Katanga + Latvian + Madura + Makassar + Austronesia + Malagasi + Irlandia Abad Pertengahan (900-1200) + Marshall + Maori + Bahasa Lain-lain + Macedonian + Mon-Khmer (Lainnya) + Malayalam + Mongolian + Moldavian + Marathi + Malay + Maltese + Burma + Nauru + Norwegian Bokmål + Nepal + Ndonga + Belanda + Norwegian Nynorsk + Norwegian + Navajo + Nyanja; Chichewa; Chewa + Ojibwa + Oromo + Oriya + Ossetic + Punjabi + Papuan (Lainnya) + Persia Kuno (kl.600-400 SM.) + Filipina (Lainnya) + Pali + Polish + Pashto (Pushto) + Portugis + Quechua + Rhaeto-Romance + Rundi + Romanian + Russian + Kinyarwanda + Sanskrit + Sardinian + Sindhi + Northern Sami + Sango + Serbo-Croatian + Sinhalese + Slovak + Slovenian + Samoan + Shona + Somali + Albanian + Serbian + Swati + Sundan + Sumeria + Swedia + Swahili + Syria + Tamil + Telugu + Tajik + Thai + Tigrinya + Turkmen + Tagalog + Tswana + Turkish + Tsonga + Tatar + Twi + Tahitian + Uighur + Ukrainian + Urdu + Uzbek + Venda + Vietnamese + Volapük + Walloon + Wolof + Xhosa + Yiddish + Yoruba + Zhuang + Cina + Zulu + + + Andora + Uni Emirat Arab + Afghanistan + Antigua dan Barbuda + Anguilla + Albania + Armenia + Antilles Belanda + Angola + Antarktika + Argentina + Samoa Amerika + Austria + Australia + Aruba + Azerbaijan + Bosnia dan Herzegovina + Barbados + Bangladesh + Belgia + Burkina Faso + Bulgaria + Bahrain + Burundi + Benin + Bermuda + Brunei + Bolivia + Brazil + Bahamas + Bhutan + Kepulauan Bouvet + Botswana + Belarusia + Belize + Kanada + Kepulauan Cocos + Republik Demokratik Kongo + Republik Afrika Tengah + Kongo + Swiss + Pantai Gading + Kepulauan Cook + Chili + Kamerun + Cina + Kolombia + Kosta Rika + Kuba + Tanjung Verde + Pulau Christmas + Siprus + Republik Ceko + Jerman + Jibouti + Denmark + Dominika + Republik Dominika + Algeria + Ekuador + Estonia + Mesir + Sahara Barat + Eritrea + Spanyol + Ethiopia + Finlandia + Fiji + Kepulauan Falkland + Mikronesia + Kepulauan Faroe + Perancis + Gabon + Inggris Raya + Grenada + Georgia + Guyana Perancis + Ghana + Gibraltar + Greenland + Gambia + Guinea + Guadeloupe + Guinea Khatulistiwa + Yunani + Georgia Selatan dan Kepulauan Sandwich Selatan + Guatemala + Guam + Guinea-Bissau + Guyana + Hong Kong S.A.R., Cina + Pulau Heard dan Kepulauan McDonald + Honduras + Kroasia + Haiti + Hungaria + Indonesia + Irlandia + Israel + India + Iraq + Iran + Islandia + Itali + Jamaika + Yordania + Jepang + Kenya + Kyrgyzstan + Kamboja + Kiribati + Komoros + Saint Kitts dan Nevis + Korea Utara + Korea Selatan + Kuwait + Kepulauan Kayman + Kazakhstan + Laos + Lebanon + Santa Lusia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Lithuania + Luxembourg + Latvia + Libya + Maroko + Monaco + Moldova + Madagaskar + Kepulauan Marshall + Macedonia + Mali + Myanmar + Mongolia + Makao S.A.R. Cina + Kepualuan Mariana Utara + Martinique + Mauritania + Montserrat + Malta + Mauritius + Maldives + Malawi + Mexico + Malaysia + Mozambique + Namibia + Kaledonia Baru + Niger + Kepulauan Norfolk + Nigeria + Nicaragua + Netherlands + Norwegia + Nepal + Nauru + Niue + Selandia Baru + Oman + Panama + Peru + Polynesia Perancis + Papua Nugini + Filipina + Pakistan + Polandia + Saint Pierre dan Miquelon + Pitcairn + Puerto Riko + Otoritas Palestina + Portugis + Palau + Paraguay + Qatar + Réunion + Romania + Rusia + Rwanda + Arab Saudi + Kepulauan Solomon + Seychelles + Sudan + Sweden + Singapura + Saint Helena + Slovenia + Svalbard dan Jan Mayen + Slovakia + Sierra Leone + San Marino + Senegal + Somalia + Serbia + Suriname + Sao Tome dan Principe + El Salvador + Syria + Swaziland + Chad + Togo + Thailand + Tajikistan + Tokelau + Turkmenistan + Tunisia + Tonga + Turkey + Trinidad dan Tobago + Tuvalu + Taiwan + Tanzania + Ukraina + Uganda + Amerika Serikat + Uruguay + Uzbekistan + Vatikan + Saint Vincent dan Grenadines + Venezuela + Kepulauan British Virgin + Kepulauan U.S. Virgin + Vietnam + Vanuatu + Wallis dan Futuna + Samoa + Yaman + Mayotte + Yugoslavia + Afrika Selatan + Zambia + Zimbabwe + + + + [a-z] + + + + + + + + Jan + Feb + Mar + Apr + Mei + Jun + Jul + Agu + Sep + Okt + Nov + Des + + + Januari + Februari + Maret + April + Mei + Juni + Juli + Agustus + September + Oktober + November + Desember + + + + + + + Min + Sen + Sel + Rab + Kam + Jum + Sab + + + Minggu + Senin + Selasa + Rabu + Kamis + Jumat + Sabtu + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + IDR + Rp + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/id_ID.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/id_ID.xml new file mode 100644 index 0000000..7937a80 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/id_ID.xml @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + EEEE dd MMMM yyyy + + + + + dd MMMM yyyy + + + + + dd MMM yy + + + + + dd/MM/yy + + + + + + + + H:mm:ss + + + + + H:mm:ss + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/is.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/is.xml new file mode 100644 index 0000000..f1470d3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/is.xml @@ -0,0 +1,304 @@ + + + + + + + + + + + íslenska + + + Sameinte arabísku fyrstadæmin + Afganistan + Antigua og Barbuda + Albanía + Armenía + Angóla + Argentína + Austurríki + Ástralía + Bosnía-Hersegovína + Belgía + Búlgaría + Búrúndí + Bólivía + Brasilía + Bahama eyjar + Bhútan + Hvíta Rússland + Kanada + Mið-Afríku lyðveldið + Kongó + Sviss + Fílabeinsströndin + Kamerún + Kína + Kólumbía + Kostaríka + Kúba + Grænhöfðaeyjar + Kýpur + Tékkland + Þýskaland + Danmörk + Dóminíska lýðveldið + Alsír + Ekvador + Eistland + Egyptaland + Vestur-Sahara + Erítrea + Spánn + Eþíópía + Finnland + Mikrónesía + Frakkland + Stóra Bretland + Georgía + Gana + Gínea + Miðbaugs Gínea + Grikkland + Gvatemala + Gínea-Bissau + Hondúras + Króatía + Haítí + Ungverjaland + Indónesía + Írland + Ísrael + Indland + Írak + Íran + Ísland + Ítalía + Jamaíka + Jórdanía + Kenýa + Kirgisistan + Kambódía + Kiribatí + Comoro eyjar + Saint Kitts og Nevis + Norður-Kórea + Suður-Kórea + Kúveit + Kasakstan + Líbanon + Saint Lúsia + Líbería + Lesótó + Litháen + Lúxemborg + Lettland + Lýbía + Marokkó + Mónacó + Moldovía + Madagaskar + Marshalleyjar + Makedónía + Mongólía + Máritanía + Máritíus + Maldíveyjar + Mexíkó + Malasía + Mósambík + Namibía + Níger + Nígería + Níkaragúa + Niðurlönd + Noregur + Nýja-Sjáland + Óman + Perú + Papúa Nýja Gínea + Filipseyjar + Pólland + Portúgal + Palaueyjar + Paragvæ + Katar + Rúmenía + Rússland + Rúanda + Sádi Arabía + Salómons eyjar + Seychelle eyjar + Súdan + Svíþjoð + Slóvenía + Slóvakía + Sómalía + Serbía + Súrinam + Sao Tome og Prinsípe + Sýrland + Svasiland + Tæland + Tadjikistan + Túrkmenistan + Túnis + Tyrkland + Trinidad og Tobago + Túvalú + Tævan + Tansanía + Úkraína + Úganda + Bandaríki Norður-Ameríku + Úrúgvæ + Úsbekistan + Vatíkanið + Saint Vinsent og Grenadíneyjar + Venesúela + Samóa + Jemen + Suður-Afríka + + + + [a-záéíóúýöæðþ] + + + + + + + + jan + feb + mar + apr + maí + jún + júl + ágú + sep + okt + nóv + des + + + janúar + febrúar + mars + apríl + maí + júní + júlí + ágúst + september + október + nóvember + desember + + + + + + + sun + mán + þri + mið + fim + fös + lau + + + sunnudagur + mánudagur + þriðjudagur + miðvikudagur + fimmtudagur + föstudagur + laugardagur + + + + + + + + EEEE, d. MMMM yyyy + + + + + d. MMMM yyyy + + + + + d.M.yyyy + + + + + d.M.yyyy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + ISK + kr. + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/is_IS.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/is_IS.xml new file mode 100644 index 0000000..0417aa9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/is_IS.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/it.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/it.xml new file mode 100644 index 0000000..7305135 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/it.xml @@ -0,0 +1,2808 @@ + + + + + + + + + + + afar + abkhazian + accinese + acioli + adangme + adyghe + avestan + afrikaans + afro-asiatica (altra lingua) + afrihili + akan + accado + aleuto + lingue algonchine + amarico + aragonese + inglese, antico (ca.450-1100) + lingue apache + arabo + aramaico + araucano + arapaho + artificiale (altra lingua) + aruaco + assamese + asturiano + lingue athabaska + lingue australiane + avaro + awadhi + aymara + azerbaigiano + baschiro + banda + lingue bamileke + beluci + bambara + balinese + basa + baltica (altra lingua) + bielorusso + begia + wemba + berbero + bulgaro + bihari + bhojpuri + bislama + bicol + bini + siksika + bambara + bengalese + bantu + tibetano + bretone + braj + bosniaco + batak + buriat + bugi + blin + catalano + caddo + indiana dell’America Centrale (altra lingua) + caribico + caucasica (altra lingua) + ceceno + cebuano + celtica altra lingua + chamorro + chibcha + ciagataico + chuukese + mari + gergo chinook + choctaw + chipewyan + cherokee + cheyenne + lingue chamic + corso + copto + creolo e pidgins, basato sull’inglese (altra lingua) + creolo e pidgins, basato sul francese (altra lingua) + creolo e pidgins, basato sul portoghese (altra lingua) + cree + turco crimeo; tatar crimeo + creola e Pidgins (altra lingua) + ceco + kashubian + slavo della Chiesa + cuscitica (altra lingua) + chuvash + gallese + danese + dakota + dargwa + dayak + tedesco + delaware + slave + dogrib + dinca + dogri + dravidica (altra lingua) + basso sorabo + duala + olandese, medio (ca. 1050-1350) + divehi + diula + dzongkha + ewe + efik + egiziano (antico) + ekajuka + greco + elamitico + inglese + inglese, medio (1100-1500) + esperanto + spagnolo + estone + basco + ewondo + persiano + fanti + fulah + finlandese + ugrofinnica (altra lingua) + figiano + faroese + francese + francese, medio (ca.1400-1600) + francese, antico (842-ca.1400) + friulano + frisone + irlandese + ga + gayo + gbaya + gaelico scozzese + germanica (altra lingua) + geez + gilbertese + galiziano + tedesco, medio alto (ca.1050-1500) + guarana + tedesco, antico alto (ca.750-1050) + gondi + gorontalo + gotico + gerbo + greco, antico (fino al 1453) + gujarati + manx + gwichʻin + haussa + haida + hawaiano + ebraico + hindi + hiligayna + himachali + hittite + hmong + hiri motu + croato + alto sorabo + haitian + ungherese + hupa + armeno + herero + interlingua + iban + indonesiano + interlingue + igbo + sichuan yi + inupiak + ilocano + indiana (altra lingua) + indoeuropea (altra lingua) + ingush + ido + iraniana + lingue irochesi + islandese + italiano + inuktitut + giapponese + lojban + giudeo persiano + giudeo arabo + giavanese + georgiano + kara-kalpak + kabyle + kachin + kamba + karen + kawi + kabardia + kongo + khasi + khoisan (altra lingua) + khotanese + kikuyu + kuanyama + kazako + kalaallisut + khmer + kimbundu + kannada + coreano + konkani + Kosraean + kpelle + kanuri + karachay-Balkar + kru + kurukh + kashmiri + curdo + kumyk + kutenai + komi + cornico + kirghiso + latino + ladino + lahnda + lamba + lussemburghese + lezghian + ganda + limburgish + lingala + lao + lolo (bantu) + lozi + lituano + luba-katanga + luba-lulua + luiseno + lunda + luo (Kenia e Tanzania) + lushai + lettone + madurese + magahi + maithili + makasar + mandingo + austronesiano + masai + moksha + mandar + mende + malagasy + irlandese medio (900-1200) + marshallese + maori + micmac + menangkabau + lingue diverse + macedone + mon-khmer (altra lingua) + malayalam + mongolo + manchu + manipuri + manobo + moldavo + mohawk + mossi + marathi + malay + maltese + multilingua + munda (altra lingua) + creek + marwari + burmese + lingue maya + erzya + nauru + nahuatl + indiano del Nord America (altra lingua) + napoletano + norvegese bokmål + ndebele del nord + basso tedesco; basso sassone + nepali + newari + ndonga + niger - cordofan (altra lingua) + niue + olandese + norvegese nynorsk + norvegese + nogai + norse antico + ndebele del sud + sotho del nord + nubiano + navajo + nyanja; chichewa; chewa + nyamwezi + nyankole + nyoro + nzima + occitano (post 1500); provenzale + ojibwa + oromo + oriya + ossetico + osage + turco ottomano (1500-1928) + lingue otomi + punjabi + papuano-australiano (altra lingua) + pangasinan + pahlavi + pampanga + papiamento + palau + antico persiano (600-400 A.C.) + filippino (altra lingua) + fenicio + pali + polacco + ponape + pracrito + provenzale, antico (fino al 1500) + pashto + portoghese + quechua + rajasthani + rapanui + rarotonga + lingua rhaeto-romance + rundi + romeno + lingua romanza (altra lingua) + romani + russo + kinyarwanda + sanscrito + sandawe + yakut + indiano del Sud America (altra lingua) + lingue salish + aramaico samaritano + sasak + santali + sardo + scozzese + sindhi + sami del nord + selkup + semitico (altra lingua) + sango + irlandese, antico (fino al ’900) + lingue sign + serbo-croato + shan + singalese + sidamo + lingue sioux + sino-tibetano (altra lingua) + slovacco + sloveno + slavo (altra lingua) + samoano + sami del sud + lingue sami (altra lingua) + sami lule + sami inari + sami skolt + shona + soninke + somalo + sogdiano + songhai + albanese + serbo + serer + swati + nilo-sahariana (altra lingua) + sotho del sud + sundanese + sukuma + susu + sumero + svedese + swahili + siriaco + tamil + tailandese (altra lingua) + telugu + temne + tereno + tetum + tagicco + thai + tigrinya + tigre + turcomanno + tokelau + tagalog + tlingit + tamashek + tswana + tonga (Isole Tonga) + tonga (nyasa) + tok pisin + turco + tsonga + tsimshian + tatarico + tumbuka + lingue tupi + turco-tatarica (altra lingua) + tuvalu + ci + taitiano + tuvinian + udmurt + uigurico + ugaritico + ucraino + mbundu + lingua imprecisata + urdu + usbeco + venda + vietnamita + volapük + voto + walloon + lingue wakash + walamo + waray + washo + sorabo + volof + kalmyk + xosa + yao (bantu) + Yapese + yiddish + yoruba + lingue yupik + zhuang + zapotec + zenaga + cinese + zande + zulu + zuni + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Andorra + Emirati Arabi Uniti + Afghanistan + Antigua e Barbuda + Anguilla + Albania + Armenia + Antille Olandesi + Angola + Antartide + Argentina + Samoa Americane + Austria + Australia + Aruba + Azerbaigian + Bosnia Erzegovina + Barbados + Bangladesh + Belgio + Burkina Faso + Bulgaria + Bahrein + Burundi + Benin + Bermuda + Brunei + Bolivia + Brasile + Bahamas + Bhutan + Isola Bouvet + Botswana + Bielorussia + Belize + Canada + Isole Cocos (Keeling) + Repubblica Democratica del Congo + Repubblica Centrafricana + Congo + Svizzera + Costa d’Avorio + Isole Cook + Cile + Camerun + Cina + Colombia + Costa Rica + Cuba + Capo Verde + Isola Christmas + Cipro + Repubblica Ceca + Germania + Gibuti + Danimarca + Dominica + Repubblica Dominicana + Algeria + Ecuador + Estonia + Egitto + Sahara Occidentale + Eritrea + Spagna + Etiopia + Finlandia + Fiji + Isole Falkland + Micronesia + Isole Faroe + Francia + en + Gabon + Regno Unito + Grenada + Georgia + Guiana Francese + Ghana + Gibilterra + Groenlandia + Gambia + Guinea + Guadalupa + Guinea Equatoriale + Grecia + Georgia del Sud e Isole Sandwich del Sud + Guatemala + Guam + Guinea-Bissau + Guyana + Regione Amministrativa Speciale di Hong Kong, Cina + Isole Heard ed Isole McDonald + Honduras + Croazia + Haiti + Ungheria + Indonesia + Irlanda + Israele + India + Territorio Britannico dell’Oceano Indiano + Iraq + Iran + Islanda + Italia + Giamaica + Giordania + Giappone + Kenya + Kirghizistan + Cambogia + Kiribati + Comore + Saint Kitts e Nevis + Corea del Nord + Corea del Sud + Kuwait + Isole Cayman + Kazakistan + Laos + Libano + Saint Lucia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Lituania + Lussemburgo + Lettonia + Libia + Marocco + Monaco + Moldavia + Madagascar + Isole Marshall + Macedonia, Repubblica + Mali + Myanmar + Mongolia + Regione Amministrativa Speciale di Macao, Cina + Isole Marianne Settentrionali + Martinica + Mauritania + Montserrat + Malta + Mauritius + Maldive + Malawi + Messico + Malesia + Mozambico + Namibia + Nuova Caledonia + Niger + Isola Norfolk + Nigeria + Nicaragua + Paesi Bassi + Norvegia + Nepal + Nauru + Niue + Nuova Zelanda + Oman + Panama + Perù + Polinesia Francese + Papua Nuova Guinea + Filippine + Pakistan + Polonia + Saint Pierre e Miquelon + Pitcairn + Portorico + Palestina + Portogallo + Palau + Paraguay + Qatar + Réunion + Romania + Federazione Russa + Ruanda + Arabia Saudita + Isole Solomon + Seychelles + Sudan + Svezia + Singapore + Sant’Elena + Slovenia + Svalbard e Jan Mayen + Slovacchia + Sierra Leone + San Marino + Senegal + Somalia + Serbia + Suriname + São Tomé e Príncipe + El Salvador + Siria + Swaziland + Isole Turks e Caicos + Ciad + Territori australi francesi + Togo + Tailandia + Tagikistan + Tokelau + Timor Est + Turkmenistan + Tunisia + Tonga + Turchia + Trinidad e Tobago + Tuvalu + Taiwan (provincia della Cina) + Tanzania + Ucraina + Uganda + Isole Minori lontane dagli Stati Uniti + Stati Uniti + Uruguay + Uzbekistan + Vaticano + Saint Vincent e Grenadines + Venezuela + Isole Vergini Britanniche + Isole Vergini Americane + Vietnam + Vanuatu + Wallis e Futuna + Samoa + Yemen + Mayotte + Yugoslavia + Sud Africa + Zambia + Zimbabwe + + + Modificato + + + Calendario + Collazione + Valuta + + + Calendario Buddista + Calendario Cinese + Calendario Gregoriano + Calendario Ebraico + Calendario Islamico + Calendario Civile Islamico + Calendario Giapponese + Ordine Diretto + Ordine Elenco Telefonico + Ordine Pinyin + Ordine Segni + Ordine Tradizionale + + + + [a-zéóàèìòíúùï] + + + + + + + + gen + feb + mar + apr + mag + giu + lug + ago + set + ott + nov + dic + + + G + F + M + A + M + G + L + A + S + O + N + D + + + gennaio + febbraio + marzo + aprile + maggio + giugno + luglio + agosto + settembre + ottobre + novembre + dicembre + + + + + + + dom + lun + mar + mer + gio + ven + sab + + + D + L + M + M + G + V + S + + + domenica + lunedì + martedì + mercoledì + giovedì + venerdì + sabato + + + + + + + + m. + p. + + + aC + dC + + + + + + + EEEE d MMMM yyyy + + + + + dd MMMM yyyy + + + + + dd/MMM/yy + + + + + dd/MM/yy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + Ora Standard Pacifico + Ora Legale Pacifico + + + PST + PDT + + Los Angeles + + + + Ora Standard Pacifico + Ora Legale Pacifico + + + PST + PDT + + Los Angeles + + + + Ora Standard Mountain + Ora Legale Mountain + + + MST + MDT + + Denver + + + + Ora Standard Mountain + Ora Legale Mountain + + + MST + MDT + + Denver + + + + Ora Standard Mountain + Ora Standard Mountain + + + MST + MST + + Phoenix + + + + Ora Standard Mountain + Ora Standard Mountain + + + MST + MST + + Phoenix + + + + Ora Standard Centrale + Ora Legale Centrale + + + CST + CDT + + Chicago + + + + Ora Standard Centrale + Ora Legale Centrale + + + CST + CDT + + Chicago + + + + Ora Standard Orientale + Ora Legale Orientale + + + EST + EDT + + New York + + + + Ora Standard Orientale + Ora Legale Orientale + + + EST + EDT + + New York + + + + Ora Standard Orientale + Ora Standard Orientale + + + EST + EST + + Indianapolis + + + + Ora Standard Orientale + Ora Standard Orientale + + + EST + EST + + Indianapolis + + + + Ora Standard Hawaii + Ora Standard Hawaii + + + HST + HST + + Honolulu + + + + Ora Standard Hawaii + Ora Standard Hawaii + + + HST + HST + + Honolulu + + + + Ora Standard Alaska + Ora Legale Alaska + + + AST + ADT + + Anchorage + + + + Ora Standard Alaska + Ora Legale Alaska + + + AST + ADT + + Anchorage + + + + Ora Standard Atlantico + Ora Legale Atlantico + + + AST + ADT + + Halifax + + + + Ora Standard Newfoundland + Ora Legale Newfoundland + + + CNT + CDT + + St. Johns + + + + Ora Standard Newfoundland + Ora Legale Newfoundland + + + CNT + CDT + + St. Johns + + + + Ora Standard Centrale Europeo + Ora Legale Centrale Europeo + + + CET + CEST + + Parigi + + + + Ora Standard Centrale Europeo + Ora Legale Centrale Europeo + + + CET + CEST + + Parigi + + + + Ora Meridiano Greenwich + Ora Meridiano Greenwich + + + GMT + GMT + + Londra + + + + Ora Meridiano Greenwich + Ora Meridiano Greenwich + + + GMT + GMT + + Casablanca + + + + Ora Standard Israele + Ora Legale Israele + + + IST + IDT + + Gerusalemme + + + + Ora Standard Giappone + Ora Standard Giappone + + + JST + JST + + Tokyo + + + + Ora Standard Giappone + Ora Standard Giappone + + + JST + JST + + Tokyo + + + + Ora Standard Europa Orientale + Ora Legale Europa Orientale + + + EET + EEST + + Bucarest + + + + Ora Standard Cina + Ora Standard Cina + + + CTT + CDT + + Shanghai + + + + Ora Standard Cina + Ora Standard Cina + + + CTT + CDT + + Shanghai + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + Diner Andorrano + ADD + + + Peseta Andorrana + ADP + + + Dirham degli Emirati Arabi Uniti + AED + + + Afgani (1927-2002) + AFA + + + Afgani + Af + + + Franco di Affars e Issas + AIF + + + Lek Albanese (1946-1961) + ALK + + + Lek Albanese + lek + + + Lek Valute Albanese + ALV + + + Certificati di cambio esteri albanesi in dollari + ALX + + + Dram Armeno + dram + + + Fiorino delle Antille Olandesi + NA f. + + + Kwanza Angolano + AOA + + + Kwanza Angolano (1977-1990) + AOK + + + Nuovo Kwanza Angolano (1990-2000) + AON + + + Kwanza Reajustado Angolano (1995-1999) + AOR + + + Escudo Angolano + AOS + + + Austral Argentino + ARA + + + Peso Moneda Nacional argentino + ARM + + + Peso Argentino (vecchio Cod.) + ARP + + + Peso Argentino + ARS + + + Scellino Austriaco + ATS + + + Dollaro Australiano + AUD + + + Sterlina Australiana + AUP + + + Fiorino di Aruba + AWG + + + Manat Azero + AZM + + + Dinar Bosnia-Herzegovina + BAD + + + Marco Conv. Bosnia-Erzegovina + KM + + + Nuovo Dinar Bosnia-Herzegovina + BAN + + + Dollaro di Barbados + BDS$ + + + Taka Bangladese + Tk + + + Franco Belga (convertibile) + BEC + + + Franco Belga + BEF + + + Franco Belga (finanziario) + BEL + + + Lev Bulgaro + BGL + + + Lev Bulgaro Socialista + BGM + + + Nuovo Lev Bulgaro + lev + + + Lev Bulgaro (1879-1952) + BGO + + + Certificati di cambio esteri in Lev bulgari + BGX + + + Dinaro del Bahraini + BD + + + Franco del Burundi + Fbu + + + Dollaro delle Bermuda + Ber$ + + + Sterlina delle Bermuda + BMP + + + Dollaro del Brunei + BND + + + Boliviano + BOB + + + Boliviano (1863-1962) + BOL + + + Peso Boliviano + BOP + + + Mvdol Boliviano + BOV + + + Cruzeiro Novo Brasiliano (1967-1986) + BRB + + + Cruzado Brasiliano + BRC + + + Cruzeiro Brasiliano (1990-1993) + BRE + + + Real Brasiliano + BRL + + + Cruzado Novo Brasiliano + BRN + + + Cruzeiro Brasiliano + BRR + + + Cruzeiro Brasiliano (1942-1967) + BRZ + + + Dollaro delle Bahamas + BSD + + + Libra delle Bahamas + BSP + + + Ngultrum Butanese + Nu + + + Rupia Butanese + BTR + + + Kyat Birmano + BUK + + + Rupia Birmana + BUR + + + Pula del Botswana + BWP + + + Nuovo Rublo Bielorussia (1994-1999) + BYB + + + Rublo Bielorussia (1992-1994) + BYL + + + Rublo Bielorussia + Rbl + + + Dollaro Belize + BZ$ + + + Dollaro Honduras Britannico + BZH + + + Dollaro Canadese + CAD + + + Franco Congolese + CDF + + + Franco della Repubblica Congolese + CDG + + + Zaire Congolese + CDL + + + Franco CFA della Repubblica dell’Africa Centrale + CFF + + + Franco Svizzero + SFr. + + + Dollaro delle Isole Cook + CKD + + + Condor Cileno + CLC + + + Escudo Cileno + CLE + + + Unidades de Fomento Chilene + CLF + + + Peso Cileno + CLP + + + Franco CFA del Camerun + CMF + + + Jen Min Piao Yuan Cinese + CNP + + + Certificati di cambio esteri cinesi in dollari statunitensi + CNX + + + Renmimbi Cinese + CNY + + + Peso in banconote colombiano + COB + + + Franco CFA del Congo + COF + + + Peso Colombiano + Col$ + + + Colón Costaricano + C + + + Corona Cecoslovacca + CSC + + + Corona forte cecoslovacca + CSK + + + Peso Cubano + CUP + + + Certificati di cambio esteri cubani + CUX + + + Escudo del Capo Verde + CVEsc + + + Guilder del Curacao + CWG + + + Sterlina Cipriota + CYP + + + Corona Ceca + CZK + + + Ostmark della Germania Orientale + DDM + + + Marco Tedesco + DEM + + + Sperrmark Tedesco + DES + + + Franco Gibutiano + DF + + + Corona Danese + DKK + + + Peso Dominicano + RD$ + + + Dinaro Algerino + DA + + + Nuovo franco algerino + DZF + + + Franco germinale algerino + DZG + + + Sucre dell’Ecuador + ECS + + + Unidad de Valor Constante (UVC) dell’Ecuador + ECV + + + Corona dell’Estonia + EEK + + + Sterlina Egiziana + EGP + + + Nakfa Eritreo + ERN + + + Peseta Spagnola + ESP + + + Birr Etiopico + Br + + + Dollaro Etiopico + ETD + + + Euro + + + + Markka Finlandese + FIM + + + Markka Finlandese (1860-1962) + FIN + + + Dollaro delle Figi + F$ + + + Sterlina delle Figi + FJP + + + Sterlina delle Falkland + FKP + + + Kronur delle Isole Faeroe + FOK + + + Franco Francese + FRF + + + Franco germinale/franco Poincare francese + FRG + + + Franco CFA Gabon + GAF + + + Sterlina Inglese + £ + + + Kupon Larit Georgiano + GEK + + + Lari Georgiano + lari + + + Cedi del Ghana + GHC + + + Vecchi cedi del Ghana + GHO + + + Sterlina del Ghana + GHP + + + Cedi rivalutato del Ghana + GHR + + + Sterlina di Gibilterra + GIP + + + Corona della Groenlandia + GLK + + + Dalasi del Gambia + GMD + + + Sterlina del Gambia + GMP + + + Franco della Guinea + GF + + + Franco della Guinea (1960-1972) + GNI + + + Syli della Guinea + GNS + + + Franco della Guadalupa + GPF + + + Ekwele della Guinea Equatoriale + GQE + + + Franco della Guinea Equatoriale + GQF + + + Peseta Guineana della Guinea Equatoriale + GQP + + + Dracma Greca + GRD + + + Nuova dracma greca + GRN + + + Quetzal Guatemalteco + Q + + + Franco Guiana della Guyana francese + GUF + + + Escudo della Guinea portoghese + GWE + + + Mil Reis della Guinea portoghese + GWM + + + Peso della Guinea-Bissau + GWP + + + Dollaro della Guyana + G$ + + + Dollaro di Hong Kong + HKD + + + Lempira Hoduregno + L + + + Dinaro Croato + HRD + + + Kuna Croata + HRK + + + Gourde Haitiano + HTG + + + Fiorino Ungherese + HUF + + + Sterlina dell’Irlanda del Nord + IBP + + + Fiorino Nica indonesiano + IDG + + + Rupia indonesiana di Java + IDJ + + + Nuova rupia indonesiana + IDN + + + Rupia Indiana + Rp + + + Lira Irlandese + IR£ + + + Sheqel Israeliano + ILL + + + Sterlina Israeliana + ILP + + + Nuovo sheqel israeliano + ILS + + + Lira Sterlina dell’Isola di Man + IMP + + + Rupia Indiana + =0#Rs.|1#Re.|1<Rs. + + + Dinaro Iracheno + ID + + + Rial Iraniano + RI + + + Corona Islandese + ISK + + + Lira Italiana + + + + Lira Sterlina di Jersey + JEP + + + Dollaro Giamaicano + J$ + + + Sterlina Giamaicana + JMP + + + Dinaro Giordano + JOD + + + Yen Giapponese + ¥ + + + Scellino Keniota + K Sh + + + Som Kirghiso + som + + + Vecchio riel cambogiano + KHO + + + Riel Cambogiano + CR + + + Dollaro di Kiribati + KID + + + Franco Comoriano + CF + + + Won della Repubblica popolare democratica nordcoreana + KPP + + + Won Nordcoreano + KPW + + + Hwan sudcoreano + KRH + + + Vecchi won sudcoreano + KRO + + + Won Sudcoreano + KRW + + + Dinaro Kuwaitiano + KD + + + Dollaro delle Isole Cayman + KYD + + + Rublo Kazaco + KZR + + + Tenge Kazaco + T + + + Kip Laotiano + LAK + + + Sterlina Libanese + LL + + + Franco del Liechtenstein + LIF + + + Rupia di Sri Lanka + SL Re + + + Rupia di Ceylon + LNR + + + Dollaro Liberiano + LRD + + + Loti del Lesotho + M + + + Lita Lituana + LTL + + + Talonas Lituani + LTT + + + Franco del Lussemburgo + LUF + + + Lat Lettone + LVL + + + Rublo Lettone + LVR + + + Lira libica della British Military Authority + LYB + + + Dinaro Libico + LD + + + Sterlina Libica + LYP + + + Dirham Marocchino + MAD + + + Franco Marocchino + MAF + + + Franc Nouveau di Monaco + MCF + + + Franco germinale di Monaco + MCG + + + Coupon Leu della Moldavia + MDC + + + Leu Moldavo + MDL + + + Coupon Rublo della Moldavia + MDR + + + Ariary Malgascio + MGA + + + Franco Malgascio + MGF + + + Dollaro delle Isole Marshall + MHD + + + Dinaro Macedone + MDen + + + Dinaro Macedone (1992-1993) + MKN + + + Franco di Mali + MLF + + + Kyat di Myanmar + MMK + + + Certificati di cambio esteri in dollari Myanmar + MMX + + + Tugrik Mongolo + Tug + + + Pataca di Macao + MOP + + + Franco della Martinica + MQF + + + Ouguiya della Mauritania + UM + + + Lira Maltese + Lm + + + Sterlina Maltese + MTP + + + Rupia Mauriziana + MUR + + + Rupia delle Maldive + MVP + + + Rufiyaa delle Maldive + MVR + + + Kwacha Malawiano + MK + + + Sterlina Malawiana + MWP + + + Peso Messicano + MEX$ + + + Peso messicano d’argento (1861-1992) + MXP + + + Unidad de Inversion (UDI) Messicana + MXV + + + Ringgit della Malesia + RM + + + Escudo del Mozambico + MZE + + + Metical del Mozambico + Mt + + + Dollaro Namibiano + N$ + + + Franco germinale della Nuova Caledonia + NCF + + + Naira Nigeriana + NGN + + + Sterlina Nigeriana + NGP + + + Franco CFP delle Nuove Ebridi + NHF + + + Cordoba Nicaraguense + NIC + + + Córdoba d’oro nicaraguense + NIG + + + Córdoba oro nicaraguense + NIO + + + Fiorino Olandese + NLG + + + Corona Norvegese + NOK + + + Rupia Nepalese + Nrs + + + Dollaro Neozelandese + $NZ + + + Sterlina Neozelandese + NZP + + + Rial Omanita + RO + + + Rial Saidi dell’Oman + OMS + + + Balboa di Panama + PAB + + + Coupon in rubli della Transdniestria + PDK + + + Nuovo rublo della Transdniestria + PDN + + + Rublo della Transdniestria + PDR + + + Inti Peruviano + PEI + + + Sol Nuevo Peruviano + PEN + + + Sol Peruviano + PES + + + Kina della Papua Nuova Guinea + PGK + + + Peso delle Filippine + PHP + + + Rupia del Pakistan + Pra + + + Zloty Polacco + Zl + + + Certificati di cambio esteri polacchi in dollari statunitensi + PLX + + + Zloty Polacco (1950-1995) + PLZ + + + Sterlina della Palestina + PSP + + + Conto Portoghese + PTC + + + Escudo Portoghese + PTE + + + Guarani del Paraguay + PYG + + + Rial del Qatar + QR + + + Franco di Reunion + REF + + + Leu della Romania + ROL + + + Nuovo Leu della Romania + RON + + + Rublo Russo + RUB + + + Rublo della CSI + RUR + + + Franco Ruandese + RWF + + + Ryal Saudita + SAR + + + Riyal sovrano saudita + SAS + + + Dollaro delle Isole Solomon + SI$ + + + Rupia delle Seychelles + SR + + + Dinaro Sudanese + SDD + + + Sterlina Sudanese + SDP + + + Corona Svedese + SEK + + + Dollaro di Singapore + SGD + + + Sterlina di Sant’Elena + SHP + + + Tallero Bons Sloveno + SIB + + + Tallero Sloveno + SIT + + + Corona Slovacca + Sk + + + Leone della Sierra Leone + SLL + + + Lira di San Marino + SML + + + Scellino Somalo + So. Sh. + + + Scellino del Somaliland + SQS + + + Fiorino del Suriname + Sf + + + Sterlina Scozzese + SSP + + + Dobra di São Tomé e Principe + Db + + + Escudo di São Tomé e Principe + STE + + + Nuovo rublo sovietico + SUN + + + Rublo Sovietico + SUR + + + Colón Salvadoregno + SVC + + + Sterlina Siriana + LS + + + Lilangeni dello Swaziland + E + + + Corona di Turks e Caicos + TCC + + + Franco CFA del Chad + TDF + + + Baht Tailandese + THB + + + Rublo del Tajikistan + TJR + + + Somoni del Tajikistan + TJS + + + Manat Turkmeno + TMM + + + Dinaro Tunisino + TND + + + Paʻanga di Tonga + T$ + + + Lira sterlina di Tonga + TOS + + + Escudo di Timor + TPE + + + Pataca di Timor + TPP + + + Lira Turca + TRL + + + Dollaro di Trinidad e Tobago + TT$ + + + Vecchio dollaro di Trinidad e Tobago + TTO + + + Dollaro di Tuvalu + TVD + + + Nuovo dollaro taiwanese + NT$ + + + Scellino della Tanzania + T Sh + + + Hrivna Ucraina + UAH + + + Karbovanetz Ucraino + UAK + + + Scellino Ugandese (1966-1987) + UGS + + + Scellino Ugandese + U Sh + + + Dollaro Statunitense + $ + + + Dollaro Statunitense (Next day) + USN + + + Dollaro Statunitense (Same day) + USS + + + Peso Fuerte dell’Uruguaiano + UYF + + + Peso Uruguaiano (1975-1993) + UYP + + + Peso Uruguayo uruguaiano + Ur$ + + + Som Coupon dell’Uzbekistan + UZC + + + Sum dell’Uzbekistan + UZS + + + Lira della Città del Vaticano + VAL + + + Piastre Dong Viet nordvietnamita + VDD + + + Nuovo Dong nordvietnamita + VDN + + + Viet Minh Piastre Dong Viet nordvietnamita + VDP + + + Bolivar Venezuelano + Be + + + Dollaro delle Isole Vergini britanniche + VGD + + + Dong Vietnamita + VND + + + Nuovo dong vietnamita + VNN + + + Dong della Repubblica Vietnamita + VNR + + + Dong Nazionale Vietnamita + VNS + + + Vatu di Vanuatu + VT + + + Sterlina della Samoa Occidentale + WSP + + + Tala della Samoa Occidentale + WST + + + Unità di conto in dinari asiatica + XAD + + + Franco CFA BEAC + XAF + + + Unità Monetaria Asiatica + XAM + + + Oro + XAU + + + Unità composita europea + XBA + + + Unità monetaria europea + XBB + + + Unità di acconto europea (XBC) + XBC + + + Unità di acconto europea (XBD) + XBD + + + Dollaro dei Caraibi Orientali + EC$ + + + Nuovo Franco CFA + XCF + + + Diritti Speciali di Incasso + XDR + + + Franco CFA BCEAEC + XEF + + + Unità Monetaria Europea + XEU + + + Franco Oro Francese + XFO + + + Franco UIC Francese + XFU + + + Dinaro Islamico + XID + + + Nouveau Franc francese metropolitano + XMF + + + Franco CFA delle Antille Francesi + XNF + + + Franco CFA BCEAO + XOF + + + Franco CFP + CFPF + + + Rublo Trasferibile COMECON + XTR + + + Dinaro dello Yemen + YDD + + + Riyal Imadi dello Yemen + YEI + + + Rial dello Yemen + YRl + + + Dinaro Forte Yugoslavo + YUD + + + Dinaro della Federazione Yugoslava + YUF + + + Dinaro 1994 Yugoslavo + YUG + + + Dinaro Noviy Yugoslavo + YUM + + + Dinaro Convertibile Yugoslavo + YUN + + + Dinaro di Ottobre Yugoslavo + YUO + + + Dinaro Riformato Yugoslavo + YUR + + + Rand Sudafricano (finanziario) + ZAL + + + Sterlina Sudafricana + ZAP + + + Rand Sudafricano + ZAR + + + Kwacha dello Zambia + ZMK + + + Sterlina dello Zambia + ZMP + + + Nuovo Zaire dello Zaire + ZRN + + + Zaire dello Zaire + ZRZ + + + Dollaro dello Zimbabwe + Z$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/it_CH.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/it_CH.xml new file mode 100644 index 0000000..ddea752 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/it_CH.xml @@ -0,0 +1,113 @@ + + + + + + + + + + + + + + + + + EEEE, d MMMM yyyy + + + + + d MMMM yyyy + + + + + d-MMM-yy + + + + + dd.MM.yy + + + + + + + + HH.mm:ss' h' z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + . + ' + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤ #,##0.00;¤-#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/it_IT.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/it_IT.xml new file mode 100644 index 0000000..13f29af --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/it_IT.xml @@ -0,0 +1,80 @@ + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + dd MMMM yyyy + + + + + dd/MMM/yy + + + + + dd/MM/yy + + + + + + + + HH:mm:ss z + + + + + H:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + + Lira Italiana + + ¤ #,##0;-¤ #,##0 + ¤ #,##0;-¤ #,##0 + . + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/iu.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/iu.xml new file mode 100644 index 0000000..527d954 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/iu.xml @@ -0,0 +1,77 @@ + + + + + + + + + + + ᐃᓄᒃᑎᑐᑦ ᑎᑎᕋᐅᓯᖅ + + + + [ᐃ-ᐆᐊᐋᐱ-ᐴᐸᐹᑉᑎ-ᑑᑕᑖᑦᑭ-ᑰᑲᑳᒃᒋ-ᒎᒐ-ᒑᒡᒥ-ᒨᒪᒫᒻᓂ-ᓅᓇᓈᓐᓕ-ᓘᓚᓛᓪᓯ-ᓲᓴᓵᔅᔨ-ᔫᔭᔮᔾᕆ-ᕉᕋ-ᕌᕐᕕ-ᕚᕝᕿ-ᖃᖅᖏᖑ-ᖖᖠ-ᖦᙱ-ᙶ] + + + + + + + + ᔭᓐᓄᐊᓕ + ᕕᕝᕗᐊᓕ + ᒫᑦᓯ + ᐊᐃᑉᐳᓗ + ᒪᐃ + ᔫᓂ + ᔪᓚᐃ + ᐊᐅᒡᒍᓯ + ᓰᑦᑏᕝᕙ + ᐆᑦᑑᕝᕙ + ᓅᕙᐃᕝᕙ + ᑏᓰᕝᕙ + + + ᔭᓐᓄᐊᓕ + ᕕᕝᕗᐊᓕ + ᒫᑦᓯ + ᐊᐃᑉᐳᓗ + ᒪᐃ + ᔫᓂ + ᔪᓚᐃ + ᐊᐅᒡᒍᓯ + ᓰᑦᑏᕝᕙ + ᐆᑦᑑᕝᕙ + ᓅᕙᐃᕝᕙ + ᑏᓰᕝᕙ + + + + + + + ᓈᑦᓰᖑᔭ + ᓇᒡᒐᔾᔭᐅ + ᓇᒡᒐᔾᔭᐅᓕᖅᑭ + ᐱᖓᑦᓯᖅ + ᓯᑕᒻᒥᖅ + ᑕᓪᓕᕐᒥᖅ + ᓈᑦᓰᖑᔭᓕᖅᕿ + + + ᓈᑦᓰᖑᔭ + ᓇᒡᒐᔾᔭᐅ + ᓇᒡᒐᔾᔭᐅᓕᖅᑭ + ᐱᖓᑦᓯᖅ + ᓯᑕᒻᒥᖅ + ᑕᓪᓕᕐᒥᖅ + ᓈᑦᓰᖑᔭᓕᖅᕿ + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/iw.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/iw.xml new file mode 100644 index 0000000..52240a2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/iw.xml @@ -0,0 +1,534 @@ + + + + + + + + + + + ערבית + בולגרית + צ׳כית + דנית + גרמנית + יוונית + אנגלית + ספרדית + אסטונית + פינית + צרפתית + עברית + קרואטית + הונגרית + איטלקית + יפנית + קוריאנית + ליטאית + לטבית + הולנדית + נורווגית + פולנית + פורטוגזית + רומנית + רוסית + סלובקית + סלובנית + שוודית + טורקית + סינית + + + אנדורה + איחוד האמירויות הערביות + אפגניסטן + אנטיגואה וברבודה + אנגילה + אלבניה + ארמניה + האינטילים ההולנדיים + אנגולה + אנטארקטיקה + ארגנטינה + סמואה האמריקנית + אוסטריה + אוסטרליה + ארובה + אזרבייג׳ן + בוסניה הרצגובינה + ברבדוס + בנגלדש + בלגיה + בורקינה פאסו + בולגריה + בחריין + בורונדי + בנין + ברמודה + ברוניי דארסלאם + בוליביה + ברזיל + בהאמה + בוטאן + האי בובה + בוטסוואנה + בלרוס + בליז + קנדה + איי קוקוס (קילינג) + קונגו, הרפובליקה הדמוקרטית של + הרפובליקה של מרכז אפריקה + קונגו + שווייץ + חוף השנהב + איי קוק + צ׳ילה + קמרון + סין + קולומביה + קוסטה ריקה + קובה + קייפ ורדה + איי כריסטמס + קפריסין + הרפובליקה הצ׳כית + גרמניה + ג׳יבוטי + דנמרק + דומיניקה + הרפובליקה הדומיניקנית + אלג׳יריה + אקוואדור + אסטוניה + מצרים + סהרה המערבית + אריתריאה + ספרד + אתיופיה + פינלנד + פיג׳י + איי פוקלנד + מאוריציוס, המדינות המאוגדות של + איי פארו + צרפת + גבון + בריטניה + גרנדה + גרוזיה + גיאנה הצרפתית + גאנה + גיברלטר + גרינלנד + גמביה + גיניאה + גוואדלופ + גיניאה המשוונית + יוון + האי ג׳ורג׳יה הדרומית ואיי סנדוויץ׳ הדרומיים + גווטמאלה + גואם + גיניאה-ביסאו + גיאנה + הונג קונג S.A.R. של סין + איי הרד ואיי מקדונלנד + הונדורס + קרואטיה + האיטי + הונגריה + אינדונזיה + אירלנד + ישראל + הודו + הטריטוריה הבריטית באוקינוס ההודי + עירק + איראן, הרפובליקה האיסלמית + איסלנד + איטליה + ג׳מייקה + ירדן + יפן + קניה + קירגיזסטן + קמבודיה + קיריבאטי + קומורוס + סנט קיטס וניבס + קוריאה, צפון + קוריאה, דרום + כווית + איי קיימאן + קזחסטן + לאוס, הרפובליקה הדמקורטית העממית + לבנון + סנט לושיה + ליכטנשטיין + סרי לנקה + ליבריה + לסוטו + ליטא + לוקסמבורג + לטביה + לוב + מרוקו + מונקו + מולדובה, הרפובליקה + מדגסקר + איי מרשל + מקדוניה, הרפובליקה של + מאלי + מינמאר + מונגוליה + מקאו S.A.R. של סין + איי מריאנה הצפוניים + מרטיניק + מאוריטניה + מונטסראט + מלטה + מאוריציוס + מלדיבאס + מלאווי + מכסיקו + מלזיה + מוזמביק + נמיביה + קלדוניה החדשה + ניז׳ר + איי נורפק + ניגריה + ניקראגווה + הולנד + נורווגיה + נפאל + נאורו + ניווה + ניו זילנד + עומן + פנמה + פרו + פולינזיה הצרפתית + פפואה גיניאה החדשה + פיליפינים + פקיסטן + פולין + סנט פייר ומיקלון + פיטקיירן + פורטו ריקו + הרשות הפלשתינית + פורטוגל + פלאו + פראגוואי + קטר + ראוניון + רומניה + חבר המדינות הרוסיות + רואנדה + ערב הסעודית + איי שלמה + איי סיישל + סודן + שוודיה + סינגפור + סיינט הלנה + סלובניה + סוולבארד וז׳אן מאיין + סלובקיה + סיירה לאונה + סן מרינו + סנגל + סומליה + סורינאם + סן תומה ופרינסיפה + אל סלבאדור + הרפובליקה הערבית הסורית + סווזילנד + איי טורקס וקאיקוס + צ׳אד + טריטוריות דרומיות של צרפת + טוגו + תאילנד + טג׳יקיסטן + טוקלאו + מזרח טימור + טורקמניסטן + טוניסיה + טונגה + טורקיה + טרינידד וטובגו + טובאלו + טיוואן + טנזניה + אוקראינה + אוגנדה + איים קטנים שלחוף ארצות הברית + ארצות הברית + אורוגוואי + אוזבקיסטן + הוותיקן + סנט וינסנט והגרנדינים + ונצואלה + איי הבתולה הבריטיים + איי הבתולה האמריקניים + וייטנאם + ואנואטו + ואליס ופוטונה + סמואה + תימן + מיוטה + יוגוסלביה + דרום אפריקה + זמביה + זימבבווה + + + + [[:Hebr:]‏‎] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + ינו + פבר + מרץ + אפר + מאי + יונ + יול + אוג + ספט + אוק + נוב + דצמ + + + ינואר + פברואר + מרץ + אפריל + מאי + יוני + יולי + אוגוסט + ספטמבר + אוקטובר + נובמבר + דצמבר + + + + + + + א + ב + ג + ד + ה + ו + ש + + + יום ראשון + יום שני + יום שלישי + יום רביעי + יום חמישי + יום שישי + שבת + + + + + + לפנה״ס + לסה״נ + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {0} {1} + + + + + + + + + תשרי + חשון + כסלו + טבת + שבט + אדר ראשון + אדר שני + ניסן + אייר + סיון + תמוז + אב + אלול + + + תשרי + חשון + כסלו + טבת + שבט + אדר ראשון + אדר שני + ניסן + אייר + סיון + תמוז + אב + אלול + + + + + + לבה"ע + + + + + + + + מוחרם + ספר + רביע אל-אוואל + רביע אל-תני + ג׳ומדה אל-אוואל + ג׳ומדה אל-תני + רג׳אב + שעבאן + ראמדן + שוואל + זו אל-QI'DAH + זו אל-חיג׳ה + + + מוחרם + ספר + רביע אל-אוואל + רביע אל-תני + ג׳ומדה אל-אוואל + ג׳ומדה אל-תני + רג׳אב + שעבאן + ראמדן + שוואל + זו אל-QI'DAH + זו אל-חיג׳ה + + + + + + שנת היג׳רה + + + + + + + + מוחרם + ספר + רביע אל-אוואל + רביע אל-תני + ג׳ומדה אל-אוואל + ג׳ומדה אל-תני + רג׳אב + שעבאן + ראמדן + שוואל + זו אל-QI'DAH + זו אל-חיג׳ה + + + מוחרם + ספר + רביע אל-אוואל + רביע אל-תני + ג׳ומדה אל-אוואל + ג׳ומדה אל-תני + רג׳אב + שעבאן + ראמדן + שוואל + זו אל-QI'DAH + זו אל-חיג׳ה + + + + + + שנת היג׳רה + + + + + + + + + ש"ח + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/iw_IL.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/iw_IL.xml new file mode 100644 index 0000000..dedd22b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/iw_IL.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ja.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ja.xml new file mode 100644 index 0000000..e4d482c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ja.xml @@ -0,0 +1,2775 @@ + + + + + + + + + + + アファール語 + アブハズ語 + アヴェスタ語 + アフリカーンス語 + アカン語 + アムハラ語 + アラゴン語 + アラビア語 + アッサム語 + アヴァル語 + アイマラ語 + アゼルバイジャン語 + バシキール語 + ベラルーシ語 + ブルガリア語 + ビハ―ル語 + ビスラマ語 + バンバラ語 + ベンガル語 + チベット語 + ブルトン語 + ボスニア語 + ビリン語 + カタロニア語 + チェチェン語 + チャモロ語 + チェロキー語 + コルシカ語 + クリー語 + チェコ語 + 教会スラブ語 + チュヴァシュ語 + ウェールズ語 + デンマーク語 + ドイツ語 + ディヴェヒ語 + ブータン語 + エウェ語 + ギリシャ語 + 英語 + エスペラント語 + スペイン語 + エストニア語 + バスク語 + ペルシア語 + フラニ語 + フィンランド語 + フィジー語 + フェロー語 + フランス語 + フリジア語 + アイルランド語 + スコットランド・ゲール語 + ゲエズ語 + ガリシア語 + グワラニ語 + グジャラート語 + マン島語 + ハウサ語 + ハワイ語 + ヘブライ語 + ヒンディー語 + ヒリモトゥ語 + クロアチア語 + ハイチ語 + ハンガリー語 + アルメニア語 + ヘレロ語 + 国際語 + インドネシア語 + 国際語 + イボ語 + 四川イ語 + イヌピアック語 + イド語 + アイスランド語 + イタリア語 + イヌクウティトット語 + 日本語 + ジャワ語 + グルジア語 + コンゴ語 + キクユ語 + クアニャマ語 + カザフ語 + グリーンランド語 + カンボジア語 + カンナダ語 + 韓国語 + コンカニ語 + カヌリ語 + カシミール語 + クルド語 + コミ語 + コーンウォール語 + キルギス語 + ラテン語 + ルクセンブルク語 + ガンダ語 + リンブルフ語 + リンガラ語 + ラオ語 + リトアニア語 + ルバ・カタンガ語 + ラトビア語 + マラガシー語 + マーシャル語 + マオリ語 + マケドニア語 + マラヤ―ラム語 + モンゴル語 + モルダビア語 + マラーティー語 + マライ語 + マルタ語 + ビルマ語 + ナウル語 + ノルウェー語 (ボクモール) + 北ンデベレ語 + ネパール語 + ンドンガ語 + オランダ語 + ノルウェー語 (ニューノルスク) + ノルウェー語 + 南ンデベレ語 + ナバホ語 + ニャンジャ語、チチェワ語、チェワ語 + プロヴァンス語 + オブジワ語 + ガラ語 + オリヤー語 + オセト語 + パンジャブ語 + パーリ語 + ポーランド語 + パシュトー語 + ポルトガル語 + ケチュア語 + レト=ロマン語 + ルンジ語 + ルーマニア語 + ロシア語 + ルワンダ語 + サンスクリット語 + サルデーニャ語 + シンド語 + 北サーミ語 + サンゴ語 + セルボ=クロアチア語 + シンハラ語 + シダモ語 + スロバキア語 + スロベニア語 + サモア語 + ショナ語 + ソマリ語 + アルバニア語 + セルビア語 + シスワティ語 + 南セソト語 + スンダ語 + スウェーデン語 + スワヒリ語 + シリア語 + タミール語 + テルグ語 + タジク語 + タイ語 + ティグリニア語 + ティグレ語 + トルクメン語 + タガログ語 + ツワナ語 + トンガ語 + トルコ語 + ツォンガ語 + タタール語 + トゥイ語 + タヒチ語 + ウイグル語 + ウクライナ語 + ウルドゥー語 + ウズベク語 + ベンダ語 + ベトナム語 + ボラピュク語 + ワロン語 + ウォロフ語 + コサ語 + イディッシュ語 + ヨルバ語 + チワン語 + 中国語 + ズールー語 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + アンドラ + アラブ首長国連邦 + アフガニスタン + アンティグア・バーブーダ + アンギラ + アルバニア + アルメニア + オランダ領アンティル諸島 + アンゴラ + 南極大陸 + アルゼンチン + 米領サモア + オーストリア + オーストラリア + アルバ島 + アゼルバイジャン + ボスニア・ヘルツェゴビナ + バルバドス + バングラデシュ + ベルギー + ブルキナファソ + ブルガリア + バーレーン + ブルンジ + ベニン + バーミューダ + ブルネイ + ボリビア + ブラジル + バハマ + ブータン + ブーベ島 + ボツワナ + ベラルーシ + ベリーズ + カナダ + ココス (キーリング) 諸島 + コンゴ民主共和国 + 中央アフリカ共和国 + コンゴ + スイス + コートジボアール + クック諸島 + チリ + カメルーン + 中国 + コロンビア + コスタリカ + キューバ + カーボベルデ + クリスマス島 + キプロス + チェコ + ドイツ + ジブチ + デンマーク + ドミニカ国 + ドミニカ共和国 + アルジェリア + エクアドル + エストニア + エジプト + 西サハラ + エリトリア + スペイン + エチオピア + フィンランド + フィジー + フォークランド諸島 + ミクロネシア + フェロー諸島 + フランス + ガボン + イギリス + グレナダ + グルジア + 仏領ギアナ + ガーナ + ジブラルタル + グリーンランド + ガンビア + ギニア + グアドループ + 赤道ギニア + ギリシア + 南ジョージア島・南サンドイッチ諸島 + グアテマラ + グアム島 + ギニアビサウ + ガイアナ + 中華人民共和国香港特別行政区 + ハード・アンド・マクドナルド・アイランズ + ホンジュラス + クロアチア + ハイチ + ハンガリー + インドネシア + アイルランド + イスラエル + インド + 英領インド洋植民地 + イラク + イラン + アイスランド + イタリア + ジャマイカ + ヨルダン + 日本 + ケニア + キルギスタン + カンボジア + キリバス + コモロ + セントクリストファー・ネイビス + 朝鮮民主主義人民共和国 + 大韓民国 + クウェート + ケイマン諸島 + カザフスタン + ラオス + レバノン + セントルシア + リヒテンシュタイン + スリランカ + リベリア + レソト + リトアニア + ルクセンブルグ + ラトビア + リビア + モロッコ + モナコ + モルドバ + マダガスカル + マーシャル諸島共和国 + マケドニア + マリ + ミャンマー + モンゴル + 中華人民共和国マカオ特別行政区 + 北マリアナ諸島 + マルティニーク島 + モーリタニア + モントセラト島 + マルタ + モーリシャス + モルジブ + マラウィ + メキシコ + マレーシア + モザンビーク + ナミビア + ニューカレドニア + ニジェール + ノーフォーク島 + ナイジェリア + ニカラグア + オランダ + ノルウェー + ネパール + ナウル + ニウエ島 + ニュージーランド + オマーン + パナマ + ペルー + 仏領ポリネシア + パプアニューギニア + フィリピン + パキスタン + ポーランド + サンピエール・エ・ミクロン島 + ピトケアン島 + プエルトリコ + パレスチナ + ポルトガル + パラオ + パラグアイ + カタール + レユニオン島 + ルーマニア + ロシア + ルワンダ + サウジアラビア + ソロモン諸島 + セイシェル + スーダン + スウェーデン + シンガポール + セントヘレナ島 + スロベニア + スバールバル諸島・ヤンマイエン島 + スロバキア + シエラレオネ + サンマリノ + セネガル + ソマリア + セルビア + スリナム + サントメ・プリンシペ + エルサルバドル + シリア + スワジランド + タークス諸島・カイコス諸島 + チャド + 仏南方領 + トーゴ + タイ + タジキスタン + トケラウ諸島 + 東ティモール + トルクメニスタン + チュニジア + トンガ + トルコ + トリニダード・トバゴ + ツバル + 台湾 + タンザニア + ウクライナ + ウガンダ + 米領太平洋諸島 + アメリカ合衆国 + ウルグアイ + ウズベキスタン + ローマ教皇庁 (バチカン市国) + セントビンセント・グレナディーン諸島 + ベネズエラ + 英領バージン諸島 + 米領バージン諸島 + ベトナム + バヌアツ + ウォリス・フトーナ + 西サモア + イエメン + マヨット島 + ユーゴスラビア + 南アフリカ + ザンビア + ジンバブエ + + + 改訂版 + + + + 照合順番 + 通貨 + + + 仏暦 + 中国暦 + グレゴリオ暦 + ヘブライ暦 + イスラム暦 + 太陽イスラム暦 + 和暦 + 直接著錄 + 電話帳方式 + 拼音順 + 画数順 + 旧式 + + + + [一-丁七万-下不-与且世丘-丙両並中丸-丹主久乏乗乙九乱乳乾了予-争事-二互五-井亜亡交享-亭人仁今-介仏仕-他付-仙代-以仮仰仲件任企伏-休会伝伯伴伸伺似但位-佐体何余作佳併使例侍供依価侮-侯侵便係-促俊俗保信修俳俵俸倉個倍倒候借倣-値倫倹偉偏停健側-偶偽傍傑傘-備催債傷傾働像僕僚僧儀億儒償優元-兆先-光克免児党入全八-六共兵具-典兼内-円冊再冒冗写冠冬冷准凍凝凡処凶凸-出刀刃分-刈刊刑列初判-別利到制-券刺-刻則削前剖剛剣-剤副-剰割創劇力功-加劣助-努励労効劾勅勇勉動勘-務勝募勢勤勧勲勺匁包化-北匠匹-医匿十千升-午半卑-協南-単博占印-危即-卵卸厄厘厚原厳去参又及-収叔取-受叙口-句叫-召可-台史-右号-司各合-吉同-向君吟否含吸-吹呈-告周味呼-命和咲哀-品員哲唆-唇唐唯唱商問啓善喚喜-喝喪-喫営嗣嘆嘱器噴嚇囚-四回因団困囲-図固国圏園土圧-在地坂均坊坑坪垂型垣埋城域執培-基堀堂堅堕堤堪報場塀-塁塊塑塔塗塚塩塾境墓増墜墨墳墾壁壇壊壌士壮声-売変夏夕-外多夜夢大天-夫央失奇奉奏契奔奥奨奪奮女-奴好如-妄妊妙妥妨妹妻姉始姓-委姫姻姿威娘娠娯婆婚婦婿媒嫁嫌嫡嬢子孔字-存孝季-孤学孫宅宇-安完宗-定宜-宝実客-室宮宰害-家容宿寂寄密富寒寛寝察寡寧審寮寸寺対-寿封-専射将尉-尋導-小少尚就尺尼-局居屈届-屋展属層-履屯山岐岩岬岳岸峠-峡峰島崇崎崩川-州巡巣工-巨差己巻市-布帆希帝帥師席帯-帰帳常帽幅幕幣干-年幸-幹幻-幾庁広床序底店府度-座庫庭庶-庸廃廉-廊延-廷建弁弊式-弐弓-引弟弦-弧弱張強弾当形彩彫彰-影役彼往-征径-待律-後徐徒-従得御復-循微徳-徴徹心必忌-忍志-忙応忠快念怒怖思怠急性怪恋恐恒恥恨-恩恭息恵悔悟-悠患悦悩-悪悲悼情惑惜惨惰想愁愉意愚-愛感慈態-慌慎慕慢-慣慨慮慰慶憂憎憤憩憲憶憾懇懐懲懸成-戒戦戯戸戻房-所扇扉手才打払扱扶批承-技抄把抑投抗-折抜択披抱抵抹押-抽担拍拐拒-拓拘-拙招拝拠-拡括拷拾持指挑挙挟振挿捕捜捨据掃授掌排掘掛採-探接控-推措掲描-提揚-換握揮援揺損搬-搭携搾摂摘摩撃撤撮撲擁操擦擬支改攻放-政故敏救敗教敢-散敬数整-敵敷文斉斎斗料斜斤-斥断新方施旅旋族旗既日旧-早旬昆-昇明易-昔星-映春昨昭是昼時晩普-景晴晶暁暇暑暖-暗暦暫暮暴曇曜曲更書-曹替-最月-有服朕朗望朝期木未-札朱朴机朽杉材-村束条来杯東松-板析林枚果-枝枠枢枯架柄某染-柔柱柳査栄栓校株核-根格-栽桃案桑桜桟梅械棄棋棒棚棟森棺植検業極楼-楽概構様槽標模権-横樹橋機欄欠-次欧欲欺款歌歓止-正武歩歯歳-歴死殉-残殖殴-段殺-殻殿母-毎毒比毛氏民気水氷-永汁-求汗汚江-池決汽沈沖没-沢河沸-油治-沼沿況泉-泊泌法泡-泣泥注泰泳洋洗洞津洪活派流浄-浅浜浦浪浮浴海-浸消涙涯液涼淑淡深混添清渇-渉渋渓減渡渦温測港湖湯湾-満源準溝溶滅滋滑滝-滞滴漁-漂漆漏演漠漢漫-漬漸潔潜潟潤潮澄激-濁濃濫濯瀬火灯-灰災炉-炊炎炭点-為烈無焦然焼煙照煩煮熟熱燃燥爆爵-父片-版牛牧物牲特犠犬犯状狂狩独-狭猛猟猫献猶猿獄獣獲玄率玉王珍珠班現球理琴環璽瓶甘甚生産用田-申男町-画界畑畔留畜-畝略番異畳疎疑疫疲疾病症痘痛痢痴療癒癖発-登白-百的皆-皇皮皿盆益盗盛盟監-盤目盲直相盾省看-県真-眠眺眼着睡督瞬矛矢知短矯石砂研-砕砲破硝硫-硬碁碑確磁磨礁礎示礼社祈-祉祖祝-神祥票祭禁禅禍福秀-私秋科-秒秘租秩称移程税稚種稲稼稿-穀穂積穏穫穴究空突窃窒-窓窮-窯立竜章童端競竹笑笛符第筆等筋筒答策箇算管箱節範築篤簡簿籍米粉粋粒粗-粘粛粧精糖糧糸系糾紀約-紅紋納純紙-紛素-索紫累-細紳紹-紺終組経結絞絡給統絵-絶絹継-続維綱-網綿緊総緑-緒線締編-緩緯練縁縄縛縦縫縮績繁繊織-繕繭繰缶罪置罰署罷羅羊美群義羽翁翌習翻-翼老考者耐耕耗耳聖聞聴職肉肌肖肝肢肥肩-肪肯育肺胃胆背胎胞胴胸能脂脅脈脚脱脳脹腐腕腰腸-腹膚膜膨臓臣臨自臭至-致興舌舎舗舞-舟航般舶船艇艦良色芋芝花芳芸芽苗若-苦英茂茎茶草荒荘荷菊菌菓菜華落葉著葬蒸蓄蔵薄薦薪-薬藩藻虐虚虜虞虫蚊蚕蛇蛍蛮融血衆行術街衛衝衡衣表衰衷袋被裁-裂装裏裕補裸製複褐褒襟襲西要覆-覇見規視覚覧親観角解触言訂計討訓託-記訟訪設許訳-訴診証詐詔-評詞詠試詩詰-詳誇誉誌-認誓誕誘語誠誤説-読課調談請論諭-諮諸諾謀-謁謄謙講謝謡謹識譜警議譲護谷豆豊豚象豪貝-貞負-貢貧-販貫-責貯貴買-貸費貿-賀賃-賄資賊賓賛-賜賞賠賢賦質購贈赤赦走赴起超越趣足距跡路跳践踊踏躍身車軌-軍軒軟転軸軽較載輝輩-輪輸轄辛辞辱-農辺込迅迎近返迫迭述迷追退-送逃逆透-逐逓-途通逝速-造連逮週-進逸遂遅遇遊-運遍-過道-違遠遣適遭-遮遵遷-選遺避還邦邪邸郊郎郡部郭郵郷都酌-配酒酔酢酪酬酵酷-酸醜醸釈里-量金針釣鈍鈴鉄鉛鉢鉱銀銃銅銑銘銭鋭鋳鋼錘錠錬錯録鍛鎖鎮鏡鐘鑑長門閉開閑間関-閣閥閲闘防阻附降限陛院-陥陪陰陳陵-陶陸険陽隅-隆隊階-随隔際-障隠隣隷隻雄-雇雌雑離-難雨雪雰雲零-雷電需震霊霜霧露青静非面革靴音韻響頂項-順預-頒領頭頻-頼題-額顔-顕願類顧風飛食飢飯飲飼-飾養餓館首香馬駄-駆駐騎騒-験騰驚骨髄高髪鬼魂魅魔魚鮮鯨鳥鳴鶏麗麦麻黄黒黙鼓鼻齢] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + 1月 + 2月 + 3月 + 4月 + 5月 + 6月 + 7月 + 8月 + 9月 + 10月 + 11月 + 12月 + + + + + + + + + + + + + + + + + + + + + + + + + 日曜日 + 月曜日 + 火曜日 + 水曜日 + 木曜日 + 金曜日 + 土曜日 + + + + 午前 + 午後 + + + 紀元前 + 西暦 + + + + + + + yyyy'年'M'月'd'日'EEEE + + + + + yyyy'年'M'月'd'日' + + + + + yyyy/MM/dd + + + + + yy/MM/dd + + + + + + + + H'時'mm'分'ss'秒'z + + + + + H:mm:ss:z + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + 大化 + 白雉 + 白鳯 + 朱鳥 + 大宝 + 慶雲 + 和銅 + 霊亀 + 養老 + 神亀 + 天平 + 天平感宝 + 天平勝宝 + 天平宝字 + 天平神護 + 神護景雲 + 宝亀 + 天応 + 延暦 + 大同 + 弘仁 + 天長 + 承和 + 嘉祥 + 仁寿 + 斉衡 + 天安 + 貞観 + 元慶 + 仁和 + 寛平 + 昌泰 + 延喜 + 延長 + 承平 + 天慶 + 天暦 + 天徳 + 応和 + 康保 + 安和 + 天禄 + 天延 + 貞元 + 天元 + 永観 + 寛和 + 永延 + 永祚 + 正暦 + 長徳 + 長保 + 寛弘 + 長和 + 寛仁 + 治安 + 万寿 + 長元 + 長暦 + 長久 + 寛徳 + 永承 + 天喜 + 康平 + 治暦 + 延久 + 承保 + 承暦 + 永保 + 応徳 + 寛治 + 嘉保 + 永長 + 承徳 + 康和 + 長治 + 嘉承 + 天仁 + 天永 + 永久 + 元永 + 保安 + 天治 + 大治 + 天承 + 長承 + 保延 + 永治 + 康治 + 天養 + 久安 + 仁平 + 久寿 + 保元 + 平治 + 永暦 + 応保 + 長寛 + 永万 + 仁安 + 嘉応 + 承安 + 安元 + 治承 + 養和 + 寿永 + 元暦 + 文治 + 建久 + 正治 + 建仁 + 元久 + 建永 + 承元 + 建暦 + 建保 + 承久 + 貞応 + 元仁 + 嘉禄 + 安貞 + 寛喜 + 貞永 + 天福 + 文暦 + 嘉禎 + 暦仁 + 延応 + 仁治 + 寛元 + 宝治 + 建長 + 康元 + 正嘉 + 正元 + 文応 + 弘長 + 文永 + 建治 + 弘安 + 正応 + 永仁 + 正安 + 乾元 + 嘉元 + 徳治 + 延慶 + 応長 + 正和 + 文保 + 元応 + 元亨 + 正中 + 嘉暦 + 元徳 + 元弘 + 建武 + 延元 + 興国 + 正平 + 建徳 + 文中 + 天授 + 康暦 + 弘和 + 元中 + 至徳 + 嘉慶 + 康応 + 明徳 + 応永 + 正長 + 永享 + 嘉吉 + 文安 + 宝徳 + 享徳 + 康正 + 長禄 + 寛正 + 文正 + 応仁 + 文明 + 長享 + 延徳 + 明応 + 文亀 + 永正 + 大永 + 享禄 + 天文 + 弘治 + 永禄 + 元亀 + 天正 + 文禄 + 慶長 + 元和 + 寛永 + 正保 + 慶安 + 承応 + 明暦 + 万治 + 寛文 + 延宝 + 天和 + 貞享 + 元禄 + 宝永 + 正徳 + 享保 + 元文 + 寛保 + 延享 + 寛延 + 宝暦 + 明和 + 安永 + 天明 + 寛政 + 享和 + 文化 + 文政 + 天保 + 弘化 + 嘉永 + 安政 + 万延 + 文久 + 元治 + 慶応 + 明治 + 大正 + 昭和 + 平成 + + + + + + + Gy'年'M'月'd'日'EEEE + + + + + Gy'年'M'月'd'日' + + + + + G yy/MM/dd + + + + + G yy/MM/dd + + + + + + + + H'時'mm'分'ss'秒'z + + + + + H:mm:ss:z + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + + 太平洋標準時 + 太平洋夏時間 + + + PST + PDT + + ロサンゼルス + + + + 太平洋標準時 + 太平洋夏時間 + + + PST + PDT + + ロサンゼルス + + + + 山地標準時 + 山地夏時間 + + + MST + MDT + + デンバー + + + + 山地標準時 + 山地夏時間 + + + MST + MDT + + デンバー + + + + 山地標準時 + 山地標準時 + + + MST + MST + + フェニックス + + + + 山地標準時 + 山地標準時 + + + MST + MST + + フェニックス + + + + 中部標準時 + 中部夏時間 + + + CST + CDT + + シカゴ + + + + 中部標準時 + 中部夏時間 + + + CST + CDT + + シカゴ + + + + 東部標準時 + 東部夏時間 + + + EST + EDT + + ニューヨーク + + + + 東部標準時 + 東部夏時間 + + + EST + EDT + + ニューヨーク + + + + 東部標準時 + 東部標準時 + + + EST + EST + + インディアナポリス + + + + 東部標準時 + 東部標準時 + + + EST + EST + + インディアナポリス + + + + ハワイ標準時 + ハワイ標準時 + + + HST + HST + + ホノルル + + + + ハワイ標準時 + ハワイ標準時 + + + HST + HST + + ホノルル + + + + アラスカ標準時 + アラスカ夏時間 + + + AST + ADT + + アンカレッジ + + + + アラスカ標準時 + アラスカ夏時間 + + + AST + ADT + + アンカレッジ + + + + 大西洋標準時 + 大西洋夏時間 + + + AST + ADT + + ハリファクス + + + + ニューファンドランド島標準時 + ニューファンドランド島夏時間 + + + CNT + CDT + + セントジョンズ + + + + ニューファンドランド島標準時 + ニューファンドランド島夏時間 + + + CNT + CDT + + セントジョンズ + + + + 中欧標準時 + 中欧夏時間 + + + CET + CEST + + パリ + + + + 中欧標準時 + 中欧夏時間 + + + CET + CEST + + パリ + + + + グリニッジ標準時 + グリニッジ標準時 + + + GMT + GMT + + ロンドン + + + + グリニッジ標準時 + グリニッジ標準時 + + + GMT + GMT + + カサブランカ + + + + イスラエル標準時 + イスラエル夏時間 + + + IST + IDT + + エルサレム + + + + 日本標準時 + 日本標準時 + + + JST + JST + + 東京 + + + + 日本標準時 + 日本標準時 + + + JST + JST + + 東京 + + + + 東欧標準時 + 東欧夏時間 + + + EET + EEST + + ブカレスト + + + + 中国標準時 + 中国標準時 + + + CTT + CDT + + 上海 + + + + 中国標準時 + 中国標準時 + + + CTT + CDT + + 上海 + + + + + + + アンドラ ディナール + ADD + + + アンドラ ペセタ + ADP + + + UAE ディルハム + AED + + + アフガニー (1927-2002) + AFA + + + アフガニー + Af + + + アファールおよびイサス フラン + AIF + + + アルバニア レク (1946-1961) + ALK + + + アルバニア レク + ALL + + + アルバニア レク (Valute) + ALV + + + アルバニア レク (f) + ALX + + + アルメニア ドラム + AMD + + + オランダ領アンティル ギルダー + ANG + + + クワンザ (AOA) + AOA + + + クワンザ (1977-1990) + AOK + + + アンゴラ 新クワンザ (1990-2000) + AON + + + アンゴラ 旧クワンザ (1995-1999) + AOR + + + アンゴラ エスクード + AOS + + + アルゼンチン アゥストラール + ARA + + + アルゼンチン ペソ (MN) + ARM + + + アルゼンチン ペソ (1983-1985) + ARP + + + アルゼンチン ペソ + ARS + + + オーストラリア ドル + AUD + + + オーストラリア ポンド + AUP + + + アルバ ギルダー + AWG + + + アゼルバイジャン マナト + AZM + + + ボスニア ディナール + BAD + + + ボスニア マルク (BAM) + BAM + + + ボスニア 新ディナール + BAN + + + バルバドス ドル + BBD + + + バングラデシュ タカ + BDT + + + ベルギー フラン (BEC) + BEC + + + ベルギー フラン + BEF + + + ベルギー フラン (BEL) + BEL + + + ブルガリア レフ (BGL) + BGL + + + ブルガリア レフ (BGM) + BGM + + + ブルガリア 新レフ + BGN + + + ブルガリア レフ (1879-1952) + BGO + + + ブルガリア レフ (FEC) + BGX + + + バーレーン ディナール + BHD + + + ブルンジ フラン + BIF + + + バミューダ ドル + BMD + + + バミューダ ポンド + BMP + + + ブルネイ ドル + BND + + + ボリビアノ + BOB + + + ボリビア ボリビアノ (1863-1962) + BOL + + + ボリビア ペソ + BOP + + + ボリビア Mvdol + BOV + + + ブラジル 新クルゼイロ (BRB、1967-1986) + BRB + + + ブラジル クルゼイロ (BRC) + BRC + + + ブラジル クルゼイロ (BRE、1990-1993) + BRE + + + ブラジル レアル + BRL + + + ブラジル 新クルゼイロ (BRN) + BRN + + + ブラジル クルゼイロ レアル + BRR + + + ブラジル クルゼイロ (BRZ、1942-1967) + BRZ + + + バハマ ドル + BSD + + + バハマ ポンド + BSP + + + ブータン ニュルタム + BTN + + + ブータン ルピー + BTR + + + ビルマ チャット + BUK + + + ビルマ ルピー + BUR + + + ボツワナ プラ + BWP + + + ベラルーシ ルーブル (BYB、1994-1999) + BYB + + + ベラルーシ ルーブル (BYL、1992-1994) + BYL + + + ベラルーシ ルーブル + BYR + + + ベリーズ ドル + BZD + + + 英領ホンジュラス ドル + BZH + + + カナダ ドル + CAD + + + コンゴ フラン + CDF + + + コンゴ共和国 フラン + CDG + + + コンゴ ザイール + CDL + + + 中央アフリカ共和国 CFA フラン + CFF + + + スイス フラン + CHF + + + クック諸島 ドル + CKD + + + チリ ペソ (CLC) + CLC + + + チリ エスクード + CLE + + + チリ ウニダ デ フォメント + CLF + + + チリ ペソ + CLP + + + カメルーン CFA フラン + CMF + + + 中国人民元 + CNP + + + 中国 米ドル (FEC) + CNX + + + 中国人民元 + CNY + + + コロンビア ペソ (COB) + COB + + + コンゴ CFA フラン + COF + + + コロンビア ペソ + COP + + + コスタリカ コロン + CRC + + + チェコスロバキア コルナ (CSC) + CSC + + + チェコスロバキア コルナ (CSK) + CSK + + + キューバ ペソ + CUP + + + キューバ (FEC) + CUX + + + カーボベルデ エスクード + CVE + + + キュラソー島 ギルダー + CWG + + + キプロス ポンド + CYP + + + チェコ コルナ + CZK + + + 東ドイツ マルク + DDM + + + ドイツ マルク + DEM + + + ドイツ マルク (Sperrmark) + DES + + + ジブチ フラン + DJF + + + デンマーク クローネ + DKK + + + ドミニカ ペソ + DOP + + + アルジェリア ディナール + DZD + + + アルジェリア 新フラン + DZF + + + アルジェリア フラン (DZG) + DZG + + + エクアドル スクレ + ECS + + + エクアドル UVC + ECV + + + エストニア クルーン + EEK + + + エジプト ポンド + EGP + + + エリトリア ナクファ + ERN + + + スペイン ペセタ + ESP + + + エチオピア ブル + ETB + + + エチオピア ドル + ETD + + + ユーロ + + + + フィンランド マルカ + FIM + + + フィンランド マルカ (1860-1962) + FIN + + + フィジー諸島 ドル + FJD + + + フィジー諸島 ポンド + FJP + + + フォークランド(マルビナス)諸島 ポンド + FKP + + + フェロー諸島 クローナ + FOK + + + フランス フラン + FRF + + + フランス フラン (Franc Germinal/Franc Poincare) + FRG + + + ガボン CFA フラン + GAF + + + 英国ポンド + £ + + + グルジア クーポン ラリ + GEK + + + グルジア ラリ + GEL + + + ガーナ セディ + GHC + + + ガーナ 旧セディ + GHO + + + ガーナ ポンド + GHP + + + ガーナ 新セディ + GHR + + + ジブラルタル ポンド + GIP + + + グリーンランド クローネ + GLK + + + ガンビア ダラシ + GMD + + + ガンビア ポンド + GMP + + + ギニア フラン + GNF + + + ギニア フラン (1960-1972) + GNI + + + ギニア シリー + GNS + + + グアドループ フラン + GPF + + + 赤道ギニア ギニー + GQE + + + 赤道ギニア フラン + GQF + + + 赤道ギニア ペセタ + GQP + + + ギリシャ ドラクマ + GRD + + + ギリシャ 新ドラクマ + GRN + + + グアテマラ ケツァル + GTQ + + + 仏領ギアナ フラン + GUF + + + ポルトガル領ギニア エスクード + GWE + + + ポルトガル領ギニア ミルレイス + GWM + + + ギニアビサウ ペソ + GWP + + + ガイアナ ドル + GYD + + + 香港ドル + HKD + + + ホンジュラス レンピラ + HNL + + + クロアチア ディナール + HRD + + + クロアチア クーナ + HRK + + + ハイチ グールド + HTG + + + ハンガリー フォリント + HUF + + + 北アイルランド ポンド + IBP + + + インドネシア ニカギルダー + IDG + + + インドネシア ジャワ ルピア + IDJ + + + インドネシア 新ルピア + IDN + + + インドネシア ルピア + IDR + + + アイリッシュ ポンド + IEP + + + イスラエル シェケル + ILL + + + イスラエル ポンド + ILP + + + イスラエル新シェケル + ILS + + + マン島 ポンド + IMP + + + インド ルピー + =0#Rs.|1#Re.|1<Rs. + + + イラク ディナール + IQD + + + イラン リアル + IRR + + + アイスランド クローナ + ISK + + + イタリア リラ + + + + ジャージー島 ポンド + JEP + + + ジャマイカ ドル + JMD + + + ジャマイカ ポンド + JMP + + + ヨルダン ディナール + JOD + + + + + + + ケニア シリング + KES + + + キルギスタン ソム + KGS + + + カンボジア 旧リエル + KHO + + + カンボジア リエル + KHR + + + キリバス ドル + KID + + + コモロ フラン + KMF + + + 北朝鮮 人民ウォン + KPP + + + 北朝鮮 ウォン + KPW + + + 韓国 ホアン + KRH + + + 韓国 旧ウォン + KRO + + + 韓国 ウォン + + + + クウェート ディナール + KWD + + + ケイマン諸島 ドル + KYD + + + カザフスタン ルーブル + KZR + + + カザフスタン テンゲ + KZT + + + ラオス キープ + LAK + + + レバノン ポンド + LBP + + + リヒテンシュタイン フラン + LIF + + + スリランカ ルピー + LKR + + + セイロン ルピー + LNR + + + リベリア ドル + LRD + + + レソト ロティ + LSL + + + リトアニア リタス + LTL + + + リトアニア タロナ + LTT + + + ルクセンブルグ フラン + LUF + + + ラトビア ラッツ + LVL + + + ラトビア ルーブル + LVR + + + リビア リラ (British Military Authority) + LYB + + + リビア ディナール + LYD + + + リピア ポンド + LYP + + + モロッコ ディルハム + MAD + + + モロッコ フラン + MAF + + + モナコ フラン + MCF + + + モナコ フラン (MCG) + MCG + + + モルドバ レイ クーポン + MDC + + + モルドバ レイ + MDL + + + モルドバ レイ クーポン + MDR + + + マダガスカル アリアリ + MGA + + + マダガスカル フラン + MGF + + + マーシャル諸島 ドル + MHD + + + マケドニア デナル + MKD + + + マケドニア デナル (1992-1993) + MKN + + + マリ フラン + MLF + + + ミャンマー チャット + MMK + + + ミャンマー ドル (FEC) + MMX + + + モンゴル トグログ + MNT + + + マカオ パタカ + MOP + + + マルティニーク島 フラン + MQF + + + モーリタニア ウギア + MRO + + + マルタ リラ + MTL + + + マルタ ポンド + MTP + + + モーリシャス ルピー + MUR + + + モルディブ諸島 ルピー + MVP + + + モルディブ諸島 ルフィア + MVR + + + マラウィ クワチャ + MK + + + マラウィ ポンド + MWP + + + メキシコ ペソ + MXN + + + メキシコ ペソ (MXP、1861-1992) + MXP + + + メキシコ UDI + MXV + + + マレーシア リンギット + MYR + + + モザンピーク エスクード + MZE + + + モザンピーク メティカル + MZM + + + ナミビア ドル + NAD + + + ニューカレドニア CFP フラン + NCF + + + ナイジェリア ナイラ + NGN + + + ナイジェリア ポンド + NGP + + + ニューヘブリディーズ諸島 CFP フラン + NHF + + + ニカラグア コルドバ + NIC + + + ニカラグア コルドバ (NIG) + NIG + + + ニカラグア コルドバ オロ + NIO + + + オランダ ギルダー + NLG + + + ノルウェー クローネ + NOK + + + ネパール ルピー + NPR + + + ニュージーランド ドル + NZD + + + ニュージーランド ポンド + NZP + + + オマーン リアル + OMR + + + オマーン リアル (OMS) + OMS + + + パナマ バルボア + PAB + + + 沿ドニエストル ルーブル (PDK) + PDK + + + 沿ドニエストル 新ルーブル (PDN) + PDN + + + 沿ドニエストル ルーブル (PDR) + PDR + + + ペルー インティ + PEI + + + ペルー 新ソル + PEN + + + ペルー ソル + PES + + + パプアニューギニア キナ + PGK + + + フィリピン ペソ + PHP + + + パキスタン ルピー + PKR + + + ポーランド ズウォティ + PLN + + + ポーランド 米ドル (FEC) + PLX + + + ポーランド ズウォティ (1950-1995) + PLZ + + + パレスチナ ポンド + PSP + + + ポルトガル コント + PTC + + + ポルトガル エスクード + PTE + + + パラグアイ グアラニ + PYG + + + カタール リアル + QAR + + + レユニオン島 フラン + REF + + + ルーマニア レイ + ROL + + + ルーマニア 新レイ + RON + + + ロシア ルーブル (1991-1998) + RUR + + + ルワンダ フラン + RWF + + + サウジ リヤル + SAR + + + サウジ リヤル (SAS) + SAS + + + ソロモン諸島 ドル + SBD + + + セイシェル ルピー + SCR + + + スーダン ディナール + SDD + + + スーダン ポンド + SDP + + + スウェーデン クローナ + SEK + + + シンガポール ドル + SGD + + + セントヘレナ島 ポンド + SHP + + + スロベニア トラール (SIB) + SIB + + + スロベニア トラール + SIT + + + スロバキア コルナ + SKK + + + シエラレオネ レオン + SLL + + + サンマリノ リラ + SML + + + ソマリア シリング + SOS + + + ソマリランド シリング + SQS + + + スリナム ギルダー + SRG + + + スコットランド ポンド + SSP + + + サントメ・プリンシペ ドブラ + STD + + + サントメ・プリンシペ エスクード + STE + + + ソ連 新ルーブル + SUN + + + ソ連 ルーブル + SUR + + + エルサルバドル コロン + SVC + + + シリア ポンド + SYP + + + スワジランド リランゲニ + SZL + + + タークス・カイコス諸島 クローン + TCC + + + チャド CFA フラン + TDF + + + タイ バーツ + THB + + + タジキスタン ルーブル + TJR + + + タジキスタン ソモニ + TJS + + + トルクメニスタン マナト + TMM + + + チュニジア ディナール + TND + + + トンガ パ・アンガ + TOP + + + トンガ ポンド + TOS + + + ティモール エスクード + TPE + + + ティモール パタカ + TPP + + + トルコ リラ + TRL + + + トリニダードトバゴ ドル + TTD + + + トリニダードトバゴ 旧ドル + TTO + + + ツバル ドル + TVD + + + 新台湾ドル + TWD + + + タンザニア シリング + TZS + + + ウクライナ グリブナ + UAH + + + ウクライナ カルボバネツ + UAK + + + ウガンダ シリング (1966-1987) + UGS + + + ウガンダ シリング + UGX + + + 米ドル + $ + + + 米ドル (翌日) + USN + + + 米ドル (当日) + USS + + + ウルグアイ ペソ (UYF) + UYF + + + ウルグアイ ペソ (1975-1993) + UYP + + + ウルグアイ ペソ + UYU + + + ウズベキスタン スム (UZC) + UZC + + + ウズベキスタン スム + UZS + + + バチカン リラ + VAL + + + 北ベトナム ドン (VDD) + VDD + + + 北ベトナム 新ドン + VDN + + + 北ベトナム ドン (VDP) + VDP + + + ベネズエラ ボリバル + VEB + + + 英領バージン諸島 ドル + VGD + + + ベトナム ドン + đ + + + ベトナム 新ドン + VNN + + + ベトナム共和国 ドン + VNR + + + ベトナム自治区 ドン + VNS + + + バヌアツ バツ + VUV + + + 西サモア ポンド + WSP + + + 西サモア タラ + WST + + + アジア ディナール勘定単位 + XAD + + + CFA フラン BEAC + XAF + + + アジア通貨単位 (AMU) + XAM + + + + XAU + + + ヨーロッパ混合単位 (EURCO) + XBA + + + ヨーロッパ通貨単位 (EMU-6) + XBB + + + ヨーロッパ勘定単位 (EUA-9) + XBC + + + ヨーロッパ勘定単位 (EUA-17) + XBD + + + 東カリブ ドル + EC$ + + + CFA 新フラン + XCF + + + 特別引き出し権 (Special Drawing Rights) + XDR + + + CFA フラン (BCEAEC) + XEF + + + ヨーロッパ通貨単位 + XEU + + + フランス フラン (XFO) + XFO + + + フランス UIC フラン + XFU + + + イスラム ディナール + XID + + + フランス 新フラン (XMF) + XMF + + + 仏領アンティル諸島 CFA フラン + XNF + + + CFA フラン BCEAO + XOF + + + CFP フラン + XPF + + + コメコン振替ルーブル + XTR + + + イエメン ディナール + YDD + + + イエメン リアル (YEI) + YEI + + + イエメン リアル + YER + + + ユーゴスラビア ディナール (YUD) + YUD + + + ユーゴスラビア ディナール (YUF) + YUF + + + ユーゴスラビア ディナール (YUG) + YUG + + + ユーゴスラビア スーパー ディナール + YUM + + + ユーゴスラビア 新ディナール (YUN) + YUN + + + ユーゴスラビア ディナール (YUO) + YUO + + + ユーゴスラビア ディナール (YUR) + YUR + + + 南アフリカ ランド (ZAL) + ZAL + + + 南アフリカ ポンド + ZAP + + + 南アフリカ ランド + ZAR + + + ザンビア ポンド + ZMP + + + ザイール 新ザイール + ZRN + + + ザイール ザイール + ZRZ + + + ジンバブエ ドル + ZWD + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ja_JP.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ja_JP.xml new file mode 100644 index 0000000..f98e9d6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ja_JP.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0;-¤#,##0 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ka.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ka.xml new file mode 100644 index 0000000..fe685ba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ka.xml @@ -0,0 +1,232 @@ + + + + + + + + + + + ქართული + + + ანდორა + არაბეთის გაერთიანებული ემირატები + ავღანეთი + ანტიგუა და ბარბუდა + ალბანეთი + სასომხეთი + ანგოლა + არგენტინა + ავსტრია + ავსტრალია + აზერბაიჯანი + ბოსნია და ჰერცეგოვინა + ბარბადოსი + ბანგლადეში + ბელგია + ბურკინა-ფასო + ბულგარეთი + ბაჰრეინი + ბურუნდი + ბენინი + ბრუნეი + ბოლივია + ბრაზილია + ბაჰამის კუნძულები + ბუტანი + ბოტსვანა + ბელორუსია + ბელიზი + კანადა + ცენტრალური აფრიკის რესპუბლიკა + კონგო + შვეიცარია + სპილოს ძვლის სანაპირო + ჩილი + კამერუნი + ჩინეთი + კოლუმბია + კოსტა-რიკა + კუბა + კაბო-ვერდე + კვიპროსი + ჩეხეთის რესპუბლიკა + გერმანია + ჯიბუტი + დანია + დომინიკა + დომინიკანის რესპუბლიკა + ალჟირი + ეკვადორი + ესტონეთი + ეგვიპტე + დასავლეთი საჰარა + ერიტრეა + ესპანეთი + ეთიოპია + ფინეთი + ფიჯი + მიკრონეზია + საფრანგეთი + გაბონი + გრენადა + საქართველო + განა + გამბია + გვინეა + ეკვატორული გვინეა + საბერძნეთი + გვატემალა + გვინეა-ბისაუ + გაიანა + ჰონდურასი + ჰორვატია + ჰაიტი + უნგრეთი + ინდონეზია + ირლანდია + ისრაელი + ინდოეთი + ერაყი + ირანი + ისლანდია + იტალია + იამაიკა + იორდანია + იაპონია + კენია + ყირგიზეთი + კამბოჯა + კირიბატი + კომორის კუნძულები + სენტ-კიტსი და ნევისი + ჩრდილოეთ კორეა + სამხრეთ კორეა + კუვეიტი + ყაზახეთი + ლაოსი + ლიბანი + სენტ-ლუსია + ლიხტენშტეინი + შრი-ლანკა + ლიბერია + ლესოტო + ლიტვა + ლუქსემბურგი + ლატვია + ლიბია + მაროკო + მონაკო + მოლდოვა + მადაგასკარი + მარშალის კუნძულები + მაკედონია + მალი + მიანმარი + მონღოლეთი + მავრიტანია + მალტა + მავრიკია + მალდივის კუნძულები + მალავი + მექსიკა + მალაიზია + მოზამბიკი + ნამიბია + ნიგერი + ნიგერია + ნიკარაგუა + ნიდერლანდები + ნორვეგია + ნეპალი + ნაურუ + ახალი ზელანდია + ომანი + პანამა + პერუ + პაპუა-ახალი გვინეა + ფილიპინები + პაკისტანი + პოლონეთი + პორტუგალია + პალაუ + პარაგვაი + კატარი + რუმინეთი + რუსეთი + რუანდა + საუდის არაბეთი + სოლომონის კუნძულები + სეიშელის კუნძულები + სუდანი + შვეცია + სინგაპური + სლოვენია + სლოვაკეთი + სიერა-ლეონე + სან-მარინო + სენეგალი + სომალი + სერბია + სურინამი + საო-ტომე და პრინსიპი + სალვადორი + სირია + სვაზილენდი + ჩადი + ტოგო + ტაილანდი + ტაჯიკეთი + თურქმენეთი + ტუნისი + ტონგა + თურქეთი + ტრინიდადი და ტობაგო + ტუვალუ + ტაივანი + ტანზანია + უკრაინა + უგანდა + ამერიკის შეერთებული შტატები + ურუგვაი + უზბაკეთი + ვატიკანი + სენტ-ვინსენტი და გრენადინები + ვენესუელა + ვიეტნამი + ვანუატუ + სამოა + იემენი + სამხრეთ აფრიკა + ზამბია + ზიმბაბვე + + + + [:Geor:] + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + GEL + Lari + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ka_GE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ka_GE.xml new file mode 100644 index 0000000..9635dd1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ka_GE.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kk.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kk.xml new file mode 100644 index 0000000..cb83c18 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kk.xml @@ -0,0 +1,159 @@ + + + + + + + + + + + Қазақ + + + Қазақстан + + + + [а-яыэёіқңүұә] + + + + + + + + қаң. + ақп. + нау. + сәу. + мам. + мау. + шіл. + там. + қыр. + қаз. + қар. + желт. + + + қаңтар + ақпан + наурыз + сәуір + мамыр + маусым + шілде + тамыз + қыркүйек + қазан + қараша + желтоқсан + + + + + + + жс. + дс. + сс. + ср. + бс. + жм. + сһ. + + + жексені + дуйсенбі + сейсенбі + сәренбі + бейсенбі + жұма + сенбі + + + + + + + + + + + + EEEE, d MMMM yyyy 'ж.' + + + + + d MMMM yyyy 'ж.' + + + + + dd.MM.yyyy + + + + + dd.MM.yy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + KZT + тңг. + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kk_KZ.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kk_KZ.xml new file mode 100644 index 0000000..a4a556f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kk_KZ.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-¤ #,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kl.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kl.xml new file mode 100644 index 0000000..5b03845 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kl.xml @@ -0,0 +1,102 @@ + + + + + + + + + + + kalaallisut + + + Kalaallit Nunaat + + + + [a-záéíúâêîôûæåøãĩũĸ] + + + + + + + + jan + feb + mar + apr + maj + jun + jul + aug + sep + okt + nov + dec + + + januari + februari + martsi + aprili + maji + juni + juli + augustusi + septemberi + oktoberi + novemberi + decemberi + + + + + + + sab + ata + mar + pin + sis + tal + arf + + + sabaat + ataasinngorneq + marlunngorneq + pingasunngorneq + sisamanngorneq + tallimanngorneq + arfininngorneq + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + DKK + kr + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kl_GL.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kl_GL.xml new file mode 100644 index 0000000..126eb09 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kl_GL.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE dd MMMM yyyy + + + + + dd MMMM yyyy + + + + + MMM dd,yy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;¤ -#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kn.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kn.xml new file mode 100644 index 0000000..5a90ad0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kn.xml @@ -0,0 +1,97 @@ + + + + + + + + + + + ಕನ್ನಡ + + + ಆಸ್ಟ್ರೆಲಿಯ + ಚೀನ + ಎಸ್ತೊನಿಯ + ಭಾರತ + ಮಾಲ್ಡಿವ ದ್ವೀಪಗಳು + ನೆಪಾಳ + ಸಿಂಗಪುರ + ತುರ್ಕಿ + + + + [:Knda:] + + + + + + + + ಜನವರೀ + ಫೆಬ್ರವರೀ + ಮಾರ್ಚ್ + ಎಪ್ರಿಲ್ + ಮೆ + ಜೂನ್ + ಜುಲೈ + ಆಗಸ್ಟ್ + ಸಪ್ಟೆಂಬರ್ + ಅಕ್ಟೋಬರ್ + ನವೆಂಬರ್ + ಡಿಸೆಂಬರ್ + + + ಜನವರೀ + ಫೆಬ್ರವರೀ + ಮಾರ್ಚ್ + ಎಪ್ರಿಲ್ + ಮೆ + ಜೂನ್ + ಜುಲೈ + ಆಗಸ್ಟ್ + ಸಪ್ಟೆಂಬರ್ + ಅಕ್ಟೋಬರ್ + ನವೆಂಬರ್ + ಡಿಸೆಂಬರ್ + + + + + + + ರ. + ಸೋ. + ಮಂ. + ಬು. + ಗು. + ಶು. + ಶನಿ. + + + ರವಿವಾರ + ಸೋಮವಾರ + ಮಂಗಳವಾರ + ಬುಧವಾರ + ಗುರುವಾರ + ಶುಕ್ರವಾರ + ಶನಿವಾರ + + + + ಪೂರ್ವಾಹ್ನ + ಅಪರಾಹ್ನ + + + + + + + INR + रु + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kn_IN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kn_IN.xml new file mode 100644 index 0000000..8bf3869 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kn_IN.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd-MM-yyyy + + + + + d-M-yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + + + + + ##,##,##0.###;-##,##,##0.### + + + + + + + #E0 + + + + + + + ##,##,##0% + + + + + + + ¤ ##,##,##0.00;-¤ ##,##,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ko.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ko.xml new file mode 100644 index 0000000..846440d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ko.xml @@ -0,0 +1,2399 @@ + + + + + + + + + + + 아파르어 + 압카즈어 + 남아공 공용어 + 아칸어 + 암하라어 + 아라곤어 + 아랍어 + 아샘어 + 아바릭어 + 아이마라어 + 아제르바이잔어 + 바슈키르어 + 벨로루시어 + 불가리아어 + 비하르어 + 비슬라마어 + 밤바라어 + 벵골어 + 티베트어 + 브르타뉴어 + 보스니아어 + 브린어 + 카탈로니아어 + 차모로어 + 체로키어 + 코르시카어 + 크리어 + 체코어 + 교회슬라브어 + 추바시어 + 웨일스어 + 덴마크어 + 독일어 + 디베히어 + 부탄어 + 에웨어 + 그리스어 + 영어 + 에스페란토어 + 스페인어 + 에스토니아어 + 바스크어 + 이란어 + 풀라어 + 핀란드어 + 피지어 + 페로스어 + 프랑스어 + 프리지아어 + 아일랜드어 + 스코갤릭어 + 게이즈어 + 갈리시아어 + 구아라니어 + 구자라트어 + 맹크스어 + 하우자어 + 하와이어 + 히브리어 + 힌디어 + 히리 모투어 + 크로아티아어 + 아이티어 + 헝가리어 + 아르메니아어 + 인터링거 + 인도네시아어 + 인터링게어 + 이그보어 + 시츄안 이어 + 이누피아크어 + 이도어 + 아이슬란드어 + 이탈리아어 + 이눅티투트어 + 일본어 + 자바어 + 그루지야어 + 콩고어 + 키쿠유어 + 쿠안야마어 + 카자흐어 + 그린랜드어 + 캄보디아어 + 카나다어 + 한국어 + 코카니어 + 칸누리어 + 카슈미르어 + 크르드어 + 코미어 + 콘월어 + 키르기스어 + 라틴어 + 룩셈부르크어 + 간다어 + 림버거어 + 링갈라어 + 라오어 + 리투아니아어 + 루바-카탄가어 + 라트비아어 + 마다가스카르어 + 마셜제도어 + 마오리어 + 마케도니아어 + 말라얄람어 + 몽골어 + 몰다비아어 + 마라티어 + 말레이어 + 몰타어 + 버마어 + 나우루어 + 보크말 노르웨이어 + 은데벨레어, 북부 + 네팔어 + 느동가어 + 네덜란드어 + 뉘노르스크 노르웨이어 + 노르웨이어 + 은데벨레어, 남부 + 나바호어 + 니안자어; 치츄어; 츄어 + 옥시트어 + 오지브웨이어 + 오로모어 (아판) + 오리야어 + 오세트어 + 펀잡어 + 팔리어 + 폴란드어 + 파시토어 (푸시토) + 포르투칼어 + 케추아어 + 레토로만어 + 반투어(부룬디) + 루마니아어 + 러시아어 + 반투어(루완다) + 산스크리트어 + 사르디니아어 + 신디어 + 북부 사미어 + 산고어 + 세르보크로아티아어 + 스리랑카어 + 시다모어 + 슬로바키아어 + 슬로베니아어 + 사모아어 + 쇼나어 + 소말리아어 + 알바니아어 + 세르비아어 + 시스와티어 + 세소토어 + 순단어 + 스웨덴어 + 스와힐리어 + 시리아어 + 타밀어 + 텔루구어 + 타지키스탄어 + 태국어 + 티그리냐어 + 티그레어 + 투르크멘어 + 타갈로그어 + 세츠와나어 + 통가어 + 터키어 + 통가어 + 타타르어 + 트위어 + 타히티어 + 위구르어 + 우크라이나어 + 우르두어 + 우즈베크어 + 벤다어 + 베트남어 + 볼라퓌크어 + 왈론어 + 올로프어 + 반투어(남아프리카) + 이디시어 + 요루바어 + 주앙어 + 중국어 + 줄루어 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 안도라 + 아랍에미리트 + 아프가니스탄 + 앤티가 바부다 + 안길라 + 알바니아 + 아르메니아 + 네덜란드령 안틸레스 + 앙골라 + 남극 대륙 + 아르헨티나 + 아메리칸 사모아 + 오스트리아 + 오스트레일리아 + 아루바 + 아제르바이잔 + 보스니아 헤르체고비나 + 바베이도스 + 방글라데시 + 벨기에 + 부르키나파소 + 불가리아 + 바레인 + 부룬디 + 베넹 + 버뮤다 + 브루나이 + 볼리비아 + 브라질 + 바하마 + 부탄 + 부베 + 보츠와나 + 벨라루스 + 벨리즈 + 캐나다 + 코코스제도 + 콩고민주공화국 + 중앙 아프리카 + 콩고 + 스위스 + 코트디부와르 + 쿡제도 + 칠레 + 카메룬 + 중국 + 콜롬비아 + 코스타리카 + 쿠바 + 까뽀베르데 + 크리스마스섬 + 사이프러스 + 체코 + 독일 + 지부티 + 덴마크 + 도미니카 + 도미니카 공화국 + 알제리 + 에쿠아도르 + 에스토니아 + 이집트 + 서사하라 + 에리트리아 + 스페인 + 이디오피아 + 핀란드 + 피지 + 포클랜드제도 + 마이크로네시아 + 페로제도 + 프랑스 + 가봉 + 영국 + 그레나다 + 그루지야 + 프랑스령 기아나 + 가나 + 지브롤터 + 그린란드 + 감비아 + 기니 + 과달로프 + 적도 기니 + 그리스 + 사우스조지아-사우스샌드위치제도 + 과테말라 + + 기네비쏘 + 가이아나 + 홍콩, 중국 특별행정구 + 허드섬-맥도널드제도 + 온두라스 + 크로아티아 + 하이티 + 헝가리 + 인도네시아 + 아일랜드 + 이스라엘 + 인도 + 영국령인도양식민지 + 이라크 + 이란 + 아이슬란드 + 이탈리아 + 자메이카 + 요르단 + 일본 + 케냐 + 키르기스스탄 + 캄보디아 + 키리바시 + 코모르 + 세인트크리스토퍼 네비스 + 조선 민주주의 인민 공화국 + 대한민국 + 쿠웨이트 + 케이맨제도 + 카자흐스탄 + 라오스 + 레바논 + 세인트루시아 + 리히텐슈타인 + 스리랑카 + 라이베리아 + 레소토 + 리투아니아 + 룩셈부르크 + 라트비아 + 리비아 + 모로코 + 모나코 + 몰도바 + 마다가스카르 + 마샬 군도 + 마케도니아어 + 말리 + 미얀마 + 몽골 + 마카오, 중국 특별행정구 + 북마리아나제도 + 말티니크 + 모리타니 + 몬트세라트 + 몰타 + 모리셔스 + 몰디브 + 말라위 + 멕시코 + 말레이지아 + 모잠비크 + 나미비아 + 뉴 칼레도니아 + 니제르 + 노퍽섬 + 나이지리아 + 니카라과 + 네덜란드 + 노르웨이 + 네팔 + 나우루 + 니우에 + 뉴질랜드 + 오만 + 파나마 + 페루 + 프랑스령 폴리네시아 + 파푸아뉴기니 + 필리핀 + 파키스탄 + 폴란드 + 세인트피에르-미케롱 + 핏케언섬 + 푸에르토리코 + 팔레스타인 지구 + 포르트칼 + 팔라우 + 파라과이 + 카타르 + 리유니온 + 루마니아 + 러시아 + 르완다 + 사우디아라비아 + 솔로몬 + 쉐이쉘 + 수단 + 스웨덴 + 싱가포르 + 세인트헬레나 + 슬로베니아 + 스발바르제도-얀마웬섬 + 슬로바키아 + 시에라리온 + 산마리노 + 세네갈 + 소말리아 + 세르비아 + 수리남 + 상투메 프린시페 + 엘살바도르 + 시리아 + 스와질랜드 + 터크스케이커스제도 + 차드 + 프랑스 남부 지방 + 토고 + 태국 + 타지키스탄 + 토켈라우 + 동티모르 + 투르크메니스탄 + 튀니지 + 통가 + 터키 + 트리니다드 토바고 + 투발루 + 대만 + 탄자니아 + 우크라이나 + 우간다 + 미국령 해외 제도 + 미국 + 우루과이 + 우즈베키스탄 + 바티칸 + 세인트빈센트그레나딘 + 베네수엘라 + 영국령 버진 아일랜드 + 미국령 버진 아일랜드 + 베트남 + 바누아투 + 왈리스-푸투나 제도 + 사모아 + 예멘 + 마요티 + 유고슬라비아 + 남아프리카 + 잠비아 + 짐바브웨 + + + 개정 + + + 달력 + 조합 + 통화 + + + 불교력 + 중국력 + 태양력 + 히브리력 + 이슬람력 + 이슬람 상용력 + 일본력 + 순서 + 전화번호부순 + 병음순 + 자획순 + 전통 역법 + + + + [가-힣] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + 1월 + 2월 + 3월 + 4월 + 5월 + 6월 + 7월 + 8월 + 9월 + 10월 + 11월 + 12월 + + + 1월 + 2월 + 3월 + 4월 + 5월 + 6월 + 7월 + 8월 + 9월 + 10월 + 11월 + 12월 + + + 1월 + 2월 + 3월 + 4월 + 5월 + 6월 + 7월 + 8월 + 9월 + 10월 + 11월 + 12월 + + + + + + + + + + + + + + + + + + + + + + + + + 일요일 + 월요일 + 화요일 + 수요일 + 목요일 + 금요일 + 토요일 + + + + 오전 + 오후 + + + 기원전 + 서기 + + + + + + + yyyy'년' M'월' d'일' EEEE + + + + + yyyy'년' M'월' d'일' EE + + + + + yyyy-MM-dd + + + + + yy-MM-dd + + + + + + + + a hh'시' mm'분' ss'초' z + + + + + a hh'시' mm'분' ss'초' + + + + + a hh'시' mm'분' + + + + + a hh'시' mm'분' + + + + + + + {1} {0} + + + + + + + + + 태평양 표준시 + 태평양 기준시 + + + PST + PDT + + 로스앤젤레스 + + + + 태평양 표준시 + 태평양 기준시 + + + PST + PDT + + 로스앤젤레스 + + + + 산악 표준시 + 산악 기준시 + + + MST + MDT + + 덴버 + + + + 산악 표준시 + 산악 기준시 + + + MST + MDT + + 덴버 + + + + 산악 표준시 + 산악 표준시 + + + MST + MST + + 피닉스 + + + + 산악 표준시 + 산악 표준시 + + + MST + MST + + 피닉스 + + + + 중부 표준시 + 중부 기준시 + + + CST + CDT + + 시카고 + + + + 중부 표준시 + 중부 기준시 + + + CST + CDT + + 시카고 + + + + 동부 표준시 + 동부 기준시 + + + EST + EDT + + 뉴욕 + + + + 동부 표준시 + 동부 기준시 + + + EST + EDT + + 뉴욕 + + + + 동부 표준시 + 동부 표준시 + + + EST + EST + + 인디애나폴리스 + + + + 동부 표준시 + 동부 표준시 + + + EST + EST + + 인디애나폴리스 + + + + 하와이 표준시 + 하와이 표준시 + + + HST + HST + + 호놀룰루 + + + + 하와이 표준시 + 하와이 표준시 + + + HST + HST + + 호놀룰루 + + + + 알래스카 표준시 + 알래스카 기준시 + + + AST + ADT + + 앵커리지 + + + + 알래스카 표준시 + 알래스카 기준시 + + + AST + ADT + + 앵커리지 + + + + 대서양 표준시 + 대서양 기준시 + + + AST + ADT + + 핼리팩스 + + + + 뉴펀들랜드 표준시 + 뉴펀들랜드 기준시 + + + CNT + CDT + + St. Johns + + + + 뉴펀들랜드 표준시 + 뉴펀들랜드 기준시 + + + CNT + CDT + + St. Johns + + + + 중부유럽 표준시 + 중부유럽 기준시 + + + CET + CEST + + 파리 + + + + 중부유럽 표준시 + 중부유럽 기준시 + + + CET + CEST + + 파리 + + + + 그리니치 표준시 + 그리니치 표준시 + + + GMT + GMT + + 런던 + + + + 그리니치 표준시 + 그리니치 표준시 + + + GMT + GMT + + 카사블랑카 + + + + 이스라엘 표준시 + 이스라엘 기준시 + + + IST + IDT + + 예루살렘 + + + + 일본 표준시 + 일본 표준시 + + + JST + JST + + 도쿄 + + + + 일본 표준시 + 일본 표준시 + + + JST + JST + + 도쿄 + + + + 한국표준시 + 한국표준시 + + + KST + KST + + + + + 동부유럽 표준시 + 동부유럽 기준시 + + + EET + EEST + + 부쿠레슈티 + + + + 중국 표준시 + 중국 표준시 + + + CTT + CDT + + 상하이 + + + + 중국 표준시 + 중국 표준시 + + + CTT + CDT + + 상하이 + + + + + + + 안도라 디네르 + ADD + + + 안도라 페세타 + ADP + + + 아랍에미레이트 디나르 + AED + + + 아프가니 (1927-2002) + AFA + + + 아프가니 + AFN + + + 아파르와 이사스의 프랑 + AIF + + + 알바니아 레크 (1946-1961) + ALK + + + 알바니아 레크 + ALL + + + 알바니아 레크 발루트 + ALV + + + 알바니아 달러 태환권 + ALX + + + 아르메니아 드람 + AMD + + + 네델란드 안틸레스 굴덴 + ANG + + + 앙골라 콴자 + AOA + + + 앙골라 콴자 (1977-1990) + AOK + + + 앙골라 신귄 콴자 (1990-2000) + AON + + + 앙골라 콴자 Reajustado (1995-1999) + AOR + + + 앙골라 에스쿠도 + AOS + + + 아르헨티나 오스트랄 + ARA + + + 아르헨티나 페소 모네다 국영 + ARM + + + 아르헨티나 페소 (1983-1985) + ARP + + + 아르헨티나 페소 + ARS + + + 호주 실링 + ATS + + + 호주 달러 + AUD + + + 호주 파운드 + AUP + + + 아루바 길더 + AWG + + + 아제르바이젠 마나트 + AZM + + + 보스니아-헤르체고비나 디나르 + BAD + + + 보스니아-헤르체고비나 태환 마르크 + BAM + + + 보스니아-헤르체고비나 신 디나르 + BAN + + + 바베이도스 달러 + BBD + + + 방글라데시 타카 + BDT + + + 벨기에 프랑 (태환) + BEC + + + 벨기에 프랑 + BEF + + + 벨기에 프랑 (금융) + BEL + + + 불가리아 동전 렛 + BGL + + + 불가리아 사회주의 렛 + BGM + + + 불가리아 신권 렛 + BGN + + + 불가리아 렛 (1879-1952) + BGO + + + 불가리아 렛 태환권 + BGX + + + 바레인 디나르 + BHD + + + 부룬디 프랑 + BIF + + + 버뮤다 달러 + BMD + + + 버뮤다 파운드 + BMP + + + 부루나이 달러 + BND + + + 볼리비아노 + BOB + + + 볼리비아노 (1863-1962) + BOL + + + 볼리비아노 페소 + BOP + + + 볼리비아노 크루제이루 노보 (1967-1986) + BRB + + + 브라질 크루자두 + BRC + + + 브라질 크루제이루 (1990-1993) + BRE + + + 브라질 레알 + BRL + + + 브라질 크루자두 노보 + BRN + + + 브라질 크루제이루 + BRR + + + 브라질 크루제이루 (1942-1967) + BRZ + + + 바하마 달러 + BSD + + + 바하마 달러 파운드 + BSP + + + 부탄 눌투눔 + BTN + + + 부탄 루피 + BTR + + + 버마 차트 + BUK + + + 버마 루피 + BUR + + + 보츠와나 폴라 + BWP + + + 벨라루스 신권 루블 (1994-1999) + BYB + + + 벨라루스 루블 (1992-1994) + BYL + + + 벨라루스 루블 + BYR + + + 벨리즈 달러 + BZD + + + 영국령 혼두라스 달러 + BZH + + + 캐나다 달러 + CAD + + + 콩고 프랑 콩골라스 + CDF + + + 콩고 공화국 프랑 + CDG + + + 콩고 자이르 + CDL + + + 스위스 프랑달러 + CHF + + + 쿡 제도 달러 + CKD + + + 칠레 콘도르 + CLC + + + 칠레 에스쿠도 + CLE + + + 칠레 페소 + CLP + + + 중국 위안 인민폐 + CNY + + + 콜롬비아 지폐 페소 + COB + + + 콩고 CFA 프랑 + COF + + + 콜롬비아 페소 + COP + + + 코스타리카 콜론 + CRC + + + 체코슬로바키아 코루나 + CSC + + + 체코슬로바키아 동전 코루나 + CSK + + + 쿠바 페소 + CUP + + + 쿠바 태환권 마르크 + CUX + + + 카보베르데 에스쿠도 + CVE + + + 쿠라카오 길더 + CWG + + + 싸이프러스 파운드 + CYP + + + 체코 공화국 코루나 + CZK + + + 동독 오스트마르크 + DDM + + + 독일 마르크 + DEM + + + 독일 스퍼마르크 + DES + + + 지부티 프랑 + DJF + + + 덴마크 크로네 + DKK + + + 도미니카 페소 + DOP + + + 알제리 디나르 + DZD + + + 알제리 신권 프랑 + DZF + + + 알제리 프랑 제르미날 + DZG + + + 에쿠아도르 수크레 + ECS + + + 에스토니아 크룬 + EEK + + + 이집트 파운드 + EGP + + + 에리트리아 나크파 + ERN + + + 스페인 페세타 + ESP + + + 이디오피아 비르 + ETB + + + 이디오피아 달러 + ETD + + + 유로화 + + + + 핀란드 마르카 + FIM + + + 핀란드 마르카 (1860-1962) + FIN + + + 피지 달러 + FJD + + + 피지 파운드 + FJP + + + 포클랜드제도 파운드 + FKP + + + 페로제도 크로너 + FOK + + + 프랑스 프랑 + FRF + + + 프랑스 프랑 제르미날/프랑 포앙카레 + FRG + + + 가봉 CFA 프랑 + GAF + + + 영국령 파운드 스털링 + £ + + + 그루지야 지폐 라리트 + GEK + + + 그루지야 라리 + GEL + + + 가나 시디 + GHC + + + 가나 구권 시디 + GHO + + + 가나 파운드 + GHP + + + 가나 재평가 시디 + GHR + + + 지브롤터 파운드 + GIP + + + 그린란드 크로네 + GLK + + + 감비아 달라시 + GMD + + + 감비아 파운드 + GMP + + + 기니 프랑 + GNF + + + 기니 프랑 (1960-1972) + GNI + + + 기니 시리 + GNS + + + 과달로프 프랑 + GPF + + + 적도 기니 프랑 + GQF + + + 적도 기니 페세타 기니아나 + GQP + + + 그리스 드라크마 + GRD + + + 그리스 신권 드라크마 + GRN + + + 과테말라 케트살 + GTQ + + + 프랑스령 가이아나 프랑 기아나 + GUF + + + 포르투갈령 기니 에스쿠도 + GWE + + + 포르투갈령 기니 밀 레이스 + GWM + + + 기네비쏘 페소 + GWP + + + 가이아나 달러 + GYD + + + 홍콩 달러 + HKD + + + 온두라스 렘피라 + HNL + + + 크로아티아 디나르 + HRD + + + 크로아티아 쿠나 + HRK + + + 하이티 구르드 + HTG + + + 헝가리 포린트 + HUF + + + 북부 아일랜드 파운드 + IBP + + + 인도네시아 니카 길더 + IDG + + + 인도네시아 자바 루피아 + IDJ + + + 인도네시아 신권 루피아 + IDN + + + 인도네시아 루피아 + IDR + + + 아일랜드 파운드 + IR£ + + + 이스라엘 세켈 + ILL + + + 이스라엘 파운드 + ILP + + + 이스라엘 신권 세켈 + ILS + + + 맨도 파운드 스털링 + IMP + + + 인도 루피 + =0#Rs.|1#Re.|1<Rs. + + + 이라크 디나르 + IQD + + + 이란 리얄 + IRR + + + 아이슬란드 크로나 + ISK + + + 이탈리아 리라 + ITL + + + 저지 파운드 스털링 + JEP + + + 자메이카 달러 + JMD + + + 자메이카 파운드 + JMP + + + 요르단 디나르 + JOD + + + 일본 엔화 + + + + 케냐 실링 + KES + + + 키르기스스탄 솜 + KGS + + + 캄보디아 구권 리얄 + KHO + + + 캄보디아 리얄 + KHR + + + 키리바시 달러 + KID + + + 코모르 프랑 + KMF + + + 조선 민주주의 인민 공화국 원 + KPP + + + 조선 민주주의 인민 공화국 원 + KPW + + + 대한민국 환 + KRH + + + 대한민국 구권 원 + KRO + + + 대한민국 원 + + + + 쿠웨이트 디나르 + KWD + + + 케이맨 제도 달러 + KYD + + + 카자흐스탄 루블 + KZR + + + 카자흐스탄 텐게 + KZT + + + 라오스 키프 + LAK + + + 레바논 파운드 + LBP + + + 리히텐슈타인 프랑 + LIF + + + 스리랑카 루피 + LKR + + + 실론 루피 + LNR + + + 라이베리아 달러 + LRD + + + 레소토 로티 + LSL + + + 리투아니아 리타 + LTL + + + 룩셈부르크 타로나 + LTT + + + 룩셈부르크 프랑 + LUF + + + 라트비아 라트 + LVL + + + 라트비아 루블 + LVR + + + 리비아 영국 군사령 리라 + LYB + + + 리비아 디나르 + LYD + + + 리비아 파운드 + LYP + + + 모로코 디렘 + MAD + + + 모로코 프랑 + MAF + + + 모나코 프랑 누보 + MCF + + + 모나코 프랑 제르미날 + MCG + + + 몰도바 레이 지폐 + MDC + + + 몰도바 레이 + MDL + + + 몰도바 루블 지폐 + MDR + + + 마다가스카르 아리아리 + MGA + + + 마다가스카르 프랑 + MGF + + + 마샬 군도 달러 + MHD + + + 마케도니아 디나르 + MKD + + + 마케도니아 디나르 (1992-1993) + MKN + + + 말리 프랑 + MLF + + + 미얀마 키얏 + MMK + + + 미얀마 달러 태환권 + MMX + + + 몽골 투그릭 + MNT + + + 마카오 파타카 + MOP + + + 말티니크 프랑 + MQF + + + 모리타니 우기야 + UM + + + 몰타 리라 + MTL + + + 몰타 파운드 + MTP + + + 모리셔스 루피 + MUR + + + 몰디브 제도 루피 + MVP + + + 몰디브 제도 루피아 + MVR + + + 말라위 콰쳐 + MWK + + + 말라위 파운드 + MWP + + + 멕시코 페소 + MXN + + + 멕시코 실버 페소 (1861-1992) + MXP + + + 말레이지아 링기트 + MYR + + + 모잠비크 에스쿠도 + MZE + + + 모잠비크 메티칼 + MZM + + + 나미비아 달러 + NAD + + + 뉴 칼레도니아 프랑 제르미날 + NCF + + + 니제르 나이라 + NGN + + + 니제르 파운드 + NGP + + + 니카라과 코르도바 + NIC + + + 니카라과 코르도바 오로 + NIO + + + 네델란드 길더 + NLG + + + 노르웨이 크로네 + NOK + + + 네팔 루피 + NPR + + + 뉴질랜드 달러 + NZD + + + 뉴질랜드 파운드 + NZP + + + 오만 리얄 + OMR + + + 오만 리얄 사이디 + OMS + + + 파나마 발보아 + PAB + + + 페루 인티 + PEI + + + 페루 솔 누에보 + PEN + + + 페루 솔 + PES + + + 파푸아뉴기니 키나 + PGK + + + 필리핀 페소 + PHP + + + 파키스탄 루피 + PKR + + + 폴란드 즐로티 + PLN + + + 폴란드 즐로티 (1950-1995) + PLZ + + + 팔레스타인 파운드 + PSP + + + 포르투갈 콘토 + PTC + + + 포르투갈 에스쿠도 + PTE + + + 파라과이 과라니 + PYG + + + 카타르 리얄 + QAR + + + 리유니온 프랑 + REF + + + 루마니아 레이 + ROL + + + 루마니아 신권 레이 + RON + + + 러시아 루블 + RUB + + + 러시아 루블 (1991-1998) + RUR + + + 르완다 프랑 + RWF + + + 사우디아라비아 리얄 + SAR + + + 사우디아라비아 자치령 리얄 + SAS + + + 솔로몬 제도 달러 + SBD + + + 세이쉴 루피 + SCR + + + 수단 디나르 + SDD + + + 수단 파운드 + SDP + + + 스웨덴 크로나 + SEK + + + 싱가폴 달러 + SGD + + + 세인트헬레나 파운드 + SHP + + + 슬로베니아 톨라르 본스 + SIB + + + 슬로베니아 톨라르 + SIT + + + 슬로바키아 코루나 + SKK + + + 시에라리온 리온 + SLL + + + 산마리노 리라 + SML + + + 소말리아 실링 + SOS + + + 소말리랜드 실링 + SQS + + + 수리남 길더 + SRG + + + 스코틀랜드 파운드 + SSP + + + 상투메 프린시페 도브라 + STD + + + 상투메 프린시페 에스쿠도 + STE + + + 소련 신권 루블 + SUN + + + 소련 루블 + SUR + + + 엘살바도르 콜론 + SVC + + + 시리아 파운드 + SYP + + + 스와질란드 릴랑게니 + SZL + + + 터크스케이커스 크라운 + TCC + + + 태국 바트 + THB + + + 타지키스탄 루블 + TJR + + + 타지키스탄 소모니 + TJS + + + 투르크메니스탄 마나트 + TMM + + + 튀니지 디나르 + TND + + + 통가 파운드 스털링 + TOS + + + 티모르 에스쿠도 + TPE + + + 티모르 파타카 + TPP + + + 터기 리라 + TRL + + + 트리니다드 토바고 달러 + TTD + + + 트리니다드 토바고 구권 달러 + TTO + + + 투발루 달러 + TVD + + + 대만 신권 달러 + TWD + + + 탄자니아 실링 + TZS + + + 우크라이나 그리브나 + UAH + + + 우간다 실링 (1966-1987) + UGS + + + 우간다 실링 + UGX + + + 미국 달러 + US$ + + + 우루과이 페소 푸에르떼 + UYF + + + 우루과이 페소 (1975-1993) + UYP + + + 우루과이 페소 우루과요 + UYU + + + 우즈베키스탄 숨 쿠폰 + UZC + + + 우즈베키스탄 숨 + UZS + + + 바티칸 리라 + VAL + + + 북 베트남 피아스타 동 베트 + VDD + + + 북 베트남 신권 동 + VDN + + + 북 베트남 베트민 피아스타 동 베트 + VDP + + + 베네주엘라 볼리바르 + VEB + + + 영국령 버진 아일랜드 달러 + VGD + + + 베트남 동 + VND + + + 베트남 신권 동 + VNN + + + 베트남 공화국 동 + VNR + + + 베트남 국영 동 + VNS + + + 바누아투 바투 + VUV + + + 서 사모아 파운드 + WSP + + + 서 사모아 탈라 + WST + + + 아시아 디나르 계산 단위 + XAD + + + 아시아 기금 연합 통화 단위 + XAM + + + + XAU + + + 동카리브 달러 + XCD + + + 특별인출권 + XDR + + + 유럽 환율 단위 + XEU + + + 프랑스 Gold 프랑 + XFO + + + 프랑스 UIC-프랑 + XFU + + + 이슬람 디나르 + XID + + + 프랑스 메트로폴리탄 누보 프랑 + XMF + + + 예멘 디나르 + YDD + + + 예멘 리알 + YER + + + 유고슬라비아 동전 디나르 + YUD + + + 유고슬라비아 연합 디나르 + YUF + + + 유고슬라비아 1994 디나르 + YUG + + + 유고슬라비아 노비 디나르 + YUM + + + 유고슬라비아 전환 디나르 + YUN + + + 유고슬라비아 10월 디나르 + YUO + + + 유고슬라비아 개량 디나르 + YUR + + + 남아프리카 랜드 (금융) + ZAL + + + 남아프리카 파운드 + ZAP + + + 남아프리카 랜드 + ZAR + + + 쟘비아 콰쳐 + ZMK + + + 쟘비아 파운드 + ZMP + + + 자이르 신권 자이르 + ZRN + + + 자이르 자이르 + ZRZ + + + 짐비브웨 달러 + ZWD + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ko_KR.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ko_KR.xml new file mode 100644 index 0000000..9bd4456 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ko_KR.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + yyyy'년' M'월' d'일' EEEE + + + + + yyyy'년' M'월' d'일' + + + + + yyyy. MM. dd + + + + + yy. MM. dd + + + + + + + + a hh'시' mm'분' ss'초' z + + + + + a hh'시' mm'분' ss'초' + + + + + a h:mm:ss + + + + + a h:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kok.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kok.xml new file mode 100644 index 0000000..3e5ab97 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kok.xml @@ -0,0 +1,247 @@ + + + + + + + + + + + अफार + अबखेज़ियन + अफ्रिकान्स + अमहारिक् + अरेबिक् + असामी + ऐमरा + अज़रबैजानी + बष्किर + बैलोरुसियन् + बल्गेरियन + बीहारी + बिसलमा + बंगाली + तिबेतियन + ब्रेटन + कटलान + कोर्शियन + ज़ेक् + वेळ्ष् + डानिष + जर्मन + भूटानी + ग्रीक् + आंग्ल + इस्परान्टो + स्पानिष + इस्टोनियन् + बास्क + पर्षियन् + फिन्निष् + फिजी + फेरोस् + फ्रेन्च + फ्रिशियन् + ऐरिष + स्काटस् गेलिक् + गेलीशियन + गौरानी + गुजराती + हौसा + हेब्रु + हिन्दी + क्रोयेषियन् + हंगेरियन् + आर्मीनियन् + इन्टरलिंग्वा + इन्डोनेषियन + इन्टरलिंग् + इनूपेयाक् + आईस्लान्डिक + इटालियन + इन्युकट्ट + जापनीस् + जावनीस् + जार्जियन् + कज़ख् + ग्रीनलान्डिक + कंबोडियन + कन्नडा + कोरियन् + कोंकणी + कश्मीरी + कुर्दिष + किर्गिज़ + लाटिन + लिंगाला + लाओतियन् + लिथुआनियन् + लाट्वियन् (लेट्टिष्) + मलागसी + माओरी + मसीडोनियन् + मळियाळम + मंगोलियन् + मोल्डावियन् + मराठी + मलय + मालतीस् + बर्मीज़् + नौरो + नेपाळी + डच् + नोर्वेजियन + ओसिटान् + ओरोमो (अफान) + ओरिया + पंजाबी + पोलिष + पाष्टो (पुष्टो) + पोर्चुगीज़् + क्वेच्वा + रहटो-रोमान्स् + किरुन्दी + रोमानियन् + रष्यन् + किन्यार्वान्डा + संस्कृत + सिंधी + सांग्रो + सेर्बो-क्रोयेषियन् + सिन्हलीस् + स्लोवाक + स्लोवेनियन् + समोन + शोना + सोमाळी + आल्बेनियन् + सेर्बियन् + सिस्वाती + सेसोथो + सुंदनीस + स्वीदीष + स्वाहिली + तमिळ + तेलुगू + तजिक + थाई + तिग्रिन्या + तुर्कमन + तगालोग + सेत्स्वाना + तोंगा + तुर्किष + त्सोगा + तटार + त्वि + उधूर + युक्रेनियन् + उर्दू + उज़बेक + वियत्नामीज़ + ओलापुक + उलोफ़ + झ़ौसा + इद्दिष् + यूरुबा + झ्हुन्ग + चीनीस् + जुलू + + + भारत + + + + [[:Deva:]‌‍] + + + + + + + + जानेवारी + फेबृवारी + मार्च + एप्रिल + मे + जून + जुलै + ओगस्ट + सेप्टेंबर + ओक्टोबर + नोव्हेंबर + डिसेंबर + + + जानेवारी + फेब्रुवारी + मार्च + एप्रिल + मे + जून + जुलै + ओगस्ट + सेप्टेंबर + ओक्टोबर + नोव्हेंबर + डिसेंबर + + + + + + + रवि + सोम + मंगळ + बुध + गुरु + शुक्र + शनि + + + आदित्यवार + सोमवार + मंगळार + बुधवार + गुरुवार + शुक्रवार + शनिवार + + + + म.पू. + म.नं. + + + क्रिस्तपूर्व + क्रिस्तशखा + + + + + + + + भारतीय समय + भारतीय समय + + + IST + IST + + + + + + + + INR + रु + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kok_IN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kok_IN.xml new file mode 100644 index 0000000..fc79dbf --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kok_IN.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd-MM-yyyy + + + + + d-M-yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + ##,##,##0.###;-##,##,##0.### + + + + + + + #E0 + + + + + + + ##,##,##0% + + + + + + + ¤ ##,##,##0.00;-¤ ##,##,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kw.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kw.xml new file mode 100644 index 0000000..0858971 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kw.xml @@ -0,0 +1,88 @@ + + + + + + + + + + + kernewek + + + Rywvaneth Unys + + + + [a-z] + + + + + + + + Gen + Whe + Mer + Ebr + Me + Efn + Gor + Est + Gwn + Hed + Du + Kev + + + Mys Genver + Mys Whevrel + Mys Merth + Mys Ebrel + Mys Me + Mys Efan + Mys Gortheren + Mye Est + Mys Gwyngala + Mys Hedra + Mys Du + Mys Kevardhu + + + + + + + Sul + Lun + Mth + Mhr + Yow + Gwe + Sad + + + De Sul + De Lun + De Merth + De Merher + De Yow + De Gwener + De Sadorn + + + + a.m. + p.m. + + + RC + AD + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kw_GB.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kw_GB.xml new file mode 100644 index 0000000..ff74ca4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/kw_GB.xml @@ -0,0 +1,103 @@ + + + + + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + d MMM yyyy + + + + + dd/MM/yyyy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ky.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ky.xml new file mode 100644 index 0000000..9519283 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ky.xml @@ -0,0 +1,42 @@ + + + + + + + + + + + Кыргыз + + + Кыргызстан + + + + [а-я і є ї ґ] + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + KGS + сом + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ky_KG.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ky_KG.xml new file mode 100644 index 0000000..fd076da --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ky_KG.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/license.html b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/license.html new file mode 100644 index 0000000..f645068 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/license.html @@ -0,0 +1,49 @@ + + + +UNICODE, INC. LICENSE AGREEMENT - DATA FILES AND SOFTWARE + + + +

    UNICODE, INC. LICENSE AGREEMENT - DATA FILES AND SOFTWARE

    +
    COPYRIGHT AND PERMISSION NOTICE
    +
    +Copyright © 1991-2008 Unicode, Inc. All rights reserved. Distributed
    +under the Terms of Use in http://www.unicode.org/copyright.html.
    +
    +Permission is hereby granted, free of charge, to any person obtaining
    +a copy of the Unicode data files and any associated documentation
    +(the "Data Files") or Unicode software and any associated documentation
    +(the "Software") to deal in the Data Files or Software without
    +restriction, including without limitation the rights to use, copy,
    +modify, merge, publish, distribute, and/or sell copies of the Data
    +Files or Software, and to permit persons to whom the Data Files or
    +Software are furnished to do so, provided that (a) the above copyright
    +notice(s) and this permission notice appear with all copies of the Data
    +Files or Software, (b) both the above copyright notice(s) and this
    +permission notice appear in associated documentation, and (c) there is
    +clear notice in each modified Data File or in the Software as well as
    +in the documentation associated with the Data File(s) or Software that
    +the data or software has been modified.
    +
    +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
    +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
    +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
    +OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS
    +INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT
    +OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
    +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
    +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
    +PERFORMANCE OF THE DATA FILES OR SOFTWARE.
    +
    +Except as contained in this notice, the name of a copyright holder shall
    +not be used in advertising or otherwise to promote the sale, use or other
    +dealings in these Data Files or Software without prior written authorization
    +of the copyright holder.
    +
    +Unicode and the Unicode logo are trademarks of Unicode, Inc., and may be
    +registered in some jurisdictions. All other trademarks and registered
    +trademarks mentioned herein are the property of their respective owners.
    +
    + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/lt.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/lt.xml new file mode 100644 index 0000000..52fc3f4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/lt.xml @@ -0,0 +1,435 @@ + + + + + + + + + + + Arabų + Bulgarų + Čekijos + Danų + Vokiečių + Graikų + Anglų + Ispanų + Estų + Suomių + Prancūzų + Hebrajų + Kroatų + Vengrų + Italų + Japonų + Korėjiečių + Lietuvių + Latvių + Olandų + Norvegų + Lenkų + Portugalų + Rumunų + Rusų + Slovakų + Slovėnų + Švedų + Turkų + Kinų + + + Andora + Jungtiniai Arabų Emyratai + Afganistanas + Antigva ir Barbuda + Angvila + Albanija + Armėnija + Nyderlandų Antilai + Angola + Antarktika + Argentina + Amerikos Samoa + Austrija + Australija + Aruba + Azerbaidžanas + Bosnija ir Hercegovina + Barbadosas + Bangladešas + Belgija + Burkina Fasas + Bulgarija + Bahreinas + Burundis + Beninas + Bermudai + Brunėjus + Bolivija + Brazilija + Bahamai + Butanas + Bouvet sala + Botsvana + Baltarusija + Belizas + Kanada + Kokosų salos + Kongo Demokratinė Respublika + Centrinės Afrikos Respublika + Kongas + Šveicarija + Kot d’Ivuar + Kuko salos + Čilė + Kamerūnas + Kinija + Kolumbija + Kosta Rika + Kuba + Žaliojo Kyšulio salos + Kalėdų sala + Kipras + Čekijos respublika + Vokietija + Džibutis + Danija + Dominika + Dominikos Respublika + Alžyras + Ekvadoras + Estija + Egiptas + Vakarų Sachara + Eritrėja + Ispanija + Etiopija + Suomija + Fidžis + Folklendo salos (Malvinai) + Mikronezijos Federacinės Valstijos + Farerų salos + Prancūzija + en + Gabonas + Didžioji Britanija + Grenada + Gruzija + Prancūzijos Gviana + Gana + Gibraltaras + Grenlandija + Gambija + Gvinėja + Gvadelupė + Ekvatorinė Gvinėja + Graikija + Rytų Džordžija ir Rytų Sandwich salos + Gvatemala + Guamas + Gvinėja-Bisau + Gviana + Kinijos S.A.R.Honkongas + Heard ir McDonald salos + Hondūras + Kroatija + Haitis + Vengrija + Indonezija + Airija + Izraelis + Indija + Britų Indijos vandenyno teritorija + Irakas + Irano + Islandija + Italija + Jamaika + Jordanija + Japonija + Kenija + Kirgiztanas + Kambodža + Kiribatis + Komorai + Sent Kitsas ir Nevis + Šiaurės Korėja + Pietų Korėja + Kuveitas + Kaimanų salos + Kazachstanas + Laoso Liaudies Demokratinė Respublika + Libanas + Šventoji Liucija + Lichtenšteinas + Šri Lanka + Liberija + Lesotas + Lietuva + Liuksemburgas + Latvija + Libijos Arabų Džamahirija + Marokas + Monakas + Moldovos Respublika + Madagaskaras + Maršalo salos + Makedonijos Respublika + Malis + Mjanma + Mongolija + Kinijos S.A.R. Makao + Šiaurinės Marianos salos + Martinika + Mauritanija + Montserat + Malta + Mauricijus + Maldivai + Malavis + Meksika + Malaizija + Mozambikas + Namibija + Naujoji Kaledonija + Nigeris + Norfolko sala + Nigerija + Nikaragva + Nyderlandai + Norvegija + Nepalas + Nauru + Niujė + Naujoji Zelandija + Omanas + Panama + Peru + Prancūzų Polinezija + Papua Naujoji Gvinėja + Filipinai + Pakistanas + Lenkija + Sen Pjeras ir Mikelonas + Pitkernas + Puerto Rikas + Palestinos teritorija + Portugalija + Palau + Paragvajus + Kataras + Rejunjonas + Rumunija + Rusijos Federacija + Ruanda + Saudo Arabija + Saliamono salos + Seišeliai + Sudanas + Švedija + Singapūras + Šventoji Elena + Slovėnija + Svalbardo ir Jan Majen salos + Slovakia + Siera Leonė + San Marinas + Senegalas + Somalis + Serbia + Surinamas + San Tomė ir Principė + El Salvadoras + Sirija + Svazilendas + Turks ir Kaikos salos + Čadas + Prancūzų pietinės teritorijos + Togas + Tailandas + Tadžikija + Tokelau + Rytų Timoras + Turkmėnistanas + Tunisas + Tonga + Turkija + Trinidadas ir Tobagas + Tuvalu + Taivanis, Kinijos provincija + Tanzanija + Ukraina + Uganda + JAV antraeilės teritorijos salos + United States + Urugvajus + Uzbekija + Vatikano Miesto Valstija + Šventasis Vincentas ir Grenadinai + Venesuela + Britų Virginijos salos + JAV Virginijos salos + Vietnamas + Vanuatu + Valiso ir Futuna salos + Samoa + Jemenas + Majotė + Jugoslavija + Pietų Afrika + Zambija + Zimbabvė + + + + [a-z ą ę į ų ė ū č š ž] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + Sau + Vas + Kov + Bal + Geg + Bir + Lie + Rgp + Rgs + Spa + Lap + Grd + + + Sausio + Vasario + Kovo + Balandžio + Gegužės + Birželio + Liepos + Rugpjūčio + Rugsėjo + Spalio + Lapkričio + Gruodžio + + + + + + + Sk + Pr + An + Tr + Kt + Pn + Št + + + Sekmadienis + Pirmadienis + Antradienis + Trečiadienis + Ketvirtadienis + Penktadienis + Šeštadienis + + + + + + + + + + pr.Kr. + po.Kr. + + + + + + + yyyy 'm.' MMMM d 'd.',EEEE + + + + + yyyy 'm.' MMMM d 'd.' + + + + + yyyy.MM.dd + + + + + yyyy.MM.dd + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + LTL + Lt + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/lt_LT.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/lt_LT.xml new file mode 100644 index 0000000..28bd602 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/lt_LT.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + ##,##0.##;-##,##0.## + + + + + + + #E0 + + + + + + + #,##% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/lv.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/lv.xml new file mode 100644 index 0000000..11ed8f0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/lv.xml @@ -0,0 +1,435 @@ + + + + + + + + + + + arābu + bulgāru + čehu + dāņu + vācu + grieķu + angļu + spāņu + igauņu + somu + franču + ivrits + horvātu + ungāru + itāliešu + japāņu + korejiešu + lietuviešu + latviešu + holandiešu + norvēģu + poļu + portugāļu + rumāņu + krievu + slovāku + slovēņu + zviedru + turku + ķīniešu + + + Andora + Apvienotie Arābu Emirāti + Afganistāna + Antigva un Barbuda + Angilja + Albānija + Armēnija + Antiļas + Angola + Antarktika + Argentīna + Amerikāņu Samoa + Austrija + Austrālija + Aruba + Azerbaidžāna + Bosnija un Hercegovina + Barbadosa + Bangladeša + Beļģija + Burkinafaso + Bulgārija + Bahreina + Burundi + Benina + Bermudu salas + Bruneja + Bolīvija + Brazīlija + Bahamas + Butāna + Buvē sala + Botsvāna + Baltkrievija + Beliza + Kanāda + Kokosu (Kīlinga) salas + Kongo Demokrātiskā Republika + Centrālāfrikas Republika + Kongo + Šveice + Kotdivuāra + Kuka salas + Čīle + Kamerūna + Ķīna + Kolumbija + Kostarika + Kuba + Kaboverde + Ziemsvētku sala + Kipra + Čehija + Vācija + Džibutija + Dānija + Dominika + Dominikānas Republika + Alžīrija + Ekvadora + Igaunija + Ēģipte + Rietumsahāra + Eritreja + Spānija + Etiopija + Somija + Fidži + Folklenda salas + Mikronēzijas Federatīvās Valstis + Farēru salas + Francija + en + Gabona + Lielbritānija + Grenāda + Gruzija + Franču Gviāna + Gana + Gibraltārs + Grenlande + Gambija + Gvineja + Gvadelupa + Ekvatoriālā Gvineja + Grieķija + Dienviddžordžija un Dienvidsendviču salas + Gvatemala + Guama + Gvineja-Bisava + Gajāna + Honkonga, Ķīnas īpašās pārvaldes apgabals + Hērda un Makdonalda salas + Hondurasa + Horvātija + Haiti + Ungārija + Indonēzija + Īrija + Izraēla + Indija + Britu Indijas okeāna teritorija + Irāka + Irāna + Islande + Itālija + Jamaika + Jordānija + Japāna + Kenija + Kirgīzija + Kambodža + Kiribati + Komoru salas + Sentkitsa un Nevisa + Ziemeļkoreja + Dienvidkoreja + Kuveita + Kaimanu salas + Kazahstāna + Laosa + Libāna + Sentlūsija + Lihtenšteina + Šrilanka + Libērija + Lesoto + Lietuva + Luksemburga + Latvija + Lībija + Maroka + Monako + Moldova + Madagaskara + Māršala salas + Maķedonija + Mali + Mjanma + Mongolija + Makao, Ķīnas īpašās pārvaldes apgabals + Ziemeļu Marianas + Martinika + Mauritānija + Montserrata + Malta + Maurīcija + Maldīvija + Malāvija + Meksika + Malaizija + Mozambika + Namībija + Jaunkaledonija + Nigēra + Norfolka + Nigērija + Nikaragva + Nīderlande + Norvēģija + Nepāla + Nauru + Niue + Jaunzēlande + Omāna + Panama + Peru + Franču Polinēzija + Papua-Jaungvineja + Filipīnas + Pakistāna + Polija + Senpjēra un Mikelona + Pitkērna + Puertoriko + Palestīniešu pašpārvaldes teritorija + Portugāle + Palau + Paragvaja + Katara + Reinjona + Rumānija + Krievija + Ruanda + Saūda Arābija + Zālamana salas + Seišeļu salas + Sudāna + Zviedrija + Singapūra + Sv. Helēnas sala + Slovēnija + Svalbāra un Jana Majena sala + Slovākija + Sjerraleone + Sanmarīno + Senegāla + Somālija + Serbia + Surinama + Santome un Prinsipi + Salvadora + Sīrija + Svazilenda + Tērksas un Kaikosas salas + Čada + Franču dienvidu teritorijas + Togo + Taizeme + Tadžikistāna + Tokelau + Austrumtimora + Turkmenistāna + Tunisija + Tonga + Turcija + Trinidāda un Tobāgo + Tuvalu + Taivāna, Ķīnas province + Tanzānija + Ukraina + Uganda + ASV mazās aizjūras teritorijas + United States + Urugvaja + Uzbekistāna + Vatikāns + Sentvinsenta un Grenadīnas + Venecuēla + Britu Virdžīnu salas + Amerikāņu Virdžīnu salas + Vjetnama + Vanuatu + Volisa un Futuna + Samoa + Jemena + Majota + Dienvidslāvija + Dienvidāfrika + Zambija + Zimbabve + + + + [a-z ā ē ī ō ū ģ ķ ļ ņ ŗ č š ž] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + Jan + Feb + Mar + Apr + Mai + Jūn + Jūl + Aug + Sep + Okt + Nov + Dec + + + janvāris + februāris + marts + aprīlis + maijs + jūnijs + jūlijs + augusts + septembris + oktobris + novembris + decembris + + + + + + + Sv + P + O + T + C + Pk + S + + + svētdiena + pirmdiena + otrdiena + trešdiena + ceturtdiena + piektdiena + sestdiena + + + + + + + + + + pmē + + + + + + + + EEEE, yyyy'. gada 'd. MMMM + + + + + yyyy'. gada 'd. MMMM + + + + + yyyy.d.M + + + + + yy.d.M + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + LVL + Ls + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/lv_LV.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/lv_LV.xml new file mode 100644 index 0000000..b7687a7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/lv_LV.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mk.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mk.xml new file mode 100644 index 0000000..6797eeb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mk.xml @@ -0,0 +1,166 @@ + + + + + + + + + + + македонски + + + Македонија + + + + [а-и к-ш ѐ ѓ ѕ ј љ њ ќ ѝ џ] + + + GuMtkHmsSEDFwWahKzUeygAZ + + + + + + јан. + фев. + мар. + апр. + мај. + јун. + јул. + авг. + септ. + окт. + ноем. + декем. + + + јануари + февруари + март + април + мај + јуни + јули + август + септември + октомври + ноември + декември + + + + + + + нед. + пон. + вт. + сре. + чет. + пет. + саб. + + + недела + понеделник + вторник + среда + четврток + петок + сабота + + + + + + + + + + пр.н.е. + ае. + + + + + + + EEEE, dd MMMM yyyy + + + + + dd MMMM yyyy + + + + + dd.M.yyyy + + + + + dd.M.yy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + MKD + Den + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mk_MK.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mk_MK.xml new file mode 100644 index 0000000..02db683 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mk_MK.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;(#,##0.###) + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤ #,##0.00;-¤ #,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mn.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mn.xml new file mode 100644 index 0000000..364b5a9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mn.xml @@ -0,0 +1,36 @@ + + + + + + + + + + + Монгол хэл + + + Монгол улс + + + + [а-яієїґү] + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mn_MN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mn_MN.xml new file mode 100644 index 0000000..a6f5436 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mn_MN.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + MNT + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mr.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mr.xml new file mode 100644 index 0000000..0ffe691 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mr.xml @@ -0,0 +1,242 @@ + + + + + + + + + + + अफार + अबखेजियन + अफ्रिकान्स + अमहारिक + अरेबिक + असामी + ऐमरा + अज़रबाइजानी + बष्किर + बैलोरुसियन + बल्गेरियन + बीहारी + बिसलमा + बंगाली + तिबेटियन + ब्रेटन + कटलन + कोर्सिकन + ज़ेक + वेल्ष + डानिष + जर्मन + भूटानी + ग्रीक + इंग्रेजी + इस्परान्टो + स्पानिष + इस्टोनियन् + बास्क + पर्षियन् + फिन्निष + फिजी + फेरोस् + फ्रेन्च + फ्रिसियन् + ऐरिष + स्काटस् गेलिक + गेलीशियन + गौरानी + गुजराती + हौसा + हेबृ + हिन्दी + क्रोयेषियन् + हंगेरियन् + आर्मीनियन् + इन्टरलिंग्वा + इन्डोनेषियन + इन्टरलिंग + इनूपियाक + आईसलान्डिक + इटालियन + इनुकिटुट् + जापनीस् + जावनीस् + जार्जियन् + कज़क + ग्रीनलान्डिक + कंबोडियन + कन्नड + कोरियन् + कोंकणी + कश्मीरी + कुर्दिष + किर्गिज़ + लाटिन + लिंगाला + लाओतियन् + लिथुआनियन् + लाट्वियन् (लेट्टिष) + मलागसी + माओरी + मसीडोनियन् + मलियालम + मंगोलियन् + मोल्डावियन् + मराठी + मलय + मालतीस् + बर्मीस् + नौरो + नेपाली + डच + नोर्वेजियन + ओसिटान् + ओरोमो (अफान) + ओरिया + पंजाबी + पोलिष + पष्टो (पुष्टो) + पोर्चुगीस् + क्वेचओ + रहटो-रोमान्स् + किरुन्दी + रोमानियन् + मराठी + रष्यन् + किन्यार्वान्डा + संस्कृत + सिंधी + सांग्रो + सेर्बो-क्रोयेषियन् + सिन्हलीस् + स्लोवाक + स्लोवेनियन् + समोन + शोना + सोमाली + आल्बेनियन् + सेर्बियन् + सिस्वती + सेसोथो + सुंदनीस् + स्वीडिष + स्वाहिली + तमिळ + तेलंगू + तजिक + थाई + तिग्रिन्या + तुर्कमेन + तगालोग + सेत्स्वाना + तोंगा + तुर्किष + त्सोगा + टटार + त्वि + उधूर + युक्रेनियन् + उर्दू + उज़बेक + वियत्नामीज़ + ओलापुक + उलोफ + क्स्होसा + इद्दिष + यूरुबा + झ्हुन्ग + चिनीस् + जुलू + + + भारत + + + + [[:Deva:]-[क़-य़]‌‍] + + + + + + + + जानेवारी + फेबृवारी + मार्च + एप्रिल + मे + जून + जुलै + ओगस्ट + सेप्टेंबर + ओक्टोबर + नोव्हेंबर + डिसेंबर + + + जानेवारी + फेबृवारी + मार्च + एप्रिल + मे + जून + जुलै + ओगस्ट + सेप्टेंबर + ओक्टोबर + नोव्हेंबर + डिसेंबर + + + + + + + रवि + सोम + मंगळ + बुध + गुरु + शुक्र + शनि + + + रविवार + सोमवार + मंगळवार + बुधवार + गुरुवार + शुक्रवार + शनिवार + + + + म.पू. + म.नं. + + + + + + भारतीय समय + भारतीय समय + + + IST + IST + + + + + + + + INR + रु + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mr_IN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mr_IN.xml new file mode 100644 index 0000000..2f2b043 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mr_IN.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd-MM-yyyy + + + + + d-M-yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + ##,##,##0.###;-##,##,##0.### + + + + + + + #E0 + + + + + + + ##,##,##0% + + + + + + + ¤ ##,##,##0.00;-¤ ##,##,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ms.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ms.xml new file mode 100644 index 0000000..d231fba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ms.xml @@ -0,0 +1,328 @@ + + + + + + + + + + + Bahasa Melayu + + + Andorra + Emiriah Arab Bersatu + Afghanistan + Antigua dan Barbuda + Anguilla + Albania + Armenia + Netherlands Antilles + Angola + Antarctica + Argentina + American Samoa + Austria + Australia + Aruba + Azerbaijan + Bosnia dan Herzegovina + Barbados + Bangladesh + Belgium + Burkina Faso + Bulgaria + Bahrain + Burundi + Benin + Bermuda + Brunei + Bolivia + Brazil + Bahamas + Bhutan + Bouvet Island + Botswana + Belarus + Belize + Kanada + Cocos (Keeling) Islands + Democratic Republic of the Congo + Republik Afrika Tengah + Congo + Switzerland + Pantai Gading + Cook Islands + Cile + Kamerun + Cina + Colombia + Kosta Rika + Cuba + Cape Verde + Christmas Island + Kibris + Republik Czech + Jerman + Jibouti + Denmark + Dominica + Republik Dominican + Aljazair + Ecuador + Estonia + Mesir + Sahara Barat + Eritrea + Sepanyol + Ethiopia + Finland + Fiji + Falkland Islands + Micronesia + Faroe Islands + Perancis + en + Gabon + United Kingdom + Grenada + Georgia + French Guiana + Ghana + Gibraltar + Greenland + Gambia + Guinea + Guadeloupe + Equatorial Guinea + Yunani + South Georgia and the South Sandwich Islands + Guatemala + Guam + Guinea Bissau + Guyana + Hong Kong S.A.R., China + Heard Island and McDonald Islands + Honduras + Croatia + Haiti + Hungari + Indonesia + Ireland + Israel + Hindia + British Indian Ocean Territory + Iraq + Iran + Iceland + Itali + Jamaika + Jordan + Jepun + Kenya + Kyrgyzstan + Kemboja + Kiribati + Comoros + Saint Kitts dan Nevis + Utara Korea + Selatan Korea + Kuwait + Cayman Islands + Kazakhstan + Laos + Lubnan + Saint Lucia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Lithuania + Luksembourg + Latvia + Libya + Maghribi + Monaco + Moldova + Madagaskar + Kepulauan Marshall + Macedonia + Mali + Myanmar + Mongolia + Macao S.A.R., China + Northern Mariana Islands + Martinique + Mauritania + Montserrat + Malta + Mauritius + Maldiv + Malawi + Meksiko + Malaysia + Mozambik + Namibia + New Caledonia + Niger + Norfolk Island + Nigeria + Nicaragua + Belanda + Norway + Nepal + Nauru + Niue + New Zealand + Oman + Panama + Peru + French Polynesia + Papua New Guinea + Filipina + Pakistan + Poland + Saint Pierre and Miquelon + Pitcairn + Puerto Rico + Palestinian Territory + Feringgi + Palau + Paraguay + Qatar + Réunion + Romania + Russia + Rwanda + Arab Saudi + Kepulauan Solomon + Seychelles + Sudan + Sweden + Singapura + Saint Helena + Slovenia + Svalbard and Jan Mayen + Slovakia + Siera Leon + San Marino + Senegal + Somalia + Serbia + Surinam + Sao Tome dan Principe + El Salvador + Syria + Swaziland + Turks and Caicos Islands + Cad + French Southern Territories + Togo + Thailand + Tadjikistan + Tokelau + Timor-Leste + Turkmenistan + Tunisia + Tonga + Turki + Trinidad dan Tobago + Tuvalu + Taiwan + Tanzania + Ukraine + Uganda + United States Minor Outlying Islands + Amerika Syarikat + Uruguay + Uzbekistan + Vatican + Saint Vincent dan Grenadines + Venezuela + British Virgin Islands + U.S. Virgin Islands + Vietnam + Vanuatu + Wallis and Futuna + Samoa + Yaman + Mayotte + Yugoslavia + Afrika Selatan + Zambia + Zimbabwe + + + + [a-z] + + + + + + + + Jan + Feb + Mac + Apr + Mei + Jun + Jul + Ogos + Sep + Okt + Nov + Dis + + + Januari + Februari + Mac + April + Mei + Jun + Julai + Ogos + September + Oktober + November + Disember + + + + + + + Ahd + Isn + Sel + Rab + Kha + Jum + Sab + + + Ahad + Isnin + Selasa + Rabu + Khamis + Jumaat + Sabtu + + + + + + + + + + Ringgit Malaysia + RM + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ms_BN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ms_BN.xml new file mode 100644 index 0000000..eac4564 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ms_BN.xml @@ -0,0 +1,95 @@ + + + + + + + + + + + + + + + + + dd MMMM yyyy + + + + + dd MMMM yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yyyy + + + + + + + + h:mm:ss aa + + + + + H:mm:ss + + + + + H:mm:ss + + + + + H:mm:ss + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + BND + $ + + + USD + US$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ms_MY.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ms_MY.xml new file mode 100644 index 0000000..84832e8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ms_MY.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE dd MMM yyyy + + + + + dd MMMM yyyy + + + + + dd MMMM yyyy + + + + + dd/MM/yyyy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mt.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mt.xml new file mode 100644 index 0000000..55221b2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mt.xml @@ -0,0 +1,873 @@ + + + + + + + + + + + en + Afar + Abkażjan + Aċiniż + Akoli + Adangme + Adyghe + Avestan + Afrikans + Afro-Asjatiku (Oħra) + Afriħili + Akan + Akkadjen + Aleut + Lingwi Algonqwinjani + Amħariku + Aragonese + Ingliż, Antik (ca.450-1100) + Lingwi Apaċi + Għarbi + Aramajk + Arawkanjan + Arapaħo + Artifiċjali (Oħra) + Arawak + Assamese + Asturian + Lingwi Atabaskani + Lingwi Awstraljani + Avarik + Awadħi + Ajmara + Ażerbajġani + Baxkir + Banda + Lingwi Bamileke + Baluċi + Bambara + Baliniż + Basa + Baltiku (Oħra) + Belarussu + Beja + Bemba + Beber + Bulgaru + Biħari + Bojpuri + Bislama + Bikol + Bini + Siksika + Bambara + Bengali + Bantu + Tibetjan + Brenton + Braj + Bosnijan + Batak + Burjat + Buginiż + Blin + Katalan + Kaddo + Amerika Ċentrali (Oħra) + Karib + Kawkasu (Oħra) + Ċeċen + Sibwano + Keltiku (Oħra) + Ċamorro + Ċibċa + Ċagataj + Ċukese + Mari + Ġargon taċ-Ċinuk + Ċostaw + Ċipewjan + Ċerokij + Xajenn + Lingwi Ċamiki + Korsiku + Koptiku + Kreoli u Piġini, Bbażat fuq l-Ingliż (Oħra) + Kreoli u Piġini, Bbażat fuq il-Franċiż (Oħra) + Creoles and pidgins, Portuguese-based (Other) + Krij + Crimean Turkish; Crimean Tatar + Kreoli u Piġini (Oħra) + Ċek + Kashubian + Slaviku tal-Knisja + Kuxtiku (Oħra) + Ċuvax + Welx + Daniż + Dakota + Dargwa + Dajak + Ġermaniż + Delawerjan + Slav + Dogrib + Dinka + Dogri + Dravidjan (Oħra) + Lower Sorbian + Dwala + Olandiż, Medjevali (ca. 1050-1350) + Diveħi + Djula + Dżongka + Ewe + Efik + Eġizzjan (Antik) + Ekajuk + Grieg + Elamit + Ingliż + Ingliż, Medjevali (1100-1500) + Esperanto + Spanjol + Estonjan + Bask + Ewondo + Persjan + Fang + Fanti + Fulaħ + Finlandiż + Finno - Ugrijan + Fiġi + Fawriż + Fon + Franċiż + Franċiż, Medjevali (ca. 1400-1600) + Franċiż, Antik (842-ca. 1400) + Frijuljan + Friżjan + Irlandiż + Ga + Gajo + Gbaja + Galliku Skoċċiż + Ġermaniku (Oħra) + Geez + Gilbertjan + Gallegjan + Ġermaniku, Medjevali Pulit (ca. 1050-1500) + Gwarani + Ġermaniku, Antik Pulit (ca. 750-1050) + Gondi + Gorontalo + Gotiku + Ġerbo + Grieg, Antik (to 1453) + Guġarati + Manks + Gwiċin + Ħawsa + Ħajda + Ħawajjan + Ebrajk + Ħindi + Hiligaynon + Ħimaċali + Ħittit + Ħmong + Ħiri Motu + Kroat + Upper Sorbian + Haitian + Ungeriż + Ħupa + Armenjan + Ħerero + Interlingua + Iban + Indoneżjan + Interlingue + Igbo + Sichuan Yi + Iġo + Inupjak + Iloko + Indjan (Oħra) + Indo-Ewropew + Ingush + Ido + Iranjan + Lingwi Irogwjani + Iżlandiż + Taljan + Inukitut + Ġappuniż + Lojban + Lhudi-Persjan + Lhudi-Għarbi + Ġavaniż + Ġorġjan + Kara-Kalpak + Kabuljan + Kaċin + Kamba + Karen + Kawi + Kabardian + Kongo + Kasi + Kojsan + Kotaniż + Kikuju + Kuanyama + Każak + Kalallisut + Kmer + Kimbundu + Kannada + Korejan + Konkani + Kosrejan + Kpelle + Kanuri + Karachay-Balkar + Kru + Kurusk + Kaxmiri + Kurdiż + Kumiku + Kutenaj + Komi + Korniku + Kirgiż + Latin + Ladino + Landa + Lamba + Letżburgiż + Leżgjan + Ganda + Limburgish + Lingaljan + Lao + Mongo + Lożi + Litwanjan + Luba-Katanga + Luba-Luluwa + Luwisinuż + Lunda + Luwa + Luxaj + Latvjan (Lettix) + Maduriż + Magaħi + Majtili + Makasar + Mandingwan + Awstronesjan + Masaj + Moksha + Mandar + Mende + Malagażi + Irlandiż, Medjevali (900-1200) + Marxall + Maori + Mikmek + Minangkabaw + Lingwi Oħra + Maċedonjan + Mon-Kmer (Oħra) + Malajalam + Mongoljan + Manċurjan + Manipuri + Lingwi Manobo + Moldavjan + Moħak + Mossi + Marati + Malajan + Malti + Lingwi Diversi + Lingwi tal-Munda + Kriek + Marwari + Burmiż + Majan + Erzya + Nawuru + Naħwatil + Indjan tal-Amerika ta' Fuq (Oħra) + Neapolitan + Bokmahal Norveġiż + Ndebele, ta' Fuq + Ġermaniż Komuni; Sassonu Komuni + Nepaliż + Newari + Ndonga + Nijas + Niġerjan - Kordofanjan + Nijuwejan + Olandiż + Ninorsk Norveġiż + Norveġiż + Nogai + Skandinav, Antik + Ndebele, t'Isfel + Soto, ta' Fuq + Lingwi Nubjani + Navaħo + Ċiċewa; Njanġa + Njamweżi + Nyankole + Njoro + Nżima + Provenzal (wara 1500) + Oġibwa + Oromo (Afan) + Orija + Ossettiku + Osaġjan + Tork (Imperu Ottoman) + Lingwi Otomjani + Punġabi + Papwan (Oħra) + Pangasinjan + Paħlavi + Pampamga + Papjamento + Palawjan + Persjan Antik (ca. 600-400 Q.K.) + Filippin (Oħra) + Feniċju + Pali + Pollakk + Ponpejan + Lingwi Prakriti + Provenzal, Antik (sa l-1500) + Paxtun + Portugiż + Keċwa + Raġastani + Rapanwi + Rarotongani + Reto-Romanz + Rundi + Rumen + Romanz (Oħra) + Żingaru + Għerq + Russu + Kinjarwanda + Sanskrit + Sandawe + Jakut + Indjan tal-Amerika t'Isfel (Oħra) + Salixan + Samritan + Saska + Santali + Sardinjan + Skoċċiż + Sindi + Sami ta' Fuq + Selkup + Semitiku + Sango + Irlandiż, Antik (sa l-900) + Lingwa tas-Sinjali + Serbo-Kroat + Xan + Sinħaliż + Sidamo + Lingwi Suwjani + Sino-Tibetjani (Oħra) + Slovakk + Slav + Slavic (Other) + Samojan + Southern Sami + Sami languages (Other) + Lule Sami + Inari Sami + Skolt Sami + Xona + Soninke + Somali + Sogdien + Songaj + Albaniż + Serb + Serer + Swati + Nilo-Saħaram + Soto, t'Isfel + Sundaniż + Sukuma + Susu + Sumerjan + Svediż + Swaħili + Sirjan + Tamil + Tai (Oħra) + Telugu + Timne + Tereno + Tetum + Taġik + Tajlandiż + Tigrinja + Tigre + Tiv + Turkmeni + Tokelau + Tagalog + Tlingit + Tamaxek + Zwana + Tongan (Gżejjer ta' Tonga) + Tonga (Njasa) + Tok Pisin + Tork + Tsonga + Zimxjan + Tatar + Tumbuka + Tupi languages + Altajk (Oħra) + Tuvalu + Twi + Taħitjan + Tuvinjan + Udmurt + Wigur + Ugaritiku + Ukranjan + Umbundu + Indeterminat + Urdu + Użbek + Vai + Venda + Vjetnamiż + Volapuk + Votik + Walloon + Lingwi Wakaxani + Walamo + Waraj + Waxo + Lingwi Sorbjani + Wolof + Kalmyk + Ħoża + Jao + Japese + Jiddix + Joruba + Lingwi Jupiċi + Żwang + Żapotek + Żenaga + Ċiniż + Żande + Żulu + Żuni + + + Andorra + Emirati Għarab Maqgħuda + Afganistan + Antigua and Barbuda + Angwilla + Albanija + Armenja + Antilles Olandiżi + Angola + Antarctica + Arġentina + Samoa Amerikana + Awstrija + Awstralja + Aruba + Ażerbajġan + Bożnija Ħerżegovina + Barbados + Bangladexx + Belġju + Burkina Faso + Bulgarija + Baħrajn + Burundi + Benin + Bermuda + Brunej + Bolivja + Brażil + Baħamas + Butan + Bouvet Island + Botswana + Bjelorussja + Beliże + Kanada + Cocos (Keeling) Islands + Democratic Republic of the Congo + Repubblika Afrikana Ċentrali + Kongo + Svizzera + Kosta ta' l-Avorju + Cook Islands + Ċili + Kamerun + Ċina + Kolumbja + Kosta Rika + Kuba + Kape Verde + Christmas Island + Ċipru + Repubblika Ċeka + Ġermanja + Ġibuti + Danimarka + Dominika + Republikka Domenikana + Alġerija + Ekwador + Estonja + Eġittu + Sahara tal-Punent + Eritreja + Spanja + Etijopja + Finlandja + Fiġi + Falkland Islands + Mikronesja + Gżejjer Faroe + Franza + en + Gabon + Ingilterra + Grenada + Ġorġja + Gujana Franċiża + Gana + Gibraltar + Grinlandja + Gambja + Gineja + Gwadelupe + Ginea Ekwatorjali + Greċja + South Georgia and the South Sandwich Islands + Gwatemala + Gwam + Ginea-Bissaw + Gujana + Ħong Kong S.A.R., Ċina + Heard Island and McDonald Islands + Ħonduras + Kroazja + Ħaiti + Ungerija + Indoneżja + Irlanda + Iżrael + Indja + British Indian Ocean Territory + Iraq + Iran + Islanda + Italja + Ġamajka + Ġordan + Ġappun + Kenja + Kirgistan + Kambodja + Kiribati + Komoros + Saint Kitts and Nevis + Koreja ta' Fuq + Koreja t'Isfel + Kuwajt + Cayman Islands + Każakstan + Laos + Libanu + Saint Lucia + Liechtenstein + Sri Lanka + Liberja + Lesoto + Litwanja + Lussemburgu + Latvja + Libja + Marokk + Monako + Maldova + Madagaskar + Gżejjer ta' Marshall + Maċedonja + Mali + Mjanmar + Mongolja + Macao S.A.R., China + Gżejjer Marjana ta' Fuq + Martinik + Mawritanja + Montserrat + Malta + Mawrizju + Maldives + Malawi + Messiku + Malasja + Możambik + Namibja + New Caledonia + Niġer + Norfolk Island + Niġerja + Nikaragwa + Olanda + Norveġja + Nepal + Nauru + Niue + New Zealand + Oman + Panama + Peru + Polinesja Franċiża + Papwa-Ginea Ġdida + Filippini + Pakistan + Polonja + Saint Pierre and Miquelon + Pitcairn + Puerto Rico + Palestinian Territory + Portugall + Palau + Paragwaj + Qatar + Réunion + Rumanija + Russja + Rwanda + Għarabja Sawdita + Solomon Islands + Seychelles + Sudan + Żvezja + Singapor + Saint Helena + Slovenja + Svalbard and Jan Mayen + Slovakkja + Sierra Leone + San Marino + Senegal + Somalja + Serbja + Surinam + Sao Tome and Principe + El Salvador + Sirja + Sważiland + Turks and Caicos Islands + Ċad + Territorji Franċiżi ta' Nofsinhar + Togo + Tajlandja + Taġikistan + Tokelaw + Timor tal-Lvant + Turkmenistan + Tuneż + Tonga + Turkija + Trinidad u Tobago + Tuvalu + Tajwan + Tanżanija + Ukraina + Uganda + United States Minor Outlying Islands + Stati Uniti + Urugwaj + Użbekistan + Vatikan + Saint Vincent and the Grenadines + Venezwela + British Virgin Islands + U.S. Virgin Islands + Vjetnam + Vanwatu + Wallis and Futuna + Samoa + Jemen + Majotte + Jugoslavja + Afrika t'Isfel + Żambja + Żimbabwe + + + + [[a-z à ì ù è ò ċ ġ ħ {għ} ż]-[cy]] + + + + + + + + Jan + Fra + Mar + Apr + Mej + Ġun + Lul + Awi + Set + Ott + Nov + Diċ + + + Jannar + Frar + Marzu + April + Mejju + Ġunju + Lulju + Awissu + Settembru + Ottubru + Novembru + Diċembru + + + + + + + Ħad + Tne + Tli + Erb + Ħam + Ġim + Sib + + + Il-Ħadd + It-Tnejn + It-Tlieta + L-Erbgħa + Il-Ħamis + Il-Ġimgħa + Is-Sibt + + + + + + + + + + QK + WK + + + + + + + EEEE, d 'ta''' MMMM yyyy + + + + + d 'ta''' MMMM yyyy + + + + + dd MMM yyyy + + + + + dd/MM/yyyy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + Ħin Ċentrali Ewropew + Ħin Ċentrali Ewropew tas-Sajf + + + CET + CEST + + Valletta + + + + + + + Lira Maltija + Lm + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mt_MT.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mt_MT.xml new file mode 100644 index 0000000..b99d965 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/mt_MT.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nb.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nb.xml new file mode 100644 index 0000000..00ecb1b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nb.xml @@ -0,0 +1,2229 @@ + + + + + + + + + + + afar + abkhasisk + avestisk + afrikaans + akan + amharisk + aragonsk + arabisk + assamisk + avarisk + aymara + aserbajdsjansk + basjkirsk + hviterussisk + bulgarsk + bihari + bislama + bambara + bengali + tibetansk + bretonsk + bosnisk + blin + katalansk + tsjetsjensk + chamorro + cherokee + korsikansk + cree + tsjekkisk + kirkeslavisk + tsjuvansk + walisisk + dansk + tysk + divehi + dzongkha + ewe + gresk + engelsk + esperanto + spansk + estisk + baskisk + persisk + fulani + finsk + fijiansk + færøysk + fransk + frisisk + irsk + skotsk gælisk + ges + galicisk + guarani + gujarati + manx + hawaiisk + hebraisk + hindi + hiri motu + kroatisk + haitisk + ungarsk + armensk + herero + interlingua + indonesisk + interlingue + ibo + sichuan-yi + unupiak + ido + islandsk + italiensk + inuktitut + japansk + javanesisk + georgisk + kikongo + kikuyu + kuanyama + kasakhisk + kalaallisut + khmer + kannada + koreansk + konkani + kanuri + kasjmiri + kurdisk + komi + kornisk + kirgisisk + latin + luxemburgsk + ganda + limburgisk + lingala + laotisk + litauisk + luba-katanga + latvisk + madagassisk + marshallesisk + maori + makedonsk + malayalam + mongolsk + moldavisk + marathi + malayisk + maltesisk + burmesisk + nauru + norsk bokmål + ndebele (nord) + nepalsk + ndonga + nederlandsk + norsk nynorsk + norsk + ndebele, sør + navajo + nyanja + oksitansk (etter 1500) + ojibwa + oromo + oriya + ossetisk + panjabi + pali + polsk + pashto + portugisisk + quechua + retoromansk + rundi + rumensk + russisk + kinjarwanda + sanskrit + sardinsk + sindhi + nordsamisk + sango + serbokroatisk + singalesisk + sidamo + slovakisk + slovensk + samoansk + shona + somalisk + albansk + serbisk + swati + sotho (sørlig) + sundanesisk + svensk + swahili + syrisk + tamil + telugu + tatsjikisk + thai + tigrinja + tigré + turkmensk + tagalog + tswana + tonga (Tonga-øyene) + tyrkisk + tsonga + tatarisk + twi + tahitisk + uigurisk + ukrainsk + urdu + usbekisk + venda + vietnamesisk + volapyk + vallonsk + wolof + xhosa + jiddisk + joruba + zhuang + kinesisk + zulu + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Andorra + De forente arabiske emiratene + Afghanistan + Antigua og Barbuda + Anguilla + Albania + Armenia + De nederlandske antiller + Angola + Antarktis + Argentina + Amerikansk Samoa + Østerrike + Australia + Aruba + Aserbajdsjan + Bosnia og Hercegovina + Barbados + Bangladesh + Belgia + Burkina Faso + Bulgaria + Bahrain + Burundi + Benin + Bermuda + Brunei Darussalam + Bolivia + Brasil + Bahamas + Bhutan + Bouvetøya + Botswana + Hviterussland + Belize + Canada + Kokosøyene (Keelingøyene) + Kongo, Den demokratiske republikken + Den sentralafrikanske republikk + Kongo + Sveits + Elfenbenskysten + Cookøyene + Chile + Kamerun + Kina + Colombia + Costa Rica + Cuba + Kapp Verde + Christmasøya + Kypros + Tsjekkia + Tyskland + Djibouti + Danmark + Dominica + Den dominikanske republikk + Algerie + Ecuador + Estland + Egypt + Vest-Sahara + Eritrea + Spania + Etiopia + Finland + Fiji + Falklandsøyene (Malvinas) + Mikronesiaføderasjonen + Færøyene + Frankrike + en + Gabon + Storbritannia + Grenada + Georgia + Fransk Guyana + Ghana + Gibraltar + Grønland + Gambia + Guinea + Guadeloupe + Ekvatorial-Guinea + Hellas + Sør-Georgia og Sør-Sandwich-øyene + Guatemala + Guam + Guinea-Bissau + Guyana + Hong Kong S.A.R. (Kina) + Heard- og McDonaldsøyene + Honduras + Kroatia + Haiti + Ungarn + Indonesia + Irland + Israel + India + Britiske områder i det indiske hav + Irak + Iran + Island + Italia + Jamaica + Jordan + Japan + Kenya + Kirgisistan + Kambodsja + Kiribati + Komorene + St. Christopher og Nevis + Nord-Korea + Sør-Korea + Kuwait + Caymanøyene + Kasakhstan + Laos, Den folkedemokratiske republikken + Libanon + St. Lucia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Litauen + Luxembourg + Latvia + Libya + Marokko + Monaco + Moldova + Madagaskar + Marshalløyene + Makedonia, Republikken + Mali + Myanmar + Mongolia + Macao S.A.R. (Kina) + Nord-Marianene + Martinique + Mauritania + Montserrat + Malta + Mauritius + Maldivene + Malawi + Mexico + Malaysia + Mosambik + Namibia + Ny-Caledonia + Niger + Norfolkøyene + Nigeria + Nicaragua + Nederland + Norge + Nepal + Nauru + Niue + New Zealand + Oman + Panama + Peru + Fransk Polynesia + Papua Ny-Guinea + Filippinene + Pakistan + Polen + St. Pierre og Miquelon + Pitcairn + Puerto Rico + Palestinsk territorium + Portugal + Palau + Paraguay + Qatar + Reunion + Romania + Den russiske føderasjon + Rwanda + Saudi Arabia + Salomonøyene + Seychellene + Sudan + Sverige + Singapore + Saint Helena + Slovenia + Svalbard og Jan Mayen + Slovakia + Sierra Leone + San Marino + Senegal + Somalia + Serbia + Surinam + Sao Tome og Principe + El Salvador + Syria + Swaziland + Turks- og Caicosøyene + Tchad + Franske sørområder + Togo + Thailand + Tadsjikistan + Tokelau + Øst-Timor + Turkmenistan + Tunisia + Tonga + Tyrkia + Trinidad og Tobago + Tuvalu + Taiwan + Tanzania + Ukraina + Uganda + USAs mindre øyer + USA + Uruguay + Usbekistan + Vatikanstaten + St. Vincent og Grenadinene + Venezuela + Jomfruøyene (britisk) + Jomfruøyene (USA) + Vietnam + Vanuatu + Wallis og Futuna + Samoa + Yemen + Mayotte + Jugoslavia + Sør-Afrika + Zambia + Zimbabwe + + + Revidert + + + Kalendar + Kollasjon + Valuta + + + Buddhistisk kalender + Kinesisk kalender + Gregoriansk kalender + Hebraisk kalender + Islamsk kalender + Islamsk sivil kalender + Japansk kalender + Direkte rekkefølge + Telefonkatalogrekkefølge + Pinyin-rekkefølge + Strekrekkefølge + Tradisjonell rekkefølge + + + + [a-zæåøéóôàüǎ] + + + + + + + + jan + feb + mar + apr + mai + jun + jul + aug + sep + okt + nov + des + + + J + F + M + A + M + J + J + A + S + O + N + D + + + januar + februar + mars + april + mai + juni + juli + august + september + oktober + november + desember + + + + + + + + ma + ti + on + to + fr + + + + S + M + T + O + T + F + L + + + søndag + mandag + tirsdag + onsdag + torsdag + fredag + lørdag + + + + + + + + + + f.Kr. + e.Kr. + + + + + + + EEEE d. MMMM yyyy + + + + + d. MMMM yyyy + + + + + d. MMM. yyyy + + + + + dd.MM.yy + + + + + + + + 'kl. 'HH.mm.ss z + + + + + HH.mm.ss z + + + + + HH.mm.ss + + + + + HH.mm + + + + + + + {1} {0} + + + + + + + + + Eastern European Standard Time + Eastern European Daylight Time + + + EET + EEST + + Bucuresti + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + Andorranske dinarer + ADD + + + Andorranske pesetas + ADP + + + UAE dirham + AED + + + Afghani (1927-2002) + AFA + + + Afghani + Af + + + Affar og Issa franc + AIF + + + Albanske lek (1946-1961) + ALK + + + Albanske lek + lek + + + Albanske lek valute + ALV + + + Albanske dollar (FEC) + ALX + + + Armenske dram + dram + + + Nederlandske antillegylden + NA f. + + + Angolanske kwanza + AOA + + + Angolanske kwanza (1977-1990) + AOK + + + Angolanske ny kwanza (1990-2000) + AON + + + Angolan Kwanza Reajustado (1995-1999) + AOR + + + Angolanske escudo + AOS + + + Argentinske australer + ARA + + + Argentinske Peso Moneda Nacional + ARM + + + Argentinske pesos (1983-1985) + ARP + + + Argentinske pesos + Arg$ + + + Østerrikske shilling + ATS + + + Australske dollar + $A + + + Australske pund + AUP + + + Arubiske gylden + AWG + + + Aserbajdsjanske Manat + AZM + + + Bosnia-Hercegovina dinarer + BAD + + + Bosnia-Hercegovina mark (konvertible) + KM + + + Bosnia-Hercegovina nye dinarer + BAN + + + Barbadisk dollar + BDS$ + + + Bangladeshiske taka + Tk + + + Belgiske franc (konvertible) + BEC + + + Belgiske franc + BF + + + Belgiske franc (økonomiske) + BEL + + + Bulgarske lev (hard) + lev + + + Bulgarske sosialist-lev + BGM + + + Bulgarske lev + BGN + + + Bulgarske lev (1879-1952) + BGO + + + Bulgarske lev (FEC) + BGX + + + Bahrainske dinarer + BD + + + Burundiske franc + Fbu + + + Bermudiske dollar + Ber$ + + + Bermudiske pund + BMP + + + Bruneiske dollar + BND + + + Boliviano + Bs + + + Boliviano (1863-1962) + BOL + + + Boliviansk pesos + BOP + + + Boliviansk mvdol + BOV + + + Brasiliansk cruzeiro novo (1967-1986) + BRB + + + Brasilianske cruzado + BRC + + + Brasilianske cruzeiro (1990-1993) + BRE + + + Brasilianske realer + R$ + + + Brasilianske cruzado novo + BRN + + + Brasilianske cruzeiro + BRR + + + Brasilianske cruzeiro (1942-1967) + BRZ + + + Bahamske dollar + BSD + + + Bahamske pund + BSP + + + Bhutanske ngultrum + Nu + + + Bhutanske rupier + BTR + + + Burmesiske kyat + BUK + + + Burmesiske rupier + BUR + + + Botswanske pula + BWP + + + Hviterussiske nye rubler (1994-1999) + BYB + + + Hviterussiske rubler (1992-1994) + BYL + + + Hviterussiske rubler + Rbl + + + Beliziske dollar + BZ$ + + + Britisk Honduras-dollar + BZH + + + Kanadiske dollar + Can$ + + + Kongolesiske franc (congolais) + CDF + + + Kongolesiske republikk-franc + CDG + + + Congolesiske zaire + CDL + + + Sentralafrikanske franc (CFA) + CFF + + + Sveitsiske franc + SwF + + + Cookøyene dollar + CKD + + + Chilenske condor + CLC + + + Chilenske escudo + CLE + + + Chilenske Unidades de Fomento + CLF + + + Chilenske pesos + Ch$ + + + Kamerunske franc (CFA) + CMF + + + Kinesiske Jen Min Piao Yuan + CNP + + + Kinesiske US dollar (FEC) + CNX + + + Kinesiske Yuan Renminbi + Y + + + Colombianske papir-pesos + COB + + + Kongolesiske franc (CFA) + COF + + + Colombianske pesos + Col$ + + + Costaricanske colon + C + + + Tsjekkoslovakiske koruna + CSC + + + Tsjekkoslovakiske koruna (hard) + CSK + + + Kubanske pesos + CUP + + + Kubanske Foreign Exchange Certificates + CUX + + + Kappverdiske escudo + CVEsc + + + Curacao-gylden + CWG + + + Kypriotiske pund + £C + + + Tsjekkiske koruna + CZK + + + Østtyske ostmark + DDM + + + Tyske mark + DEM + + + Tyske sperrmark + DES + + + Djiboutiske franc + DF + + + Danske kroner + DKr + + + Dominikanske pesos + RD$ + + + Algeriske dinarer + DA + + + Algeriske nye franc + DZF + + + Algeriske franc germinal + DZG + + + Ecuadorianske sucre + ECS + + + Ecuadorianske Unidad de Valor Constante (UVC) + ECV + + + Estiske kroon + EEK + + + Egyptiske pund + EGP + + + Eritreiske nakfa + ERN + + + Spanske peseta + ESP + + + Etiopiske birr + Br + + + Etiopiske dollar + ETD + + + Euro + + + + Finske mark + FIM + + + Finske mark (1860-1962) + FIN + + + Fijianske dollar + F$ + + + Fijianske pund + FJP + + + Falklandsøyene-pund + FKP + + + Færøyske kronur + FOK + + + Franske franc + FRF + + + Franske franc (Germinal/Franc Poincare) + FRG + + + Gabonske franc (CFA) + GAF + + + Britiske pund sterling + £ + + + Georgiske kupon larit + GEK + + + Georgiske lari + lari + + + Ghanesiske cedi + GHC + + + Ghanesiske gamle cedi + GHO + + + Ghanesiske pund + GHP + + + Ghanesiske revaluerte cedi + GHR + + + Gibraltarske pund + GIP + + + Grønlandske kroner + GLK + + + Gambiske dalasi + GMD + + + Gambiske pund + GMP + + + Guineanske franc + GF + + + Guineanske franc (1960-1972) + GNI + + + Guineanske syli + GNS + + + Guadeloupe-franc + GPF + + + Ekvatorialguineanske ekwele guineana + GQE + + + Ekvatorialguineanske franco + GQF + + + Ekvatorialguineanske peseta guineana + GQP + + + Greske drakmer + GRD + + + Greske nye drakmer + GRN + + + Guatemalanske quetzal + Q + + + Fransk Guyana-franc guiana + GUF + + + Portugisiske guinea escudo + GWE + + + Portugisiske Guinea Mil Reis + GWM + + + Guinea-Bissau-pesos + GWP + + + Guyanske dollar + G$ + + + Hongkong-dollar + HK$ + + + Hoduras Lempira + L + + + Kroatiske dinarer + HRD + + + Kroatiske kuna + HRK + + + Haitiske gourde + HTG + + + Ungarske forinter + Ft + + + Nordirske pund + IBP + + + Indonesiske nica-gylden + IDG + + + Indonesiske Java-rupier + IDJ + + + Indonesiske nye rupier + IDN + + + Indonesiske rupier + Rp + + + Irske pund + IR£ + + + Israelske shekler + ILL + + + Israelske pund + ILP + + + Israelske nye shekler + ILS + + + Manske pund sterling + IMP + + + Indiske rupier + =0#Rs.|1#Re.|1<Rs. + + + Irakske dinarer + ID + + + Iranske rialer + RI + + + Islandske kronar + ISK + + + Italienske lire + + + + Jersey pund sterling + JEP + + + Jamaikanske dollar + J$ + + + Jamaikanske pund + JMP + + + Jordanske dinarer + JD + + + Japanske yen + ¥ + + + Kenyanske shilling + K Sh + + + Kirgisiske som + som + + + Kambodsjanske gamle riel + KHO + + + Kambodsjanske riel + CR + + + Kiribatiske dollar + KID + + + Komoriske franc + CF + + + Nordkoreanske won (1947-1959) + KPP + + + Nordkoreanske won + KPW + + + Sørkoreanske hwan + KRH + + + Sørkoreanske gamle won + KRO + + + Sørkoreanske won + KRW + + + Kuwaitiske dinarer + KD + + + Caymanske dollar + KYD + + + Kasakhstanske rubler + KZR + + + Kasakhstanske tenge + T + + + Laotiske kip + LAK + + + Libanesiske pund + LL + + + Liechtensteinske franc + LIF + + + Srilankiske rupier + SL Re + + + Ceylonske rupier + LNR + + + Liberiske dollar + LRD + + + Lesothiske loti + M + + + Litauiske lita + LTL + + + Litauiske talonas + LTT + + + Luxemburgske franc + LUF + + + Latviske lats + LVL + + + Latviske rubler + LVR + + + Libyske British Military Authority-lira + LYB + + + Libyske dinarer + LD + + + Libyske pund + LYP + + + Marokkanske dirham + MAD + + + Marokkanske franc + MAF + + + Monegaskiske franc nouveau + MCF + + + Monegaskiske franc germinal + MCG + + + Moldovske leu cupon + MDC + + + Moldovske leu + MDL + + + Moldovske ruble cupon + MDR + + + Madagassiske ariary + MGA + + + Madagassiske franc + MGF + + + Marshalløyene-dollar + MHD + + + Makedonske denarer + MDen + + + Makedonske denarer (1992-1993) + MKN + + + Maliske franc + MLF + + + Myanmarske kyat + MMK + + + Myanmarske dollar (FEC) + MMX + + + Mongolske tugrik + Tug + + + Makaoske pataca + MOP + + + Martinique-franc + MQF + + + Mauritanske ouguiya + UM + + + Maltesiske lira + Lm + + + Maltesiske pund + MTP + + + Mauritiske rupier + MUR + + + Maldiviske rupier + MVP + + + Maldiviske rufiyaa + MVR + + + Malawisle kwacha + MK + + + Malawiske pund + MWP + + + Meksikanske pesos + MEX$ + + + Meksikanske sølvpesos (1861-1992) + MXP + + + Meksikanske Unidad de Inversion (UDI) + MXV + + + Malaysiske ringgit + RM + + + Mosambikiske escudo + MZE + + + Mosambikiske metical + Mt + + + Namibiske dollar + N$ + + + Kaledonske franc germinal + NCF + + + Nigerianske naira + NGN + + + Nigerianske pund + NGP + + + Ny-hebridene CFP-franc + NHF + + + Nicaraguanske cordoba + NIC + + + Nicaraguanske gullcordoba + NIG + + + Nicaraguanske cordoba oro + NIO + + + Nederlandske gylden + NLG + + + Norske kroner + kr + + + Nepalesiske rupier + Nrs + + + Nyzealandske dollar + $NZ + + + Nyzealandske pund + NZP + + + Omanske rialer + RO + + + Omanske rial saidi + OMS + + + Panamanske balboa + PAB + + + Transdniestriansk rubler (kupon) + PDK + + + Transdniestrianske nye rubler + PDN + + + Transdniestrianske rubler + PDR + + + Peruvianske inti + PEI + + + Peruvianske sol nuevo + PEN + + + Peruvianske sol + PES + + + Papuanske kina + PGK + + + Filippinske pesos + PHP + + + Pakistanske rupier + Pra + + + Polske zloty + Zl + + + Polske US dollar (FEC) + PLX + + + Polske zloty (1950-1995) + PLZ + + + Palestinske pund + PSP + + + Portugisiske conto + PTC + + + Portugisiske escudo + PTE + + + Paraguayanske guarani + PYG + + + Qatarske riyaler + QR + + + Reunionske franc + REF + + + Rumenske leu + leu + + + Rumenske nye leu + RON + + + Russiske rubler + RUB + + + Russiske rubler (1991-1998) + RUR + + + Rwandiske franc + RWF + + + Saudiarabiske riyaler + SRl + + + Saudiarabiske riyaler (1936-1952) + SAS + + + Salomonske dollar + SI$ + + + Seychelliske rupier + SR + + + Sudanesiske dinarer + SDD + + + Sudanesiske pund + SDP + + + Svenske kroner + SKr + + + Singaporske dollar + S$ + + + Sankthelenske pund + SHP + + + Slovenske tolar bons + SIB + + + Slovenske tolar + SIT + + + Slovakiske koruna + Sk + + + Sierraleonske leone + SLL + + + Sanmarinske lira + SML + + + Somaliske shilling + So. Sh. + + + Somalilandske shilling + SQS + + + Surinamske gylden + Sf + + + Skotske pund + SSP + + + Sao Tome og Principe-dobra + Db + + + Sao Tome og Principe-escudo + STE + + + Sovjetiske nye rubler + SUN + + + Sovjetiske rubler + SUR + + + Salvadoranske colon + SVC + + + Syriske pund + LS + + + Swazilandske lilangeni + E + + + Turks- og Caicosøyene-crown + TCC + + + Tsjadiske franc (CFA) + TDF + + + Thailandske baht + THB + + + Tadsjikiske rubler + TJR + + + Tadsjikiske somoni + TJS + + + Turkmenske manat + TMM + + + Tunisiske dinarer + TND + + + Tonganske paʻanga + T$ + + + Tonganske pund sterling + TOS + + + Timoresiske escudo + TPE + + + Timoresiske pataca + TPP + + + Tyrkiske lira + TL + + + Trinidadiske dollar + TT$ + + + Trinidadiske gamle dollar + TTO + + + Tuvalske dollar + TVD + + + Taiwanske nye dollar + NT$ + + + Tanzanianske shilling + T Sh + + + Ukrainsle hryvnia + UAH + + + Ukrainske karbovanetz + UAK + + + Ugandiske shilling (1966-1987) + UGS + + + Ugandiske shilling + U Sh + + + Amerikanske dollar + US$ + + + Amerikanske dollar (neste dag) + USN + + + Amerikanske dollar (samme dag) + USS + + + Uruguayanske peso fuerte + UYF + + + Uruguayanske pesos (1975-1993) + UYP + + + Uruguayanske peso uruguayo + Ur$ + + + Usbekiske kupong-som + UZC + + + Usbekiske sum + UZS + + + Vatikanstatens lira + VAL + + + Nordvietnamesiske piastre dong viet + VDD + + + Nordvietnamesiske nye dong + VDN + + + Nordvietnamesiske viet minh piastre dong viet + VDP + + + Venezuelanske bolivar + Be + + + De britiske jomfruøyene-dollar + VGD + + + Vietnamesiske dong + VND + + + Vietnamesiske nye dong + VNN + + + Vietnamesiske republikk-dong + VNR + + + Vietnamesiske nasjonale dong + VNS + + + Vanuatisk vatu + VT + + + Vestsamoisk pund + WSP + + + Vestsamoisk tala + WST + + + Asian Dinar Unit of Account + XAD + + + CFA Franc BEAC + XAF + + + Asian Monetary Unit + XAM + + + Gull + XAU + + + European Composite Unit + XBA + + + European Monetary Unit + XBB + + + European Unit of Account (XBC) + XBC + + + European Unit of Account (XBD) + XBD + + + Østkaribiske dollar + EC$ + + + CFA Nouveau Franc + XCF + + + Special Drawing Rights + XDR + + + CFA Franc BCEAEC + XEF + + + European Currency Unit + XEU + + + French Gold Franc + XFO + + + French UIC-Franc + XFU + + + Islamske dinarer + XID + + + French Metropolitan Nouveau Franc + XMF + + + Franske antiller-franc (CFA) + XNF + + + CFA Franc BCEAO + XOF + + + CFP Franc + CFPF + + + COMECON Transferable Ruble + XTR + + + Jemenittiske dinarer + YDD + + + Jemenittiske imadi-riyaler + YEI + + + Jemenittiske rialer + YRl + + + Jugoslaviske dinarer (hard) + YUD + + + Jugoslaviske føderasjonen-dinarer + YUF + + + Jugoslaviske 1994-dinarer + YUG + + + Jugoslaviske noviy-dinarer + YUM + + + Jugoslaviske konvertible dinarer + YUN + + + Jugoslaviske oktoberdinarer + YUO + + + Jugoslaviske reforerte dinarer + YUR + + + Sørafrikanske rand (økonomisk) + ZAL + + + Sørafrikanske pund + ZAP + + + Sørafrikanske rand + R + + + Zambiske kwacha + ZMK + + + Zambiske pund + ZMP + + + Zairiske nye zaire + ZRN + + + Zairiske zaire + ZRZ + + + Zimbabwiske dollar + Z$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nb_NO.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nb_NO.xml new file mode 100644 index 0000000..a3e9444 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nb_NO.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nl.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nl.xml new file mode 100644 index 0000000..840972d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nl.xml @@ -0,0 +1,2650 @@ + + + + + + + + + + + Afar + Abchazisch + Avestisch + Afrikaans + Akan + Amhaars + Aragonees + Arabisch + Assamees + Avarisch + Aymara + Azerbeidzjaans + Basjkiers + Wit-Russisch + Bulgaars + Bihari + Bislama + Bambara + Bengalees + Tibetaans + Bretons + Bosnisch + Blin + Catalaans + Chechen + Chamorro + Cherokee + Corsicaans + Cree + Tsjechisch + Kerkslavisch + Tsjoevasjisch + Welsh + Deens + Duits + Divehi + Dzongkha + Ewe + Grieks + Engels + Esperanto + Spaans + Estlands + Baskisch + Perzisch + Fulah + Fins + Fijisch + Faeröers + Frans + Fries + Iers + Schots Gaelic + Geez + Galicisch + Guarani + Gujarati + Manx + Hausa + Hawaïaans + Hebreeuws + Hindi + Hiri Motu + Kroatisch + Haïtiaans + Hongaars + Armeens + Herero + Interlingua + Indonesisch + Interlingue + Igbo + Sichuan Yi + Inupiaq + Ido + IJslands + Italiaans + Inuktitut + Japans + Javaans + Georgisch + Kongo + Kikuyu + Kuanyama + Kazachs + Kalaallisut + Khmer + Kannada + Koreaans + Konkani + Kanuri + Kashmiri + Koerdisch + Komi + Cornish + Kirgizisch + Latijn + Luxemburgs + Ganda + Limburgs + Lingala + Lao + Litouws + Luba-Katanga + Letlands + Malagasisch + Marshallees + Maori + Macedonisch + Malayalam + Mongools + Moldavisch + Marathi + Maleis + Maltees + Birmees + Nauru + Noors - Bokmål + Ndebele, noord- + Nepalees + Ndonga + Nederlands + Noors - Nynorsk + Noors + Ndebele, zuid- + Navajo + Nyanja + Langue d’Oc (na 1500) + Ojibwa + Oromo + Oriya + Ossetisch + Punjabi + Pali + Pools + Pashto + Portugees + Quechua + Retoromaans + Rundi + Roemeens + Russisch + Kinyarwanda + Sanskrit + Sardinisch + Sindhi + Noord-Samisch + Sango + Servokroatisch + Singalees + Sidamo + Slowaaks + Sloveens + Samoaans + Shona + Somalisch + Albanees + Servisch + Swati + Sotho, zuid + Sundanees + Zweeds + Swahili + Syriac + Tamil + Teloegoe + Tadzjik + Thai + Tigrinya + Tigre + Turkmeens + Tagalog + Tswana + Tonga (Tonga-eilanden) + Turks + Tsonga + Tataars + Twi + Tahitisch + Uighur + Oekraïens + Urdu + Oezbeeks + Venda + Vietnamees + Volapük + Wallonisch + Wolof + Xhosa + Jiddisch + Joruba + Zhuang + Chinees + Zulu + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Andorra + Verenigde Arabische Emiraten + Afghanistan + Antigua en Barbuda + Anguilla + Albanië + Armenië + Nederlandse Antillen + Angola + Antarctica + Argentinië + Amerikaans Samoa + Oostenrijk + Australië + Aruba + Azerbeidzjan + Bosnië Herzegovina + Barbados + Bangladesh + België + Burkina Faso + Bulgarije + Bahrein + Burundi + Benin + Bermuda + Brunei Darussalam + Bolivia + Brazilië + Bahama’s + Bhutan + Bouveteiland + Botswana + Wit-Rusland + Belize + Canada + Cocoseilanden + Congo, Democratische Republiek + Centraal-Afrikaanse Republiek + Congo + Zwitserland + Ivoorkust + Cookeilanden + Chili + Kameroen + China + Colombia + Costa Rica + Cuba + Kaapverdië + Christmaseiland + Cyprus + Tsjechië + Duitsland + Djibouti + Denemarken + Dominica + Dominicaanse Republiek + Algerije + Ecuador + Estland + Egypte + West-Sahara + Eritrea + Spanje + Ethiopië + Finland + Fiji + Falklandeilanden + Micronesia, Federale Staten van + Faeröer + Frankrijk + en + Gabon + Verenigd Koninkrijk + Grenada + Georgië + Frans-Guyana + Ghana + Gibraltar + Groenland + Gambia + Guinea + Guadeloupe + Equatoriaal-Guinea + Griekenland + Zuid-Georgië en Zuidelijke Sandwicheilanden + Guatemala + Guam + Guinee-Bissau + Guyana + Hongkong S.A.R. van China + Heardeiland en McDonaldeiland + Honduras + Kroatië + Haïti + Hongarije + Indonesië + Ierland + Israël + India + Brits Territorium in de Indische Oceaan + Irak + Iran + IJsland + Italië + Jamaica + Jordanië + Japan + Kenia + Kirgizstan + Cambodja + Kiribati + Comoren + Saint Kitts en Nevis + Noord-Korea + Zuid-Korea + Koeweit + Caymaneilanden + Kazachstan + Laos + Libanon + Saint Lucia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Litouwen + Luxemburg + Letland + Libië + Marokko + Monaco + Republiek Moldavië + Madagaskar + Marshalleilanden + Macedonië, Republiek + Mali + Myanmar + Mongolië + Macao S.A.R. van China + Noordelijke Marianeneilanden + Martinique + Mauritanië + Montserrat + Malta + Mauritius + Maldiven + Malawi + Mexico + Maleisië + Mozambique + Namibië + Nieuw-Caledonië + Niger + Norfolkeiland + Nigeria + Nicaragua + Nederland + Noorwegen + Nepal + Nauru + Niue + Nieuw-Zeeland + Oman + Panama + Peru + Frans-Polynesië + Papoea-Nieuw-Guinea + Filipijnen + Pakistan + Polen + Saint Pierre en Miquelon + Pitcairn + Puerto Rico + Palestijns Gebied + Portugal + Palau + Paraguay + Qatar + Réunion + Roemenië + Russische Federatie + Rwanda + Saoedi-Arabië + Salomonseilanden + Seychellen + Soedan + Zweden + Singapore + Saint Helena + Slovenië + Svalbard en Jan Mayen + Slowakije + Sierra Leone + San Marino + Senegal + Somalië + Servië + Suriname + Sao Tomé en Principe + El Salvador + Syrië + Swaziland + Turks- en Caicoseilanden + Tsjaad + Franse Gebieden in de zuidelijke Indische Oceaan + Togo + Thailand + Tadzjikistan + Tokelau + Oost-Timor + Turkmenistan + Tunesië + Tonga + Turkije + Trinidad en Tobago + Tuvalu + Taiwan + Tanzania + Oekraïne + Oeganda + Amerikaanse ondergeschikte afgelegen eilanden + Verenigde Staten + Uruguay + Oezbekistan + Vaticaanstad + Saint Vincent en de Grenadines + Venezuela + Britse Maagdeneilanden + Amerikaanse Maagdeneilanden + Vietnam + Vanuatu + Wallis en Futuna + Samoa + Jemen + Mayotte + Joegoslavië + Zuid-Afrika + Zambia + Zimbabwe + + + Gewijzigd + + + Kalender + Volgorde + Munteenheid + + + Boeddhistische kalender + Chinese kalender + Gregoriaanse kalender + Joodse kalender + Islamitische kalender + Islamitische kalender (cyclisch) + Japanse kalender + Directe volgorde + Telefoonboekvolgorde + Pinyinvolgorde + Streekvolgorde + Traditioneelvolgorde + + + + [a-záéíóúäëïöüij] + + + + + + + + jan + feb + mrt + apr + mei + jun + jul + aug + sep + okt + nov + dec + + + J + F + M + A + M + J + J + A + S + O + N + D + + + januari + februari + maart + april + mei + juni + juli + augustus + september + oktober + november + december + + + + + + + zo + ma + di + wo + do + vr + za + + + Z + M + D + W + D + V + Z + + + zondag + maandag + dinsdag + woensdag + donderdag + vrijdag + zaterdag + + + + + + + + + + v. Chr. + n. Chr. + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + d-MMM-yyyy + + + + + d-M-yy + + + + + + + + H:mm:ss' uur' z + + + + + H:mm:ss z + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + + Tisjrie + Chesjwan + Kislev + Tevet + Sjevat + Adar + Adar B + Nisan + Ijar + Sivan + Tammoez + Av + Elloel + + + Tisjrie + Chesjwan + Kislev + Tevet + Sjevat + Adar + Adar B + Nisan + Ijar + Sivan + Tammoez + Av + Elloel + + + + + + + + + Moeharram + Safar + Rabiʻa al awal + Rabiʻa al thani + Joemadʻal awal + Joemadʻal thani + Rajab + Sjaʻaban + Ramadan + Sjawal + Doe al kaʻaba + Doe al hizja + + + Moeharram + Safar + Rabiʻa al awal + Rabiʻa al thani + Joemadʻal awal + Joemadʻal thani + Rajab + Sjaʻaban + Ramadan + Sjawal + Doe al kaʻaba + Doe al hizja + + + + + + Saʻna Hizjria + + + + + + + + Moeharram + Safar + Rabiʻa al awal + Rabiʻa al thani + Joemadʻal awal + Joemadʻal thani + Rajab + Sjaʻaban + Ramadan + Sjawal + Doe al kaʻaba + Doe al hizja + + + Moeharram + Safar + Rabiʻa al awal + Rabiʻa al thani + Joemadʻal awal + Joemadʻal thani + Rajab + Sjaʻaban + Ramadan + Sjawal + Doe al kaʻaba + Doe al hizja + + + + + + Saʻna Hizjria + + + + + + + + Pacific-standaardtijd + Pacific-zomertijd + + + PST + PDT + + Los Angeles + + + + Pacific-standaardtijd + Pacific-zomertijd + + + PST + PDT + + Los Angeles + + + + Mountain-standaardtijd + Mountain-zomertijd + + + MST + MDT + + Denver + + + + Mountain-standaardtijd + Mountain-zomertijd + + + MST + MDT + + Denver + + + + Mountain-standaardtijd + Mountain-standaardtijd + + + MST + MST + + Phoenix + + + + Mountain-standaardtijd + Mountain-standaardtijd + + + MST + MST + + Phoenix + + + + Central-standaardtijd + Central-zomertijd + + + CST + CDT + + Chicago + + + + Central-standaardtijd + Central-zomertijd + + + CST + CDT + + Chicago + + + + Eastern-standaardtijd + Eastern-zomertijd + + + EST + EDT + + New York + + + + Eastern-standaardtijd + Eastern-zomertijd + + + EST + EDT + + New York + + + + Eastern-standaardtijd + Eastern-standaardtijd + + + EST + EST + + Indianapolis + + + + Eastern-standaardtijd + Eastern-standaardtijd + + + EST + EST + + Indianapolis + + + + Hawaï-standaardtijd + Hawaï-standaardtijd + + + HST + HST + + Honolulu + + + + Hawaï-standaardtijd + Hawaï-standaardtijd + + + HST + HST + + Honolulu + + + + Alaska-standaardtijd + Alaska-zomertijd + + + AST + ADT + + Anchorage + + + + Alaska-standaardtijd + Alaska-zomertijd + + + AST + ADT + + Anchorage + + + + Atlantic-standaardtijd + Atlantic-zomertijd + + + AST + ADT + + Halifax + + + + Newfoundland-standaardtijd + Newfoundland-zomertijd + + + CNT + CDT + + St. Johns + + + + Newfoundland-standaardtijd + Newfoundland-zomertijd + + + CNT + CDT + + St. Johns + + + + Midden-Europese standaardtijd + Midden-Europese zomertijd + + + CET + CEST + + Paris + + + + Midden-Europese standaardtijd + Midden-Europese zomertijd + + + CET + CEST + + Parijs + + + + Greenwich Mean Time + Greenwich Mean Time + + + GMT + GMT + + Londen + + + + Greenwich Mean Time + Greenwich Mean Time + + + GMT + GMT + + Casablanca + + + + Israëlische standaardtijd + Israëlische zomertijd + + + IST + IDT + + Jeruzalem + + + + Japanse standaardtijd + Japanse standaardtijd + + + JST + JST + + Tokyo + + + + Japanse standaardtijd + Japanse standaardtijd + + + JST + JST + + Tokyo + + + + Oost-Europese standaardtijd + Oost-Europese zomertijd + + + EET + EEST + + Boekarest + + + + Chinese standaardtijd + Chinese standaardtijd + + + CTT + CDT + + Shanghai + + + + Chinese standaardtijd + Chinese standaardtijd + + + CTT + CDT + + Shanghai + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + Andorrese diner + ADD + + + Andorrese peseta + ADP + + + Verenigde Arabische Emiraten-dirham + AED + + + Afghani (1927-2002) + AFA + + + Afghani + Af + + + Affars en Issas-franc + AIF + + + Albanese lek (1946-1961) + ALK + + + Albanese lek + lek + + + Albanese lek valute + ALV + + + Albanese dollarwisselcertificaten + ALX + + + Armeense dram + dram + + + Nederlands-Antilliaanse gulden + NA f. + + + Angolese kwanza + AOA + + + Angolese kwanza (1977-1990) + AOK + + + Angolese nieuwe kwanza (1990-2000) + AON + + + Angolese kwanza reajustado (1995-1999) + AOR + + + Angolese escudo + AOS + + + Argentijnse austral + ARA + + + Argentijnse peso moneda nacional + ARM + + + Argentijnse peso (1983-1985) + ARP + + + Argentijnse peso + Arg$ + + + Oostenrijkse schilling + ATS + + + Australische dollar + $A + + + Australisch pond + AUP + + + Arubaanse gulden + AWG + + + Azerbeidzjaanse manat + AZM + + + Bosnische dinar + BAD + + + Bosnische convertibele mark + KM + + + Bosnische nieuwe dinar + BAN + + + Barbadaanse dollar + BDS$ + + + Bengalese taka + Tk + + + Belgische frank (convertibel) + BEC + + + Belgische frank + BF + + + Belgische frank (financieel) + BEL + + + Bulgaarse harde lev + lev + + + Bulgaarse socialistische lev + BGM + + + Bulgaarse nieuwe lev + BGN + + + Bulgaarse lev (1879-1952) + BGO + + + Bulgaarse levwisselcertificaten + BGX + + + Bahreinse dinar + BD + + + Burundese franc + Fbu + + + Bermuda-dollar + Ber$ + + + Bermuda-pond + BMP + + + Bruneise dollar + BND + + + Boliviano + Bs + + + Boliviano (1863-1962) + BOL + + + Boliviaanse peso + BOP + + + Boliviaanse mvdol + BOV + + + Braziliaanse cruzeiro novo (1967-1986) + BRB + + + Braziliaanse cruzado + BRC + + + Braziliaanse cruzeiro (1990-1993) + BRE + + + Braziliaanse real + R$ + + + Braziliaanse cruzado novo + BRN + + + Braziliaanse cruzeiro + BRR + + + Braziliaanse cruzeiro (1942-1967) + BRZ + + + Bahamaanse dollar + BSD + + + Bahamaans pond + BSP + + + Bhutaanse ngultrum + Nu + + + Bhutaanse rupee + BTR + + + Birmese kyat + BUK + + + Birmese rupee + BUR + + + Botswaanse pula + BWP + + + Wit-Russische nieuwe roebel (1994-1999) + BYB + + + Wit-Russische roebel (1992-1994) + BYL + + + Wit-Russische roebel + Rbl + + + Belizaanse dollar + BZ$ + + + Brits-Hondurese dollar + BZH + + + Canadese dollar + Can$ + + + Congolese franc congolais + CDF + + + Congolese franc + CDG + + + Congolese zaïre + CDL + + + Centraal-Afrikaanse CFA-franc + CFF + + + Zwitserse franc + SwF + + + Cookeilandse dollar + CKD + + + Chileense condor + CLC + + + Chileense escudo + CLE + + + Chileense unidades de fomento + CLF + + + Chileense peso + Ch$ + + + Kameroense CFA-franc + CMF + + + Chinese jen min piao yuan + CNP + + + Chinese Amerikaanse-dollarwisselcertificaten + CNX + + + Chinese yuan renminbi + Y + + + Colombiaanse papieren peso + COB + + + Congolese CFA-franc + COF + + + Colombiaanse peso + Col$ + + + Costaricaanse colón + C + + + Tsjechoslowaakse koruna + CSC + + + Tsjechoslowaakse harde koruna + CSK + + + Cubaanse peso + CUP + + + Cubaanse wisselcertificaten + CUX + + + Kaapverdische escudo + CVEsc + + + Curaçao-gulden + CWG + + + Cyprisch pond + £C + + + Tsjechische koruna + CZK + + + Oost-Duitse ostmark + DDM + + + Duitse mark + DEM + + + Duitse sperrmark + DES + + + Djiboutiaanse franc + DF + + + Deense kroon + DKr + + + Dominicaanse peso + RD$ + + + Algerijnse dinar + DA + + + Algerijnse nieuwe franc + DZF + + + Algerijnse franc germinal + DZG + + + Ecuadoraanse sucre + ECS + + + Ecuadoraanse unidad de valor constante (UVC) + ECV + + + Estlandse kroon + EEK + + + Egyptisch pond + EGP + + + Eritrese nakfa + ERN + + + Spaanse peseta + ESP + + + Ethiopische birr + Br + + + Ethiopische dollar + ETD + + + Euro + + + + Finse markka + FIM + + + Finse markka (1860-1962) + FIN + + + Fijische dollar + F$ + + + Fijisch pond + FJP + + + Falklandeilands pond + FKP + + + Faeröerse kroon + FOK + + + Franse franc + FRF + + + Franse franc germinal/franc poincare + FRG + + + Gabonese CFA-franc + GAF + + + Brits pond sterling + £ + + + Georgische kupon larit + GEK + + + Georgische lari + lari + + + Ghanese cedi + GHC + + + Ghanese oude cedi + GHO + + + Ghanees pond + GHP + + + Ghanese hergewaardeerde cedi + GHR + + + Gibraltarees pond + GIP + + + Groenlandse kroon + GLK + + + Gambiaanse dalasi + GMD + + + Gambiaans pond + GMP + + + Guinese franc + GF + + + Guinese franc (1960-1972) + GNI + + + Guinese syli + GNS + + + Guadeloupse franc + GPF + + + Equatoriaal-Guinese ekwele guineana + GQE + + + Equatoriaal-Guinese franco + GQF + + + Equatoriaal-Guinese peseta puineana + GQP + + + Griekse drachme + GRD + + + Griekse nieuwe drachme + GRN + + + Guatemalteekse quetzal + Q + + + Frans-Guyaanse franc guiana + GUF + + + Portuguees-Guinese escudo + GWE + + + Portuguees-Guinese mil reis + GWM + + + Guinee-Bissause peso + GWP + + + Guyaanse dollar + G$ + + + Hongkongse dollar + HK$ + + + Hodurese lempira + L + + + Kroatische dinar + HRD + + + Kroatische kuna + HRK + + + Haïtiaanse gourde + HTG + + + Hongaarse forint + Ft + + + Noord-Iers pond + IBP + + + Indonesische nica-gulden + IDG + + + Indonesische Java-rupiah + IDJ + + + Indonesische nieuwe rupiah + IDN + + + Indonesische rupiah + Rp + + + Iers pond + IR£ + + + Israëlische shekel + ILL + + + Israëlisch pond + ILP + + + Israëlische nieuwe shekel + ILS + + + Isle of Man pond sterling + IMP + + + Indiase rupee + =0#Rs.|1#Re.|1<Rs. + + + Iraakse dinar + ID + + + Iraanse rial + RI + + + IJslandse kroon + ISK + + + Italiaanse lire + + + + Jersey pond sterling + JEP + + + Jamaicaanse dollar + J$ + + + Jamaicaans pond + JMP + + + Jordaanse dinar + JD + + + Japanse yen + ¥ + + + Kenyaanse shilling + K Sh + + + Kirgizische som + som + + + Cambodjaanse oude riel + KHO + + + Cambodjaanse riel + CR + + + Kiribatische dollar + KID + + + Comorese franc + CF + + + Noord-Koreaanse Volksrepubliek-won + KPP + + + Noord-Koreaanse won + KPW + + + Zuid-Koreaanse hwan + KRH + + + Zuid-Koreaanse oude won + KRO + + + Zuid-Koreaanse won + KRW + + + Koeweitse dinar + KD + + + Caymaneilandse dollar + KYD + + + Kazachstaanse roebel + KZR + + + Kazachstaanse tenge + T + + + Laotiaanse kip + LAK + + + Libanees pond + LL + + + Liechtensteinse frank + LIF + + + Srilankaanse rupee + SL Re + + + Ceylon-rupee + LNR + + + Liberiaanse dollar + LRD + + + Lesothaanse loti + M + + + Litouwse litas + LTL + + + Litouwse talonas + LTT + + + Luxemburgse frank + LUF + + + Letse lats + LVL + + + Letse roebel + LVR + + + Libische Britse Militaire Autoriteit-lire + LYB + + + Libische dinar + LD + + + Libisch pond + LYP + + + Marokkaanse dirham + MAD + + + Marokkaanse franc + MAF + + + Monegaskische nieuwe franc + MCF + + + Monegaskische franc germinal + MCG + + + Monegaskische leu cupon + MDC + + + Moldavische leu + MDL + + + Moldavische roebel-cupon + MDR + + + Malagassische ariary + MGA + + + Malagassische franc + MGF + + + Marshalleilandse dollar + MHD + + + Macedonische denar + MDen + + + Macedonische denar (1992-1993) + MKN + + + Malinese franc + MLF + + + Myanmarese kyat + MMK + + + Myanmarese dollarwisselcertificaten + MMX + + + Mongoolse tugrik + Tug + + + Macause pataca + MOP + + + Martinikaanse franc + MQF + + + Mauritaanse ouguiya + UM + + + Maltese lire + Lm + + + Maltees pond + MTP + + + Mauritiaanse rupee + MUR + + + Maldivische rupee + MVP + + + Maldivische rufiyaa + MVR + + + Malawische kwacha + MK + + + Malawisch pond + MWP + + + Mexicaanse peso + MEX$ + + + Mexicaanse zilveren peso (1861-1992) + MXP + + + Mexicaanse unidad de inversion (UDI) + MXV + + + Maleisische ringgit + RM + + + Mozambikaanse escudo + MZE + + + Mozambikaanse metical + Mt + + + Namibische dollar + N$ + + + Nieuw-Caledonische franc germinal + NCF + + + Nigeriaanse naira + NGN + + + Nigeriaans pond + NGP + + + Nieuw-Hebridiaanse CFP-franc + NHF + + + Nicaraguaanse córdoba + NIC + + + Nicaraguaanse gouden córdoba + NIG + + + Nicaraguaanse córdoba oro + NIO + + + Nederlandse gulden + fl + + + Noorse kroon + NKr + + + Nepalese rupee + Nrs + + + Nieuw-Zeelandse dollar + $NZ + + + Nieuw-Zeelands pond + NZP + + + Omaanse rial + RO + + + Omaanse rial saidi + OMS + + + Panamese balboa + PAB + + + Trans-Djnestrische roebel-kupon + PDK + + + Trans-Djnestrische nieuwe roebel + PDN + + + Trans-Djnestrische roebel + PDR + + + Peruaanse inti + PEI + + + Peruaanse nieuwe sol + PEN + + + Peruaanse sol + PES + + + Papuaanse kina + PGK + + + Filipijnse peso + PHP + + + Pakistaanse rupee + Pra + + + Poolse zloty + Zl + + + Poolse Amerikaanse-dollarwisselcertificaten + PLX + + + Poolse zloty (1950-1995) + PLZ + + + Palestijns pond + PSP + + + Portugese conto + PTC + + + Portugese escudo + PTE + + + Paraguayaanse guarani + PYG + + + Qatarese rial + QR + + + Réunionse franc + REF + + + Roemeense leu + leu + + + Roemeense nieuwe leu + RON + + + Russische roebel + RUB + + + Russische roebel (1991-1998) + RUR + + + Rwandese franc + RWF + + + Saoedische rial + SRl + + + Saoedische souvereine rial + SAS + + + Salomonseilandse dollar + SI$ + + + Seychelse rupee + SR + + + Soedanese dinar + SDD + + + Soedanees pond + SDP + + + Zweedse kroon + SKr + + + Singaporese dollar + S$ + + + Sint-Heleense pond + SHP + + + Sloveense tolar bons + SIB + + + Sloveense tolar + SIT + + + Slowaakse koruna + Sk + + + Sierraleoonse leone + SLL + + + Sanmarinese lire + SML + + + Somalische shilling + So. Sh. + + + Somalilandse shilling + SQS + + + Surinaamse gulden + Sf + + + Schotse pond + SSP + + + Santomese dobra + Db + + + Santomese escudo + STE + + + Nieuwe sovjet-roebel + SUN + + + Sovjet-roebel + SUR + + + Salvadoraanse colón + SVC + + + Syrisch pond + LS + + + Swazische lilangeni + E + + + Turks en Caicos-kroon + TCC + + + Tsjaadse CFA-franc + TDF + + + Thaise baht + THB + + + Tadzjikistaanse roebel + TJR + + + Tadzjikistaanse somoni + TJS + + + Turkmeense manat + TMM + + + Tunesische dinar + TND + + + Tongaanse paʻanga + T$ + + + Tongaans pond sterling + TOS + + + Timorese escudo + TPE + + + Timorese pataca + TPP + + + Turkse lire + TL + + + Trinidad en Tobago-dollar + TT$ + + + Trinidad en Tobago-oude dollar + TTO + + + Tuvaluaanse dollar + TVD + + + Nieuwe Taiwanese dollar + NT$ + + + Tanzaniaanse shilling + T Sh + + + Oekraïense hryvnia + UAH + + + Oekraïense karbovanetz + UAK + + + Oegandese shilling (1966-1987) + UGS + + + Oegandese shilling + U Sh + + + Amerikaanse dollar + US$ + + + Amerikaanse dollar (volgende dag) + USN + + + Amerikaanse dollar (zelfde dag) + USS + + + Uruguayaanse peso fuerte + UYF + + + Uruguayaanse peso (1975-1993) + UYP + + + Uruguayaanse peso uruguayo + Ur$ + + + Oezbekistaanse coupon-som + UZC + + + Oezbekistaanse sum + UZS + + + Vaticaanse lire + VAL + + + Noord-Vietnamese piastre dong viet + VDD + + + Noord-Vietnamese nieuwe dong + VDN + + + Noord-Vietnamese viet minh piastre dong viet + VDP + + + Venezolaanse bolivar + Be + + + Britse Maagdeneilandse dollar + VGD + + + Vietnamese dong + VND + + + Vietnamese nieuwe dong + VNN + + + Vietnamese Republiek-dong + VNR + + + Vietnamese nationale dong + VNS + + + Vanuatuaanse vatu + VT + + + West-Samoaans pond + WSP + + + West-Samoaanse tala + WST + + + Aziatische dinar-rekeneenheid + XAD + + + CFA-franc BEAC + XAF + + + Aziatische monetaire eenheid + XAM + + + Goud + XAU + + + Europese samengestelde eenheid + XBA + + + Europese monetaire eenheid + XBB + + + Europese rekeneenheid (XBC) + XBC + + + Europese rekeneenheid (XBD) + XBD + + + Oost-Caribische dollar + EC$ + + + CFA nieuwe franc + XCF + + + Special Drawing Rights + XDR + + + CFA-franc BCEAEC + XEF + + + European Currency Unit + XEU + + + Franse gouden franc + XFO + + + Franse UIC-franc + XFU + + + Islamitische dinar + XID + + + Franse metropolische nieuwe franc + XMF + + + Franse antillen CFA-franc + XNF + + + CFA-franc BCEAO + XOF + + + CFP-franc + CFPF + + + COMECON transferable roebel + XTR + + + Jemenitische dinar + YDD + + + Jemenitische imadi rial + YEI + + + Jemenitische rial + YRl + + + Joegoslavische harde dinar + YUD + + + Joegoslavische federale dinar + YUF + + + Joegoslavische 1994-dinar + YUG + + + Joegoslavische noviy-dinar + YUM + + + Joegoslavische convertibele dinar + YUN + + + Joegoslavische oktober-dinar + YUO + + + Joegoslavische hervormde dinar + YUR + + + Zuid-Afrikaanse rand (financieel) + ZAL + + + Zuid-Afrikaans pond + ZAP + + + Zuid-Afrikaanse rand + R + + + Zambiaanse kwacha + ZMK + + + Zambiaans pond + ZMP + + + Zaïrese nieuwe zaïre + ZRN + + + Zaïrese zaïre + ZRZ + + + Zimbabwaanse dollar + Z$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nl_BE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nl_BE.xml new file mode 100644 index 0000000..4e11f65 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nl_BE.xml @@ -0,0 +1,103 @@ + + + + + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + d-MMM-yy + + + + + d/MM/yy + + + + + + + + HH.mm' u. 'z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nl_NL.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nl_NL.xml new file mode 100644 index 0000000..343c8ae --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nl_NL.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤ #,##0.00;¤ #,##0.00- + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nn.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nn.xml new file mode 100644 index 0000000..9cf2cf8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nn.xml @@ -0,0 +1,276 @@ + + + + + + + + + + + norsk bokmål + norsk nynorsk + norsk + + + De forente arabiske emiratene + Antigua og Barbuda + De nederlandske antiller + Antarktis + Amerikansk Samoa + Østerrike + Aserbajdsjan + Bosnia og Hercegovina + Belgia + Brunei Darussalam + Brasil + Bouvetøya + Hviterussland + Kokosøyene (Keelingøyene) + Kongo, Den demokratiske republikken + Den sentralafrikanske republikk + Kongo + Sveits + Elfenbenskysten + Cookøyene + Kamerun + Kina + Kapp Verde + Christmasøya + Kypros + Tsjekkia + Tyskland + Danmark + Den dominikanske republikk + Algerie + Estland + Vest-Sahara + Spania + Etiopia + Falklandsøyene (Malvinas) + Mikronesiaføderasjonen + Færøyene + Frankrike + nb + Storbritannia + Fransk Guyana + Grønland + Ekvatorial-Guinea + Hellas + Sør-Georgia og Sør-Sandwich-øyene + Hong Kong S.A.R. (Kina) + Heard- og McDonaldsøyene + Kroatia + Ungarn + Irland + Britiske områder i det indiske hav + Irak + Island + Italia + Kirgisistan + Kambodsja + Komorene + St. Christopher og Nevis + Nord-Korea + Sør-Korea + Caymanøyene + Kasakhstan + Laos, Den folkedemokratiske republikken + Libanon + St. Lucia + Litauen + Marokko + Madagaskar + Marshalløyene + Makedonia, Republikken + Macao S.A.R. (Kina) + Nord-Marianene + Maldivene + Mosambik + Ny-Caledonia + Norfolkøyene + Nederland + Noreg + Fransk Polynesia + Papua Ny-Guinea + Filippinene + Polen + St. Pierre og Miquelon + Palestinsk territorium + Reunion + Den russiske føderasjon + Salomonøyene + Seychellene + Sverige + Svalbard og Jan Mayen + Surinam + Sao Tome og Principe + Turks- og Caicosøyene + Tchad + Franske sørområder + Tadsjikistan + Øst-Timor + Tyrkia + Trinidad og Tobago + Ukraina + USAs mindre øyer + USA + Usbekistan + Vatikanstaten + St. Vincent og Grenadinene + Jomfruøyene (britisk) + Jomfruøyene (USA) + Wallis og Futuna + Jugoslavia + Sør-Afrika + + + + [a-zæåøéóôàüǎ] + + + + + + + + jan + feb + mar + apr + mai + jun + jul + aug + sep + okt + nov + des + + + januar + februar + mars + april + mai + juni + juli + august + september + oktober + november + desember + + + + + + + su + + ty + on + to + fr + la + + + sundag + måndag + tysdag + onsdag + torsdag + fredag + laurdag + + + + + + + + + + f.Kr. + e.Kr. + + + + + + + EEEE d. MMMM yyyy + + + + + d. MMMM yyyy + + + + + d. MMM. yyyy + + + + + dd.MM.yy + + + + + + + + 'kl. 'HH.mm.ss z + + + + + HH.mm.ss z + + + + + HH.mm.ss + + + + + HH.mm + + + + + + + {1} {0} + + + + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + NOK + kr + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nn_NO.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nn_NO.xml new file mode 100644 index 0000000..bdd2ffc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/nn_NO.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/no.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/no.xml new file mode 100644 index 0000000..8565644 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/no.xml @@ -0,0 +1,2229 @@ + + + + + + + + + + + afar + abkhasisk + avestisk + afrikaans + akan + amharisk + aragonsk + arabisk + assamisk + avarisk + aymara + aserbajdsjansk + basjkirsk + hviterussisk + bulgarsk + bihari + bislama + bambara + bengali + tibetansk + bretonsk + bosnisk + blin + katalansk + tsjetsjensk + chamorro + cherokee + korsikansk + cree + tsjekkisk + kirkeslavisk + tsjuvansk + walisisk + dansk + tysk + divehi + dzongkha + ewe + gresk + engelsk + esperanto + spansk + estisk + baskisk + persisk + fulani + finsk + fijiansk + færøysk + fransk + frisisk + irsk + skotsk gælisk + ges + galicisk + guarani + gujarati + manx + hawaiisk + hebraisk + hindi + hiri motu + kroatisk + haitisk + ungarsk + armensk + herero + interlingua + indonesisk + interlingue + ibo + sichuan-yi + unupiak + ido + islandsk + italiensk + inuktitut + japansk + javanesisk + georgisk + kikongo + kikuyu + kuanyama + kasakhisk + kalaallisut + khmer + kannada + koreansk + konkani + kanuri + kasjmiri + kurdisk + komi + kornisk + kirgisisk + latin + luxemburgsk + ganda + limburgisk + lingala + laotisk + litauisk + luba-katanga + latvisk + madagassisk + marshallesisk + maori + makedonsk + malayalam + mongolsk + moldavisk + marathi + malayisk + maltesisk + burmesisk + nauru + norsk bokmål + ndebele (nord) + nepalsk + ndonga + nederlandsk + norsk nynorsk + norsk + ndebele, sør + navajo + nyanja + oksitansk (etter 1500) + ojibwa + oromo + oriya + ossetisk + panjabi + pali + polsk + pashto + portugisisk + quechua + retoromansk + rundi + rumensk + russisk + kinjarwanda + sanskrit + sardinsk + sindhi + nordsamisk + sango + serbokroatisk + singalesisk + sidamo + slovakisk + slovensk + samoansk + shona + somalisk + albansk + serbisk + swati + sotho (sørlig) + sundanesisk + svensk + swahili + syrisk + tamil + telugu + tatsjikisk + thai + tigrinja + tigré + turkmensk + tagalog + tswana + tonga (Tonga-øyene) + tyrkisk + tsonga + tatarisk + twi + tahitisk + uigurisk + ukrainsk + urdu + usbekisk + venda + vietnamesisk + volapyk + vallonsk + wolof + xhosa + jiddisk + joruba + zhuang + kinesisk + zulu + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Andorra + De forente arabiske emiratene + Afghanistan + Antigua og Barbuda + Anguilla + Albania + Armenia + De nederlandske antiller + Angola + Antarktis + Argentina + Amerikansk Samoa + Østerrike + Australia + Aruba + Aserbajdsjan + Bosnia og Hercegovina + Barbados + Bangladesh + Belgia + Burkina Faso + Bulgaria + Bahrain + Burundi + Benin + Bermuda + Brunei Darussalam + Bolivia + Brasil + Bahamas + Bhutan + Bouvetøya + Botswana + Hviterussland + Belize + Canada + Kokosøyene (Keelingøyene) + Kongo, Den demokratiske republikken + Den sentralafrikanske republikk + Kongo + Sveits + Elfenbenskysten + Cookøyene + Chile + Kamerun + Kina + Colombia + Costa Rica + Cuba + Kapp Verde + Christmasøya + Kypros + Tsjekkia + Tyskland + Djibouti + Danmark + Dominica + Den dominikanske republikk + Algerie + Ecuador + Estland + Egypt + Vest-Sahara + Eritrea + Spania + Etiopia + Finland + Fiji + Falklandsøyene (Malvinas) + Mikronesiaføderasjonen + Færøyene + Frankrike + en + Gabon + Storbritannia + Grenada + Georgia + Fransk Guyana + Ghana + Gibraltar + Grønland + Gambia + Guinea + Guadeloupe + Ekvatorial-Guinea + Hellas + Sør-Georgia og Sør-Sandwich-øyene + Guatemala + Guam + Guinea-Bissau + Guyana + Hong Kong S.A.R. (Kina) + Heard- og McDonaldsøyene + Honduras + Kroatia + Haiti + Ungarn + Indonesia + Irland + Israel + India + Britiske områder i det indiske hav + Irak + Iran + Island + Italia + Jamaica + Jordan + Japan + Kenya + Kirgisistan + Kambodsja + Kiribati + Komorene + St. Christopher og Nevis + Nord-Korea + Sør-Korea + Kuwait + Caymanøyene + Kasakhstan + Laos, Den folkedemokratiske republikken + Libanon + St. Lucia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Litauen + Luxembourg + Latvia + Libya + Marokko + Monaco + Moldova + Madagaskar + Marshalløyene + Makedonia, Republikken + Mali + Myanmar + Mongolia + Macao S.A.R. (Kina) + Nord-Marianene + Martinique + Mauritania + Montserrat + Malta + Mauritius + Maldivene + Malawi + Mexico + Malaysia + Mosambik + Namibia + Ny-Caledonia + Niger + Norfolkøyene + Nigeria + Nicaragua + Nederland + Norge + Nepal + Nauru + Niue + New Zealand + Oman + Panama + Peru + Fransk Polynesia + Papua Ny-Guinea + Filippinene + Pakistan + Polen + St. Pierre og Miquelon + Pitcairn + Puerto Rico + Palestinsk territorium + Portugal + Palau + Paraguay + Qatar + Reunion + Romania + Den russiske føderasjon + Rwanda + Saudi Arabia + Salomonøyene + Seychellene + Sudan + Sverige + Singapore + Saint Helena + Slovenia + Svalbard og Jan Mayen + Slovakia + Sierra Leone + San Marino + Senegal + Somalia + Serbia + Surinam + Sao Tome og Principe + El Salvador + Syria + Swaziland + Turks- og Caicosøyene + Tchad + Franske sørområder + Togo + Thailand + Tadsjikistan + Tokelau + Øst-Timor + Turkmenistan + Tunisia + Tonga + Tyrkia + Trinidad og Tobago + Tuvalu + Taiwan + Tanzania + Ukraina + Uganda + USAs mindre øyer + USA + Uruguay + Usbekistan + Vatikanstaten + St. Vincent og Grenadinene + Venezuela + Jomfruøyene (britisk) + Jomfruøyene (USA) + Vietnam + Vanuatu + Wallis og Futuna + Samoa + Yemen + Mayotte + Jugoslavia + Sør-Afrika + Zambia + Zimbabwe + + + Revidert + + + Kalendar + Kollasjon + Valuta + + + Buddhistisk kalender + Kinesisk kalender + Gregoriansk kalender + Hebraisk kalender + Islamsk kalender + Islamsk sivil kalender + Japansk kalender + Direkte rekkefølge + Telefonkatalogrekkefølge + Pinyin-rekkefølge + Strekrekkefølge + Tradisjonell rekkefølge + + + + [a-zæåøéóôàüǎ] + + + + + + + + jan + feb + mar + apr + mai + jun + jul + aug + sep + okt + nov + des + + + J + F + M + A + M + J + J + A + S + O + N + D + + + januar + februar + mars + april + mai + juni + juli + august + september + oktober + november + desember + + + + + + + + ma + ti + on + to + fr + + + + S + M + T + O + T + F + L + + + søndag + mandag + tirsdag + onsdag + torsdag + fredag + lørdag + + + + + + + + + + f.Kr. + e.Kr. + + + + + + + EEEE d. MMMM yyyy + + + + + d. MMMM yyyy + + + + + d. MMM. yyyy + + + + + dd.MM.yy + + + + + + + + 'kl. 'HH.mm.ss z + + + + + HH.mm.ss z + + + + + HH.mm.ss + + + + + HH.mm + + + + + + + {1} {0} + + + + + + + + + Eastern European Standard Time + Eastern European Daylight Time + + + EET + EEST + + Bucuresti + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + Andorranske dinarer + ADD + + + Andorranske pesetas + ADP + + + UAE dirham + AED + + + Afghani (1927-2002) + AFA + + + Afghani + Af + + + Affar og Issa franc + AIF + + + Albanske lek (1946-1961) + ALK + + + Albanske lek + lek + + + Albanske lek valute + ALV + + + Albanske dollar (FEC) + ALX + + + Armenske dram + dram + + + Nederlandske antillegylden + NA f. + + + Angolanske kwanza + AOA + + + Angolanske kwanza (1977-1990) + AOK + + + Angolanske ny kwanza (1990-2000) + AON + + + Angolan Kwanza Reajustado (1995-1999) + AOR + + + Angolanske escudo + AOS + + + Argentinske australer + ARA + + + Argentinske Peso Moneda Nacional + ARM + + + Argentinske pesos (1983-1985) + ARP + + + Argentinske pesos + Arg$ + + + Østerrikske shilling + ATS + + + Australske dollar + $A + + + Australske pund + AUP + + + Arubiske gylden + AWG + + + Aserbajdsjanske Manat + AZM + + + Bosnia-Hercegovina dinarer + BAD + + + Bosnia-Hercegovina mark (konvertible) + KM + + + Bosnia-Hercegovina nye dinarer + BAN + + + Barbadisk dollar + BDS$ + + + Bangladeshiske taka + Tk + + + Belgiske franc (konvertible) + BEC + + + Belgiske franc + BF + + + Belgiske franc (økonomiske) + BEL + + + Bulgarske lev (hard) + lev + + + Bulgarske sosialist-lev + BGM + + + Bulgarske lev + BGN + + + Bulgarske lev (1879-1952) + BGO + + + Bulgarske lev (FEC) + BGX + + + Bahrainske dinarer + BD + + + Burundiske franc + Fbu + + + Bermudiske dollar + Ber$ + + + Bermudiske pund + BMP + + + Bruneiske dollar + BND + + + Boliviano + Bs + + + Boliviano (1863-1962) + BOL + + + Boliviansk pesos + BOP + + + Boliviansk mvdol + BOV + + + Brasiliansk cruzeiro novo (1967-1986) + BRB + + + Brasilianske cruzado + BRC + + + Brasilianske cruzeiro (1990-1993) + BRE + + + Brasilianske realer + R$ + + + Brasilianske cruzado novo + BRN + + + Brasilianske cruzeiro + BRR + + + Brasilianske cruzeiro (1942-1967) + BRZ + + + Bahamske dollar + BSD + + + Bahamske pund + BSP + + + Bhutanske ngultrum + Nu + + + Bhutanske rupier + BTR + + + Burmesiske kyat + BUK + + + Burmesiske rupier + BUR + + + Botswanske pula + BWP + + + Hviterussiske nye rubler (1994-1999) + BYB + + + Hviterussiske rubler (1992-1994) + BYL + + + Hviterussiske rubler + Rbl + + + Beliziske dollar + BZ$ + + + Britisk Honduras-dollar + BZH + + + Kanadiske dollar + Can$ + + + Kongolesiske franc (congolais) + CDF + + + Kongolesiske republikk-franc + CDG + + + Congolesiske zaire + CDL + + + Sentralafrikanske franc (CFA) + CFF + + + Sveitsiske franc + SwF + + + Cookøyene dollar + CKD + + + Chilenske condor + CLC + + + Chilenske escudo + CLE + + + Chilenske Unidades de Fomento + CLF + + + Chilenske pesos + Ch$ + + + Kamerunske franc (CFA) + CMF + + + Kinesiske Jen Min Piao Yuan + CNP + + + Kinesiske US dollar (FEC) + CNX + + + Kinesiske Yuan Renminbi + Y + + + Colombianske papir-pesos + COB + + + Kongolesiske franc (CFA) + COF + + + Colombianske pesos + Col$ + + + Costaricanske colon + C + + + Tsjekkoslovakiske koruna + CSC + + + Tsjekkoslovakiske koruna (hard) + CSK + + + Kubanske pesos + CUP + + + Kubanske Foreign Exchange Certificates + CUX + + + Kappverdiske escudo + CVEsc + + + Curacao-gylden + CWG + + + Kypriotiske pund + £C + + + Tsjekkiske koruna + CZK + + + Østtyske ostmark + DDM + + + Tyske mark + DEM + + + Tyske sperrmark + DES + + + Djiboutiske franc + DF + + + Danske kroner + DKr + + + Dominikanske pesos + RD$ + + + Algeriske dinarer + DA + + + Algeriske nye franc + DZF + + + Algeriske franc germinal + DZG + + + Ecuadorianske sucre + ECS + + + Ecuadorianske Unidad de Valor Constante (UVC) + ECV + + + Estiske kroon + EEK + + + Egyptiske pund + EGP + + + Eritreiske nakfa + ERN + + + Spanske peseta + ESP + + + Etiopiske birr + Br + + + Etiopiske dollar + ETD + + + Euro + + + + Finske mark + FIM + + + Finske mark (1860-1962) + FIN + + + Fijianske dollar + F$ + + + Fijianske pund + FJP + + + Falklandsøyene-pund + FKP + + + Færøyske kronur + FOK + + + Franske franc + FRF + + + Franske franc (Germinal/Franc Poincare) + FRG + + + Gabonske franc (CFA) + GAF + + + Britiske pund sterling + £ + + + Georgiske kupon larit + GEK + + + Georgiske lari + lari + + + Ghanesiske cedi + GHC + + + Ghanesiske gamle cedi + GHO + + + Ghanesiske pund + GHP + + + Ghanesiske revaluerte cedi + GHR + + + Gibraltarske pund + GIP + + + Grønlandske kroner + GLK + + + Gambiske dalasi + GMD + + + Gambiske pund + GMP + + + Guineanske franc + GF + + + Guineanske franc (1960-1972) + GNI + + + Guineanske syli + GNS + + + Guadeloupe-franc + GPF + + + Ekvatorialguineanske ekwele guineana + GQE + + + Ekvatorialguineanske franco + GQF + + + Ekvatorialguineanske peseta guineana + GQP + + + Greske drakmer + GRD + + + Greske nye drakmer + GRN + + + Guatemalanske quetzal + Q + + + Fransk Guyana-franc guiana + GUF + + + Portugisiske guinea escudo + GWE + + + Portugisiske Guinea Mil Reis + GWM + + + Guinea-Bissau-pesos + GWP + + + Guyanske dollar + G$ + + + Hongkong-dollar + HK$ + + + Hoduras Lempira + L + + + Kroatiske dinarer + HRD + + + Kroatiske kuna + HRK + + + Haitiske gourde + HTG + + + Ungarske forinter + Ft + + + Nordirske pund + IBP + + + Indonesiske nica-gylden + IDG + + + Indonesiske Java-rupier + IDJ + + + Indonesiske nye rupier + IDN + + + Indonesiske rupier + Rp + + + Irske pund + IR£ + + + Israelske shekler + ILL + + + Israelske pund + ILP + + + Israelske nye shekler + ILS + + + Manske pund sterling + IMP + + + Indiske rupier + =0#Rs.|1#Re.|1<Rs. + + + Irakske dinarer + ID + + + Iranske rialer + RI + + + Islandske kronar + ISK + + + Italienske lire + + + + Jersey pund sterling + JEP + + + Jamaikanske dollar + J$ + + + Jamaikanske pund + JMP + + + Jordanske dinarer + JD + + + Japanske yen + ¥ + + + Kenyanske shilling + K Sh + + + Kirgisiske som + som + + + Kambodsjanske gamle riel + KHO + + + Kambodsjanske riel + CR + + + Kiribatiske dollar + KID + + + Komoriske franc + CF + + + Nordkoreanske won (1947-1959) + KPP + + + Nordkoreanske won + KPW + + + Sørkoreanske hwan + KRH + + + Sørkoreanske gamle won + KRO + + + Sørkoreanske won + KRW + + + Kuwaitiske dinarer + KD + + + Caymanske dollar + KYD + + + Kasakhstanske rubler + KZR + + + Kasakhstanske tenge + T + + + Laotiske kip + LAK + + + Libanesiske pund + LL + + + Liechtensteinske franc + LIF + + + Srilankiske rupier + SL Re + + + Ceylonske rupier + LNR + + + Liberiske dollar + LRD + + + Lesothiske loti + M + + + Litauiske lita + LTL + + + Litauiske talonas + LTT + + + Luxemburgske franc + LUF + + + Latviske lats + LVL + + + Latviske rubler + LVR + + + Libyske British Military Authority-lira + LYB + + + Libyske dinarer + LD + + + Libyske pund + LYP + + + Marokkanske dirham + MAD + + + Marokkanske franc + MAF + + + Monegaskiske franc nouveau + MCF + + + Monegaskiske franc germinal + MCG + + + Moldovske leu cupon + MDC + + + Moldovske leu + MDL + + + Moldovske ruble cupon + MDR + + + Madagassiske ariary + MGA + + + Madagassiske franc + MGF + + + Marshalløyene-dollar + MHD + + + Makedonske denarer + MDen + + + Makedonske denarer (1992-1993) + MKN + + + Maliske franc + MLF + + + Myanmarske kyat + MMK + + + Myanmarske dollar (FEC) + MMX + + + Mongolske tugrik + Tug + + + Makaoske pataca + MOP + + + Martinique-franc + MQF + + + Mauritanske ouguiya + UM + + + Maltesiske lira + Lm + + + Maltesiske pund + MTP + + + Mauritiske rupier + MUR + + + Maldiviske rupier + MVP + + + Maldiviske rufiyaa + MVR + + + Malawisle kwacha + MK + + + Malawiske pund + MWP + + + Meksikanske pesos + MEX$ + + + Meksikanske sølvpesos (1861-1992) + MXP + + + Meksikanske Unidad de Inversion (UDI) + MXV + + + Malaysiske ringgit + RM + + + Mosambikiske escudo + MZE + + + Mosambikiske metical + Mt + + + Namibiske dollar + N$ + + + Kaledonske franc germinal + NCF + + + Nigerianske naira + NGN + + + Nigerianske pund + NGP + + + Ny-hebridene CFP-franc + NHF + + + Nicaraguanske cordoba + NIC + + + Nicaraguanske gullcordoba + NIG + + + Nicaraguanske cordoba oro + NIO + + + Nederlandske gylden + NLG + + + Norske kroner + kr + + + Nepalesiske rupier + Nrs + + + Nyzealandske dollar + $NZ + + + Nyzealandske pund + NZP + + + Omanske rialer + RO + + + Omanske rial saidi + OMS + + + Panamanske balboa + PAB + + + Transdniestriansk rubler (kupon) + PDK + + + Transdniestrianske nye rubler + PDN + + + Transdniestrianske rubler + PDR + + + Peruvianske inti + PEI + + + Peruvianske sol nuevo + PEN + + + Peruvianske sol + PES + + + Papuanske kina + PGK + + + Filippinske pesos + PHP + + + Pakistanske rupier + Pra + + + Polske zloty + Zl + + + Polske US dollar (FEC) + PLX + + + Polske zloty (1950-1995) + PLZ + + + Palestinske pund + PSP + + + Portugisiske conto + PTC + + + Portugisiske escudo + PTE + + + Paraguayanske guarani + PYG + + + Qatarske riyaler + QR + + + Reunionske franc + REF + + + Rumenske leu + leu + + + Rumenske nye leu + RON + + + Russiske rubler + RUB + + + Russiske rubler (1991-1998) + RUR + + + Rwandiske franc + RWF + + + Saudiarabiske riyaler + SRl + + + Saudiarabiske riyaler (1936-1952) + SAS + + + Salomonske dollar + SI$ + + + Seychelliske rupier + SR + + + Sudanesiske dinarer + SDD + + + Sudanesiske pund + SDP + + + Svenske kroner + SKr + + + Singaporske dollar + S$ + + + Sankthelenske pund + SHP + + + Slovenske tolar bons + SIB + + + Slovenske tolar + SIT + + + Slovakiske koruna + Sk + + + Sierraleonske leone + SLL + + + Sanmarinske lira + SML + + + Somaliske shilling + So. Sh. + + + Somalilandske shilling + SQS + + + Surinamske gylden + Sf + + + Skotske pund + SSP + + + Sao Tome og Principe-dobra + Db + + + Sao Tome og Principe-escudo + STE + + + Sovjetiske nye rubler + SUN + + + Sovjetiske rubler + SUR + + + Salvadoranske colon + SVC + + + Syriske pund + LS + + + Swazilandske lilangeni + E + + + Turks- og Caicosøyene-crown + TCC + + + Tsjadiske franc (CFA) + TDF + + + Thailandske baht + THB + + + Tadsjikiske rubler + TJR + + + Tadsjikiske somoni + TJS + + + Turkmenske manat + TMM + + + Tunisiske dinarer + TND + + + Tonganske paʻanga + T$ + + + Tonganske pund sterling + TOS + + + Timoresiske escudo + TPE + + + Timoresiske pataca + TPP + + + Tyrkiske lira + TL + + + Trinidadiske dollar + TT$ + + + Trinidadiske gamle dollar + TTO + + + Tuvalske dollar + TVD + + + Taiwanske nye dollar + NT$ + + + Tanzanianske shilling + T Sh + + + Ukrainsle hryvnia + UAH + + + Ukrainske karbovanetz + UAK + + + Ugandiske shilling (1966-1987) + UGS + + + Ugandiske shilling + U Sh + + + Amerikanske dollar + US$ + + + Amerikanske dollar (neste dag) + USN + + + Amerikanske dollar (samme dag) + USS + + + Uruguayanske peso fuerte + UYF + + + Uruguayanske pesos (1975-1993) + UYP + + + Uruguayanske peso uruguayo + Ur$ + + + Usbekiske kupong-som + UZC + + + Usbekiske sum + UZS + + + Vatikanstatens lira + VAL + + + Nordvietnamesiske piastre dong viet + VDD + + + Nordvietnamesiske nye dong + VDN + + + Nordvietnamesiske viet minh piastre dong viet + VDP + + + Venezuelanske bolivar + Be + + + De britiske jomfruøyene-dollar + VGD + + + Vietnamesiske dong + VND + + + Vietnamesiske nye dong + VNN + + + Vietnamesiske republikk-dong + VNR + + + Vietnamesiske nasjonale dong + VNS + + + Vanuatisk vatu + VT + + + Vestsamoisk pund + WSP + + + Vestsamoisk tala + WST + + + Asian Dinar Unit of Account + XAD + + + CFA Franc BEAC + XAF + + + Asian Monetary Unit + XAM + + + Gull + XAU + + + European Composite Unit + XBA + + + European Monetary Unit + XBB + + + European Unit of Account (XBC) + XBC + + + European Unit of Account (XBD) + XBD + + + Østkaribiske dollar + EC$ + + + CFA Nouveau Franc + XCF + + + Special Drawing Rights + XDR + + + CFA Franc BCEAEC + XEF + + + European Currency Unit + XEU + + + French Gold Franc + XFO + + + French UIC-Franc + XFU + + + Islamske dinarer + XID + + + French Metropolitan Nouveau Franc + XMF + + + Franske antiller-franc (CFA) + XNF + + + CFA Franc BCEAO + XOF + + + CFP Franc + CFPF + + + COMECON Transferable Ruble + XTR + + + Jemenittiske dinarer + YDD + + + Jemenittiske imadi-riyaler + YEI + + + Jemenittiske rialer + YRl + + + Jugoslaviske dinarer (hard) + YUD + + + Jugoslaviske føderasjonen-dinarer + YUF + + + Jugoslaviske 1994-dinarer + YUG + + + Jugoslaviske noviy-dinarer + YUM + + + Jugoslaviske konvertible dinarer + YUN + + + Jugoslaviske oktoberdinarer + YUO + + + Jugoslaviske reforerte dinarer + YUR + + + Sørafrikanske rand (økonomisk) + ZAL + + + Sørafrikanske pund + ZAP + + + Sørafrikanske rand + R + + + Zambiske kwacha + ZMK + + + Zambiske pund + ZMP + + + Zairiske nye zaire + ZRN + + + Zairiske zaire + ZRZ + + + Zimbabwiske dollar + Z$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/no_NO.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/no_NO.xml new file mode 100644 index 0000000..d2f24d6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/no_NO.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/om.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/om.xml new file mode 100644 index 0000000..2a5a1cb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/om.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + Oromoo + + + Itoophiyaa + Keeniyaa + + + + [a-z] + + + + + + + + Ama + Gur + Bit + Elb + Cam + Wax + Ado + Hag + Ful + Onk + Sad + Mud + + + Amajjii + Guraandhala + Bitooteessa + Elba + Caamsa + Waxabajjii + Adooleessa + Hagayya + Fuulbana + Onkololeessa + Sadaasa + Muddee + + + + + + + Dil + Wix + Qib + Rob + Kam + Jim + San + + + Dilbata + Wiixata + Qibxata + Roobii + Kamiisa + Jimaata + Sanbata + + + + + + + + + + KD + KB + + + + + + + + + KES + Ksh + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/om_ET.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/om_ET.xml new file mode 100644 index 0000000..d1e9aca --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/om_ET.xml @@ -0,0 +1,107 @@ + + + + + + + + + + + + + WD + WB + + + + + EEEE, MMMM d, yyyy + + + + + dd MMMM yyyy + + + + + dd-MMM-yy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + ETB + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/om_KE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/om_KE.xml new file mode 100644 index 0000000..b68889a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/om_KE.xml @@ -0,0 +1,101 @@ + + + + + + + + + + + + + WD + WB + + + + + EEEE, MMMM d, yyyy + + + + + dd MMMM yyyy + + + + + dd-MMM-yy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pa.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pa.xml new file mode 100644 index 0000000..c4bcd5d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pa.xml @@ -0,0 +1,132 @@ + + + + + + + + + + + ਪੰਜਾਬੀ + + + ਭਾਰਤ + + + + [[:Guru:]‌‍] + + + + + + + + ਜਨਵਰੀ + ਫ਼ਰਵਰੀ + ਮਾਰਚ + ਅਪ੍ਰੈਲ + ਮਈ + ਜੂਨ + ਜੁਲਾਈ + ਅਗਸਤ + ਸਤੰਬਰ + ਅਕਤੂਬਰ + ਨਵੰਬਰ + ਦਸੰਬਰ + + + ਜਨਵਰੀ + ਫ਼ਰਵਰੀ + ਮਾਰਚ + ਅਪ੍ਰੈਲ + ਮਈ + ਜੂਨ + ਜੁਲਾਈ + ਅਗਸਤ + ਸਤੰਬਰ + ਅਕਤੂਬਰ + ਨਵੰਬਰ + ਦਸੰਬਰ + + + + + + + ਐਤ. + ਸੋਮ. + ਮੰਗਲ. + ਬੁਧ. + ਵੀਰ. + ਸ਼ੁਕਰ. + ਸ਼ਨੀ. + + + ਐਤਵਾਰ + ਸੋਮਵਾਰ + ਮੰਗਲਵਾਰ + ਬੁਧਵਾਰ + ਵੀਰਵਾਰ + ਸ਼ੁੱਕਰਵਾਰ + ਸ਼ਨੀਚਰਵਾਰ + + + + ਸਵੇਰੇ + ਸ਼ਾਮ + + + + + + . + , + ; + % + + # + + + - + E + + + + + + + + ##,##,##0.###;-##,##,##0.### + + + + + + + #E0 + + + + + + + ##,##,##0% + + + + + + + ¤ ##,##,##0.00;-¤ ##,##,##0.00 + + + + + + ਰੁਪਿਯ + ਰੁ. + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pa_IN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pa_IN.xml new file mode 100644 index 0000000..37fc24f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pa_IN.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd-MM-yyyy + + + + + d-M-yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + + + + + ##,##,##0.###;-##,##,##0.### + + + + + + + #E0 + + + + + + + ##,##,##0% + + + + + + + ¤##,##,##0.00;-¤##,##,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pl.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pl.xml new file mode 100644 index 0000000..dacfb42 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pl.xml @@ -0,0 +1,492 @@ + + + + + + + + + + + arabski + bułgarski + czeski + duński + niemiecki + grecki + angielski + hiszpański + estoński + fiński + francuski + hebrajski + chorwacki + węgierski + włoski + japoński + koreański + litewski + łotewski + holenderski + norweski + polski + portugalski + rumuński + rosyjski + słowacki + słoweński + szwedzki + turecki + chiński + + + Andora + Zjednoczone Emiraty Arabskie + Afganistan + Antigua i Barbuda + Anguilla + Albania + Armenia + Antyle Holenderskie + Angola + Antarktyka + Argentyna + Samoa Amerykańskie + Austria + Australia + Aruba + Azerbejdżan + Bośnia i Hercegowina + Barbados + Bangladesz + Belgia + Burkina Faso + Bułgaria + Bahrajn + Burundi + Benin + Bermudy + Brunei Darussalam + Boliwia + Brazylia + Bahamy + Bhutan + Wyspa Bouveta + Botswana + Białoruś + Belize + Kanada + Wyspy Kokosowe (Keelinga) + Kongo, Republika Demokratyczna + Republika Środkowej Afryki + Kongo + Szwajcaria + Wybrzeże Kości Słoniowej + Wyspy Cooka + Chile + Kamerun + Chiny + Kolumbia + Kostaryka + Kuba + Wyspy Zielonego Przylądka + Wyspa Bożego Narodzenia + Cypr + Republika Czeska + Niemcy + Dżibuti + Dania + Dominika + Republika Dominikańska + Algieria + Ekwador + Estonia + Egipt + Sahara Zachodnia + Erytrea + Hiszpania + Etiopia + Finlandia + Fidżi + Falklandy (Malwiny) + Mikronezja, Stany Sfederowane + Wyspy Owcze + Francja + en + Gabon + Wielka Brytania + Grenada + Gruzja + Gujana Francuska + Ghana + Gibraltar + Grenlandia + Gambia + Gwinea + Gwadelupa + Gwinea Równikowa + Grecja + Wyspy Georgia Południowa i Sandwich Południowy + Gwatemala + Guam + Gwinea Bissau + Gujana + Hongkong, Specjalny Region Administracyjny Chin + Wyspy Heard i McDonald + Honduras + Chorwacja + Haiti + Węgry + Indonezja + Irlandia + Izrael + Indie + Terytorium Brytyjskie Oceanu Indyjskiego + Irak + Iran + Islandia + Włochy + Jamajka + Jordania + Japonia + Kenia + Kirgistan + Kambodża + Kiribati + Komory + Saint Kitts i Nevis + Korea Północna + Korea Południowa + Kuwejt + Kajmany + Kazachstan + Laos (Demokratyczna Republika Ludowa) + Liban + Saint Lucia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Litwa + Luksemburg + Łotwa + Libijska + Maroko + Monako + Mołdawia, Republika + Madagaskar + Wyspy Marshalla + Macedonia, Republika + Mali + Birma + Mongolia + Makau, Specjalny Region Administracyjny Chin + Wspólnota Marianów Północnych + Martynika + Mauretania + Montserrat + Malta + Mauritius + Malediwy + Malawi + Meksyk + Malezja + Mozambik + Namibia + Nowa Kaledonia + Niger + Wyspa Norfolk + Nigeria + Nikaragua + Holandia + Norwegia + Nepal + Nauru + Niue + Nowa Zelandia + Oman + Panama + Peru + Polinezja Francuska + Papua Nowa Gwinea + Filipiny + Pakistan + Polska + St. Pierre i Miquelon + Pitcairn + Puerto Rico + Terytoria Palestyńskie + Portugalia + Palau + Paragwaj + Katar + Reunion + Rumunia + Federacja Rosyjska + Rwanda + Arabia Saudyjska + Wyspy Salomona + Seszele + Sudan + Szwecja + Singapur + Wyspa Świętej Heleny + Słowenia + Svalbard i Wyspy Jan Mayen + Słowacja + Sierra Leone + San Marino + Senegal + Somalia + Serbia + Surinam + Wyspy Świętego Tomasza i Książęca + Salwador + Syria + Suazi + Turks i Caicos + Czad + Francuskie Terytoria Południowe + Togo + Tajlandia + Tadżykistan + Tokelau + Timor Wschodni + Turkmenia + Tunezja + Tonga + Turcja + Trinidad i Tobago + Tuvalu + Tajwan + Tanzania + Ukraina + Uganda + United States Minor Outlying Islands + Stany Zjednoczone + Urugwaj + Uzbekistan + Stolica Apostolska (Państwo Watykańskie) + Saint Vincent and the Grenadines + Wenezuela + Brytyjskie Wyspy Dziewicze + Wyspy Dziewicze, Stanów Zjednoczonych + Wietnam + Vanuatu + Wallis i Futuna + Samoa + Jemen + Majotta + Jugosławia + Afryka Południowa + Zambia + Zimbabwe + + + + [a-z ó ą ę ć ń ś ź ł ż] + + + + + + + + st + lut + mrz + kw + maj + cz + lip + sier + wrz + paź + lis + gr + + + s + l + m + k + m + c + l + s + w + p + l + g + + + stycznia + lutego + marca + kwietnia + maja + czerwca + lipca + sierpnia + września + października + listopada + grudnia + + + + + st + lut + mrz + kw + maj + cz + lip + sier + wrz + paź + lis + gr + + + s + l + m + k + m + c + l + s + w + p + l + g + + + Styczeń + Luty + Marzec + Kwiecień + Maj + Czerwiec + Lipiec + Sierpień + Wrzesień + Październik + Listopad + Grudzień + + + + + + + N + Pn + Wt + Śr + Cz + Pt + So + + + niedziela + poniedziałek + wtorek + środa + czwartek + piątek + sobota + + + + + + + + + + p.n.e. + n.e. + + + + + + + EEEE, d MMMM yyyy + + + + + d MMMM yyyy + + + + + yyyy-MM-dd + + + + + yy-MM-dd + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + PLN + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pl_PL.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pl_PL.xml new file mode 100644 index 0000000..0de6ec8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pl_PL.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ps.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ps.xml new file mode 100644 index 0000000..fc88820 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ps.xml @@ -0,0 +1,259 @@ + + + + + + + + + + + عربي + بلوڅي + الماني + یوناني + انګلیسي + حبشي + فارسي + فینلنډي + فرانسوي + عبري + هندي + ارمني + هند و اروپایي + ایټالوي + جاپانی + کردي + لاتیني + ملغاسي + مقدوني + مغولي + ملایا + پولنډي + پښتو + پورتګالي + روسي + سنسکریټ + سویډنی + تاجک + ترکمني + تاتار + ازبکي + چیني + + + افغانستان + البانیه + انګولا + انتارکتیکا + اتریش + بنګله‌دیش + بلغاریه + کاناډا + سویس + چین + کولمبیا + کیوبا + المان + ډنمارک + الجزایر + مصر + هسپانیه + حبشه + فنلینډ + فرانسه + برتانیه + ګانا + ګیانا + یونان + ګواتیمالا + هانکانګ + هانډوراس + مجارستان + اندونیزیا + هند + عراق + آیسلینډ + ایټالیه + جمیکا + جاپان + کمبودیا + کویټ + لاوس + لبنان + لایبریا + لیبیا + مراکش + مغولستان + مالیزیا + نایجیریا + نکاراګوا + هالېنډ + ناروې + نیپال + نیوزیلنډ + پاکستان + پولنډ + فلسطین + پورتګال + روسیه + روندا + سعودی عربستان + سویډن + سالوېډور + سوریه + تاجکستان + تنزانیا + یوروګوای + یمن + + + + + + + [ء-ؤئ-غفقل-وي-ْٰٔټپځڅ-چډړږژښکګڼی-ۍې ‌‍‏‎] + + + + + + + + جنو + فبر + مار + اپر + مـی + جون + جول + اګس + سپت + اکت + نوم + دسم + + + جنوري + فبروري + مارچ + اپریل + می + جون + جولای + اګست + سپتمبر + اکتوبر + نومبر + دسمبر + + + + + + + ی. + د. + س. + چ. + پ. + ج. + ش. + + + یکشنبه + دوشنبه + سه‌شنبه + چهارشنبه + پنجشنبه + جمعه + شنبه + + + + غ.م. + غ.و. + + + ق.م. + م. + + + + + + + EEEE د yyyy د MMMM d + + + + + د yyyy د MMMM d + + + + + d MMMM yyyy + + + + + yyyy/M/d + + + + + + + + H:mm:ss (z) + + + + + H:mm:ss (z) + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + + د افغانستان په وخت + د افغانستان په وخت + + + AFT + AFT + + کابل + + + + + + + افغانۍ + افغانۍ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ps_AF.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ps_AF.xml new file mode 100644 index 0000000..5c756cd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ps_AF.xml @@ -0,0 +1,67 @@ + + + + + + + + + + + + + + + + + + + + + + + + + ٫ + ٬ + ; + ٪ + ۰ + # + + + + ×۱۰^ + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0 ¤;-#,##0 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pt.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pt.xml new file mode 100644 index 0000000..2a58f85 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pt.xml @@ -0,0 +1,2789 @@ + + + + + + + + + + + afar + abkhazian + achinese + acoli + adangme + adyghe + avéstico + africâner + afro-asiático (outros) + Afrihili + Akan + acadiano + aleúte + idiomas algonquianos + amárico + aragonês + inglês, arcaico (aprox. 450-1100) + idiomas apache + árabe + aramaico + araucano + arapaho + artificiais (outros) + arauaqui + assamês + asturiano + idiomas atabascanos + idiomas australianos + avaric + Awadhi + aimara + azerbaijano + bashkir + banda + bamileke Languages + balúchi + bambara + balinês + basa + bálticos (outros) + bielo-russo + beja + bemba + berbere + búlgaro + biari + bhojpuri + bislamá + bikol + bini + siksika + bambara + bengali + banto + tibetano + bretão + braj + bósnio + bataque + Buriat + Buginese + Blin + catalão + caddo + indígenas centro-americanos (outros) + caribe + caucasianos (outros) + chechene + cebuano + célticos (outros) + chamorro + chibcha + chagatai + chuukese + mari + chinook jargon + choctaw + chipewyan + cheroqui + cheiene + chamic languages + córsico + copta + crioulos e pídgin, inglês (outros) + crioulos e pídgin, francês (outros) + crioulos e pídgin, português (outros) + cree + crimean turkish; crimean tatar + crioulos e pídgins (outros) + tcheco + kashubian + eslavo eclesiástico + cuxitas (outros) + chuvash + galês + dinamarquês + dacota + dargwa + dayak + alemão + delaware + slave + dogrib + dinka + dogri + dravídicos (outros) + Lower Sorbian + duala + holandês, medieval (aprox. 1050-1350) + divehi + diúla + dzonga + eve + efique + egípcio (arcaico) + ekajuk + grego + elamite + inglês + inglês, medieval (1100-1500) + esperanto + espanhol + estoniano + basco + ewondo + persa + fangue + fanti + fula + finlandês + ugro-finês (outros) + fijiano + feroês + fom + francês + francês, medieval (aprox.1400-1600) + francês, arcaico (842-aprox.1400) + friulano + frisão + irlandês + ga + gayo + gbaia + gaélico escocês + germânicos (outros) + geez + gilbertês + galego + alemão, medieval alto (aprox.1050-1500) + guarani + alemão, arcaico alto (aprox.750-1050) + gondi + gorontalo + gótico + Gerbo + grego, arcaico (até 1453) + guzerate + manx + gwichʻin + hauçá + haida + havaiano + hebraico + hindi + hiligaynon + himachali + hitita + hmong + hiri motu + croata + upper sorbian + haitiano + húngaro + hupa + armênio + herero + interlíngua + Iban + indonésio + interlingue + ibo + sichuan yi + Inupiaq + ilocano + índicos (outros) + indo-europeus (outros) + inguche + ido + iraniano + idiomas iroqueses + islandês + italiano + inuktitut + japonês + lojban + judaico-persa + judaico-arábico + georgiano + kara-Kalpak + kabyle + kachin + kamba + karen + kawi + kabardian + congolês + khasi + khoisan (other) + khotanese + quicuio + Kuanyama + cazaque + groenlandês + cmer + quimbundo + canarês + coreano + concani + kosraean + kpelle + canúri + karachay-Balkar + kru + kurukh + kashmiri + curdo + kumyk + kutenai + komi + córnico + quirguiz + latim + ladino + lahnda + lamba + luxemburguês + lezghian + luganda + limburgish + lingala + laosiano + mongo + lozi + lituano + luba-catanga + luba-Lulua + luiseno + lunda + lushai + letão + madurês + magahi + maithili + makasar + mandinga + austronésio + massai + mocsa + mandar + mende + malgaxe + irlandês, medieval (900-1200) + marshallês + maori + miquemaque + minangkabau + idiomas diversos + macedônio + mon-khmer (other) + malaiala + mongol + manchu + manipuri + manobo languages + moldávio + mohawk + mossi + marata + malaio + maltês + idiomas múltiplos + idiomas munda + creek + marwari + birmanês + maia + erzya + nauruano + náuatle + indígenas norte-americanos (outros) + napolitano + bokmål norueguês + ndebele, north + alto alemão; baixo saxão + nepali + newari + dongo + nias + niger - kordofanian (other) + niueano + holandês + nynorsk norueguês + norueguês + nogai + norse, old + ndebele, south + soto, setentrional + idiomas núbios + navajo + nianja; chicheua; cheua + nyamwezi + nyankole + nyoro + nzima + occitânico (após 1500); provençal + ojibwa + oromo + oriya + ossetic + osage + turco, otomano (1500-1928) + idiomas otomanos + panjabi + papuanos (outros) + pangasinã + pálavi + pampanga + papiamento + palauano + persa arcaico (aprox. 600-400 a.C.) + filipinos (outros) + fenício + páli + polonês + pohnpeian + idiomas prácrito + provençal, arcaico (até 1500) + pashto (pushto) + português + quíchua + rajastani + rapanui + rarotongano + rhaeto-romance + rundi + romeno + romances (outros) + romani + russo + kinyarwanda + sânscrito + sandawe + iacuto + indígenas sul-americanos (outros) + salishan languages + aramaico samaritano + sasak + santali + sardo + escocês + sindi + northern sami + selkup + semíticos (outros) + sango + irlandês, arcaico (até 900) + linguages de sinais + servo-croata + shan + cingalês + sidamo + idiomas sioux + sino-tibetanos (outros) + eslovaco + eslovênio + somali + sogdien + songai + albanês + sérvio + serere + swati + nilo-saarianos (outros) + soto, do sul + sundanês + sukuma + sosso + sumério + sueco + suaíli + siríaco + tâmil + tai (outros) + telugu + timne + tereno + tétum + tadjique + tailandês + tigrínia + tigré + turcomano + toquelauano + tlinguite + tamaxeque + tswana + tonga (ilhas tonga) + toganês (Nyasa) + tok pisin + turco + tsonga + tsimshian + tatar + tumbuka + idiomas tupi + altaicos (outros) + tuvaluano + twi + taitiano + tuvinian + udmurt + uighur + ugarítico + ucraniano + umbundu + indeterminado + urdu + usbeque + venda + vietnamita + volapuque + votic + walloon + wakashan languages + walamo + waray + washo + sorbian languages + uolofe + kalmyk + xosa + iao + yapese + iídiche + ioruba + idiomas iúpique + zhuang + zapoteca + zenaga + chinês + zande + zulu + zunhi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Andorra + Emirados Árabes Unidos + Afeganistão + Antígua e Barbuda + Anguilla + Albânia + Armênia + Antilhas Holandesas + Angola + Antártida + Argentina + Samoa Americana + Áustria + Austrália + Aruba + Azerbaijão + Bósnia-Herzegóvina + Barbados + Bangladesh + Bélgica + Burquina Faso + Bulgária + Bareine + Burundi + Benin + Bermudas + Brunei + Bolívia + Brasil + Bahamas + Butão + Ilha Bouvet + Botsuana + Belarus + Belize + Canadá + Ilhas Cocos (Keeling) + Congo, República Democrática do + República Centro-Africana + Congo + Suíça + Costa do Marfim + Ilhas Cook + Chile + República dos Camarões + China + Colômbia + Costa Rica + Cuba + Cabo Verde + Ilhas Natal + Chipre + República Tcheca + Alemanha + Djibuti + Dinamarca + Dominica + República Dominicana + Argélia + Equador + Estônia + Egito + Saara Ocidental + Eritréia + Espanha + Etiópia + Finlândia + Fiji + Ilhas Malvinas + Micronésia, Estados Federados da + Ilhas Faroe + França + en + Gabão + Reino Unido + Granada + Geórgia + Guiana Francesa + Gana + Gibraltar + Groênlandia + Gâmbia + Guiné + Guadalupe + Guiné Equatorial + Grécia + Geórgia do Sul e Ilhas Sandwich do Sul + Guatemala + Guam + Guiné Bissau + Guiana + Hong Kong, Região Admin. Especial da China + Ilha Heard e Ilhas McDonald + Honduras + Croácia + Haiti + Hungria + Indonésia + Irlanda + Israel + Índia + Território Britânico do Oceano Índico + Iraque + Irã + Islândia + Itália + Jamaica + Jordânia + Japão + Quênia + Quirguistão + Camboja + Quiribati + Comores + São Cristovão e Nevis + Coréia, Norte + Coréia, Sul + Kuwait + Ilhas Caiman + Casaquistão + República Democrática Popular de Lao + Líbano + Santa Lúcia + Liechtenstein + Sri Lanka + Libéria + Lesoto + Lituânia + Luxemburgo + Letônia + Líbia + Marrocos + Mônaco + Moldova, República de + Madagascar + Ilhas Marshall + Macedônia, República da + Mali + Mianmá + Mongólia + Macau, Região Admin. Especial da China + Ilhas Marianas do Norte + Martinica + Mauritânia + Montserrat + Malta + Maurício + Maldivas + Malawi + México + Malásia + Moçambique + Namíbia + Nova Caledônia + Níger + Ilha Norfolk + Nigéria + Nicarágua + Países Baixos + Noruega + Nepal + Nauru + Niue + Nova Zelândia + Omã + Panamá + Peru + Polinésia Francesa + Papua-Nova Guiné + Filipinas + Paquistão + Polônia + Saint Pierre e Miquelon + Pitcairn + Porto Rico + Território da Palestina + Portugal + Palau + Paraguai + Catar + Reunião + Romênia + Rússia + Ruanda + Arábia Saudita + Ilhas Salomão + Seychelles + Sudão + Suécia + Cingapura + Santa Helena + Eslovênia + Svalbard e Jan Mayen + Eslováquia + Serra Leoa + San Marino + Senegal + Somália + Sérvia + Suriname + São Tomé e Príncipe + El Salvador + Síria + Suazilândia + Ilhas Turks e Caicos + Chade + Territórios Franceses do Sul + Togo + Tailândia + Tadjiquistão + Tokelau + Timor Leste + Turcomenistão + Tunísia + Tonga + Turquia + Trinidad e Tobago + Tuvalu + Taiwan + Tanzânia + Ucrânia + Uganda + Ilhas Menores Distantes dos Estados Unidos + Estados Unidos + Uruguai + Uzbequistão + Vaticano + São Vicente e Granadinas + Venezuela + Ilhas Virgens Britânicas + Ilhas Virgens dos EUA + Vietnã + Vanuatu + Wallis e Futuna + Samoa + Iêmen + Mayotte + Iugoslávia + África do Sul + Zâmbia + Zimbábwe + + + Revisado + + + Calendário + Intercalação + Moeda + + + Calendário Budista + Calendário Chinês + Calendário Gregoriano + Calendário Hebraico + Calendário Islâmico + Calendário Civil Islâmico + Calendário Japonês + Ordem Direta + Ordem de Lista Telefônica + Ordem Pin-yin + Ordem dos Traços + Ordem Tradicional + + + + [a-zãõçáéíóúàâêôüò] + + + + + + + + jan + fev + mar + abr + mai + jun + jul + ago + set + out + nov + dez + + + J + F + M + A + M + J + J + A + S + O + N + D + + + janeiro + fevereiro + março + abril + maio + junho + julho + agosto + setembro + outubro + novembro + dezembro + + + + + + + dom + seg + ter + qua + qui + sex + sáb + + + D + S + T + Q + Q + S + S + + + domingo + segunda-feira + terça-feira + quarta-feira + quinta-feira + sexta-feira + sábado + + + + + + a.C. + d.C. + + + + + + + EEEE, d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + d/MMM/yyyy + + + + + dd-MM-yyyy + + + + + + + + HH'H'mm'm'ss's' z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + Horário Padrão do Pacífico + Horário de Verão do Pacífico + + + PST + PDT + + Los Angeles + + + + Horário Padrão do Pacífico + Horário de Verão do Pacífico + + + PST + PDT + + Los Angeles + + + + Horário Padrão Montanha + Horário de Verão Montanha + + + MST + MDT + + Denver + + + + Horário Padrão Montanha + Horário de Verão Montanha + + + MST + MDT + + Denver + + + + Horário Padrão Montanha + Horário Padrão Montanha + + + MST + MST + + Phoenix + + + + Horário Padrão Montanha + Horário Padrão Montanha + + + MST + MST + + Phoenix + + + + Horário Padrão Central + Horário de Verão Central + + + CST + CDT + + Chicago + + + + Horário Padrão Central + Horário de Verão Central + + + CST + CDT + + Chicago + + + + Horário Padrão Oriental + Horário de Verão Oriental + + + EST + EDT + + Nova Iorque + + + + Horário Padrão Oriental + Horário de Verão Oriental + + + EST + EDT + + Nova Iorque + + + + Horário Padrão Oriental + Horário Padrão Oriental + + + EST + EST + + Indianapolis + + + + Horário Padrão Oriental + Horário Padrão Oriental + + + EST + EST + + Indianápolis + + + + Horário Padrão do Havaí + Horário Padrão do Havaí + + + HST + HST + + Honolulu + + + + Horário Padrão do Havaí + Horário Padrão do Havaí + + + HST + HST + + Honolulu + + + + Horário Padrão do Alasca + Horário de Verão do Alasca + + + AST + ADT + + Anchorage + + + + Horário Padrão do Alasca + Horário de Verão do Alasca + + + AST + ADT + + Anchorage + + + + Horário Padrão Atlântico + Horário de Verão Atlântico + + + AST + ADT + + Halifax + + + + Horário Padrão de Terra Nova + Horário de Verão de Terra Nova + + + CNT + CDT + + St. Johns + + + + Horário Padrão de Terra Nova + Horário de Verão de Terra Nova + + + CNT + CDT + + St. Johns + + + + Horário Padrão Europa Central + Horário de Verão Europa Central + + + CET + CEST + + Paris + + + + Horário Padrão Europa Central + Horário de Verão Europa Central + + + CET + CEST + + Paris + + + + Horário do Meridiano de Greenwich + Horário do Meridiano de Greenwich + + + GMT + GMT + + Londres + + + + Horário do Meridiano de Greenwich + Horário do Meridiano de Greenwich + + + GMT + GMT + + Casablanca + + + + Horário Padrão de Israel + Horário de Verão de Israel + + + IST + IDT + + Jerusalém + + + + Horário Padrão do Japão + Horário Padrão do Japão + + + JST + JST + + Tóquio + + + + Horário Padrão do Japão + Horário Padrão do Japão + + + JST + JST + + Tóquio + + + + Horário Padrão da Europa Oriental + Horário de Verão da Europa Oriental + + + EET + EEST + + Bucareste + + + + Horário Padrão da China + Horário Padrão da China + + + CTT + CDT + + Xangai + + + + Horário Padrão da China + Horário Padrão da China + + + CTT + CDT + + Xangai + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + Diner de Andorra + ADD + + + Peseta de Andorra + ADP + + + Dirém dos Emirados Árabes Unidos + AED + + + Afegane (1927-2002) + AFA + + + Afegane + AFA + + + Franco de Affars e Issas + AIF + + + Lek Albanês + ALL + + + Lek Valute Albanês + ALV + + + Certificados de câmbio albaneses em dólares + ALX + + + Dram Arménio + AMD + + + Guilder das Antilhas Holandesas + ANG + + + Cuanza angolano + AOA + + + Cuanza angolano (1977-1990) + AOK + + + Cuanza novo angolano (1990-2000) + AON + + + Cuanza angolano reajustado (1995-1999) + AOR + + + Escudo angolano + AOS + + + Austral argentino + ARA + + + Peso moneda nacional argentino + ARM + + + Peso argentino (1983-1985) + ARP + + + Peso argentino + ARS + + + Xelim austríaco + ATS + + + Dólar australiano + AUD + + + Libra australiana + AUP + + + Guilder de Aruba + AWG + + + Manat azerbaijano + AZM + + + Dinar da Bósnia-Herzegóvina + BAD + + + Marco bósnio-herzegóvino conversível + BAM + + + Dinar novo da Bósnia-Herzegóvina + BAN + + + Dólar de Barbados + BBD + + + Taka de Bangladesh + BDT + + + Franco belga (conversível) + BEC + + + Franco belga + BEF + + + Franco belga (financeiro) + BEL + + + Lev forte búlgaro + BGL + + + Lev socialista búlgaro + BGM + + + Lev novo búlgaro + BGN + + + Lev búlgaro (1879-1952) + BGO + + + Certificados de câmbio búlgaros em leva + BGX + + + Dinar bareinita + BHD + + + Franco do Burundi + BIF + + + Dólar das Bermudas + BMD + + + Libra das Bermudas + BMP + + + Dólar do Brunei + BND + + + Boliviano + BOB + + + Boliviano (1863-1962) + BOL + + + Peso boliviano + BOP + + + Mvdol boliviano + BOV + + + Cruzeiro novo brasileiro(1967-1986) + BRB + + + Cruzado brasileiro + BRC + + + Cruzeiro brasileiro (1990-1993) + BRE + + + Real brasileiro + R$ + + + Cruzado novo brasileiro + BRN + + + Cruzeiro brasileiro + BRR + + + Cruzeiro brasileiro (1942-1967) + BRZ + + + Dólar das Bahamas + BSD + + + Libra das Bahamas + BSP + + + Ngultrum do Butão + BTN + + + Rupia do Butão + BTR + + + Kyat birmanês + BUK + + + Rupia birmanesa + BUR + + + Pula botsuanesa + BWP + + + Rublo novo bielo-russo (1994-1999) + BYB + + + Rublo bielo-russo (1992-1994) + BYL + + + Rublo bielo-russo + BYR + + + Dólar do Belize + BZD + + + Dólar de Honduras Britânica + BZH + + + Dólar canadense + CAD + + + Franco congolês + CDF + + + Franco da República do Congo + CDG + + + Zaire congolês + CDL + + + Franco da República Centro-Africana CFA + CFF + + + Franco suíço + CHF + + + Dólar das Ilhas Cook + CKD + + + Condor chileno + CLC + + + Escudo chileno + CLE + + + Unidades de Fomento chilenas + CLF + + + Peso chileno + CLP + + + Franco dos Camarões CFA + CMF + + + Jen Min Piao Yuan chinês + CNP + + + Certificados de câmbio chineses em dólares dos EUA + CNX + + + Yuan Renminbi chinês + CNY + + + Peso de Papel colombiano + COB + + + Franco do Congo CFA + COF + + + Peso colombiano + COP + + + Colon da Costa Rica + CRC + + + Coroa checoslovaca + CSC + + + Coroa Forte checoslovaca + CSK + + + Peso cubano + CUP + + + Certificados de câmbio cubanos + CUX + + + Escudo cabo-verdiano + CVE + + + Guilder de Curaçau + CWG + + + Libra de Chipre + CYP + + + Coroa da República Checa + CZK + + + Ostmark da Alemanha Oriental + DDM + + + Marco alemão + DEM + + + Sperrmark alemão + DES + + + Franco do Djibuti + DJF + + + Coroa dinamarquesa + DKK + + + Peso dominicano + DOP + + + Dinar argelino + DZD + + + Franco Novo argelino + DZF + + + Franco Germinal argelino + DZG + + + Sucre equatoriano + ECS + + + Unidad de Valor Constante (UVC) do Equador + ECV + + + Coroa estoniana + EEK + + + Libra egípcia + EGP + + + Nakfa da Eritréia + ERN + + + Peseta espanhola + ESP + + + Birr etíope + ETB + + + Dólar etíope + ETD + + + Euro + + + + Marca finlandesa + FIM + + + Marca finlandesa (1860-1962) + FIN + + + Dólar de Fiji + FJD + + + Libra de Fiji + FJP + + + Libra das Malvinas + FKP + + + Coroa das Ilhas Feroé + FOK + + + Franco francês + FRF + + + Franco Germinal francês/Franco Poincaré + FRG + + + Franco do Gabão CFA + GAF + + + Libra esterlina britânica + £ + + + Cupom Lari georgiano + GEK + + + Lari georgiano + GEL + + + Cedi de Gana + GHC + + + Cedi Antigo de Gana + GHO + + + Libra de Gana + GHP + + + Cedi reajustado de Gana + GHR + + + Libra de Gibraltar + GIP + + + Coroa de Groenlândia + GLK + + + Dalasi de Gâmbia + GMD + + + Libra de Gâmbia + GMP + + + Franco de Guiné + GNF + + + Franco de Guiné (1960-1972) + GNI + + + Syli de Guiné + GNS + + + Franco de Guadalupe + GPF + + + Ekwele de Guiné Equatorial + GQE + + + Franco de Guiné Equatorial + GQF + + + Peseta Guineana de Guiné Equatorial + GQP + + + Dracma grego + GRD + + + Dracma Novo grego + GRN + + + Quetçal da Guatemala + GTQ + + + Franco da Guiana Francesa + GUF + + + Escudo da Guiné Portuguesa + GWE + + + Mil-réis da Guiné Portuguesa + GWM + + + Peso da Guiné-Bissau + GWP + + + Dólar da Guiana + GYD + + + Dólar de Hong Kong + HKD + + + Lempira de Honduras + HNL + + + Dinar croata + HRD + + + Kuna croata + HRK + + + Gurde do Haiti + HTG + + + Forinte húngaro + HUF + + + Libra da Irlanda do Norte + IBP + + + Guilder Nica indonésio + IDG + + + Rupia Java indonésia + IDJ + + + Rupia Nova indonésia + IDN + + + Rupia indonésia + IDR + + + Libra irlandesa + IEP + + + Sheqel israelita + ILL + + + Libra israelita + ILP + + + Sheqel Novo israelita + ILS + + + Libra esterlina da Ilha de Man + IMP + + + Rupia indiana + =0#Rs.|1#Re.|1<Rs. + + + Dinar iraquiano + IQD + + + Rial iraniano + IRR + + + Coroa islandesa + ISK + + + Lira italiana + + + + Libra esterlina de Jersey + JEP + + + Dólar jamaicano + JMD + + + Libra jamaicana + JMP + + + Dinar jordaniano + JOD + + + Iene japonês + ¥ + + + Xelim queniano + KES + + + Som de Quirguistão + KGS + + + Riel Antigo do Camboja + KHO + + + Riel cambojano + KHR + + + Dólar do Quiribati + KID + + + Franco de Comores + KMF + + + Won da República Popular da Coréia do Norte + KPP + + + Won norte-coreano + KPW + + + Hwan sul-coreano + KRH + + + Won Antigo sul-coreano + KRO + + + Won sul-coreano + KRW + + + Dinar coveitiano + KWD + + + Dólar das Ilhas Caimão + KYD + + + Rublo do Cazaquistão + KZR + + + Tenge do Cazaquistão + KZT + + + Kip de Laos + LAK + + + Libra libanesa + LBP + + + Franco de Liechtenstein + LIF + + + Rupia de Sri Lanka + LKR + + + Rupia do Ceilão + LNR + + + Dólar liberiano + LRD + + + Loti de Lesoto + LSL + + + Lita lituano + LTL + + + Talonas lituano + LTT + + + Franco luxemburguês + LUF + + + Lats letão + LVL + + + Rublo letão + LVR + + + Lira líbia da Autoridade Militar Britânica + LYB + + + Dinar líbio + LYD + + + Libra líbia + LYP + + + Dirém marroquino + MAD + + + Franco marroquino + MAF + + + Franco Novo de Mônaco + MCF + + + Franco Germinal de Mônaco + MCG + + + Cupom leu moldávio + MDC + + + Leu de Moldávia + MDL + + + Cupom rublo molávio + MDR + + + Ariary de Madagascar + MGA + + + Franco de Madagascar + MGF + + + Dólar das Ilhas Marshall + MHD + + + Dinar macedônio + MKD + + + Dinar macedônio (1992-1993) + MKN + + + Franco de Mali + MLF + + + Kyat de Mianmar + MMK + + + Certificados de câmbio birmaneses em dólares + MMX + + + Tugrik mongol + MNT + + + Pataca macaense + MOP + + + Franco da Martinica + MQF + + + Ouguiya da Mauritânia + MRO + + + Lira maltesa + MTL + + + Libra maltesa + MTP + + + Rupia de Maurício + MUR + + + Rupia das Ilhas Maldivas + MVP + + + Rupias das Ilhas Maldivas + MVR + + + Cuacha do Maláui + MWK + + + Libra do Maláui + MWP + + + Peso mexicano + MXN + + + Peso Plata mexicano (1861-1992) + MXP + + + Unidad de Inversion (UDI) mexicana + MXV + + + Ringgit malaio + MYR + + + Escudo de Moçambique + MZE + + + Metical de Moçambique + MZM + + + Dólar da Namíbia + NAD + + + Franco Germinal da Nova Caledônia + NCF + + + Naira nigeriana + NGN + + + Libra nigeriana + NGP + + + Franco CFP das Novas Hébridas + NHF + + + Córdoba nicaraguano + NIC + + + Córdoba Ouro nicaraguano + NIG + + + Córdoba Ouro nicaraguano + NIO + + + Guilder holandês + NLG + + + Coroa norueguesa + NOK + + + Rupia nepalesa + NPR + + + Dólar da Nova Zelândia + NZD + + + Libra da Nova Zelândia + NZP + + + Rial de Omã + OMR + + + Rial Saidi de Omã + OMS + + + Balboa panamenho + PAB + + + Cupom rublo de Transdniestria + PDK + + + Rublo Novo de Transdniestria + PDN + + + Rublo de Transdniestria + PDR + + + Inti peruano + PEI + + + Sol Novo peruano + PEN + + + Sol peruano + PES + + + Kina da Papua-Nova Guiné + PGK + + + Peso filipino + PHP + + + Rupia paquistanesa + PKR + + + Zloti polonês + PLN + + + Certificados de câmbio poloneses em dólares + PLX + + + Zloti polonês (1950-1995) + PLZ + + + Libra palestina + PSP + + + Conto português + PTC + + + Escudo português + Esc. + + + Guarani paraguaio + PYG + + + Rial catariano + QAR + + + Franco de Reunião + REF + + + Leu romeno + ROL + + + Leu Novo romeno + RON + + + Rublo russo + RUB + + + Rublo russo (1991-1998) + RUR + + + Franco ruandês + RWF + + + Rial saudita + SAR + + + Rial Soberano saudita + SAS + + + Dólar das Ilhas Salomão + SBD + + + Rupia das Seychelles + SCR + + + Dinar sudanês + SDD + + + Libra sudanesa + SDP + + + Coroa sueca + SEK + + + Dólar de Cingapura + SGD + + + Libra de Santa Helena + SHP + + + Tolar Bons esloveno + SIB + + + Tolar Bons esloveno + SIT + + + Coroa eslovaca + SKK + + + Leone de Serra Leoa + SLL + + + Lira de San Marino + SML + + + Xelim somali + SOS + + + Xelim de Somalilândia + SQS + + + Guilder do Suriname + SRG + + + Libra escocesa + SSP + + + Dobra de São Tomé e Príncipe + STD + + + Escudo de São Tomé e Príncipe + STE + + + Rublo Novo soviético + SUN + + + Rublo soviético + SUR + + + Colom salvadorenho + SVC + + + Libra síria + SYP + + + Lilangeni da Suazilândia + SZL + + + Coroa de Turcas e Caicos + TCC + + + Franco CFA de Chade + TDF + + + Baht tailandês + THB + + + Rublo do Tadjiquistão + TJR + + + Somoni tadjique + TJS + + + Manat do Turcomenistão + TMM + + + Dinar tunisiano + TND + + + Paʻanga de Tonga + TOP + + + Libra esterlina de Tonga + TOS + + + Escudo timorense + TPE + + + Pataca timorense + TPP + + + Lira turca + TRL + + + Dólar de Trinidad e Tobago + TTD + + + Dólar Antigo de Trinidad e Tobago + TTO + + + Dólar de Tuvalu + TVD + + + Dólar Novo de Taiwan + TWD + + + Xelim de Tanzânia + TZS + + + Hryvnia ucraniano + UAH + + + Karbovanetz ucraniano + UAK + + + Xelim ugandense (1966-1987) + UGS + + + Xelim ugandense + UGX + + + Dólar norte-americano + $ + + + Dólar norte-americano (Dia seguinte) + USN + + + Dólar norte-americano (Mesmo dia) + USS + + + Peso Fuerte uruguaio + UYF + + + Peso uruguaio (1975-1993) + UYP + + + Peso uruguaio + UYU + + + Coupon Som do Usbequistão + UZC + + + Sum do Usbequistão + UZS + + + Lira da Cidade do Vaticano + VAL + + + Piastre Dong Viet do Vietnã do Norte + VDD + + + Dong Novo do Vietnã do Norte + VDN + + + Viet Minh Piastre Dong Viet do Vietnã do Norte + VDP + + + Bolívar venezuelano + VEB + + + Dólar das Ilhas Virgens Britânicas + VGD + + + Dong vietnamita + đ + + + Dong Novo vietnamita + VNN + + + Dong da República do Vietnã + VNR + + + Dong Nacional vietnamita + VNS + + + Vatu de Vanuatu + VUV + + + Libra de Samoa Ocidental + WSP + + + Tala de Samoa Ocidental + WST + + + Unidade de Conta asiática em dinares + XAD + + + Franco CFA BEAC + XAF + + + Unidade Monetária Asiática + XAM + + + Ouro + XAU + + + Unidade Composta Européia + XBA + + + Unidade Monetária Européia + XBB + + + Unidade de Conta Européia (XBC) + XBC + + + Unidade de Conta Européia (XBD) + XBD + + + Dólar do Caribe Oriental + XCD + + + Franco Novo CFA + XCF + + + Direitos Especiais de Giro + XDR + + + Franco CFA BCEAEC + XEF + + + Unidade Monetária Européia + XEU + + + Franco-ouro francês + XFO + + + Franco UIC francês + XFU + + + Dinar islâmico + XID + + + Franco Novo Metropolitano francês + XMF + + + Franco CFA das Antilhas Francesas + XNF + + + Franco CFA BCEAO + XOF + + + Franco CFP + CFPF + + + Rublo transferível do COMECON + XTR + + + Dinar iemenita + YDD + + + Rial Imadi iemenita + YEI + + + Rial iemenita + YRl + + + Dinar forte iugoslavo + YUD + + + Dinar da Federação Iugoslava + YUF + + + Dinar iugoslavo de 1994 + YUG + + + Super Dinar iugoslavo + YUM + + + Dinar conversível iugoslavo + YUN + + + Dinar de outubro iugoslavo + YUO + + + Dinar reformado iugoslavo + YUR + + + Rand sul-africano (financeiro) + ZAL + + + Libra sul-africana + ZAP + + + Rand sul-africano + ZAR + + + Cuacha zambiano + ZMK + + + Libra zambiana + ZMP + + + Zaire Novo zairense + ZRN + + + Zaire zairense + ZRZ + + + Dólar do Zimbábwe + ZWD + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pt_BR.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pt_BR.xml new file mode 100644 index 0000000..4a11fdd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pt_BR.xml @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + EEEE, d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yy + + + + + + + + HH'h'mm'min'ss's' z + + + + + H'h'm'min's's' z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pt_PT.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pt_PT.xml new file mode 100644 index 0000000..2ba0753 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/pt_PT.xml @@ -0,0 +1,186 @@ + + + + + + + + + + + + árabe + checo + estónio + letão + polaco + esloveno + + + Emiratos Árabes Unidos + Antígua e Barbuda + Arménia + Antárctica + Azerbeijão + Bósnia-Herzegovina + Benim + Bielorrússia + Camarões + Ilha do Natal + República Checa + Estónia + Egipto + Sahara Ocidental + Eritreia + Ilhas Falkland + Gronelândia + Ilhas South Georgia e South Sandwich + Guiné-Bissau + Hong Kong - Região Administrativa Especial da China + Quénia + Quirguizistão + Camboja + Saint Kitts e Nevis + Coreia do Norte + Coreia do Sul + Ilhas Caimão + Cazaquistão + Lao, República Popular Democrática + Letónia + Mónaco + Moldávia, República da + Madagáscar + Macedónia, República da + Macau - Região Administrativa Especial da China + Ilhas Mariana do Norte + Maurícias + Nova Caledónia + Papua Nova Guiné + Polónia + Território Palestiniano + Reunion + Roménia + Seicheles + Singapura + Eslovénia + São Marino + Ilhas Turcas e Caicos + Tchade + Territórios Franceses a Sul + Tajiquistão + Turquemenistão + Formosa, Província Chinesa + Ilhas Minor Outlying (E.U.A) + Uzbaquistão + Santa Sé (Estado da Cidade do Vaticano) + Saint Vincent e Grenadines + Ilhas Virgin Britânicas + Ilhas Virgin E.U.A. + Vietname + Iémen + Jugoslávia + + + + + + + + + + + + + + EEEE, d' de 'MMMM' de 'yyyy + + + + + d' de 'MMMM' de 'yyyy + + + + + yyyy/MM/dd + + + + + yy/MM/dd + + + + + + + + HH'H'mm'm'ss's' z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + + Escudo português + Esc. + #,##0.00 ¤;-#,##0.00 ¤ + #,##0.00 ¤;-#,##0.00 ¤ + , + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ro.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ro.xml new file mode 100644 index 0000000..57fd882 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ro.xml @@ -0,0 +1,435 @@ + + + + + + + + + + + Arabă + Bulgară + Cehă + Daneză + Germană + Greacă + Engleză + Spaniolă + Estoniană + Finlandeză + Franceză + Ebraică + Croată + Maghiară + Italiană + Japoneză + Coreeană + Lituaniană + Letonă + Olandeză + Norvegiană + Poloneză + Portugheză + Română + Rusă + Slovacă + Slovenă + Suedeză + Turcă + Chineză + + + Andorra + Emiratele Arabe Unite + Afganistan + Antigua şi Barbuda + Anguilla + Albania + Armenia + Antilele Olandeze + Angola + Antarctica + Argentina + Samoa Americană + Austria + Australia + Aruba + Azerbaidjan + Bosnia şi Herzegovina + Barbados + Bangladesh + Belgia + Burkina Faso + Bulgaria + Bahrain + Burundi + Benin + Bermuda + Brunei + Bolivia + Brazilia + Bahamas + Bhutan + Insula Bouvet + Botswana + Bielorusia + Belize + Canada + Insulele Cocos (Keeling) + Congo, Republica Democratică + Republica Central Africană + Congo + Eleveţia + Coasta de Fildeş + Insulele Cook + Chile + Camerun + China + Columbia + Costa Rica + Cuba + Capul Verde + Insula Christmas + Cipru + Republica Cehă + Germania + Djibouti + Danemarca + Dominica + Republica Dominicană + Algeria + Ecuador + Estonia + Egipt + Sahara de Vest + Eritrea + Spania + Etiopia + Finlanda + Fiji + Insulele Falkland + Micronezia, Statele Federate + Insulele Feroe + Franţa + en + Gabon + Regatul Unit + Grenada + Georgia + Guyana Franceză + Ghana + Gibraltar + Groenlanda + Gambia + Guineea + Guadeloupe + Guineea Ecuatorială + Grecia + Insulele South Georgia şi South Sandwich + Guatemala + Guam + Guineea-Bissau + Guyana + R.A.S. Hong Kong a Chinei + Insula Heard şi Insulele McDonald + Honduras + Croaţia + Haiti + Ungaria + Indonezia + Irlanda + Israel + India + Teritoriile Britanice din Oceanul Indian + Iraq + Iran + Islanda + Italia + Jamaica + Iordania + Japonia + Kenya + Kirghizia + Cambodgia + Kiribati + Comoros + Saint Kitts şi Nevis + Coreea de Nord + Coreea de Sud + Kuweit + Insulele Cayman + Kazahstan + Lao, Republica Democratică Populară + Liban + Saint Lucia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Lituania + Luxemburg + Letonia + Libia, Jamahiriya Arabă + Maroc + Monaco + Moldova, Republica + Madagascar + Insulele Marshall + Macedonia + Mali + Myanmar + Mongolia + R.A.S. Macao a Chinei + Insulele Northern Mariana + Martinica + Mauritania + Montserrat + Malta + Mauritius + Maldive + Malawi + Mexic + Malaezia + Mozambic + Namibia + Noua Caledonie + Niger + Insulele Norfolk + Nigeria + Nicaragua + Olanda + Norvegia + Nepal + Nauru + Niue + Noua Zeelandă + Oman + Panama + Peru + Polinezia Franceză + Papua Noua Guinee + Filipine + Pakistan + Polonia + Saint Pierre şi Miquelon + Pitcairn + Porto Rico + Teritoriul Palestinian + Portugalia + Palau + Paraguay + Qatar + Reunion + România + Federaţia Rusă + Rwanda + Arabia Saudită + Insulele Solomon + Seychelles + Sudan + Suedia + Singapore + Saint Helena + Slovenia + Svalbard şi Jan Mayen + Slovacia + Sierra Leone + San Marino + Senegal + Somalia + Serbia + Surinam + Sao Tome şi Principe + El Salvador + Siria + Swaziland + Insulele Turks şi Caicos + Ciad + Teritoriile Franceze de Sud + Togo + Tailanda + Tadjikistan + Tokelau + Timorul de Est + Turkmenistan + Tunisia + Tonga + Turcia + Trinidad şi Tobago + Tuvalu + Taiwan, Provincia Chineză + Tanzania + Ucraina + Uganda + United States Minor Outlying Islands + Statele Unite + Uruguay + Uzbekistan + Sfântul Scaun (Statul Vatican) + Saint Vincent şi Grenadines + Venezuela + Insulele Virgine Britanice + Insulele Virgine S.U.A. + Vietnam + Vanuatu + Wallis şi Futuna + Samoa + Yemen + Mayotte + Iugoslavia + Africa de Sud + Zambia + Zimbabwe + + + + [a-z â î ă ş ţ] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + Ian + Feb + Mar + Apr + Mai + Iun + Iul + Aug + Sep + Oct + Nov + Dec + + + ianuarie + februarie + martie + aprilie + mai + iunie + iulie + august + septembrie + octombrie + noiembrie + decembrie + + + + + + + D + L + Ma + Mi + J + V + S + + + duminică + luni + marţi + miercuri + joi + vineri + sîmbătă + + + + + + + + + + d.C. + î.d.C. + + + + + + + d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd.MM.yyyy + + + + + dd.MM.yyyy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + ROL + lei + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ro_RO.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ro_RO.xml new file mode 100644 index 0000000..69b498d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ro_RO.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/root.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/root.xml new file mode 100644 index 0000000..70329be --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/root.xml @@ -0,0 +1,1665 @@ + + + + + + + + + + + aa + ab + ace + ach + ada + ady + ae + af + afa + afh + ak + akk + ale + alg + am + an + ang + apa + ar + arc + arn + arp + art + arw + as + ast + ath + aus + av + awa + ay + az + ba + bad + bai + bal + bam + ban + bas + bat + be + bej + bem + ber + bg + bh + bho + bi + bik + bin + bla + bm + bn + bnt + bo + br + bra + bs + btk + bua + bug + byn + ca + cad + cai + car + cau + ce + ceb + cel + ch + chb + chg + chk + chm + chn + cho + chp + chr + chy + cmc + co + cop + cpe + cpf + cpp + cr + crh + crp + cs + csb + cu + cus + cv + cy + da + dak + dar + day + de + del + den + dgr + din + doi + dra + dsb + dua + dum + dv + dyu + dz + ee + efi + egy + eka + el + elx + en + enm + eo + es + et + eu + ewo + fa + fan + fat + ff + fi + fiu + fj + fo + fon + fr + frm + fro + fur + fy + ga + gaa + gay + gba + gd + gem + gez + gil + gl + gmh + gn + goh + gon + gor + got + grb + grc + gu + gv + gwi + ha + hai + haw + he + hi + hil + him + hit + hmn + ho + hr + hsb + ht + hu + hup + hy + hz + ia + iba + id + ie + ig + ii + ijo + ik + ilo + inc + ine + inh + io + ira + iro + is + it + iu + ja + jbo + jpr + jrb + jv + ka + kaa + kab + kac + kam + kar + kaw + kbd + kg + kha + khi + kho + ki + kj + kk + kl + km + kmb + kn + ko + kok + kos + kpe + kr + krc + kro + kru + ks + ku + kum + kut + kv + kw + ky + la + lad + lah + lam + lb + lez + lg + li + ln + lo + lol + loz + lt + lu + lua + lui + lun + luo + lus + lv + mad + mag + mai + mak + man + map + mas + mdf + mdr + men + mg + mga + mh + mi + mic + min + mis + mk + mkh + ml + mn + mnc + mni + mno + mo + moh + mos + mr + ms + mt + mul + mun + mus + mwr + my + myn + myv + na + nah + nai + nap + nb + nd + nds + ne + new + ng + nia + nic + niu + nl + nn + no + nog + non + nr + nso + nub + nv + ny + nym + nyn + nyo + nzi + oc + oj + om + or + os + osa + ota + oto + pa + paa + pag + pal + pam + pap + pau + peo + phi + phn + pi + pl + pon + pra + pro + ps + pt + qu + raj + rap + rar + rm + rn + ro + roa + rom + root + ru + rw + sa + sad + sah + sai + sal + sam + sas + sat + sc + sco + sd + se + sel + sem + sg + sga + sgn + sh + shn + si + sid + sio + sit + sk + sl + sla + sm + sma + smi + smj + smn + sms + sn + snk + so + sog + son + sq + sr + srr + ss + ssa + st + su + suk + sus + sux + sv + sw + syr + ta + tai + te + tem + ter + tet + tg + th + ti + tig + tiv + tk + tkl + tl + tli + tmh + tn + to + tog + tpi + tr + ts + tsi + tt + tum + tup + tut + tvl + tw + ty + tyv + udm + ug + uga + uk + umb + und + ur + uz + vai + ve + vi + vo + vot + wa + wak + wal + war + was + wen + wo + xal + xh + yao + yap + yi + yo + ypk + za + zap + zen + zh + znd + zu + zun + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + AD + AE + AF + AG + AI + AL + AM + AN + AO + AQ + AR + AS + AT + AU + AW + AZ + BA + BB + BD + BE + BF + BG + BH + BI + BJ + BM + BN + BO + BR + BS + BT + BV + BW + BY + BZ + CA + CC + CD + CF + CG + CH + CI + CK + CL + CM + CN + CO + CR + CU + CV + CX + CY + CZ + DE + DJ + DK + DM + DO + DZ + EC + EE + EG + EH + ER + ES + ET + FI + FJ + FK + FM + FO + FR + GA + GB + GD + GE + GF + GH + GI + GL + GM + GN + GP + GQ + GR + GS + GT + GU + GW + GY + HK + HM + HN + HR + HT + HU + ID + IE + IL + IN + IO + IQ + IR + IS + IT + JM + JO + JP + KE + KG + KH + KI + KM + KN + KP + KR + KW + KY + KZ + LA + LB + LC + LI + LK + LR + LS + LT + LU + LV + LY + MA + MC + MD + MG + MH + MK + ML + MM + MN + MO + MP + MQ + MR + MS + MT + MU + MV + MW + MX + MY + MZ + NA + NC + NE + NF + NG + NI + NL + NO + NP + NR + NU + NZ + OM + PA + PE + PF + PG + PH + PK + PL + PM + PN + PR + PS + PT + PW + PY + QA + RE + RO + RU + RW + SA + SB + SC + SD + SE + SG + SH + SI + SJ + SK + SL + SM + SN + SO + SP + SR + ST + SV + SY + SZ + TC + TD + TF + TG + TH + TJ + TK + TL + TM + TN + TO + TR + TT + TV + TW + TZ + UA + UG + UM + US + UY + UZ + VA + VC + VE + VG + VI + VN + VU + WF + WS + YE + YT + YU + ZA + ZM + ZW + + + POSIX + REVISED + + + CALENDAR + COLLATION + CURRENCY + + + BUDDHIST + CHINESE + GREGORIAN + HEBREW + ISLAMIC + ISLAMIC-CIVIL + JAPANESE + DIRECT + PHONEBOOK + PINYIN + STROKE + TRADITIONAL + + + + + + + [] + + + + + 297 + 210 + + + + GyMdkHmsSEDFwWahKzYeugAZ + + + + + BE + + + + + + + EEEE, MMMM d, yyyy G + + + + + MMMM d, yyyy G + + + + + MMM d, yyyy G + + + + + M/d/yyyy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + EEEE y'x'G-Ml-d + + + + + y'x'G-Ml-d + + + + + y'x'G-Ml-d + + + + + y'x'G-Ml-d + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + + + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + + + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + + + + + + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + + + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + + + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + + + + + + + + + 1 + 2 + 3 + 4 + 5 + 6 + 7 + + + 1 + 2 + 3 + 4 + 5 + 6 + 7 + + + 1 + 2 + 3 + 4 + 5 + 6 + 7 + + + + + + 1 + 2 + 3 + 4 + 5 + 6 + 7 + + + 1 + 2 + 3 + 4 + 5 + 6 + 7 + + + 1 + 2 + 3 + 4 + 5 + 6 + 7 + + + + + + + + AM + PM + + + BCE + CE + + + + + + + EEEE, yyyy MMMM dd + + + + + yyyy MMMM d + + + + + yyyy MMM d + + + + + yy/MM/dd + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + + + Tishri + Heshvan + Kislev + Tevet + Shevat + Adar I + Adar + Nisan + Iyar + Sivan + Tamuz + Av + Elul + + + Tishri + Heshvan + Kislev + Tevet + Shevat + Adar I + Adar + Nisan + Iyar + Sivan + Tamuz + Av + Elul + + + + + + AM + + + + + + + + + + Muharram + Safar + Rabiʻ I + Rabiʻ II + Jumada I + Jumada II + Rajab + Shaʻban + Ramadan + Shawwal + Dhuʻl-Qiʻdah + Dhuʻl-Hijjah + + + Muharram + Safar + Rabiʻ I + Rabiʻ II + Jumada I + Jumada II + Rajab + Shaʻban + Ramadan + Shawwal + Dhuʻl-Qiʻdah + Dhuʻl-Hijjah + + + + + + AH + + + + + + + + + + Muharram + Safar + Rabiʻ I + Rabiʻ II + Jumada I + Jumada II + Rajab + Shaʻban + Ramadan + Shawwal + Dhuʻl-Qiʻdah + Dhuʻl-Hijjah + + + Muharram + Safar + Rabiʻ I + Rabiʻ II + Jumada I + Jumada II + Rajab + Shaʻban + Ramadan + Shawwal + Dhuʻl-Qiʻdah + Dhuʻl-Hijjah + + + + + + AH + + + + + + + Taika + Hakuchi + Hakuhō + Shuchō + Taihō + Keiun + Wadō + Reiki + Yōrō + Jinki + Tempyō + Tempyō-kampō + Tempyō-shōhō + Tempyō-hōji + Temphō-jingo + Jingo-keiun + Hōki + Ten-ō + Enryaku + Daidō + Kōnin + Tenchō + Shōwa + Kajō + Ninju + Saiko + Tennan + Jōgan + Genkei + Ninna + Kampyō + Shōtai + Engi + Enchō + Shōhei + Tengyō + Tenryaku + Tentoku + Ōwa + Kōhō + Anna + Tenroku + Ten-en + Jōgen + Tengen + Eikan + Kanna + Ei-en + Eiso + Shōryaku + Chōtoku + Chōhō + Kankō + Chōwa + Kannin + Jian + Manju + Chōgen + Chōryaku + Chōkyū + Kantoku + Eishō + Tengi + Kōhei + Jiryaku + Enkyū + Shōho + Shōryaku + Eiho + Ōtoku + Kanji + Kaho + Eichō + Shōtoku + Kōwa + Chōji + Kashō + Tennin + Ten-ei + Eikyū + Gen-ei + Hoan + Tenji + Daiji + Tenshō + Chōshō + Hoen + Eiji + Kōji + Tenyō + Kyūan + Ninpei + Kyūju + Hogen + Heiji + Eiryaku + Ōho + Chōkan + Eiman + Nin-an + Kaō + Shōan + Angen + Jishō + Yōwa + Juei + Genryuku + Bunji + Kenkyū + Shōji + Kennin + Genkyū + Ken-ei + Shōgen + Kenryaku + Kenpō + Shōkyū + Jōō + Gennin + Karoku + Antei + Kanki + Jōei + Tempuku + Bunryaku + Katei + Ryakunin + En-ō + Ninji + Kangen + Hōji + Kenchō + Kōgen + Shōka + Shōgen + Bun-ō + Kōchō + Bun-ei + Kenji + Kōan + Shōō + Einin + Shōan + Kengen + Kagen + Tokuji + Enkei + Ōchō + Shōwa + Bunpō + Genō + Genkyō + Shōchū + Kareki + Gentoku + Genkō + Kemmu + Engen + Kōkoku + Shōhei + Kentoku + Bunchũ + Tenju + Kōryaku + Kōwa + Genchũ + Meitoku + Kakei + Kōō + Meitoku + Ōei + Shōchō + Eikyō + Kakitsu + Bun-an + Hōtoku + Kyōtoku + Kōshō + Chōroku + Kanshō + Bunshō + Ōnin + Bunmei + Chōkyō + Entoku + Meiō + Bunki + Eishō + Taiei + Kyōroku + Tenmon + Kōji + Eiroku + Genki + Tenshō + Bunroku + Keichō + Genwa + Kan-ei + Shōho + Keian + Shōō + Meiryaku + Manji + Kanbun + Enpō + Tenwa + Jōkyō + Genroku + Hōei + Shōtoku + Kyōhō + Genbun + Kanpō + Enkyō + Kan-en + Hōryaku + Meiwa + An-ei + Tenmei + Kansei + Kyōwa + Bunka + Bunsei + Tenpō + Kōka + Kaei + Ansei + Man-en + Bunkyū + Genji + Keiō + Meiji + Taishō + Shōwa + Heisei + + + + + + + EEEE, MMMM d, y G + + + + + MMMM d, y G + + + + + MMM d, y G + + + + + M/d/yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + + + + + + + + + + + + . + , + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤ #,##0.00;-¤ #,##0.00 + + + + + + EUR + + + + GBP + £ + + + INR + =0#Rs.|1#Re.|1<Rs. + + + ITL + + + + JPY + ¥ + + + USD + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ru.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ru.xml new file mode 100644 index 0000000..ddbecf8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ru.xml @@ -0,0 +1,591 @@ + + + + + + + + + + + Афар + Абхазский + Африкаанс + Амхарский + Арабский + Ассамский + Аямара + Азербайджанский + Башкирский + Белорусский + Болгарский + Бихарский + Бислама + Бенгальский + Тибетский + Бретонский + Каталанский + Корсиканский + Чешский + Валлийский + Датский + Немецкий + Бутанский + Греческий + Английский + Эсперанто + Испанский + Эстонский + Баскский + Персидский + Финский + Фиджи + Фарерский + Французский + Фризский + Ирландский + Гаэльский + Галицийский + Гуарани + Гуярати + Хоса + Иврит + Хинди + Хорватский + Венгерский + Армянский + Смешанный язык + Индонезийский + Смешанный язык + Инапиак + Исландский + Итальянский + Инактитут + Японский + Яванский + Грузинский + Казахский + Гренландский + Камбоджийский + Канада + Корейский + Кашмирский + Курдиш + Киргизский + Латинский + Лингала + Лаосский + Литовский + Латвийский + Малагасийский + Маори + Македонский + Малаялам + Монгольский + Молдавский + Маратийский + Малайский + Мальтийский + Бирманский + Науру + Непальский + Голландский + Норвежский + Окитан + Оромо (Афан) + Ория + Панджабский + Польский + Пашто (Пушто) + Португальский + Кечуа + Раето-романский + Кирундийский + Румынский + Русский + Кинярванда + Санскрит + Синди + Санго + Сербско-хорватский + Сингальский + Словацкий + Словенский + Самоа + Шона + Сомали + Албанский + Сербский + Сисвати + Сесото + Санданизский + Шведский + Суахили + Тамильский + Телугу + Таджикский + Тайский + Тигриниа + Туркменский + Тагалог + Сетсвана + Тонга + Турецкий + Тсонга + Татарский + Тви + Уйгурский + Украинский + Урду + Узбекский + Вьетнамский + Волапак + Волоф + Хоза + Идиш + Йоруба + Зуанг + Китайский + Зулусский + + + Андорра + Объединенные Арабские Эмираты + Афганистан + Антигуа и Барбуда + Ангуилла + Албания + Армения + Голландские Антильские Острова + Ангола + Антарктида + Аргентина + Американское Самоа + Австрия + Австралия + Аруба + Азербайджан + Босния + Барбадос + Бангладеш + Бельгия + Буркина Фасо + Болгария + Бахрейн + Бурунди + Бенин + Бермудские Острова + Бруней Даруссалам + Боливия + Бразилия + Багамские острова + Бутан + Остров Буве + Ботсвана + Беларусь + Белиз + Канада + Кокосовые Острова (Киилинг) + Конго, Демократическая Республика + Центрально-Африканская Республика + Конго + Швейцария + Кот д’Ивуар + Острова Кука + Чили + Камерун + Китай + Колумбия + Коста-Рика + Куба + Острова Зеленого Мыса + Остров Рождества + Кипр + Чешская Республика + Германия + Джибути + Дания + Остров Доминика + Доминиканская Республика + Алжир + Эквадор + Эстония + Египет + Западная Сахара + Эритрея + Испания + Эфиопия + Финляндия + Фиджи + Фольклендские Острова + Федеративное Государство Микронезия + Фарерские острова + Франция + Габон + Великобритания + Гренада + Грузия + Французская Гвиана + Гана + Гибралтар + Гренландия + Гамбия + Гвинея + Гваделупа + Экваториальная Гвинея + Греция + Южная Джорджия и Южные Сандвичевы Острова + Гватемала + Гуам + Гвинея-Биссау + Гайана + Гонконг (Область с Особым Административным Управлением, Китай) + Острова Херд и Мак-Дональд + Гондурас + Хорватия + Гаити + Венгрия + Индонезия + Ирландия + Израиль + Индия + Британские Территории в Индийском Океане + Ирак + Иран + Исландия + Италия + Ямайка + Иордания + Япония + Кения + Кыргызстан + Камбоджа + Кирибати + Коморские Острова + Сент-Киттс и Невис + Северная Корея + Южная Корея + Кувейт + Каймановы Острова + Казахстан + Лаос + Ливан + Сент-Люсия + Лихтенштейн + Шри-Ланка + Либерия + Лесото + Литва + Люксембург + Латвия + Ливия + Марокко + Монако + Молдова + Мадагаскар + Маршалловы Острова + Македония + Мали + Майанмар + Монголия + Макао (Область с Особым Административным Управлением, Китай) + Северные Марианские Острова + Мартиник + Мавритания + Монсеррат + Мальта + Маврикий + Мальдивы + Малави + Мексика + Малайзия + Мозамбик + Намибия + Новая Каледония + Нигер + Остров Норфолк + Нигерия + Никарагуа + Нидерланды + Норвегия + Непал + Науру + Ниуе + Новая Зеландия + Оман + Панама + Перу + Французская Полинезия + Папуа-Новая Гвинея + Филиппины + Пакистан + Польша + Сен-Пьер и Микелон + Остров Питкэрн + Пуэрто-Рико + Палестинская автономия + Португалия + Палау + Парагвай + Катар + Реюньон + Румыния + Россия + Руанда + Саудовская Аравия + Соломоновы Острова + Сейшельские Острова + Судан + Швеция + Сингапур + Остров Святой Елены + Словения + Острова Свалбард и Жан Майен + Словакия + Сьерра-Леоне + Сан-Марино + Сенегал + Сомали + Сербия + Суринам + Сан-Томе и Принсипи + Сальвадор + Сирийская Арабская Республика + Свазиленд + Острова Туркс и Кайкос + Чад + Французские Южные Территории + Того + Таиланд + Таджикистан + Токелау + Восточный Тимор + Туркменистан + Тунис + Тонга + Турция + Тринидад и Тобаго + Тувалу + Тайвань, Китайская Провинция + Танзания + Украина + Уганда + Внешние малые острова (США) + Соединенные Штаты + Уругвай + Узбекистан + Государство-город Ватикан + Сент-Винсент и Гренадины + Венесуэла + Британские Виргинские Острова + Американские Виргинские Острова + Вьетнам + Вануату + Эллис и Футуна + Самоа + Йемен + Майотта + Югославия + Южная Африка + Замбия + Зимбабве + + + + [а-я ё і ѣ ѳ ѵ] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + янв + фев + мар + апр + май + июн + июл + авг + сен + окт + ноя + дек + + + Я + Ф + М + А + М + И + И + А + С + О + Н + Д + + + января + февраля + марта + апреля + мая + июня + июля + августа + сентября + октября + ноября + декабря + + + + + янв + фев + мар + апр + май + июн + июл + авг + сен + окт + ноя + дек + + + Я + Ф + М + А + М + И + И + А + С + О + Н + Д + + + Январь + Февраль + Март + Апрель + Май + Июнь + Июль + Август + Сентябрь + Октябрь + Ноябрь + Декабрь + + + + + + + Вс + Пн + Вт + Ср + Чт + Пт + Сб + + + воскресенье + понедельник + вторник + среда + четверг + пятница + суббота + + + + + + + + + + до н.э. + н.э. + + + + + + + d MMMM yyyy 'г.' + + + + + d MMMM yyyy 'г.' + + + + + dd.MM.yyyy + + + + + dd.MM.yy + + + + + + + + H:mm:ss z + + + + + H:mm:ss z + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + + + RUR + р. + + + UAH + грн. + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ru_RU.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ru_RU.xml new file mode 100644 index 0000000..54d4e7e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ru_RU.xml @@ -0,0 +1,54 @@ + + + + + + + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00¤;-#,##0.00¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ru_UA.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ru_UA.xml new file mode 100644 index 0000000..4cf9aee --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ru_UA.xml @@ -0,0 +1,113 @@ + + + + + + + + + + + + + + + + + EEEE, d MMMM yyyy 'г.' + + + + + d MMMM yyyy + + + + + d MMM yyyy + + + + + dd.MM.yy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sa.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sa.xml new file mode 100644 index 0000000..f1a0feb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sa.xml @@ -0,0 +1,70 @@ + + + + + + + + + + + संस्कृत + + + भारतम् + + + + [[[:Deva:][॑-॔]]-[क़-य़]‌‍] + + + + . + , + ; + % + + # + + + - + E + + + + + + + + ##,##,##0.###;-##,##,##0.### + + + + + + + #E0 + + + + + + + ##,##,##0% + + + + + + + ¤ ##,##,##0.00;-¤ ##,##,##0.00 + + + + + + INR + रु + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sa_IN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sa_IN.xml new file mode 100644 index 0000000..20dc4ce --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sa_IN.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd-MM-yyyy + + + + + d-MM-yy + + + + + + + + hh:mm:ss a z + + + + + hh:mm:ss a z + + + + + hh:mm:ss a + + + + + hh:mm a + + + + + + + {1} {0} + + + + + + + + + + + ##,##,##0.###;-##,##,##0.### + + + + + + + #E0 + + + + + + + ##,##,##0% + + + + + + + ¤##,##,##0.00;-¤##,##,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sh.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sh.xml new file mode 100644 index 0000000..7226c55 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sh.xml @@ -0,0 +1,475 @@ + + + + + + + + + + + Afrikanerski + Arapski + Beloruski + Bugarski + Bretonski + Katalonski + Korzikanski + Češki + Danski + Nemački + Grčki + Engleski + Španski + Estonski + Baskijski + Persijski + Finski + Francuski + Irski + Hebrejski + Hrvatski + Mađarski + Armenski + Indonezijski + Islandski + Italijanski + Japanski + Gruzijski + Kambodžanski + Korejski + Kurdski + Kirgiski + Latinski + Litvanski + Letonski + Makedonski + Mongolski + Moldavski + Burmanski + Holandski + Norveški + Poljski + Portugalski + Reto-Romanski + Rumunski + Ruski + Srpsko-Hrvatski + Slovački + Slovenački + Albanski + Srpski + Švedski + Svahili + Turski + Ukrajnski + Vijetnamski + Jidiš + Kineski + + + Andora + Ujedinjeni Arapski Emirati + Avganistan + Antigua and Barbuda + Anguilla + Albanija + Armenija + Holandski Antili + Angola + Antarctica + Argentina + American Samoa + Austrija + Australija + Aruba + Azerbejdžan + Bosna i Hercegovina + Barbados + Bangladeš + Belgija + Burkina Faso + Bugarska + Bahrein + Burundi + Benin + Bermuda + Brunej + Bolivija + Brazil + Bahami + Butan + Bouvet Island + Bocvana + Belorusija + Belise + Kanada + Cocos (Keeling) Islands + Democratic Republic of the Congo + Centralno Afrička Republika + Kongo + Švajcarska + Obala Slonovače + Cook Islands + Čile + Kamerun + Kina + Kolumbija + Kostarika + Kuba + Cape Verde + Christmas Island + Kipar + Češka + Nemačka + Džibuti + Danska + Dominika + Dominikanska Republika + Alžir + Ekvador + Estonija + Egipat + Zapadna Sahara + Eritreja + Španija + Etiopija + Finska + Fidži + Falkland Islands + Mikronezija + Faroe Islands + Francuska + en + Gabon + Velika Britanija + Grenada + Gruzija + Francuska Gvajana + Gana + Gibraltar + Greenland + Gambija + Gvineja + Gvadelupe + Ekvatorijalna Gvineja + Grčka + South Georgia and the South Sandwich Islands + Gvatemala + Guam + Gvineja-Bisao + Gvajana + Hong Kong S.A.R., China + Heard Island and McDonald Islands + Honduras + Hrvatska + Haiti + Mađarska + Indonezija + Irska + Izrael + Indija + British Indian Ocean Territory + Irak + Iran + Island + Italija + Jamajka + Jordan + Japan + Kenija + Kirgistan + Kambodža + Kiribati + Comoros + Saint Kitts and Nevis + Severna Koreja + Južna Koreja + Kuvajt + Cayman Islands + Kazahstan + Laos + Liban + Saint Lucia + Lihenštajn + Šrilanka + Liberija + Lesoto + Litvanija + Luksemburg + Letonija + Libija + Maroko + Monako + Moldavija + Madagaskar + Marshall Islands + Makedonija + Mali + Mijnamar + Mongolija + Macao S.A.R., China + Northern Mariana Islands + Martinik + Mauritanija + Montserrat + Malta + Mauricius + Maldives + Malawi + Meksiko + Malezija + Mozambik + Namibija + Nova Kaledonija + Niger + Norfolk Island + Nigerija + Nikaragva + Holandija + Norveška + Nepal + Nauru + Niue + Novi Zeland + Oman + Panama + Peru + Francuska Polinezija + Papua Nova Gvineja + Filipini + Pakistan + Poljska + Saint Pierre and Miquelon + Pitcairn + Porto Riko + Palestinian Territory + Portugal + Palau + Paragvaj + Katar + Réunion + Rumunija + Rusija + Ruanda + Saudijska Arabija + Solomon Islands + Sejšeli + Sudan + Švedska + Singapur + Saint Helena + Slovenija + Svalbard and Jan Mayen + Slovačka + Sijera Leone + San Marino + Senegal + Somalija + Srbija + Surinam + Sao Tome and Principe + Salvador + Sirija + Svazilend + Turks and Caicos Islands + Čad + Francuske Južne Teritorije + Togo + Tajland + Tadžikistan + Tokelau + Timor-Leste + Turkmenistan + Tunis + Tonga + Turska + Trinidad i Tobago + Tuvalu + Tajvan + Tanzanija + Ukrajina + Uganda + United States Minor Outlying Islands + Sjedinjene Američke Države + Urugvaj + Uzbekistan + Vatikan + Saint Vincent and the Grenadines + Venecuela + Britanska Devičanska Ostrva + S.A.D. Devičanska Ostrva + Vijetnam + Vanuatu + Wallis and Futuna + Samoa + Jemen + Mayotte + Jugoslavija + Južna Afrika + Zambija + Zimbabve + + + + [a-p r-v z đ ć č ž š {lj} {nj} {dž}] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + jan + feb + mar + apr + maj + jun + jul + avg + sep + okt + nov + dec + + + januar + februar + mart + april + maj + juni + juli + avgust + septembar + oktobar + novembar + decembar + + + + + + + ned + pon + uto + sre + čet + pet + sub + + + nedelja + ponedeljak + utorak + sreda + četvrtak + petak + subota + + + + + + + + + + p. n. e. + n. e. + + + + + + + EEEE, dd. MMMM yyyy. + + + + + EEEE, d.MM.yyyy. + + + + + dd.MM.yyyy. + + + + + d.M.yy. + + + + + + + + HH.mm.ss z + + + + + HH.mm.ss z + + + + + HH.mm.ss + + + + + HH.mm + + + + + + + {1} {0} + + + + + + + + + Centralno Evropsko Vreme + Centralno Evropsko Letnje Vreme + + + CET + CET + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + YUN + Din + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sh_YU.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sh_YU.xml new file mode 100644 index 0000000..b522bb6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sh_YU.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sid.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sid.xml new file mode 100644 index 0000000..083036a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sid.xml @@ -0,0 +1,153 @@ + + + + + + + + + + + Sidaamu Afo + + + Itiyoophiya + + + + [a-z] + + + + + + + + Jan + Feb + Mar + Apr + May + Jun + Jul + Aug + Sep + Oct + Nov + Dec + + + January + February + March + April + May + June + July + August + September + October + November + December + + + + + + + Sam + San + Mak + Row + Ham + Arb + Qid + + + Sambata + Sanyo + Maakisanyo + Roowe + Hamuse + Arbe + Qidaame + + + + + + + + soodo + hawwaro + + + YIA + YIG + + + + + + + EEEE, MMMM dd, yyyy + + + + + dd MMMM yyyy + + + + + dd-MMM-yyyy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + ETB + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sid_ET.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sid_ET.xml new file mode 100644 index 0000000..d598986 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sid_ET.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sk.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sk.xml new file mode 100644 index 0000000..1287db9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sk.xml @@ -0,0 +1,435 @@ + + + + + + + + + + + arabský + bulharský + český + dánsky + nemecký + grécky + anglický + španielsky + estónsky + fínsky + francúzsky + hebrejský + chorvátsky + maďarský + taliansky + japonský + kórejský + litovský + lotyšský + holandský + nórsky + poľský + portugalský + rumunský + ruský + slovenský + slovinský + švédsky + turecký + čínsky + + + Andorra + Spojené arabské emiráty + Afganistan + Antigua a Barbados + Anguilla + Albánsko + Arménsko + Holandské Antily + Angola + Antarctica + Argentína + Americká Samoa + Rakúsko + Austrália + Aruba + Azerbajdžan + Bosna a Hercegovina + Barbados + Bangladéš + Belgicko + Burkina Faso + Bulharsko + Bahrajn + Burundi + Benin + Bermudy + Brunej + Bolívia + Brazília + Bahamy + Bután + Bouvetov ostrov + Botswana + Bielorusko + Belize + Kanada + Kokosové (Keelingove) ostrovy + Konžská demokratická republika + Stredoafrická republika + Kongo + Švajčiarsko + Pobrežie Slonoviny + Cookove ostrovy + Čile + Kamerun + Čína + Kolumbia + Kostarika + Kuba + Kapverdy + Vianočný ostrov + Cyprus + Česká republika + Nemecko + Džibuti + Dánsko + Dominika + Dominikánska republika + Alžírsko + Ekvádor + Estónsko + Egypt + Západná Sahara + Eritrea + Španielsko + Etiópia + Fínsko + Fidži + Falklandské ostrovy + Mikronézia, Federatívne štáty + Faerské ostrovy + Francúzsko + en + Gabon + Spojené kráľovstvo + Grenada + Gruzínsko + Francúzska Guayana + Ghana + Gibraltár + Grónsko + Gambia + Guinea + Guadeloupe + Rovníková Guinea + Grécko + Južná Georgia a Južné Sandwichove ostrovy + Guatemala + Guam + Guinea-Bissau + Guayana + Hong Kong S.A.R. Číny + Heardove ostrovy a McDonaldove ostrovy + Honduras + Chorvátsko + Haiti + Maďarsko + Indonézia + Írsko + Izrael + India + Britské územie v Indickom oceáne + Irak + Irán + Island + Taliansko + Jamajka + Jordánsko + Japonsko + Keňa + Kirgizsko + Kambodža + Kiribati + Komory + Saint Kitts a Nevis + Kórea, Severná + Kórea, Južná + Kuvajt + Kajmanské ostrovy + Kazachstan + Laoská ľudovodemokratická republika + Libanon + Svätá Lucia + Lichtenštajnsko + Srí Lanka + Libéria + Lesotho + Litva + Luxembursko + Lotyšsko + Lýbijská arabská džamahírija + Maroko + Monako + Moldavsko, republika + Madagaskar + Marshallove ostrovy + Macedónsko, republika + Mali + Mjanmarsko + Mongolsko + Makao S.A.R. Číny + Severné Mariány + Martinik + Mauritánia + Montserrat + Malta + Maurícius + Maldivy + Malawi + Mexiko + Malajzia + Mozambik + Namíbia + Nová Kaledónia + Niger + Norfolkov ostrov + Nigéria + Nikaragua + Holandsko + Nórsko + Nepál + Nauru + Niue + Nový Zéland + Omán + Panama + Peru + Francúzska Polynézia + Papua Nová Guinea + Filipíny + Pakistan + Poľsko + Saint Pierre a Miquelon + Pitcairnove ostrovy + Portoriko + Palestínske územie + Portugalsko + Palau + Paraguaj + Katar + Reunion + Rumunsko + Ruská federácia + Rwanda + Saudská Arábia + Šalamúnove ostrovy + Seychelské ostrovy + Sudán + Švédsko + Singapur + Svätá Helena + Slovinsko + Špicbergy a Jan Mayen + Slovenská republika + Sierra Leone + San Maríno + Senegal + Somálsko + Serbia + Surinam + Svätý Tomáš a Princove ostrovy + Salvador + Sýrska arabská republika + Svazijsko + Turks a Caicos + Čad + Francúzske južné územia + Togo + Thajsko + Tadžikistan + Tokelau + Východný Timor + Turkménsko + Tunisko + Tonga + Turecko + Trinidad a Tobago + Tuvalu + Tajwan + Tanzánia + Ukrajina + Uganda + Menšie odľahlé ostrovy USA + Spojené štáty + Uruguaj + Uzbekistan + Svätá stolica (Vatikánsky mestský štát) + Svätý Vincent a Grenadíny + Venezuela + Britské panenské ostrovy + Panenské ostrovy - USA + Vietnam + Vanuatu + Wallis a Futuna + Samoa + Jemen + Mayotte + Juhoslávia + Južná Afrika + Zambia + Zimbabwe + + + + [a-z ý á é í ó ú ä ô ĺ ŕ č ď ľ ň š ť ž] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + jan + feb + mar + apr + máj + jún + júl + aug + sep + okt + nov + dec + + + január + február + marec + apríl + máj + jún + júl + august + september + október + november + december + + + + + + + Ne + Po + Ut + St + Št + Pi + So + + + Nedeľa + Pondelok + Utorok + Streda + Štvrtok + Piatok + Sobota + + + + + + + + + + pred n.l. + n.l. + + + + + + + EEEE, d. MMMM yyyy + + + + + d. MMMM yyyy + + + + + d.M.yyyy + + + + + d.M.yyyy + + + + + + + + H:mm:ss z + + + + + H:mm:ss z + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + SKK + Sk + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sk_SK.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sk_SK.xml new file mode 100644 index 0000000..7c0740b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sk_SK.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sl.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sl.xml new file mode 100644 index 0000000..918756f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sl.xml @@ -0,0 +1,435 @@ + + + + + + + + + + + Arabščina + Bolgarščina + Češčina + Danščina + Nemščina + Grščina + Angleščina + Španščina + Estonščina + Finščina + Francoščina + Hebrejščina + Hrvaščina + Madžarščina + Italijanščina + Japonščina + Korejščina + Litovščina + Letonščina + Nizozemščina + Norveščina + Poljščina + Portugalščina + Romunščina + Ruščina + Slovaščina + Slovenščina + Švedščina + Turščina + Kitajščina + + + Andora + Združeni arabski emirati + Afganistan + Antigva in Barbuda + Angvila + Albanija + Armenija + Nizozemski Antili + Angola + Antarktika + Argentina + Ameriška Samoa + Avstrija + Avstralija + Aruba + Azerbajdžan + Bosna in Hercegovina + Barbados + Bangladeš + Belgija + Burkina Faso + Bolgarija + Bahrajn + Burundi + Benin + Bermuda + Brunej + Bolivija + Brazilija + Bahami + Butan + Otok Bouvet + Bocvana + Belorusija + Belize + Kanada + Kokosovi otoki + Demokratična republika Kongo + Centralnoafriška republika + Kongo + Švica + Slonokoščena obala + Cookovi otoki + Čile + Kamerun + Kitajska + Kolumbija + Kostarika + Kuba + Kapverdski otoki + Božični otok + Ciper + Češka + Nemčija + Džibuti + Danska + Dominika + Dominikanska republika + Alžirija + Ekvador + Estonija + Egipt + Zahodna Sahara + Eritreja + Španija + Etiopija + Finska + Fidži + Falklandski (Malvinski) otoki + Mikronezija + Fererski otoki + Francija + en + Gabon + Velika Britanija + Grenada + Gruzija + Francoska Gvajana + Gana + Gibraltar + Grenlandija + Gambija + Gvineja + Guadeloupe + Ekvatorialna Gvineja + Grčija + Južna Georgija in Južni Sandwich Islands + Gvatemala + Guam + Gvineja Bissau + Gvajana + Kitajska republika Hong Kong + Heardov otok in McDonaldovi otoki + Honduras + Hrvaška + Haiti + Madžarska + Indonezija + Irska + Izrael + Indija + Britanska Indija + Irak + Iran + Islandija + Italija + Jamajka + Jordan + Japonska + Kenija + Kirgizistan + Kambodža + Kiribati + Komori + Saint Kitts in Nevis + Severna Koreja + Južna Koreja + Kuvajt + Kajmanski otoki + Kazahstan + Ljudska demokratična republika Laos + Libanon + Saint Lucia + Liechtenstein + Šrilanka + Liberija + Lesoto + Litva + Luxemburg + Latvija + Libija + Maroko + Monako + Republika Moldova + Madagaskar + Marshallovi otoki + Republika Makedonija + Mali + Myanmar + Mongolija + Kitajska republika Macao + Severni Marianski otoki + Martinik + Mavretanija + Montserrat + Malta + Mauritius + Maldivi + Malavi + Mehika + Malezija + Mozambik + Namibija + Nova Kaledonija + Nigerija + Otok Norfolk + Nigerija + Nikaragva + Nizozemska + Norveška + Nepal + Nauru + Niue + Nova Zelandija + Oman + Panama + Peru + Francoska Polinezija + Papua Nova Gvineja + Filipini + Pakistan + Poljska + Saint Pierre in Miquelon + Pitcairn + Portoriko + Palestinsko ozemlje + Portugalska + Palau + Paragvaj + Katar + Reunion + Romunija + Ruska federacija + Ruanda + Saudova Arabija + Salomonovo otočje + Sejšeli + Sudan + Švedska + Singapur + Sveta Helena + Slovenija + Svalbard in Jan Mayen + Slovaška + Sierra Leone + San Marino + Senegal + Somalija + Serbia + Surinam + Sao Tome in Principe + Salvador + Sirija + Svazi + Otočji Turks in Caicos + Čad + Francoski zahodni teritorij + Togo + Tajska + Tadžikistan + Tokelau + Vzhodni Timor + Turkmenistan + Tunizija + Tonga + Turčija + Trinidad in Tobago + Tuvalu + Tajvan + Tanzanija + Ukrajina + Uganda + Ameriški manjši oddaljeni otoki + Združene države Amerike + Urugvaj + Uzbekistan + Vatikan + Saint Vincent in Grenadine + Venezuela + Britanski Deviški otoki + Ameriški Deviški otoki + Vietnam + Vanuatu + Wallis in Futuna + Samoa + Jemen + Mayotte + Jugoslavija + Južna Afrika + Zambija + Zimbabve + + + + [a-p r-v z č š ž] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + jan + feb + mar + apr + maj + jun + jul + avg + sep + okt + nov + dec + + + januar + februar + marec + april + maj + junij + julij + avgust + september + oktober + november + december + + + + + + + ned + pon + tor + sre + čet + pet + sob + + + nedelja + ponedeljek + torek + sreda + četrtek + petek + sobota + + + + + + + + + + pr.n.š. + po Kr. + + + + + + + EEEE, dd. MMMM yyyy + + + + + dd. MMMM yyyy + + + + + yyyy.M.d + + + + + yy.M.d + + + + + + + + H:mm:ss z + + + + + H:mm:ss z + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + SIT + SIT + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sl_SI.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sl_SI.xml new file mode 100644 index 0000000..5d83684 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sl_SI.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so.xml new file mode 100644 index 0000000..90548f5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so.xml @@ -0,0 +1,252 @@ + + + + + + + + + + + Soomaali + + + Imaaraadka Carabta ee Midoobay + Afgaanistaan + Armeeniya + Angoola + Osteeriya + Awstraaliya + Boosniya Heersigoviina + Baarbadoos + Bangaala-Deesh + Beljiyam + Baxrayn + Beniin + Braasiil + Kanada + Swiiserlaand + Jili + Kameruun + Shiinaha + Kuuba + Jarmal + Jabuuti + Danmaark + Masar + Isbeyn + Itoobiya + Fiinlaand + Faransiis + Giriinaada + Gini + Giriigga + Korweeshiya + Hangeri + Indoneesiya + Ayrlaanda + Israa'iil + Hindiya + Ciraaq + Iiraan + Iislaand + Talyaani + Jameyka + Urdun + Jabbaan + Kiiniya + Kamboodiya + Kuuriyada Waqooyi + Kuuriyada Koonfureed + Kuwayt + Kasaakhistaan + Lubnaan + Siirilaanka + Laybeeriya + Losooto + Luksemboorg + Laatfiya + Liibiya + Marooko + Moonako + Makadooniya + Maali + Muritaaniya + Maalda + Maaldiqeen + Malaawi + Meksiko + Musambiig + Namiibiya + Nayjeeriya + Nikaraaguwa + Noorweey + Neyuusilaand + Cumaan + Filibiin + Bakistaan + Booland + Bortuqaal + Qadar + Rumaaniya + Ruush + Sacuudi Carabiya + Sudaan + Iswidhan + Siraaliyoon + Soomaaliya + Suuriya + Jaad + Toogo + Taylaand + Tuniisiya + Turki + Tansaaniya + Ugaanda + Qaramada Midoobey ee Maraykanka + Faatikaan + Fenisuweela + Fiyetnaam + Yaman + Koonfur Afrika + Saambiya + Simbaabwe + + + + [a-z] + + + + + + + + Kob + Lab + Sad + Afr + Sha + Lix + Tod + Sid + Sag + Tob + KIT + LIT + + + Bisha Koobaad + Bisha Labaad + Bisha Saddexaad + Bisha Afraad + Bisha Shanaad + Bisha Lixaad + Bisha Todobaad + Bisha Sideedaad + Bisha Sagaalaad + Bisha Tobnaad + Bisha Kow iyo Tobnaad + Bisha Laba iyo Tobnaad + + + + + + + Axa + Isn + Sal + Arb + Kha + Jim + Sab + + + Axad + Isniin + Salaaso + Arbaco + Khamiis + Jimco + Sabti + + + + + + + + sn + gn + + + Ciise ka hor + Ciise ka dib + + + + + + + EEEE, MMMM dd, yyyy + + + + + dd MMMM yyyy + + + + + dd-MMM-yyyy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + KES + Ksh + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so_DJ.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so_DJ.xml new file mode 100644 index 0000000..7c035e8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so_DJ.xml @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + DJF + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so_ET.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so_ET.xml new file mode 100644 index 0000000..0213721 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so_ET.xml @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + ETB + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so_KE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so_KE.xml new file mode 100644 index 0000000..53efa6f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so_KE.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so_SO.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so_SO.xml new file mode 100644 index 0000000..75170f8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/so_SO.xml @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + SOS + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sq.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sq.xml new file mode 100644 index 0000000..c2d03ac --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sq.xml @@ -0,0 +1,319 @@ + + + + + + + + + + + shqipe + + + Andorrë + Emiratet Arabe te Bashkuara + Afganistan + Antigua e Barbuda + Shqipëria + Armeni + Angolë + Argjentinë + Austri + Australi + Azerbajxhan + Bosnja dhe Hercegovina + Belgjikë + Bullgari + Bahrein + Brunej + Bolivi + Butan + Botsvana + Bjellorusi + Kanada + Republika Qendrore e Afrikës + Kongo + Zvicër + Bregu i Fildishtë + Kili + Kamerun + Kinë + Kolumbi + Kosta Rika + Kubë + Kap Verde + Qipro + Republika e Çekisë + Gjermani + Xhibuti + Danimarkë + Dominikë + Republika Dominikanë + Algjeri + Ekuator + Estoni + Egjipt + Saharaja Perëndimore + Eritre + Spanjë + Etiopi + Finlandë + Fixhi + Mikronezi + Francë + Gjabon + Gjeorgji + Ganë + Gambi + Guine + Guineja Ekuatoriale + Greqi + Guatemalë + Guine Bisau + Guajana + Kroaci + Hungari + Indonezi + Irlandë + Izrael + Indi + Irak + Islandë + Itali + Xhamajkë + Jordani + Japoni + Kenia + Kirgistan + Kamboxhi + Qiribati + Komore + Saint Kitts e Nevis + Koreja e Veriut + Koreja e Jugut + Kuvajt + Kazakistan + Liban + Lihtënshtajn + Liberi + Lesoto + Lituani + Luksemburg + Letoni + Libi + Maroko + Monako + Moldavi + Madagaskar + Ishujt Marshall + Maqedoni + Mongoli + Mauritani + Maltë + Maldivit + Malavi + Meksikë + Malajzi + Mozambik + Namibi + Nigeri + Nikaragua + Vendet e Ulëta + Norvegji + Zelanda e Re + Papua Guineja e Re + Filipine + Poloni + Portugali + Paraguaj + Katar + Rumani + Rusi + Ruanda + Arabia Saudite + Ishujt Solomon + Sishel + Suedi + Singapor + Slloveni + Sllovaki + Siera Leone + Somali + Serbië + Sao Tome e Prinsipe + Siri + Svazilandë + Çad + Togo + Tajlandë + Taxhikistan + Tunisi + Turqi + Trinidad e Tobago + Tajvan + Tanzani + Ukrainë + Shtetet e Bashkuara të Amerikës + Uruguaj + Vatikan + Saint Vincent e Grenadinet + Venezuelë + Jemen + Afrika e Jugut + Zambi + Zimbabve + + + + [a-zçë{dh}{gj}{ll}{nj}{rr}{sh}{th}{xh}{zh}] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + Jan + Shk + Mar + Pri + Maj + Qer + Kor + Gsh + Sht + Tet + Nën + Dhj + + + janar + shkurt + mars + prill + maj + qershor + korrik + gusht + shtator + tetor + nëntor + dhjetor + + + + + + + Die + Hën + Mar + Mër + Enj + Pre + Sht + + + e diel + e hënë + e martë + e mërkurë + e enjte + e premte + e shtunë + + + + + + + + PD + MD + + + p.e.r. + n.e.r. + + + + + + + EEEE, dd MMMM yyyy + + + + + dd MMMM yyyy + + + + + yyyy-MM-dd + + + + + yy-MM-dd + + + + + + + + h.mm.ss.a z + + + + + h.mm.ss.a z + + + + + h:mm:ss.a + + + + + h.mm.a + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + ALL + Lek + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sq_AL.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sq_AL.xml new file mode 100644 index 0000000..dbd18c3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sq_AL.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sr.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sr.xml new file mode 100644 index 0000000..96859b7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sr.xml @@ -0,0 +1,479 @@ + + + + + + + + + + + Африканерски + Арапски + Белоруски + Бугарски + Бретонски + Каталонски + Корзикански + Чешки + Дански + Немачки + Грчки + Енглески + Есперанто + Шпански + Естонски + Баскијски + Персијски + Фински + Француски + Ирски + Хебрејски + Хрватски + Мађарски + Арменски + Индонезијски + Исландски + Италијански + Јапански + Грузијски + Камбоџански + Корејски + Курдски + Киргиски + Латински + Литвански + Летонски + Македонски + Монголски + Молдавски + Бурмански + Холандски + Норвешки + Пољски + Португалски + Рето-Романски + Румунски + Руски + Санскрит + Српско-Хрватски + Словачки + Словеначки + Албански + Српски + Шведски + Свахили + Турски + Украјински + Вијетнамски + Јидиш + Кинески + + + Андора + Уједињени Арапски Емирати + Авганистан + Албанија + Арменија + Холандски Антили + Ангола + Аргентина + Аустрија + Аустралија + Аруба + Азербејџан + Босна и Херцеговина + Барбадос + Бангладеш + Белгија + Буркина Фасо + Бугарска + Бахреин + Бурунди + Бенин + Бермуда + Брунеј + Боливија + Браѕил + Бахами + Бутан + Боцвана + Белорусија + Белисе + Канада + Централно Афричка Република + Конго + Швајцарска + Обала Слоноваче + Чиле + Камерун + Кина + Колумбија + Костарика + Куба + Кипар + Чешка + Немачка + Џибути + Данска + Доминика + Доминиканска Република + Алжир + Еквадор + Естонија + Египат + Западна Сахара + Еритреја + Шпанија + Етиопија + Финска + Фиџи + Микронезија + Француска + Габон + Велика Британија + Грузија + Француска Гвајана + Гана + Гамбија + Гвинеја + Гваделупе + Екваторијална Гвинеја + Грчка + Гватемала + Гвинеја-Бисао + Гвајана + Хондурас + Хрватска + Хаити + Мађарска + Индонезија + Ирска + Израел + Индија + Ирак + Иран + Исланд + Италија + Јамајка + Јордан + Јапан + Кенија + Киргизстан + Камбоџа + Северна Кореја + Јужна Кореја + Кувајт + Казахстан + Лаос + Либан + Лихенштајн + Шри Ланка + Либерија + Лесото + Литванија + Луксембург + Летонија + Либија + Мароко + Монако + Молдавија + Мадагаскар + Македонија + Мали + Мијнамар + Монголија + Мартиник + Мауританија + Малта + Маурицијус + Мексико + Малезија + Мозамбик + Намибија + Нова Каледонија + Нигер + Нигерија + Никарагва + Холандија + Норвешка + Непал + Нови Зеланд + Оман + Панама + Перу + Француска Полинезија + Папуа Нова Гвинеја + Филипини + Пакистан + Пољска + Порто Рико + Португал + Парагвај + Катар + Румунија + Русија + Руанда + Саудијска Арабија + Сејшели + Судан + Шведска + Сингапур + Словенија + Словачка + Сијера Леоне + Сенегал + Сомалија + Србија + Суринам + Салвадор + Сирија + Свазиленд + Чад + Француске Јужне Територије + Того + Тајланд + Таџикистан + Туркменистан + Тунис + Турска + Тринидад и Тобаго + Тајван + Танзанија + Украјина + Уганда + Сједињене Америчке Државе + Уругвај + Узбекистан + Ватикан + Венецуела + Британска Девичанска Острва + С.А.Д. Девичанска Острва + Вијетнам + Јемен + Југославија + Јужна Африка + Замбија + Зимбабве + + + + [а-и к-ш ђ ј љ њ ћ џ] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + јан + феб + мар + апр + мај + јун + јул + абг + сеп + окт + ноб + дец + + + ј + ф + м + а + м + ј + ј + а + с + о + н + д + + + јануара + фебруара + марта + априла + маја + јуна + јула + августа + септембра + октобра + новембра + децембра + + + + + јан + феб + мар + апр + мај + јун + јул + абг + сеп + окт + ноб + дец + + + ј + ф + м + а + м + ј + ј + а + с + о + н + д + + + јануар + фебруар + март + април + мај + јун + јул + август + септембар + октобар + новембар + децембар + + + + + + + нед + пон + уто + сре + чет + пет + суб + + + недеља + понедељак + уторак + среда + четвртак + петак + субота + + + + + + + + + + п. н. е. + н. е + + + + + + + EEEE, dd.MMMM.yyyy. + + + + + dd.MM.yyyy. + + + + + dd.MM.yyyy. + + + + + d.M.yy. + + + + + + + + HH.mm.ss z + + + + + HH.mm.ss z + + + + + HH.mm.ss + + + + + HH.mm + + + + + + + {1} {0} + + + + + + + + + Централно Европско Време + Централно Европско Време + + + CET + CET + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + YUN + Дин + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sr_YU.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sr_YU.xml new file mode 100644 index 0000000..c3cdecf --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sr_YU.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sv.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sv.xml new file mode 100644 index 0000000..aa9caea --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sv.xml @@ -0,0 +1,2750 @@ + + + + + + + + + + + afar + abkhaziska + achinese + acholi + adangme + adygeiska + avestiska + afrikaans + afro-asiatiskt (andra) + afrihili + akan + akkadiska + aleutiska + Algonkinspråk + amhariska + aragonesiska + fornengelska (ca. 450-1100) + Apache-språk + arabiska + arameiska + araukanska + arapaho + artificiellt (annat) + arawakiska + assami + asturiska + Athapaskiska språk + Australiska språk + avariska + awadhi + aymara + azerbadzjanska + basjkiriska + banda + Bamilekiska språk + baluchi + bambara + balinesiska + basa + baltiskt (annat) + vitryska + beyja + bemba + berber + bulgariska + bihari + bhojpuri + bislama + bikol + bini + siksika + bambara + bengali + bantuspråk + tibetanska + bretonska + braj + bosniska + batak + buriat + buginesiska + blin + katalanska + caddo + centralamerikanskt indianskt (annat) + karibiska + kaukasiskt (annat) + tjetjenska + cebuano + keltiskt (annat) + chamorro + chibcha + chagatai + chuukesiska + mari + chinook + choctaw + chipewyan + cherokesiska + cheyenne + Chami-språk + korsiska + koptiska + kreolska och pidgin, engelsk-baserat (annat) + kreolska och pidgin, fransk-baserat (annat) + kreolska och pidgin, portugisisk-baserat (annat) + cree + krimturkiska; krimtatar + kreolska och pidgin (annat) + tjeckiska + kasjubiska + kyrkoslaviska + kushitiska (annat) + tjuvasjiska + walesiska + danska + dakota + dargwa + dayak + tyska + delaware + slave + dogrib + dinka + dogri + dravidiskt (annat) + lågsorbiska + duala + medelnederländska (ca. 1050-1350) + divehi + dyula + dzongkha + ewe + efik + fornegyptiska + ekajuk + grekiska + elamitiska + engelska + medelengelska (1100-1500) + esperanto + spanska + estniska + baskiska + ewondo + farsi + fang + fanti + fulani + finska + finskugriskt (annat) + fidjianska + färöiska + franska + medelfranska (ca.1400-1600) + fornfranska (842- ca.1400) + friuilian + frisiska + irländsk gaeliska + ga + gayo + gbaya + skotsk gaeliska + germanskt (annat) + geez + gilbertesiska; kiribati + galiciska + medelhögtyska (ca.1050-1500) + guaraní + fornhögtyska (ca.750-1050) + gondi + gorontalo + gotiska + grebo + forngrekiska (till 1453) + gujarati + manx gaeliska + gwichʻin + haussa + haida + hawaiiska + hebreiska + hindi + hiligaynon + himachali + hettitiska + hmong + hiri motu + kroatiska + högsorbiska + haitiska + ungerska + hupa + armeniska + herero + interlingua + iban + indonesiska + interlingue + ibo + sichuan yi + inupiaq + iloko + indiskt (annat) + indo-europeiskt (annat) + ingusjiska + ido + iranska + irokesiska + isländska + italienska + inuktitut + japanska + lojban + judisk farsi + judisk arabiska + javanska + georgiska + karakalpakiska + kabyliska + kachin + kamba + karen + kawi + kabardinska + kikongo + khasi + khoisan (annat) + khotanesiska + kikuyu + kuanyama + kazakiska + grönländska + kambodjanska; khmer + kinbundu + kanaresiska; kannada + koreanska + konkani + kosreanska + kpelle + kanuri + karachay-balkar + kru + kurukh + kashmiri + kurdiska + kumyk + kutenai + kome + korniska + kirgisiska + latin + ladino + lahnda + lamba + luxemburgiska + lezghien + luganda + limburgiska + lingala + laotiska + lolo; mongo + lozi + litauiska + luba-katanga + luba-lulua + luiseño + lunda + lushai + lettiska + madurese + magahi + maithili + makasar + mande + austronesiska + massajiska + moksja + mandar + mende + malagassiska + medeliriska (900-1200) + marshalliska + maori + mic-mac + minangkabau + Blandade språk + makedonska + mon-khmer (annat) + malayalam + mongoliska + manchu + manipuri + manobo-sråk + moldaviska + mohawk + mossi + marathi + malajiska + maltesiska + Flera språk + Munda-språk + muskogee + marwari + burmanska + maya + erjya + nauru + nahuatl; aztekiska + nordamerikanskt indianspråk (annat) + napolitanska + norskt bokmål + nord­ndebele + lågtyska; lågsaxiska + nepali + newari + ndonga + nias + kordofanspråk (annat) + niuean + nederländska + ny­norsk + norska + nogai + fornnordiska + syd­ndebele + sotho, nord + Nubiska språk + navaho + nyanja + nyamwezi + nyankole + nyoro + nzima + provensalska (efter 1500 + odjibwa; chippewa + oromo + oriya + ossetiska + osage + ottomanturkiska (1500-1928) + Oto-mangue-språk + panjabi + papuaspråk (annat) + pangasinan + pahlavi + pampanga + papiamento + palau + fornpersiska (ca.600-400 b.c.) + filippinskt språk (annat) + kananeiska; feniciska + pali + polska + ponape + Prakritspråk + fornprovensalska (till 1500) + pashto; afghanska + portugisiska + quechua + rajasthani + rapanui + rarotongan + räto­romanska + rundi + rumänska + romanskt (annat) + romani + ryska + rwanda; kinjarwanda + sanskrit + sandawe + jakutiska + nordamerikanskt indianskt (annat) + salish-språk + samaritanska + sasak + santali + sardiska + skotska + sindhi + nord­samiska + selkup + semitiskt (annat) + sango + forniriska (till 900) + teckenspråk + serbokroatiska + shan + singalesiska + sidamo + siouxspråk + Sinotibetanska språk + slovakiska + slovenska + slaviskt (annat) + samoanska + sydsamiska + samiskt språk (annat) + lulesamiska + enaresamiska + skoltsamiska + shona; manshona + soninke + somali + sogdiska + songhai + albanska + serbiska + serer + swati + nilosahariskt (annat) + syd­sotho + sundanesiska + sukuma + susu + sumeriska + svenska + swahili + syriska + tamil + tai (annat) + telugu + temne + tereno + tetum + tadzjikiska + thailändska + tigrinja + tigré + tivi + turkmeniska + tokelau + tagalog + tlingit + tamashek + tswana + tonga + tonga-Nyasa + tok pisin + turkiska + tsonga + tsimshian + tatariska + tumbuka + Tupi-språk + altaiskt (annat) + tuvaluan + twi + tahitiska + tuviniska + udmurtiska + uiguriska + ugaritiska + ukrainska + umbundu + obestämt + urdu + uzbekiska + venda + vietnamesiska + volapük + votiska + walloon + wakash + walamo + waray + washo + Sorbiska språk + wolof + kalmuckiska + xhosa + jiddisch + yoruba + Yupiska språk + zhuang + zapotek + zenaga + kinesiska + zandé + zulu + zuñi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Andorra + Förenade Arabemiraten + Afganistan + Antigua och Barbuda + Anguilla + Albanien + Armenien + Nederländska Antillerna + Angola + Antarktis + Argentina + Amerikanska Samoa + Österrike + Australien + Aruba + Azerbajdzjan + Bosnien och Hercegovina + Barbados + Bangladesh + Belgien + Burkina Faso + Bulgarien + Bahrain + Burundi + Benin + Bermuda + Brunei + Bolivia + Brasilien + Bahamas + Bhutan + Bouvetön + Botswana + Vitryssland + Belize + Kanada + Kokosöarna (Keelingöarna) + Demokratiska republiken Kongo + Centralafrikanska republiken + Kongo + Schweiz + Elfenbenskusten + Cooköarna + Chile + Kamerun + Kina + Colombia + Costa Rica + Kuba + Kap Verde + Julön + Cypern + Tjeckien + Tyskland + Djibouti + Danmark + Dominica + Dominikanska republiken + Algeriet + Ecuador + Estland + Egypten + Västra Sahara + Eritrea + Spanien + Etiopien + Finland + Fiji + Falklandsöarna + Mikronesien + Färöarna + Frankrike + en + Gabon + Storbritannien + Grenada + Georgien + Franska Guyana + Ghana + Gibraltar + Grönland + Gambia + Guinea + Guadelope + Ekvatorialguinea + Grekland + Sydgeorgien och Södra Sandwichöarna + Guatemala + Guam + Guinea-Bissau + Guyana + Hongkong (S.A.R. Kina) + Heard- och McDonaldöarna + Honduras + Kroatien + Haiti + Ungern + Indonesien + Irland + Israel + Indien + Brittiska Indiska oceanöarna + Irak + Iran + Island + Italien + Jamaica + Jordanien + Japan + Kenya + Kirgisistan + Kambodja + Kiribati + Komorerna + S:t Christopher och Nevis + Nordkorea + Sydkorea + Kuwait + Kajmanöarna + Kazachstan + Laos + Libanon + S:t Lucia + Liechtenstein + Sri Lanka + Liberia + Lesotho + Litauen + Luxemburg + Lettland + Libyen + Marocko + Monaco + Moldavien + Madagaskar + Marshallöarna + Makedonien + Mali + Myanmar + Mongoliet + Macao (S.A.R. Kina) + Nordmarianerna + Martinique + Mauretanien + Montserrat + Malta + Mauritius + Maldiverna + Malawi + Mexiko + Malaysia + Moçambique + Namibia + Nya Kaledonien + Niger + Norfolkön + Nigeria + Nicaragua + Nederländerna + Norge + Nepal + Nauru + Niueön + Nya Zeeland + Oman + Panama + Peru + Franska Polynesien + Papua Nya Guinea + Filippinerna + Pakistan + Polen + S:t Pierre och Miquelon + Pitcairn + Puerto Rico + Palestinska territoriet + Portugal + Palau + Paraguay + Qatar + Réunion + Rumänien + Ryssland + Rwanda + Saudi-Arabien + Salomonöarna + Seychellerna + Sudan + Sverige + Singapore + S:t Helena + Slovenien + Svalbard och Jan Mayen + Slovakien + Sierra Leone + San Marino + Senegal + Somalia + Serbien + Surinam + São Tomé och Príncipe + El Salvador + Syrien + Swaziland + Turks- och Caicosöarna + Tchad + Franska Sydterritorierna + Togo + Thailand + Tadzjikistan + Tokelauöarna + Östtimor + Turkmenistan + Tunisien + Tonga + Turkiet + Trinidad och Tobago + Tuvalu + Taiwan + Tanzania + Ukraina + Uganda + Små, avlägset belägna öar som tillhör Förenta staterna + Amerikas Förenta Stater + Uruguay + Uzbekistan + Vatikanstaten + S:t Vincent och Grenadinerna + Venezuela + Brittiska Jungfruöarna + Amerikanska Jungfruöarna + Vietnam + Vanuatu + Wallis och Futunaöarna + Samoa + Jemen + Mayotte + Jugoslavien + Sydafrika + Zambia + Zimbabwe + + + Reviderad + + + Kalendar + Sortera + Valuta + + + Buddistisk kalender + Kinesisk kalender + Gregoriansk kalender + Hebreisk kalender + Islamisk kalender + Islamisk civil kalender + Japansk kalender + Direkt ordning + Telefonboksordning + Pinyinordning + Raderingsordning + Traditionell ordning + + + + [a-zäöåáéëü] + + + + + + + + jan + feb + mar + apr + maj + jun + jul + aug + sep + okt + nov + dec + + + J + F + M + A + M + J + J + A + S + O + N + D + + + januari + februari + mars + april + maj + juni + juli + augusti + september + oktober + november + december + + + + + + + + + ti + on + to + fr + + + + S + M + T + O + T + F + L + + + söndag + måndag + tisdag + onsdag + torsdag + fredag + lördag + + + + + + + + fm + em + + + f.Kr. + e.Kr. + + + + + + + 'den 'd MMMM yyyy + + + + + 'den 'd MMM yyyy + + + + + yyyy-MM-dd + + + + + yyyy-MM-dd + + + + + + + + 'kl. 'HH.mm.ss z + + + + + HH.mm.ss z + + + + + HH.mm.ss + + + + + HH.mm + + + + + + + {1} {0} + + + + + + + + + Pacific, normaltid + Pacific, sommartid + + + PST + PDT + + Los Angeles + + + + Pacific, normaltid + Pacific, sommartid + + + PST + PDT + + Los Angeles + + + + Mountain, normaltid + Mountain, sommartid + + + MST + MDT + + Denver + + + + Mountain, normaltid + Mountain, sommartid + + + MST + MDT + + Denver + + + + Mountain, normaltid + Mountain, sommartid + + + MST + MST + + Phoenix + + + + Mountain, normaltid + Mountain, sommartid + + + MST + MST + + Phoenix + + + + Central, normaltid + Central, sommartid + + + CST + CDT + + Chicago + + + + Central, normaltid + Central, sommartid + + + CST + CDT + + Chicago + + + + Eastern, normaltid + Eastern, sommartid + + + EST + EDT + + New York + + + + Eastern, normaltid + Eastern, sommartid + + + EST + EDT + + New York + + + + Eastern, normaltid + Eastern, normaltid + + + EST + EST + + Indianapolis + + + + Eastern, normaltid + Eastern, normaltid + + + EST + EST + + Indianapolis + + + + Hawaii, normaltid + Hawaii, normaltid + + + HST + HST + + Honolulu + + + + Hawaii, normaltid + Hawaii, normaltid + + + HST + HST + + Honolulu + + + + Alaska, normaltid + Alaska, sommartid + + + AST + ADT + + Anchorage + + + + Alaska, normaltid + Alaska, sommartid + + + AST + ADT + + Anchorage + + + + Atlantic, normaltid + Atlantic, sommartid + + + AST + ADT + + Halifax + + + + Newfoundland, normaltid + Newfoundland, sommartid + + + CNT + CDT + + St. Johns + + + + Newfoundland, normaltid + Newfoundland, sommartid + + + CNT + CDT + + St. Johns + + + + Centraleuropa, normaltid + Centraleuropa, sommartid + + + CET + CEST + + Paris + + + + Centraleuropa, normaltid + Centraleuropa, sommartid + + + CET + CEST + + Paris + + + + Greenwichtid + Greenwichtid + + + GMT + GMT + + London + + + + Greenwichtid + Greenwichtid + + + GMT + GMT + + Casablanca + + + + Israel, normaltid + Israel, sommartid + + + IST + IDT + + Jerusalem + + + + Japan, normaltid + Japan, normaltid + + + JST + JST + + Tokyo + + + + Japan, normaltid + Japan, normaltid + + + JST + JST + + Tokyo + + + + Östeuropa, normaltid + Östeuropa, sommartid + + + EET + EEST + + Bukarest + + + + Kina, normaltid + Kina, normaltid + + + CTT + CDT + + Shanghai + + + + Kina, normaltid + Kina, normaltid + + + CTT + CDT + + Shanghai + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + Andorransk diner + ADD + + + Andorransk peseta + ADP + + + Förenade arabemiratens dirham + AED + + + Affars and Issas franc + AIF + + + Albansk lek (1946-1961) + ALK + + + Albansk lek + lek + + + Albansk lek – Valute) + ALV + + + Albansk dollar – Foreign Exchange Certificates) + ALX + + + Armenisk dram + dram + + + Nederländsk antillisk gulden + NA f. + + + Angolansk kwanza + AOA + + + Angolansk kwanza (1977-1990) + AOK + + + Angolansk ny kwanza (1990-2000) + AON + + + Angolansk kwanza – Reajustado (1995-1999) + AOR + + + Angolansk escudo + AOS + + + Argentinsk austral + ARA + + + Argentinsk peso – Moneda nacional + ARM + + + Argentinsk peso (1983-1985) + ARP + + + Argentinsk peso + Arg$ + + + Österrikisk schilling + ATS + + + Australisk dollar + $A + + + Australiskt pund + AUP + + + Aruba-florin + AWG + + + Azerbajdzansk manat + AZM + + + Bosnisk-Hercegovinsk dinar + BAD + + + Bosnisk-Hercegovinsk konvertibel mark + KM + + + Bosnisk-Hercegovinsk ny dinar + BAN + + + Barbadisk dollar + BDS$ + + + Bangladeshisk taka + Tk + + + Belgisk franc (konvertibel) + BEC + + + Belgisk franc + BF + + + Belgisk franc (finansiell) + BEL + + + Bulgarisk hård lev + lev + + + Bulgarisk socialistisk lev + BGM + + + Bulgarisk ny lev + BGN + + + Bulgarisk lev (1879-1952) + BGO + + + Bulgarisk lev – Foreign Exchange Certificates + BGX + + + Bahrainsk dinar + BD + + + Burundisk franc + Fbu + + + Bermuda-dollar + Ber$ + + + Bermuda-pund + BMP + + + Bruneisk dollar + BND + + + Boliviansk peso + BOP + + + Boliviansk mvdol + BOV + + + Brasiliansk cruzeiro novo (1967-1986) + BRB + + + Brasiliansk cruzado + BRC + + + Brasiliansk cruzeiro (1990-1993) + BRE + + + Brasiliansk real + R$ + + + Brasiliansk cruzado novo + BRN + + + Brasiliansk cruzeiro + BRR + + + Brasiliansk cruzeiro (1942-1967) + BRZ + + + Bahamansk dollar + BSD + + + Bahamanskt pund + BSP + + + Bhutanesisk ngultrum + Nu + + + Bhutanesisk rupie + BTR + + + Burmesisk kyat + BUK + + + Burmesisk rupee + BUR + + + Botswansk pula + BWP + + + Vitrysk ny rubel (1994-1999) + BYB + + + Vitrysk rubel (1992-1994) + BYL + + + Vitrysk rubel + Rbl + + + Belizisk dollar + BZ$ + + + Brittiska Honduras-dollar + BZH + + + Kanadensisk dollar + Can$ + + + Kongolesisk franc congolais + CDF + + + Kongolesisk franc + CDG + + + Kongolesisk zaire + CDL + + + Centralafrikanska CFA-franc + CFF + + + Schweizisk franc + SwF + + + Cooköisk dollar + CKD + + + Chilensk condor + CLC + + + Chilensk escudo + CLE + + + Chilensk unidades de fomento + CLF + + + Chilensk peso + Ch$ + + + Kamerunsk CFA-franc + CMF + + + Kinesisk jen min piao yuan + CNP + + + Kinesiska US Dollar Foreign Exchange Certificates + CNX + + + Kinesisk yuan renminbi + Y + + + Colombiansk papperspeso + COB + + + Kongolesisk CFA-franc + COF + + + Colombiansk peso + Col$ + + + Costarikansk colon + C + + + Tjeckisk koruna + CSC + + + Tjeckisk hård koruna + CSK + + + Kubansk peso + CUP + + + Kubansk Foreign Exchange Certificates + CUX + + + Kapverdisk escudo + CVEsc + + + Curacaoisk gulden + CWG + + + Cypriotiskt pund + £C + + + Tjeckisk koruna + CZK + + + Östtysk mark + DDM + + + Tysk mark + DEM + + + Tysk sperrmark + DES + + + Djiboutisk franc + DF + + + Dansk krona + DKr + + + Dominikansk peso + RD$ + + + Algerisk dinar + DA + + + Algerisk ny franc + DZF + + + Algerisk franc germinal + DZG + + + Ecuadoriansk sucre + ECS + + + Ecuadoriansk Unidad de Valor Constante (UVC) + ECV + + + Estnisk krona + EEK + + + Egyptisk pund + EGP + + + Eritreansk nakfa + ERN + + + Spansk peseta + ESP + + + Etiopisk birr + Br + + + Etiopisk dollar + ETD + + + Euro + + + + Finska mark + FIM + + + Finska mark (1860-1962) + FIN + + + Fijiansk dollar + F$ + + + Fijianskt pund + FJP + + + Falklandsöarnas pund + FKP + + + Färöisk kronar + FOK + + + Fransk franc + FRF + + + Fransk Franc Germinal/Franc Poincare + FRG + + + Gabonesisk CFA-franc + GAF + + + Britiskt pound sterling + £ + + + Georgisk kupon larit + GEK + + + Georgisk lari + lari + + + Ghanansk cedi + GHC + + + Ghanansk gammal cedi + GHO + + + Ghananskt pund + GHP + + + Ghanansk omvärderad cedi + GHR + + + Gibraltiskt pund + GIP + + + Grönländsk krona + GLK + + + Gambisk dalasi + GMD + + + Gambiskt pund + GMP + + + Guineansk franc + GF + + + Guineansk franc (1960-1972) + GNI + + + Guineansk syli + GNS + + + Guadeloupisk franc + GPF + + + Ekvatorialguineansk ekwele guineana + GQE + + + Ekvatorialguineansk franco + GQF + + + Ekvatorialguineansk peseta guineana + GQP + + + Grekisk drachma + GRD + + + Grekisk ny drachma + GRN + + + Guatemalansk quetzal + Q + + + Franska Guyanas Franc Guiana + GUF + + + Portugisisk guineas escudo + GWE + + + Portugisiska guineas mil reis + GWM + + + Guinea-Bissauisk peso + GWP + + + Guyanansk dollar + G$ + + + Hongkong-dollar + HK$ + + + Hoduransk lempira + L + + + Kroatisk dinar + HRD + + + Kroatisk kuna + HRK + + + Haitisk gourde + HTG + + + Ungersk forint + Ft + + + Nordirländskt pund + IBP + + + Indonesisk nica gulden + IDG + + + Indonesisk java rupiah + IDJ + + + Indonesisk ny rupiah + IDN + + + Indonesisk rupiah + Rp + + + Irländskt pund + IR£ + + + Israelisk shekel + ILL + + + Israeliskt pund + ILP + + + Israelisk ny shekel + ILS + + + Isle of Man pund sterling + IMP + + + Indisk rupie + =0#Rs.|1#Re.|1<Rs. + + + Irakisk dinar + ID + + + Iransk rial + RI + + + Isländsk krona + ISK + + + Italiensk lira + + + + Jersey pound sterling + JEP + + + Jamaicansk dollar + J$ + + + Jamaicanskt pund + JMP + + + Jordansk dinar + JD + + + Japansk yen + ¥ + + + Kenyansk shilling + K Sh + + + Kirgizistansk som + som + + + Kambodjansk gammal riel + KHO + + + Kambodjansk riel + CR + + + Kiribatisk dollar + KID + + + Komorisk franc + CF + + + Nordkoreansk won + KPP + + + Nordkoreansk won + KPW + + + Sydkoreansk hwan + KRH + + + Sydkoreansk gammal won + KRO + + + Sydkoreansk won + KRW + + + Kuwaitisk dinar + KD + + + Caymanöisk dollar + KYD + + + Kazakisk rubel + KZR + + + Kazakisk tenge + T + + + Laotisk kip + LAK + + + Libanesiskt pund + LL + + + Liechtensteinsk franc + LIF + + + Srilankesisk rupie + SL Re + + + Ceylonesisk rupie + LNR + + + Liberisk dollar + LRD + + + Lesothisk loti + M + + + Lettisk lita + LTL + + + Lettisk talonas + LTT + + + Luxemburgsk franc + LUF + + + Lettisk lats + LVL + + + Lettisk rubel + LVR + + + Libyska brittiska militärmyndighetens lira + LYB + + + Libysk dinar + LD + + + Libyskt pund + LYP + + + Marockansk dirham + MAD + + + Marockansk franc + MAF + + + Monegaskisk franc nouveau + MCF + + + Monegaskisk franc germinal + MCG + + + Moldavisk leu-kupong + MDC + + + Moldavisk leu + MDL + + + Moldavisk rubelkupong + MDR + + + Madagaskisk ariary + MGA + + + Madagaskisk franc + MGF + + + Marshallöisk dollar + MHD + + + Makedonisk denar + MDen + + + Makedonisk denar (1992-1993) + MKN + + + Malisk franc + MLF + + + Myanmarisk kyat + MMK + + + Myanmarisk dollar – Foreign Exchange Certificates + MMX + + + Mongolisk tugrik + Tug + + + Macaoisk pataca + MOP + + + Martiniqueisk franc + MQF + + + Mauretansk ouguiya + UM + + + Maltesisk lira + Lm + + + Maltesiskt pund + MTP + + + Mauritisk rupie + MUR + + + Maldivisk rupie + MVP + + + Maldivisk rufiyaa + MVR + + + Malawisk kwacha + MK + + + Malawiskt pund + MWP + + + Mexikansk peso + MEX$ + + + Mexikansk silverpeso (1861-1992) + MXP + + + Mexikansk Unidad de Inversion (UDI) + MXV + + + Malaysisk ringgit + RM + + + Moçambikisk escudo + MZE + + + Moçambikisk metical + Mt + + + Namibisk dollar + N$ + + + Nya Kaledonisk franc germinal + NCF + + + Nigeriansk naira + NGN + + + Nigerianskt pund + NGP + + + Nya Hebridiska CFP-franc + NHF + + + Nicaraguansk córdoba + NIC + + + Nicaraguansk guldcordoba + NIG + + + Nicaraguansk córdoba oro + NIO + + + Nederländsk gulden + NLG + + + Norsk krona + NKr + + + Nepalesisk rupie + Nrs + + + Nyzeeländsk dollar + $NZ + + + Nyzeeländsk pund + NZP + + + Omansk rial + RO + + + Omansk rial saidi + OMS + + + Panamansk balboa + PAB + + + Transdniestrisk rubekupong + PDK + + + Transdniestrisk ny rubel + PDN + + + Transdniestrisk rubel + PDR + + + Peruansk inti + PEI + + + Peruansk sol nuevo + PEN + + + Peruansk sol + PES + + + Papuansk kina + PGK + + + Filippinsk peso + PHP + + + Pakistansk rupie + Pra + + + Polsk zloty + Zl + + + Polsk US-dollar Foreign Exchange Certificates + PLX + + + Polsk zloty (1950-1995) + PLZ + + + Palestinskt pund + PSP + + + Portugisisk conto + PTC + + + Portugisisk escudo + PTE + + + Paraguaysk guarani + PYG + + + Qatarisk rial + QR + + + Reunion-franc + REF + + + Rumänsk leu + leu + + + Rumänsk ny leu + RON + + + Rysk rubel + RUB + + + Rysk rubel (1991-1998) + RUR + + + Rwandisk franc + RWF + + + Saudisk riyal + SRl + + + Saudisk sovereign riyal + SAS + + + Salomonöisk dollar + SI$ + + + Seychellisk rupie + SR + + + Sudanesisk dinar + SDD + + + Sudanesiskt pund + SDP + + + Svensk krona + kr + + + Singaporiansk dollar + S$ + + + Saint Helena-pund + SHP + + + Slovensk tolar bons + SIB + + + Slovensk tolar + SIT + + + Slovakisk koruna + Sk + + + Sierraleonsk leone + SLL + + + Sanmarinsk lira + SML + + + Somalisk shilling + So. Sh. + + + Somaliländsk shilling + SQS + + + Surinamesisk gulden + Sf + + + Skottskt pund + SSP + + + São Tomé och Príncipe-dobra + Db + + + São Tomé och Príncipe-escudo + STE + + + Sovjetisk ny rubel + SUN + + + Sovjetisk rubel + SUR + + + Salvadoransk colon + SVC + + + Syriskt pund + LS + + + Swaziländsk lilangeni + E + + + Turks and Caicos-crown + TCC + + + Tchadisk CFA-franc + TDF + + + Thailändsk baht + THB + + + Tadzjikisk rubel + TJR + + + Tadzjikisk somoni + TJS + + + Turkmensk manat + TMM + + + Tunisisk dinar + TND + + + Tongansk paʻanga + T$ + + + Tongansk pound sterling + TOS + + + Timoriansk escudo + TPE + + + Timoriansk pataca + TPP + + + Turkisk lira + TL + + + Trinidadisk dollar + TT$ + + + Trinidadisk gammal dollar + TTO + + + Tuvaluansk dollar + TVD + + + Taiwanesisk ny dollar + NT$ + + + Tanzanisk shilling + T Sh + + + Ukrainsk hryvnia + UAH + + + Ukrainsk karbovanetz + UAK + + + Ugandisk shilling (1966-1987) + UGS + + + Ugandisk shilling + U Sh + + + US-dollar + US$ + + + US-dollar (nästa dag) + USN + + + US-dollar (samma dag) + USS + + + Uruguayansk peso fuerte + UYF + + + Uruguayansk peso (1975-1993) + UYP + + + Uruguayansk peso uruguayo + Ur$ + + + Uzbekisk coupon som + UZC + + + Uzbekisk sum + UZS + + + Heliga Stolen-lira + VAL + + + Nordvietnamesisk piastre dong viet + VDD + + + Nordvietnamesisk ny dong + VDN + + + Nordvietnamesisk viet minh piastre dong viet + VDP + + + Venezuelansk bolivar + Be + + + Jungfruöisk dollar + VGD + + + Vietnamesisk dong + VND + + + Vietnamesisk ny dong + VNN + + + Vietnamesisk dong + VNR + + + Vietnamesisk nationell dong + VNS + + + Vanuatisk vatu + VT + + + Västsamoanskt pund + WSP + + + Västsamoansk tala + WST + + + CFA Franc BEAC + XAF + + + Östkaribisk dollar + EC$ + + + CFA Nouveau Franc + XCF + + + CFA Franc BCEAEC + XEF + + + Fransk guldfranc + XFO + + + French UIC-Franc + XFU + + + Islamisk dinar + XID + + + French Metropolitan Nouveau Franc + XMF + + + Fransk Antillisk CFA-franc + XNF + + + CFA Franc BCEAO + XOF + + + CFP-franc + CFPF + + + Jemenitisk dinar + YDD + + + Jemenitisk imadi riyal + YEI + + + Jemenitisk rial + YRl + + + Jugoslavisk hård dinar + YUD + + + Jugoslavisk Federation-dinar + YUF + + + Jugoslavisk 1994 dinar + YUG + + + Jugoslavisk noviy dinar + YUM + + + Jugoslavisk konvertibel dinar + YUN + + + Jugoslavisk oktober dinar + YUO + + + Jugoslavisk reformed dinar + YUR + + + Sydafrikansk rand (finansiell) + ZAL + + + Sydafrikansk rand + ZAP + + + Sydafrikansk rand + R + + + Zambisk kwacha + ZMK + + + Zambiskt pund + ZMP + + + Zairisk ny zaire + ZRN + + + Zairisk zaire + ZRZ + + + Zimbabwisk dollar + Z$ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sv_FI.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sv_FI.xml new file mode 100644 index 0000000..e7fb9a9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sv_FI.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sv_SE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sv_SE.xml new file mode 100644 index 0000000..c6546ae --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sv_SE.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sw.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sw.xml new file mode 100644 index 0000000..7e1d07d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sw.xml @@ -0,0 +1,192 @@ + + + + + + + + + + + Kiswahili + + + Muugano wa Falme za Nchi za Kiarabu + Antigua na Barbuda + Ajentina + Bosnia na Herzegowina + Ubelgiji + Brazili + Visiwa vya Bahama + Kanada + Jamhuri ya Afrika ya Kati + Kongo + Uswisi + Pwani ya Pembe + Kamerun + Uchina + Kolombia + Rasi Verde + Jamhuri ya Czech + Udachi + Jibuti + Udenmarki + Dominika + Jamhuri ya Dominikan + Ekvado + Misri + Uhispania + Uhabeshi + Ufaransa + Uingereza + Guinea ya Ikweta + Kroatia + Hungaria + Uyahudi + Uhindi + Iraki + Uajemi + Barafu + Uitaliani + Jamaika + Ujapani + Kenya + Kampuchea + Visiwa vya Komoro + Saint Kitts na Nevis + Korea ya Kaskazini + Korea ya Kusini + Luksemburg + Moroko + Monako + Visiwa vya Marshall + Meksiko + Malasya + Msumbiji + Nikaragua + Uholanzi + Unorwe + Nepali + Papua Guinea Mpya + Filipino + Ureno + Paragwai + Urusi + Arabuni Saudi + Visiwa vya Solomon + Visiwa vya Shelisheli + Uswidi + Somali + Sao Tome na Principe + Chadi + Timor ya Mashariki + Uturuki + Trinidad na Tobago + Tanzania + Muungano wa Nchi za Amerika + Urugwai + Vatikano + Saint Vincent na Grenadines + Yemeni + Afrika ya Kusini + + + + [a-z] + + + + + + + + Jan + Feb + Mar + Apr + Mei + Jun + Jul + Ago + Sep + Okt + Nov + Des + + + Januari + Februari + Machi + Aprili + Mei + Juni + Julai + Agosti + Septemba + Oktoba + Novemba + Desemba + + + + + + + Jpi + Jtt + Jnn + Jtn + Alh + Iju + Jmo + + + Jumapili + Jumatatu + Jumanne + Jumatano + Alhamisi + Ijumaa + Jumamosi + + + + + + + + + + KK + BK + + + + + + + + Saa za Africa Mashariki + Saa za Africa Mashariki + + + EAT + EAT + + Nairobi + + + + + + + KES + KSh + + + TZS + TSh + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sw_KE.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sw_KE.xml new file mode 100644 index 0000000..ba14c8c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sw_KE.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sw_TZ.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sw_TZ.xml new file mode 100644 index 0000000..2df6c4d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/sw_TZ.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/syr.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/syr.xml new file mode 100644 index 0000000..23d1735 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/syr.xml @@ -0,0 +1,52 @@ + + + + + + + + + + + ܣܘܪܝܝܐ + + + ܣܘܪܝܝܐ + + + + [[:Syrc:]‌‍‏‎] + + + + + + + + ܏ܟܢ ܏ܒ + ܫܒܛ + ܐܕܪ + ܢܝܣܢ + ܐܝܪ + ܚܙܝܪܢ + ܬܡܘܙ + ܐܒ + ܐܝܠܘܠ + ܏ܬܫ ܏ܐ + ܏ܬܫ ܏ܒ + ܏ܟܢ ܏ܐ + + + + + + + + + + SYP + ل.س.‏ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/syr_SY.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/syr_SY.xml new file mode 100644 index 0000000..ae19447 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/syr_SY.xml @@ -0,0 +1,103 @@ + + + + + + + + + + + + + + + + + + + + + dd MMMM, yyyy + + + + + dd MMMM, yyyy + + + + + dd/MM/yyyy + + + + + dd/MM/yyyy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss + + + + + h:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;#,##0.###- + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤ #,##0.00;¤ #,##0.00- + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ta.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ta.xml new file mode 100644 index 0000000..e56e0e9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ta.xml @@ -0,0 +1,440 @@ + + + + + + + + + + + அபார் + அப்காஸின் + ஆப்ரிகன்ஸ் + அம்ஹாரிக் + அரபு + அஸ்ஸாமி + அயமரா + அசர்பாய்ஜானி + பாஷ்கிர்0 + பைலோருஷ்ன் + பல்கேரியன் + பிஹாரி + பிஸ்லாமா + வங்காளம் + திபெத்து + பிரிடன் + காடலான் + கார்சியன் + செக் + வெல்ஷ் + டானிஷ் + ஜெர்மன் + புடானி + கிரேக்கம் + ஆங்கிலம் + எஸ்பரேன்டோ + ஸ்பேனிஷ் + எஸ்டோனியன் + பஸ்க் + பர்ஸியன் + பின்னிஷ் + பிஜி + பைரோஸி + பிரெஞ்சு + பிரிஷியன் + ஐரிஷ் + ஸ்காட்ஸ் காலெக் + கெலிஸியன் + குரானி + குஜராத்தி + ஹொஸா + ஹுப்ரு + இந்தி + கரோஷியன் + ஹங்கேரியன் + ஆர்மேனியன் + இன்டர்லிங்குவா + இந்தோனேஷியன் + இன்டர்லிங்குவா + இனுபெக் + ஐஸ்லென்டிக் + இத்தாலியன் + இனுகிடட் + ஜப்பானீஸ் + ஜாவானீஸ் + கன்னடம் + கசாக் + கிரின்லென்டிக் + கம்போடியன் + கன்னடா + கொரியன் + கொங்கனி + காஷ்மிரி + குர்திஷ் + கிர்கிஷ் + லாதின் + லிங்காலா + லோத்தியன் + லுத்தேனியன் + லேட்வியன் (லேட்டிஷ்) + மலகெஸி + மோரி + மெக்கடோனியன் + மலையாளம் + மங்கோலியன் + மோல்டேவியன் + மராத்தி + மலாய் + மால்டிஸ் + பர்மிஸ் + நாரூ + நேப்பாலி + டச்சு + நார்வேகியன் + ஆகிடியன் + ஒரோம (அபன்) + ஒரியா + பஞ்சாபி + போலிஷ் + பேஷ்டோ (புஷ்டோ) + போர்த்துகீஸ் + கியுசா + ரைட்டோ-ரோமென்ஸ் + கிருந்தி + ரோமேனியன் + தமிழ் + ரஷியன் + கின்யர்வென்டா + சமஸ்கிருதம் + சிந்தி + சென்க்ரோ + செர்போ-க்ரோஷியன் + சிங்களம் + ஸ்லோவெக் + ஸ்லோவினேயின் + ஸெமோன் + ஷோனா + சோமாலி + அல்பெனியன் + சர்பியன் + ஷிஸ்வாதி + ஷெஸ்ஸோதோ + சுடானீஸ் + ஷீவிடிஸ் + சுவாஹிலி + தமிழ் + தெலுங்கு + தாஜிக் + தாய் + டிக்ரின்யா + டர்க்மென் + டாகாலோக் + ஸெட்ஸ்வானா + டோங்கா + டர்கிஷ் + ஸோங்கா + டாடர் + த்திவி + யுகுர் + உக்ரேனியன் + உருது + உஸ்பெக் + வியட்நாமிஸ் + ஒலபுக் + ஒலோப் + ஹோஷா + ஈத்திஷ + யோருப்பா + ஜுவாங் + சீனம் + ஜூலூ + + + அன்டோரா + ஐக்கிய அரபு கூட்டாட்சி + ஆப்கானிஸ்தான் + ஆன்டிகுவா பார்புடா + அல்பேனியா + ஆர்மீனியா + அங்கோலா + அர்ஜெண்டினா + ஆஸ்திரியா + ஆஸ்திரேலியா + அஜர்பைஜான் + போஸ்னியா ஹெர்ஸிகோவினா + பார்படோஸ் + பங்களாதேஷ் + பெல்ஜியம் + பர்கினோ பாஸோ + பல்கேரியா + பஹ்ரைன் + புருண்டி + பெனின் + புரூனேய் + பொலிவியா + பிரேஸில் + பஹாமாஸ் + பூடான் + போட்ஸ்வானா + பெலாரூஸ் + பெலிஸ் + கனடா + மத்திய ஆப்ரிக்கக் குடியரசு + காங்கோ + ஸ்விட்சர்லாந்து + ஐவரி கோஸ்ட் + சிலி + கேமரூன் + சீன + கொலம்பியா + கோஸ்டாரிகா + கியூபா + கேப் வெர்டே + சைப்ரஸ் + செக் குடியரசு + ஜெர்மன் + ஜிபௌடி + டென்மார்க் + டொமினிகா + டொமினிகன் குடியரசு + அல்ஜீரியா + ஈக்வடார் + எஸ்டோனியா + எகிப்து + ஸ்பெயின் + எதியோப்பியா + பின்லாந்து + பிஜி + பிரான்ஸ் + காபோன் + பிரிடிஷ் கூட்டரசு + கிரனெடா + ஜார்ஜியா + கானா + காம்பியா + கினி + ஈக்குவிடோரியல் கினி + கிரீஸ் + குவாத்தாமாலா + கினி-பிஸ்ஸாவ் + கயானா + ஹாண்டுராஸ் + குரோசியா + ஹெய்தி + ஹங்கேரி + இந்தோனேஷியா + அயர்லாந்து + இஸ்ரேல் + இந்தியா + இராக் + ஈரான் + ஐஸ்லாந்து + இத்தாலி + ஜமாய்க்கா + ஜொர்டான் + ஜப்பான் + கென்யா + கிர்கிஸ்தான் + கம்போடியா + கிரிபடி + கோமரோஸ் + வட கொரியா + தென் கொரியா + குவைத்து + கஜகஸ்தான் + லாவோஸ் + லெபனான் + லிச்டெண்ஸ்டீன் + இலங்கை + லைபீரியா + லெசோதோ + லிதுவேனியா + லக்ஸ்சம்பர்க் + லாட்வியா + லிப்யா + மொரோக்கோ + மொனாக்கோ + மால்டோவா + மசெடோணியா + மாலீ + மியான்மார் + மங்கோலியா + மால்டா + மாலத்தீவு + மலாவீ + மெக்சிகோ + மலேஷியா + னாமீபியா + நிகாராகுவா + நெதர்லாந்து + நார்வே + நேபாளம் + நௌரு + நியூசிலாந்து + ஓமான் + பணாமா + பெரு + பாப்புவா-நியூகினி + பிலிப்பைன்ஸ் + பாகிஸ்தான் + போலந்து + போர்ச்சுக்கல் + பாரகுவே + காடார் + ருமேனியா + ரஷ்யா + சவூதி அரேபியா + சாலமன் தீவுகள் + ஸ்வீடன் + சிங்கப்பூர் + ஸ்லோவேனியா + ஸ்லோவாகியா + சான்மெரினோ + சூரினாம் + எல் சால்வடார் + சிரியா + சாட் + தாய்லாந்து + தாஜிகிஸ்தான் + துர்க்மெனிஸ்தான் + துனிசியா + தொங்கா + துருக்கி + திரினிடாட் தொபாகோ + துவாலூ + தைவான் + உக்ரைன் + ஐக்கிய அமெரிக்கா குடியரசு + உருகுவே + உஸ்பெகிஸ்தான் + வாடிகன் + வெனஜுவேலா + வியட்நாம் + வனுவாட்டு + சமோவா + யேமன் + தென் ஆப்ரிக்கா + ஜிம்பாப்வே + + + + [[:Taml:]‌‍] + + + + + + + + ஜன. + பிப். + மார். + ஏப். + மே + ஜூன் + ஜூலை + ஆக. + செப். + அக். + நவ. + டிச. + + + ஜனவரி + பிப்ரவரி + மார்ச் + ஏப்ரல் + மே + ஜூன் + ஜூலை + ஆகஸ்ட் + செப்டம்பர் + அக்டோபர் + நவம்பர் + டிசம்பர் + + + + + + + ஞா + தி + செ + பு + வி + வெ + + + + ஞாயிறு + திங்கள் + செவ்வாய் + புதன் + வியாழன் + வெள்ளி + சனி + + + + காலை + மாலை + + + கிமு + கிபி + + + + + + + + இந்திய நேரப்படி + இந்திய நேரப்படி + + + IST + IST + + + + + + + + + #,##,##0.###;-#,##,##0.### + + + + + + + #E0 + + + + + + + #,##,##0% + + + + + + + ¤ #,##,##0.00;-¤ #,##,##0.00 + + + + + + INR + ரூ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ta_IN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ta_IN.xml new file mode 100644 index 0000000..77f867a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ta_IN.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd-MM-yyyy + + + + + d-M-yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + ##,##,##0.###;-##,##,##0.### + + + + + + + #E0 + + + + + + + ##,##,##0% + + + + + + + ¤ ##,##,##0.00;-¤ ##,##,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/te.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/te.xml new file mode 100644 index 0000000..4ab6829 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/te.xml @@ -0,0 +1,104 @@ + + + + + + + + + + + తెలుగు + + + భారత దేళ౦ + + + + [[:Telu:]‌‍] + + + + + + + + జనవరి + ఫిబ్రవరి + మార్చి + ఏప్రిల్ + మే + జూన్ + జూలై + ఆగస్టు + సెప్టెంబర్ + అక్టోబర్ + నవంబర్ + డిసెంబర్ + + + జనవరి + ఫిబ్రవరి + మార్చి + ఏప్రిల్ + మే + జూన్ + జూలై + ఆగస్టు + సెప్టెంబర్ + అక్టోబర్ + నవంబర్ + డిసెంబర్ + + + + + + + ఆది + సోమ + మంగళ + బుధ + గురు + శుక్ర + శని + + + ఆదివారం + సోమవారం + మంగళవారం + బుధవారం + గురువారం + శుక్రవారం + శనివారం + + + + పూర్వాహ్న + అపరాహ్న + + + + + + . + , + ; + % + + # + + + - + E + + + + + + + INR + రూ. + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/te_IN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/te_IN.xml new file mode 100644 index 0000000..6687a7b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/te_IN.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd-MM-yyyy + + + + + dd-MM-yy + + + + + + + + h:mm:ss a z + + + + + h:mm:ss a z + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + ##,##,##0.###;-##,##,##0.### + + + + + + + #E0 + + + + + + + ##,##,##0% + + + + + + + ¤ ##,##,##0.00;-¤ ##,##,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/th.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/th.xml new file mode 100644 index 0000000..eba5d9d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/th.xml @@ -0,0 +1,545 @@ + + + + + + + + + + + อาฟา + แอบกาเซีย + แอฟริกัน + อัมฮาริค + อาระบิค + อัสสัมมิส + ไอมารา + อาเซอร์ไบจานี + บาสช์กีร์ + บายโลรัสเซีย + บัลแกเรีย + บิฮารี + บิสลามา + เบนการี + ทิเบต + บรีทัน + แคตาแลน + คอร์ซิกา + เช็ค + เวลส์ + เดนมาร์ก + เยอรมัน + ภูฐานี + กรีก + อังกฤษ + เอสเปอรันโต + สเปน + เอสโตเนีย + แบสก์ + เปอร์เซีย + ฟิน + ฟิจิ + ฟาโรส + ฝรั่งเศส + ฟรีสแลนด์ + ไอริช + สก็อตส์เกลิค + กะลีเชีย + กัวรานี + กูจาราติ + โฮซา + ยิว + ฮีนดิ + โครเอเทีย + ฮังการี + อาร์มีเนีย + อินเตอร์ลิงกวา + อินโดนีเชีย + อินเตอร์ลิงค์ + ไอนูเปียก + ไอซ์แลนด์ดิค + อิตาลี + ไอนุกติตัท + ญี่ปุ่น + ชวา + จอร์เจียน + คาซัค + กรีนแลนด์ดิค + เขมร + กานาดา + เกาหลี + คัชมีรี + เคิด + เคอร์กิซ + ละติน + ลิงกาลา + ลาว + ลิธัวเนีย + แลตเวีย (เลททิสช์) + มาลากาซี + เมารี + แมซีโดเนีย + แมละยาลัม + มองโกล + โมดาเวีย + มาราที + มลายู + มอลตา + พม่า + นอรู + เนปาล + ฮอลันดา + นอร์เวย์ + ออกซิทัน + โอโรโม (อาฟาน) + โอริยา + ปัญจาป + โปแลนด์ + พาสช์โต (พุสช์โต) + โปรตุเกส + คิวชัว + เรโต-โรแมนซ์ + คิรันดี + โรมัน + รัสเซีย + คินยาวันดา + สันสกฤต + ซินดิ + สันโค + เซอร์โบ-โครเอเทียน + สิงหล + สโลวัค + สโลเวเนีย + ซามัว + โซนา + โซมาลี + แอลเบเนีย + เซอร์เบีย + ซีสวาติ + เซโสโท + ซันดานีส + สวีเดน + ซวาฮิรี + ทมิฬ + ทิลูกู + ทาจิค + ไทย + ทิกรินยา + เติร์กเมน + ตากาล็อก + เซตสวานา + ทองก้า + ตุรกี + ซองกา + ตาด + ทวี + อุยกัว + ยูเครน + อิรดู + อุสเบค + เวียดนาม + โวลาพุก + วูลอฟ + โซสา + ยีดิช + โยรูบา + จวง + จีน + ซูลู + + + อันดอร์รา + สหรัฐอาหรับเอมิเรตส์ + อัฟกานิสถาน + อันกิล่า + แอลเบเนีย + อาร์มีเนีย + เนเธอร์แลนด์แอนทิลล์ + อันโกลา + อาร์เจนติน่า + ออสเตรีย + ออสเตรเลีย + อารูบา + อาเซอร์ไบจัน + บอสเนีย และ เฮิร์ซโกวิเนีย + บาร์บาดอส + บังคลาเทศ + เบลเยี่ยม + เบอร์กินาฟาโซ + บัลแกเรีย + บาห์เรน + บูรันดิ + เบนิน + เบอร์มิวด้า + บรูไน + โบลิเวีย + บราซิล + บาฮามาส + ภูฐาน + บอตสวานา + เบลลารัส + เบลิซ + แคนาดา + สาธารณรัฐแอฟริกากลาง + คองโก + สวิสเซอร์แลนด์ + ฝั่งทะเลไอวอริ + ชิลี + คาเมรูน + จีน + โคลัมเบีย + คอสตาริก้า + คิวบา + เคพเวอร์ด + ไซปรัส + สาธารณรัฐเช็ค + เยอรมนี + ดิโบติ + เดนมาร์ก + โดมินิก้า + สาธารณรัฐโดมินิกัน + แอลจีเรีย + เอกวาดอร์ + เอสโตเนีย + อียิปต์ + ซาฮาร่าตะวันตก + อิริทรี + สเปน + เอธิโอเปีย + ฟินแลนด์ + ฟิจิ + ไมโครนิเซีย + ฝรั่งเศส + กาบอน + สหราชอาณาจักร + จอร์เจีย + เฟร็นชกิวน่า + กาน่า + แกมเบีย + กิวนี + กัวเดอลูป + เอควาโทเรียลกินี + กรีซ + กัวเตมาลา + กิวนี-บิสโซ + กูยาน่า + ฮ่องกง + ฮอนดูรัส + โครเอเชีย + ไฮตี + ฮังการี + อินโดนีเซีย + ไอร์แลนด์ + อิสราเอล + อินเดีย + อิรัก + อิหร่าน + ไอซแลนด์ + อิตาลี + จาไมก้า + จอร์แดน + ญี่ปุ่น + เคนย่า + เคอร์กิสถาน + กัมพูชา + คิรีบาติ + โคโมรอส + เกาหลีเหนือ + เกาหลีใต้ + คูเวต + คาซัคสถาน + ลาว + เลบานอน + ไลเทนสไตน์ + ศรีลังกา + ลิเบอร์เลีย + เลโซโท + ลิเทอร์เนีย + ลักซ์เซมเบอร์ก + ลาตเวีย + ลิเบีย + โมรอคโค + โมนาโค + โมลโดวา + มาดากาสก้า + แมซีโดเนีย + มาลี + สหภาพพม่า + มองโกเลีย + มาเก๊า + มาร์ตินิก + มอริทาเนีย + มอนต์เซอราต + มัลต้า + มอริเตียส + แม็กซิโก + มาเลเซีย + โมแซมบิค + นามิเบีย + นิวคาลิโดเนีย + ไนเจอร์ + ไนจีเรีย + นิคารากัว + เนเธอร์แลนด์ + นอร์เวย์ + เนปาล + นียู + นิวซีแลนด์ + โอมาน + ปานามา + เปรู + เฟร็นชโพลินีเซีย + ปาปัวนิวกีนี + ฟิลิปปินส์ + ปากีสถาน + โปแลนด์ + เปอร์โตริโก + โปตุกัล + ปารากวัย + กาตาร์ + รูเมเนีย + รัสเซีย + ราวัลดา + ซาอุดิอาระเบีย + เซย์แชลล์ + ซูดาน + สวีเดน + สิงคโปร์ + สโลวิเนีย + สโลวาเกีย + เซียร์ร่าลีออน + ซินีกัล + โซมาเลีย + เซอร์เบีย + ซูรินามิ + เอลซาวาดอร์ + ซีเรีย + สวาซิแลนด์ + ชาด + อาณาเขตทางใต้ของฝรั่งเศส + โตโก + ประเทศไทย + ทาจิกิสถาน + โทกิโล + ติมอร์ตะวันออก + เติร์กเมนิสถาน + ตูนิเซีย + ทองก้า + ตุรกี + ทรินิแดด และโทบาโก + ไต้หวัน + ทานซาเนีย + ยูเครน + อูกานดา + สหรัฐอเมริกา + อุรูกวัย + อุซเบกิสถาน + วาติกัน + เวเนซูเอล่า + บริทิชเวอร์จินไอส์แลนด์ + ยูเอสเวอร์จินไอส์แลนด์ + เวียดนาม + วานัวตู + เยเมน + มายอต + ยูโกสลาเวีย + แอฟริกาใต้ + แซมเบีย + ซิมบาบเว + + + + [:Thai:] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + พ.ศ. + + + + + + + EEEE'ที่ 'd MMMM G yyyy + + + + + d MMMM yyyy + + + + + d MMM yyyy + + + + + d/M/yyyy + + + + + + + + H' นาฬิกา 'm' นาที 'ss' วินาที' + + + + + H' นาฬิกา 'm' นาที' + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1}, {0} + + + + + + + + + ม.ค. + ก.พ. + มี.ค. + เม.ย. + พ.ค. + มิ.ย. + ก.ค. + ส.ค. + ก.ย. + ต.ค. + พ.ย. + ธ.ค. + + + มกราคม + กุมภาพันธ์ + มีนาคม + เมษายน + พฤษภาคม + มิถุนายน + กรกฎาคม + สิงหาคม + กันยายน + ตุลาคม + พฤศจิกายน + ธันวาคม + + + + + + + อา. + จ. + อ. + พ. + พฤ. + ศ. + ส. + + + วันอาทิตย์ + วันจันทร์ + วันอังคาร + วันพุธ + วันพฤหัสบดี + วันศุกร์ + วันเสาร์ + + + + ก่อนเที่ยง + หลังเที่ยง + + + ปีก่อนคริสต์กาลที่ + ค.ศ. + + + + + + + EEEE'ที่ 'd MMMM G yyyy + + + + + d MMMM yyyy + + + + + d MMM yyyy + + + + + d/M/yyyy + + + + + + + + H' นาฬิกา 'm' นาที 'ss' วินาที' + + + + + H' นาฬิกา 'm' นาที' + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1}, {0} + + + + + + + + + + บาท + ฿ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/th_TH.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/th_TH.xml new file mode 100644 index 0000000..4693044 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/th_TH.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;¤-#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ti.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ti.xml new file mode 100644 index 0000000..b92e786 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ti.xml @@ -0,0 +1,362 @@ + + + + + + + + + + + አፋርኛ + አብሐዚኛ + አፍሪቃንስኛ + አማርኛ + ዐርቢኛ + አሳሜዛዊ + አያማርኛ + አዜርባይጃንኛ + ባስኪርኛ + ቤላራሻኛ + ቡልጋሪኛ + ቢሃሪ + ቢስላምኛ + በንጋሊኛ + ትበትንኛ + ብሬቶንኛ + ብሊን + ካታላንኛ + ኮርሲካኛ + ቼክኛ + ወልሽ + ዴኒሽ + ጀርመን + ድዞንግኻኛ + ግሪክኛ + እንግሊዝኛ + ኤስፐራንቶ + ስፓኒሽ + ኤስቶኒአን + ባስክኛ + ፐርሲያኛ + ፊኒሽ + ፊጂኛ + ፋሮኛ + ፈረንሳይኛ + ፍሪስኛ + አይሪሽ + እስኮትስ ጌልክኛ + ግዕዝኛ + ጋለጋኛ + ጓራኒኛ + ጉጃርቲኛ + ሃውሳኛ + ዕብራስጥ + ሐንድኛ + ክሮሽያንኛ + ሀንጋሪኛ + አርመናዊ + ኢንቴርሊንጓ + እንዶኒሲኛ + እንተርሊንግወ + እኑፒያቅኛ + አይስላንድኛ + ጣሊያንኛ + እኑክቲቱትኛ + ጃፓንኛ + ጃቫንኛ + ጊዮርጊያን + ካዛክኛ + ካላሊሱትኛ + ክመርኛ + ካናዳኛ + ኮሪያኛ + ካሽሚርኛ + ኩርድሽኛ + ኪርጊዝኛ + ላቲንኛ + ሊንጋላኛ + ላውስኛ + ሊቱአኒያን + ላትቪያን + ማላጋስኛ + ማዮሪኛ + ማከዶኒኛ + ማላያላምኛ + ሞንጎላዊኛ + ሞልዳቫዊና + ማራዚኛ + ማላይኛ + ማልቲስኛ + ቡርማኛ + ናኡሩ + ኔፓሊኛ + ደች + ኖርዌጂያን + ኦኪታንኛ + ኦሮምኛ + ኦሪያኛ + ፓንጃቢኛ + ፖሊሽ + ፑሽቶኛ + ፖርቱጋሊኛ + ኵቿኛ + ሮማንስ + ሩንዲኛ + ሮማኒያን + ራሽኛ + ኪንያርዋንድኛ + ሳንስክሪትኛ + ሲንድሂኛ + ሳንጎኛ + ስንሃልኛ + ሲዳምኛ + ስሎቫክኛ + ስሎቪኛ + ሳሞአኛ + ሾናኛ + ሱማልኛ + ልቤኒኛ + ሰርቢኛ + ስዋቲኛ + ሶዞኛ + ሱዳንኛ + ስዊድንኛ + ስዋሂሊኛ + ታሚልኛ + ተሉጉኛ + ታጂኪኛ + ታይኛ + ትግርኛ + ትግረ + ቱርክመንኛ + ታጋሎገኛ + ጽዋናዊኛ + ቶንጋ + ቱርክኛ + ጾንጋኛ + ታታርኛ + ትዊኛ + ኡዊግሁርኛ + ዩክረኒኛ + ኡርዱኛ + ኡዝበክኛ + ቪትናምኛ + ቮላፑክኛ + ዎሎፍኛ + ዞሳኛ + ይዲሻዊኛ + ዮሩባዊኛ + ዡዋንግኛ + ቻይንኛ + ዙሉኛ + + + አንዶራ + የተባበሩት አረብ ኤምሬትስ + አልባኒያ + አርሜኒያ + ኔዘርላንድስ አንቲልስ + አርጀንቲና + ኦስትሪያ + አውስትሬሊያ + አዘርባጃን + ቦስኒያ እና ሄርዞጎቪኒያ + ባርቤዶስ + ቤልጄም + ቡልጌሪያ + ባህሬን + ቤርሙዳ + ቦሊቪያ + ብራዚል + ቡህታን + ቤላሩስ + ቤሊዘ + ኮንጎ + የመካከለኛው አፍሪካ ሪፐብሊክ + ስዊዘርላንድ + ቺሊ + ካሜሩን + ቻይና + ኮሎምቢያ + ኬፕ ቬርዴ + ሳይፕረስ + ቼክ ሪፑብሊክ + ጀርመን + ዴንማርክ + ዶሚኒካ + ዶሚኒክ ሪፑብሊክ + አልጄሪያ + ኢኳዶር + ኤስቶኒያ + ግብጽ + ምዕራባዊ ሳህራ + ኤርትራ + ስፔን + ኢትዮጵያ + ፊንላንድ + ፊጂ + ሚክሮኔዢያ + እንግሊዝ + ጆርጂያ + የፈረንሳይ ጉዊአና + ጋምቢያ + ጊኒ + ኢኳቶሪያል ጊኒ + ግሪክ + ቢሳዎ + ጉያና + ሆንግ ኮንግ + ክሮኤሽያ + ሀይቲ + ሀንጋሪ + ኢንዶኔዢያ + አየርላንድ + እስራኤል + ህንድ + ኢራቅ + አይስላንድ + ጣሊያን + ጃማይካ + ጆርዳን + ጃፓን + ካምቦዲያ + ኮሞሮስ + ደቡብ ኮሪያ + ሰሜን ኮሪያ + ክዌት + ሊባኖስ + ሊቱዌኒያ + ላትቪያ + ሊቢያ + ሞሮኮ + ሞልዶቫ + ማከዶኒያ + ሞንጎሊያ + ማካዎ + ሞሪቴኒያ + ማልታ + ማሩሸስ + ሜክሲኮ + ማሌዢያ + ናሚቢያ + ኒው ካሌዶኒያ + ናይጄሪያ + ኔዘርላንድ + ኖርዌ + ኔፓል + ኒው ዚላንድ + ፔሩ + የፈረንሳይ ፖሊኔዢያ + ፓፑዋ ኒው ጊኒ + ፖላንድ + ፖርታ ሪኮ + ሮሜኒያ + ራሺያ + ሳውድአረቢያ + ሱዳን + ስዊድን + ሲንጋፖር + ስሎቬኒያ + ስሎቫኪያ + ሴኔጋል + ሱማሌ + ሰርቢያ + ሲሪያ + ቻድ + የፈረንሳይ ደቡባዊ ግዛቶች + ታይላንድ + ታጃኪስታን + ምስራቅ ቲሞር + ቱኒዚያ + ቱርክ + ትሪኒዳድ እና ቶባጎ + ታንዛኒያ + ዩጋንዳ + አሜሪካ + ዩዝበኪስታን + ቬንዙዌላ + የእንግሊዝ ድንግል ደሴቶች + የአሜሪካ ቨርጂን ደሴቶች + የመን + ዩጎዝላቪያ + ደቡብ አፍሪካ + ዛምቢያ + + + + [:Ethi:] + + + + + + + + ጃንዩ + ፌብሩ + ማርች + ኤፕረ + ሜይ + ጁን + ጁላይ + ኦገስ + ሴፕቴ + ኦክተ + ኖቬም + ዲሴም + + + ጃንዩወሪ + ፌብሩወሪ + ማርች + ኤፕረል + ሜይ + ጁን + ጁላይ + ኦገስት + ሴፕቴምበር + ኦክተውበር + ኖቬምበር + ዲሴምበር + + + + + + + ሰንበ + ሰኑይ + ሠሉስ + ረቡዕ + ኃሙስ + ዓርቢ + ቀዳም + + + ሰንበት + ሰኑይ + ሠሉስ + ረቡዕ + ኃሙስ + ዓርቢ + ቀዳም + + + + + + + + ንጉሆ ሰዓተ + ድሕር ሰዓት + + + ዓ/ዓ + ዓ/ም + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ti_ER.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ti_ER.xml new file mode 100644 index 0000000..130185e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ti_ER.xml @@ -0,0 +1,159 @@ + + + + + + + + + + + + + + + + ጥሪ + ለካቲ + መጋቢ + ሚያዝ + ግንቦ + ሰነ + ሓምለ + ነሓሰ + መስከ + ጥቅም + ሕዳር + ታሕሳ + + + ጥሪ + ለካቲት + መጋቢት + ሚያዝያ + ግንቦት + ሰነ + ሓምለ + ነሓሰ + መስከረም + ጥቅምቲ + ሕዳር + ታሕሳስ + + + + + + + ሰንበ + ሰኑይ + ሰሉስ + ረቡዕ + ሓሙስ + ዓርቢ + ቀዳም + + + ሰንበት + ሰኑይ + ሰሉስ + ረቡዕ + ሓሙስ + ዓርቢ + ቀዳም + + + + + + + + EEEE፡ dd MMMM መዓልቲ yyyy G + + + + + dd MMMM yyyy + + + + + dd-MMM-yy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + ERN + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ti_ET.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ti_ET.xml new file mode 100644 index 0000000..ca3a665 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ti_ET.xml @@ -0,0 +1,105 @@ + + + + + + + + + + + + + + + + + EEEE፣ dd MMMM መዓልቲ yyyy G + + + + + dd MMMM yyyy + + + + + dd-MMM-yy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + ETB + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tig.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tig.xml new file mode 100644 index 0000000..7c10288 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tig.xml @@ -0,0 +1,371 @@ + + + + + + + + + + + am + አፋርኛ + አብሐዚኛ + አፍሪቃንስኛ + አምሐረኛ + ዐርቢኛ + አሳሜዛዊ + አያማርኛ + አዜርባይጃንኛ + ባስኪርኛ + ቤላራሻኛ + ቡልጋሪኛ + ቢሃሪ + ቢስላምኛ + በንጋሊኛ + ትበትንኛ + ብሬቶንኛ + ብሊን + ካታላንኛ + ኮርሲካኛ + ቼክኛ + ወልሽ + ዴኒሽ + ጀርመን + ድዞንግኻኛ + ግሪክኛ + እንግሊዝኛ + ኤስፐራንቶ + ስፓኒሽ + ኤስቶኒአን + ባስክኛ + ፐርሲያኛ + ፊኒሽ + ፊጂኛ + ፋሮኛ + ፈረንሳይኛ + ፍሪስኛ + አይሪሽ + እስኮትስ ጌልክኛ + ግዕዝኛ + ጋለጋኛ + ጓራኒኛ + ጉጃርቲኛ + ሃውሳኛ + ዕብራስጥ + ሐንድኛ + ክሮሽያንኛ + ሀንጋሪኛ + አርመናዊ + ኢንቴርሊንጓ + እንዶኒሲኛ + እንተርሊንግወ + እኑፒያቅኛ + አይስላንድኛ + ጣሊያንኛ + እኑክቲቱትኛ + ጃፓንኛ + ጃቫንኛ + ጊዮርጊያን + ካዛክኛ + ካላሊሱትኛ + ክመርኛ + ካናዳኛ + ኮሪያኛ + ካሽሚርኛ + ኩርድሽኛ + ኪርጊዝኛ + ላቲንኛ + ሊንጋላኛ + ላውስኛ + ሊቱአኒያን + ላትቪያን + ማላጋስኛ + ማዮሪኛ + ማከዶኒኛ + ማላያላምኛ + ሞንጎላዊኛ + ሞልዳቫዊና + ማራዚኛ + ማላይኛ + ማልቲስኛ + ቡርማኛ + ናኡሩ + ኔፓሊኛ + ደች + ኖርዌጂያን + ኦኪታንኛ + ኦሮምኛ + ኦሪያኛ + ፓንጃቢኛ + ፖሊሽ + ፑሽቶኛ + ፖርቱጋሊኛ + ኵቿኛ + ሮማንስ + ሩንዲኛ + ሮማኒያን + ራሽኛ + ኪንያርዋንድኛ + ሳንስክሪትኛ + ሲንድሂኛ + ሳንጎኛ + ስንሃልኛ + ሲዳምኛ + ስሎቫክኛ + ስሎቪኛ + ሳሞአኛ + ሾናኛ + ሱማልኛ + ልቤኒኛ + ሰርቢኛ + ስዋቲኛ + ሶዞኛ + ሱዳንኛ + ስዊድንኛ + ስዋሂሊኛ + ታሚልኛ + ተሉጉኛ + ታጂኪኛ + ታይኛ + ትግርኛ + ትግረ + ቱርክመንኛ + ታጋሎገኛ + ጽዋናዊኛ + ቶንጋ + ቱርክኛ + ጾንጋኛ + ታታርኛ + ትዊኛ + ኡዊግሁርኛ + ዩክረኒኛ + ኡርዱኛ + ኡዝበክኛ + ቪትናምኛ + ቮላፑክኛ + ዎሎፍኛ + ዞሳኛ + ይዲሻዊኛ + ዮሩባዊኛ + ዡዋንግኛ + ቻይንኛ + ዙሉኛ + + + አንዶራ + የተባበሩት አረብ ኤምሬትስ + አልባኒያ + አርሜኒያ + ኔዘርላንድስ አንቲልስ + አርጀንቲና + ኦስትሪያ + አውስትሬሊያ + አዘርባጃን + ቦስኒያ እና ሄርዞጎቪኒያ + ባርቤዶስ + ቤልጄም + ቡልጌሪያ + ባህሬን + ቤርሙዳ + ቦሊቪያ + ብራዚል + ቡህታን + ቤላሩስ + ቤሊዘ + ኮንጎ + የመካከለኛው አፍሪካ ሪፐብሊክ + ስዊዘርላንድ + ቺሊ + ካሜሩን + ቻይና + ኮሎምቢያ + ኬፕ ቬርዴ + ሳይፕረስ + ቼክ ሪፑብሊክ + ጀርመን + ዴንማርክ + ዶሚኒካ + ዶሚኒክ ሪፑብሊክ + አልጄሪያ + ኢኳዶር + ኤስቶኒያ + ግብጽ + ምዕራባዊ ሳህራ + ኤርትራ + ስፔን + ኢትዮጵያ + ፊንላንድ + ፊጂ + ሚክሮኔዢያ + እንግሊዝ + ጆርጂያ + የፈረንሳይ ጉዊአና + ጋምቢያ + ጊኒ + ኢኳቶሪያል ጊኒ + ግሪክ + ቢሳዎ + ጉያና + ሆንግ ኮንግ + ክሮኤሽያ + ሀይቲ + ሀንጋሪ + ኢንዶኔዢያ + አየርላንድ + እስራኤል + ህንድ + ኢራቅ + አይስላንድ + ጣሊያን + ጃማይካ + ጆርዳን + ጃፓን + ካምቦዲያ + ኮሞሮስ + ደቡብ ኮሪያ + ሰሜን ኮሪያ + ክዌት + ሊባኖስ + ሊቱዌኒያ + ላትቪያ + ሊቢያ + ሞሮኮ + ሞልዶቫ + ማከዶኒያ + ሞንጎሊያ + ማካዎ + ሞሪቴኒያ + ማልታ + ማሩሸስ + ሜክሲኮ + ማሌዢያ + ናሚቢያ + ኒው ካሌዶኒያ + ናይጄሪያ + ኔዘርላንድ + ኖርዌ + ኔፓል + ኒው ዚላንድ + ፔሩ + የፈረንሳይ ፖሊኔዢያ + ፓፑዋ ኒው ጊኒ + ፖላንድ + ፖርታ ሪኮ + ሮሜኒያ + ራሺያ + ሳውድአረቢያ + ሱዳን + ስዊድን + ሲንጋፖር + ስሎቬኒያ + ስሎቫኪያ + ሴኔጋል + ሱማሌ + ሰርቢያ + ሲሪያ + ቻድ + የፈረንሳይ ደቡባዊ ግዛቶች + ታይላንድ + ታጃኪስታን + ምስራቅ ቲሞር + ቱኒዚያ + ቱርክ + ትሪኒዳድ እና ቶባጎ + ታንዛኒያ + ዩጋንዳ + አሜሪካ + ዩዝበኪስታን + ቬንዙዌላ + የእንግሊዝ ድንግል ደሴቶች + የአሜሪካ ቨርጂን ደሴቶች + የመን + ዩጎዝላቪያ + ደቡብ አፍሪካ + ዛምቢያ + + + + [:Ethi:] + + + + + + + + ጃንዩ + ፌብሩ + ማርች + ኤፕረ + ሜይ + ጁን + ጁላይ + ኦገስ + ሴፕቴ + ኦክተ + ኖቬም + ዲሴም + + + ጃንዩወሪ + ፌብሩወሪ + ማርች + ኤፕረል + ሜይ + ጁን + ጁላይ + ኦገስት + ሴፕቴምበር + ኦክተውበር + ኖቬምበር + ዲሴምበር + + + + + + + ሰ/ዓ + ሰኖ + ታላሸ + ኣረር + ከሚሽ + ጅምዓ + ሰ/ን + + + ሰንበት ዓባይ + ሰኖ + ታላሸኖ + ኣረርባዓ + ከሚሽ + ጅምዓት + ሰንበት ንኢሽ + + + + + + + + ቀደም ሰርምዕል + ሓቆ ስርምዕል + + + ዓ/ዓ + ዓ/ም + + + + + + + + + ERN + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tig_ER.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tig_ER.xml new file mode 100644 index 0000000..9e9c564 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tig_ER.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE፡ dd MMMM ዮም yyyy G + + + + + dd MMMM yyyy + + + + + dd-MMM-yyyy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tr.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tr.xml new file mode 100644 index 0000000..f9b59c1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tr.xml @@ -0,0 +1,547 @@ + + + + + + + + + + + Afar + Abazca + Afrikaan Dili + Amharik + Arapça + Aymara + Azerice + Başkırt Dili + Beyaz Rusça + Bulgarca + Bihari + Bislama + Bengal Dili + Tibetçe + Breton Dili + Katalan Dili + Korsika Dili + Çekçe + Gal Dili + Danca + Almanca + Bhutan Dili + Yunanca + İngilizce + Esperanto + İspanyolca + Estonya Dili + Bask Dili + Farsça + Fince + Fiji Dili + Faroe Dili + Fransızca + Frizye Dili + İrlanda Dili + İskoç Gal Dili + Galiçya Dili + Guarani + Gujarati + Hausa + İbranice + Hint Dili + Hırvatça + Macarca + Ermenice + Interlingua + Endonezya Dili + Interlingue + Inupiak + İzlandaca + İtalyanca + Inuktitut + Japonca + Java Dili + Gürcüce + Kazak Dili + Grönland Dili + Kamboçya Dili + Kannada + Korece + Keşmirce + Kürtçe + Kırgızca + Latince + Lingala + Laos Dili + Litvanya Dili + Letonya Dili + Malaga Dili + Maori + Makedonca + Malayalam + Moğol Dili + Moldavya Dili + Marathi + Malay + Malta Dili + Birmanya Dili + Nauru + Nepal Dili + Hollanda Dili + Norveççe + Occitan + Oromo (Afan) + Oriya + Pencap Dili + Polonya Dili + Peştun Dili + Portekizce + Quechua + Rhaeto-Roman Dili + Kirundi + Romence + Rusça + Kinyarwanda + Sanskritçe + Sindhi + Sangho + Sırp-Hırvat Dili + Sinhal Dili + Slovakça + Slovence + Samoa Dili + Shona + Somali Dili + Arnavutça + Sırpça + Siswati + Sesotho + Sudan Dili + İsveççe + Swahili + Tamil + Telugu + Tacik Dili + Tay Dili + Tigrinya + Türkmence + Tagalog + Setswana + Tonga + Türkçe + Tsonga + Tatarca + Twi + Uygurca + Ukraynaca + Urduca + Özbekçe + Vietnam Dili + Volapuk + Wolof + Xhosa + Yiddiş + Yoruba + Zhuang + Çince + Zulu + + + Andora + Birleşik Arap Emirlikleri + Afganistan + Antigua ve Barbuda + Anguilla + Arnavutluk + Ermenistan + Hollanda Antilleri + Angola + Antarktika + Arjantin + Amerikan Samoası + Avusturya + Avustralya + Aruba + Azerbaycan + Bosna Hersek + Barbados + Bangladeş + Belçika + Burkina Faso + Bulgaristan + Bahreyn + Burundi + Benin + Bermuda + Brunei + Bolivya + Brezilya + Bahamalar + Bhutan + Bouvet Adası + Botswana + Belarus + Belize + Kanada + Cocos (Keeling) Adaları + Kongo Demokratik Cumhuriyeti + Orta Afrika Cumhuriyeti + Kongo + İsviçre + Fildişi Sahilleri + Cook Adaları + Şili + Kamerun + Çin + Kolombiya + Kosta Rika + Küba + Cape Verde + Christmas Adası + Kıbrıs + Çek Cumhuriyeti + Almanya + Cibuti + Danimarka + Dominik + Dominik Cumhuriyeti + Cezayir + Ekvador + Estonya + Mısır + Batı Sahara + Eritre + İspanya + Etiyopya + Finlandiya + Fiji + Falkland Adaları (Malvinalar) + Mikronezya Federal Eyaletleri + Faroe Adaları + Fransa + en + Gabon + Birleşik Krallık + Granada + Gürcistan + Fransız Ginesi + Gana + Cebelitarık + Grönland + Gambia + Gine + Guadeloupe + Ekvator Ginesi + Yunanistan + Güney Georgia ve Güney Sandwich Adaları + Guatemala + Guam + Gine-Bissau + Guyana + Hong Kong SAR - Çin + Heard Adası ve McDonald Adaları + Honduras + Hırvatistan + Haiti + Macaristan + Endonezya + İrlanda + İsrail + Hindistan + Hint Okyanusu İngiliz Bölgesi + Irak + İran + İzlanda + İtalya + Jamaika + Ürdün + Japonya + Kenya + Kırgızistan + Kamboçya + Kiribati + Komorlar + Saint Kittler ve Neviler + Kore, Kuzey + Kore, Güney + Kuveyt + Cayman Adaları + Kazakistan + Lao Demokratik Halk Cumhuriyeti + Lübnan + Saint Lucia + Liechtenstein + Sri Lanka + Liberya + Lesotho + Litvanya + Lüksemburg + Letonya + Libya + Fas + Monako + Moldovya Cumhuriyeti + Madagaskar + Marshall Adaları + Makedonya Cumhuriyeti + Mali + Myanmar + Moğolistan + Macao S.A.R. - Çin + Kuzey Mariana Adaları + Martinik + Moritanya + Montserrat + Malta + Mauritius + Maldivler + Malavi + Meksika + Malezya + Mozambik + Namibya + Yeni Kaledonya + Nijer + Norfolk Adası + Nijerya + Nikaragua + Hollanda + Norveç + Nepal + Nauru Adası + Niue Adaları + Yeni Zelanda + Umman + Panama + Peru + Fransız Polinezyası + Papua Yeni Gine + Filipinler + Pakistan + Polonya + Saint Pierre ve Miquelon + Pitcairn + Porto Riko + Filistin Bölgesi + Portekiz + Palau + Paraguay + Katar + Reunion + Romanya + Rusya Federasyonu + Ruanda + Suudi Arabistan + Solomon Adaları + Seyşeller + Sudan + İsveç + Singapur + Saint Helena + Slovenya + Svalbard ve Jan Mayen + Slovakya + Sierra Leone + San Marino + Senegal + Somali + Serbia + Surinam + Sao Tome ve Principe + El Salvador + Suriye + Swaziland + Turks ve Caicos Adaları + Çad + Fransız Güney Bölgeleri + Togo + Tayland + Tacikistan + Tokelau + Doğu Timor + Türkmenistan + Tunus + Tonga + Türkiye + Trinidad ve Tobago + Tuvalu + Tayvan, Çin Bölgesi + Tanzanya + Ukrayna + Uganda + Amerika Birleşik Devletleri Küçük Dış Adaları + Amerika Birleşik Devletleri + Uruguay + Özbekistan + Kutsal Devlet (Vatikan Şehir Devleti) + Saint Vincent ve Grenadinler + Venezuela + İngiliz Virgin Adaları + ABD Virgin Adaları + Vietnam + Vanuatu + Wallis ve Futuna + Samoa + Yemen + Mayotte + Yugoslavya + Güney Afrika + Zambiya + Zimbabwe + + + + [a-zâûöüıçşğ] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + Oca + Şub + Mar + Nis + May + Haz + Tem + Ağu + Eyl + Eki + Kas + Ara + + + Ocak + Şubat + Mart + Nisan + Mayıs + Haziran + Temmuz + Ağustos + Eylül + Ekim + Kasım + Aralık + + + + + + + Paz + Pzt + Sal + Çar + Per + Cum + Cmt + + + Pazar + Pazartesi + Salı + Çarşamba + Perşembe + Cuma + Cumartesi + + + + + + + + + + + MS + + + + + + + dd MMMM yyyy EEEE + + + + + dd MMMM yyyy EEEE + + + + + dd.MMM.yyyy + + + + + dd.MM.yyyy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + ITL + ITL + + + TRL + TL + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tr_TR.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tr_TR.xml new file mode 100644 index 0000000..14c83b8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tr_TR.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tt.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tt.xml new file mode 100644 index 0000000..c488d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tt.xml @@ -0,0 +1,42 @@ + + + + + + + + + + + Татар + + + Россия + + + + [а-яёіѣѳѵә] + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + RUR + р. + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tt_RU.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tt_RU.xml new file mode 100644 index 0000000..c911b3f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/tt_RU.xml @@ -0,0 +1,103 @@ + + + + + + + + + + + + + + + + + + + + + d MMMM yyyy + + + + + d MMMM yyyy + + + + + dd.MM.yyyy + + + + + dd.MM.yyyy + + + + + + + + h:mm:ss a + + + + + H:mm:ss + + + + + H:mm:ss + + + + + H:mm:ss + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00¤;-#,##0.00¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uk.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uk.xml new file mode 100644 index 0000000..4ba921d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uk.xml @@ -0,0 +1,599 @@ + + + + + + + + + + + Афарська + Абхазька + Африканс + Амхарік + Арабська + Ассамська + Аумара + Азербайджанська + Башкирська + Білоруська + Болгарська + Біхарійська + Бісламійська + Бенгальська + Тібетська + Бретонська + Каталонська + Корсиканська + Чеська + Валлійська + Датська + Німецька + Бхутані + Грецька + Англійська + Есперанто + Іспанська + Естонська + Басква + Перська + Фінська + Фіджі + Фарерська + Французька + Фризька + Ірландська + Гаельська + Галісійська + Гуарані + Гуяраті + Хауса + Іврит + Хінді + Хорватська + Угорська + Вірменська + Інтерлінгва + Індонезійська + Інтерлінгва + Інупіак + Ісландська + Італійська + Японська + Яванська + Грузинська + Казахська + Гринландік + Кампучійська + Дравідійська + Корейська + Кашмірська + Курдська + Киргизька + Латинська + Лінгала + Лаоська + Литовська + Латвійська + Малагасійська + Маорі + Македонська + Малайялам + Монгольська + Молдавська + Маратхі + Малайська + Мальтійська + Бурмісійська + Науру + Непальська + Голландська + Норвезька + Окитан + Оромо + Орія + Панджабі + Польська + Пашто + Португальська + Кечуа + Ретороманська + Кірундійська + Румунська + Російська + Кінаруанда + Санскрит + Сіндтхі + Сангро + Сербсько-хорватська + Сингальська + Словацька + Словенська + Самоанська + Шона + Сомалі + Албанська + Сербська + Сісваті + Сесотхо + Суданська + Шведська + Суахілі + Тамільська + Телугу + Таджицька + Тайська + Тигріні + Туркменська + Тагальська + Сетсванська + Тонга + Турецька + Тсонго + Татарська + Тві + Уйгурська + Українська + Урду + Узбецька + Вʼєтнамська + Волапак + Волоф + Кхоса + Ідиш + Йоруба + Зуанг + Китайська + Зулуська + + + Андорра + Сполучені Арабські Емірати + Афганістан + Антигуа і Барбуда + Ангілья + Албанія + Вірменія + Нідерландські Антіли + Ангола + Антарктика + Аргентина + Американські Самоа + Австрія + Австралія + Аруба + Азербайджан + Боснія і Герцеговина + Барбадос + Бангладеш + Бельгія + Буркіна-Фасо + Болгарія + Бахрейн + Бурунді + Бенін + Бермуди + Бруней + Болівія + Бразилія + Багами + Бутан + Буве, острів + Ботсвана + Білорусь + Беліз + Канада + Кокосові острови + Конго + Центрально-Африканська Республіка + Конго + Швейцарія + Кот-д’Івуар + Кука, острови + Чилі + Камерун + Китай + Колумбія + Коста-Рика + Куба + Зеленого Мису, острови + Різдвяні Острови + Кіпр + Чехія + Німеччина + Джибуті + Данія + Домінік + Домініканська Республіка + Алжир + Еквадор + Естонія + Єгипет + Західна Сахара + Ерітрея + Іспанія + Ефіопія + Фінляндія + Фіджі + Фолклендські Острови (Мальвіни) + Мікронезія + Фаро, острови + Франція + Габон + Великобританія + Гренада + Грузія + Французька Гвіана + Гана + Гібралтар + Гренландія + Гамбія + Гвінея + Гваделупа + Екваторіальна Гвінея + Греція + Південна Джоржія та Острови Південний Сандвіч + Гватемала + Гуам + Гвінея-Біссау + Гуана + Гонконг + Острови Херда і Макдональдса + Гондурас + Хорватія + Гаїті + Угорщина + Індонезія + Ірландія + Ізраїль + Індія + Британські території Індійського океану + Ірак + Іран + Ісландія + Італія + Ямайка + Йорданія + Японія + Кенія + Киргизстан + Камбоджа + Кірибаті + Коморос + Св. Кіттс і Невіс + Корея, Демократична Республіка + Корея, Республіка + Кувейт + Кайманові острови + Казахстан + Лаоська Народно-Демократична Республіка + Ліван + Санта Лючія + Ліхтенштейн + Шрі-Ланка + Ліберія + Лесото + Литва + Люксембург + Латвія + Лівійська Арабська Джамахірія + Марокко + Монако + Молдова + Мадагаскар + Маршалові Острови + Македонія + Малі + Мʼянмар + Монголія + Макао + Північна Маріана, острови + Мартиніка + Мавританія + Монсеррат + Мальта + Маврикій + Мальдіви + Малави + Мексика + Малайзія + Мозамбік + Намібія + Нова Каледонія + Нігерія + Норфолькські Острови + Нігерія + Нікарагуа + Нідерланди + Норвегія + Непал + Науру + Нія + Нова Зеландія + Оман + Панама + Перу + Французька Полінезія + Папуа Нова Гвінея + Філіппіни + Пакистан + Польща + Св. Пʼєр і Мікулон + Піткаїрн + Пуерто-Ріко + Палестина + Португалія + Палау + Парагвай + Катар + Реюньйон + Румунія + Росія + Руанда + Саудівська Аравія + Соломонові Острови + Сейшели + Судан + Швеція + Сінгапур + Св. Єлена + Словенія + Свалбард і Ян Майєн, острови + Словакія + Сьєрра-Леоне + Сан-Маріно + Сенегал + Сомалі + Сурінам + Сао Том і Прінсіп + Сальвадор + Сирійська Арабська Республіка + Свазіленд + Турок та Какіос, острови + Чад + Французькі Південні Території + Того + Тайланд + Таджикистан + Токелау + Східний Тимор + Туркменистан + Туніс + Тонга + Туреччина + Тринідад і Табаго + Тувалу + Тайвань + Танзанія, Обʼєднана Республіка + Україна + Уганда + Віддалені Острови США + США + Уругвай + Узбекистан + Ватикан + Св. Вінсент і Гренадини + Венесуела + Віргінські острови (Британія) + Віргінські острови (США) + Вʼєтнам + Вануату + Валліс і Футуна, острови + Самоа + Йємен + Майот + Югославія + ПАР + Замбія + Зімбабве + + + + [а-щюьяєіїґ] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + Січ + Лют + Бер + Кві + Тра + Чер + Лип + Сер + Вер + Жов + Лис + Гру + + + С + Л + Б + К + Т + Ч + Л + С + В + Ж + Л + Г + + + січня + лютого + березня + квітня + травня + червня + липня + серпня + вересня + жовтня + листопада + грудня + + + + + Січ + Лют + Бер + Кві + Тра + Чер + Лип + Сер + Вер + Жов + Лис + Гру + + + С + Л + Б + К + Т + Ч + Л + С + В + Ж + Л + Г + + + Січень + Лютий + Березень + Квітень + Травень + Червень + Липень + Серпень + Вересень + Жовтень + Листопад + Грудень + + + + + + + Нд + Пн + Вт + Ср + Чт + Пт + Сб + + + Неділя + Понеділок + Вівторок + Середа + Четвер + Пʼятниця + Субота + + + + + + + + + + до н.е. + н.е. + + + + + + + EEEE, d MMMM yyyy 'р.' + + + + + d MMMM yyyy + + + + + d MMM yyyy + + + + + dd.MM.yy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {1} {0} + + + + + + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + UAH + грн. + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uk_UA.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uk_UA.xml new file mode 100644 index 0000000..d0cc8a8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uk_UA.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ur.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ur.xml new file mode 100644 index 0000000..63cd85a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ur.xml @@ -0,0 +1,28 @@ + + + + + + + + + + + اردو + + + پاکستان + + + + [[:Arab:]‌‍‏‎] + + + + + PKR + Rs + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ur_PK.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ur_PK.xml new file mode 100644 index 0000000..3a8a99b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/ur_PK.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uz.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uz.xml new file mode 100644 index 0000000..f987a8d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uz.xml @@ -0,0 +1,42 @@ + + + + + + + + + + + Ўзбек + + + Ўзбекистон + + + + [а-яёіѣѳѵў] + + + + , +   + ; + % + 0 + # + + + - + E + + + + + + + UZS + сўм + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uz_AF.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uz_AF.xml new file mode 100644 index 0000000..d796f3e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uz_AF.xml @@ -0,0 +1,209 @@ + + + + + + + + + + + + دری + پشتو + اۉزبېک + + + افغانستان + + + + [ء-ؤئ-غفقل-ويً-ْٰٔټپځڅ-چډړږژښکګگڼۇۉی-ۍې] + + + + + + + + جنو + فبر + مار + اپر + مـی + جون + جول + اگس + سپت + اکت + نوم + دسم + + + جنوری + فبروری + مارچ + اپریل + می + جون + جولای + اگست + سپتمبر + اکتوبر + نومبر + دسمبر + + + + + + + ی. + د. + س. + چ. + پ. + ج. + ش. + + + یکشنبه + دوشنبه + سه‌شنبه + چهارشنبه + پنجشنبه + جمعه + شنبه + + + + + + + + + + ق.م. + م. + + + + + + + yyyy نچی ییل d نچی MMMM EEEE کونی + + + + + d نچی MMMM yyyy + + + + + d MMMM yyyy + + + + + yyyy/M/d + + + + + + + + H:mm:ss (z) + + + + + H:mm:ss (z) + + + + + H:mm:ss + + + + + H:mm + + + + + + + {1} {0} + + + + + + + + + افغانستان وقتی + افغانستان وقتی + + + AFT + AFT + + کابل + + + + + + ٫ + ٬ + ; + ٪ + ۰ + # + + + + ×۱۰^ + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0 ¤;-#,##0 ¤ + + + + + + افغانی + افغانی + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uz_UZ.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uz_UZ.xml new file mode 100644 index 0000000..0308a19 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/uz_UZ.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/vi.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/vi.xml new file mode 100644 index 0000000..9be53d7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/vi.xml @@ -0,0 +1,440 @@ + + + + + + + + + + + Tiếng A-rập + Tiếng Ai-déc-bai-gian + Tiếng Bê-la-rút + Tiếng Bun-ga-ri + Tiếng Tây Tạng + Tiếng Ca-ta-lăng + Tiếng Séc + Tiếng Đan Mạch + Tiếng Đức + Tiếng Hy Lạp + Tiếng Anh + Tiếng Quốc Tế Ngữ + Tiếng Tây Ban Nha + Tiếng E-xtô-ni-a + Tiếng Ba Tư + Tiếng Phần Lan + Tiếng Pháp + Tiếng Ai-len + Tiếng Hê-brơ + Tiếng Hin-đi + Tiếng Crô-a-ti-a + Tiếng Hung-ga-ri + Tiếng Ác-mê-ni + Tiếng Khoa Học Quốc Tế + Tiếng In-đô-nê-xia + Tiếng Ai-xơ-len + Tiếng Ý + Tiếng Nhật + Tiếng Gia-va + Tiếng Campuchia + Tiếng Kan-na-đa + Tiếng Hàn Quốc + Tiếng La-tinh + Tiếng Lào + Tiếng Lít-va + Tiếng Lát-vi-a + Tiếng Ma-xê-đô-ni-a + Tiếng Mông Cổ + Tiếng Ma-lay-xi-a + Tiếng Nê-pan + Tiếng Hà Lan + Tiếng Na Uy + Tiếng Ba Lan + Tiếng Bồ Đào Nha + Tiếng Ru-ma-ni + Tiếng Nga + Tiếng Phạn + Tiếng Xlô-vác + Tiếng Xlô-ven + Tiếng Xô-ma-li + Tiếng An-ba-ni + Tiếng Séc-bi + Tiếng Thụy Điển + Tiếng Thái + Tiếng Thổ Nhĩ Kỳ + Tiếng U-crai-na + Tiếng U-dơ-bếch + Tiếng Việt + Tiếng Y-đit + Tiếng Trung Quốc + + + Các Tiểu Vương quốc A-rập Thống nhất + Áp-ga-ni-xtan + An-ti-gu-a và Ba-bu-đa + An-ba-ni + Ác-mê-ni-a + Ăng-gô-la + Ác-hen-ti-na + Áo + Úc + Ai-déc-bai-gian + Bô-xni-a Héc-xê-gô-vi-na + Bác-ba-đốt + Băng-la-đét + Bỉ + Buốc-ki-na Pha-xô + Bun-ga-ri + Ba-ren + Bu-run-đi + Bê-nanh + Bru-nây + Bô-li-vi-a + Bra-xin + Ba-ha-ma + Bốt-xoa-na + Bê-la-rút + Bê-li-xê + Ca-na-đa + Cộng hòa Trung Phi + Công-gô + Thụy Sĩ + Bờ Biển Ngà + Chi-lê + Ca-mơ-run + Trung Quốc + Cô-lôm-bi-a + Cốt-xta Ri-ca + Cu Ba + Cáp-ve + Síp + Cộng hòa Séc + Đức + Gi-bu-ti + Đan Mạch + An-giê-ri + Ê-cu-a-đo + E-xtô-ni-a + Ai Cập + Tây Sahara + Ê-ri-tơ-rê-a + Tây Ban Nha + Ê-ti-ô-pi-a + Phần Lan + Phi-gi + Mi-crô-nê-xi-a + Pháp + Ga-bông + Vương quốc Anh + Grê-na-đa + Gru-di-a + Gha-na + Găm-bi-a + Ghi-nê + Ghi-nê Xích-đạo + Hy Lạp + Goa-tê-ma-la + Ghi-nê Bít-xao + Guy-a-na + Hôn-đu-rát + Crô-a-ti-a + Ha-i-ti + Hung-ga-ri + Nam Dương + Ai-len + I-xra-en + Ấn Độ + I-rắc + I-ran + Ai-xơ-len + Ý + Ha-mai-ca + Gióc-đa-ni + Nhật Bản + Kê-ni-a + Cư-rơ-gư-xtan + Campuchia + Ki-ri-ba-ti + Cô-mô + Xan-kít và Nê-vi + Bắc Triều Tiên + Hàn Quốc + Cô-oét + Ka-dắc-xtan + Lào + Li-băng + Xan Lu-xi + Lich-ten-xtên + Xri Lan-ca + Li-bê-ri-a + Lê-xô-thô + Li-tu-a-ni-a + Lúc-xăm-bua + Lát-vi-a + Li-bi + Ma-rốc + Mô-na-cô + Môn-đô-va + Ma-đa-gát-xca + Quần đảo Mác-san + Ma-xê-đô-ni-a + Ma-li + Mi-an-ma + Mông Cổ + Mô-ri-ta-ni + Man-ta + Mô-ri-xơ + Man-đi-vơ + Ma-la-uy + Mê-hi-cô + Ma-lay-xi-a + Mô-dăm-bích + Nam-mi-bi-a + Ni-giê + Ni-giê-ri-a + Ni-ca-ra-goa + Hà Lan + Na Uy + Nê-pan + Niu Di-lân + Ô-man + Pa-na-ma + Pê-ru + Pa-pu-a Niu Ghi-nê + Phi-lip-pin + Pa-ki-xtan + Ba Lan + Bồ Đào Nha + Pa-ra-goay + Ca-ta + Ru-ma-ni + Nga + Ru-an-đa + A-rập Xê-út + Quần đảo Xô-lô-mông + Xây-sen + Xu-đăng + Thụy Điển + Xin-ga-po + Xlô-ven-ni-a + Xlô-va-ki-a + Xi-ê-ra Lê-ôn + Xan Ma-ri-nô + Xê-nê-gan + Xô-ma-li + Séc-bia + Xu-ri-nam + Xao Tô-mê và Prin-xi-pê + En-san-va-đo + Xi-ri + Xoa-di-len + Sát + Tô-gô + Thái Lan + Tát-gi-ki-xtan + Tuốc-mê-ni-xtan + Tuy-ni-di + Tông-ga + Thổ Nhĩ Kỳ + Tri-ni-đát và Tô-ba-gô + Tu-va-lu + Đài Loan + Tan-da-ni-a + U-crai-na + U-gan-đa + Hoa Kỳ + U-ru-goay + U-dơ-bê-ki-xtan + Va-ti-căng + Xan Vin-xen và Grê-na-din + Vê-nê-zu-ê-la + Việt Nam + Va-nu-a-tu + Xa-moa + Y-ê-men + Nam Tư + Nam Phi + Dăm-bi-a + Dim-ba-bu-ê + + + + [a-zẠ-ỹđơà-ãè-êìíò-õùúýăĩũư] + + + + + + + + thg 1 + thg 2 + thg 3 + thg 4 + thg 5 + thg 6 + thg 7 + thg 8 + thg 9 + thg 10 + thg 11 + thg 12 + + + tháng một + tháng hai + tháng ba + tháng tư + tháng năm + tháng sáu + tháng bảy + tháng tám + tháng chín + tháng mười + tháng mười một + tháng mười hai + + + + + + + CN + Th 2 + Th 3 + Th 4 + Th 5 + Th 6 + Th 7 + + + Chủ nhật + Thứ hai + Thứ ba + Thứ tư + Thứ năm + Thứ sáu + Thứ bảy + + + + + + + + SA + CH + + + tr. CN + sau CN + + + + + + + EEEE, 'ngày' dd MMMM 'năm' yyyy + + + + + 'Ngày' dd 'tháng' M 'năm' yyyy + + + + + dd-MM-yyyy + + + + + dd/MM/yyyy + + + + + + + + HH:mm:ss z + + + + + HH:mm:ss z + + + + + HH:mm:ss + + + + + HH:mm + + + + + + + {0} {1} + + + + + + + + + , + . + ; + % + 0 + # + + + - + E + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + #,##0.00 ¤;-#,##0.00 ¤ + + + + + + đồng + đ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/vi_VN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/vi_VN.xml new file mode 100644 index 0000000..83bbe5f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/vi_VN.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/wal.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/wal.xml new file mode 100644 index 0000000..e638133 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/wal.xml @@ -0,0 +1,197 @@ + + + + + + + + + + + ወላይታቱ + + + አንዶራ + የተባበሩት አረብ ኤምሬትስ + አልባኒያ + አርሜኒያ + ኔዘርላንድስ አንቲልስ + አርጀንቲና + ኦስትሪያ + አውስትሬሊያ + አዘርባጃን + ቦስኒያ እና ሄርዞጎቪኒያ + ባርቤዶስ + ቤልጄም + ቡልጌሪያ + ባህሬን + ቤርሙዳ + ቦሊቪያ + ብራዚል + ቡህታን + ቤላሩስ + ቤሊዘ + ኮንጎ + የመካከለኛው አፍሪካ ሪፐብሊክ + ስዊዘርላንድ + ቺሊ + ካሜሩን + ቻይና + ኮሎምቢያ + ኬፕ ቬርዴ + ሳይፕረስ + ቼክ ሪፑብሊክ + ጀርመን + ዴንማርክ + ዶሚኒካ + ዶሚኒክ ሪፑብሊክ + አልጄሪያ + ኢኳዶር + ኤስቶኒያ + ግብጽ + ምዕራባዊ ሳህራ + ኤርትራ + ስፔን + ኢትዮጵያ + ፊንላንድ + ፊጂ + ሚክሮኔዢያ + እንግሊዝ + ጆርጂያ + የፈረንሳይ ጉዊአና + ጋምቢያ + ጊኒ + ኢኳቶሪያል ጊኒ + ግሪክ + ቢሳዎ + ጉያና + ሆንግ ኮንግ + ክሮኤሽያ + ሀይቲ + ሀንጋሪ + ኢንዶኔዢያ + አየርላንድ + እስራኤል + ህንድ + ኢራቅ + አይስላንድ + ጣሊያን + ጃማይካ + ጆርዳን + ጃፓን + ካምቦዲያ + ኮሞሮስ + ደቡብ ኮሪያ + ሰሜን ኮሪያ + ክዌት + ሊባኖስ + ሊቱዌኒያ + ላትቪያ + ሊቢያ + ሞሮኮ + ሞልዶቫ + ማከዶኒያ + ሞንጎሊያ + ማካዎ + ሞሪቴኒያ + ማልታ + ማሩሸስ + ሜክሲኮ + ማሌዢያ + ናሚቢያ + ኒው ካሌዶኒያ + ናይጄሪያ + ኔዘርላንድ + ኖርዌ + ኔፓል + ኒው ዚላንድ + ፔሩ + የፈረንሳይ ፖሊኔዢያ + ፓፑዋ ኒው ጊኒ + ፖላንድ + ፖርታ ሪኮ + ሮሜኒያ + ራሺያ + ሳውድአረቢያ + ሱዳን + ስዊድን + ሲንጋፖር + ስሎቬኒያ + ስሎቫኪያ + ሴኔጋል + ሱማሌ + ሰርቢያ + ሲሪያ + ቻድ + የፈረንሳይ ደቡባዊ ግዛቶች + ታይላንድ + ታጃኪስታን + ምስራቅ ቲሞር + ቱኒዚያ + ቱርክ + ትሪኒዳድ እና ቶባጎ + ታንዛኒያ + ዩጋንዳ + አሜሪካ + ዩዝበኪስታን + ቬንዙዌላ + የእንግሊዝ ድንግል ደሴቶች + የአሜሪካ ቨርጂን ደሴቶች + የመን + ዩጎዝላቪያ + ደቡብ አፍሪካ + ዛምቢያ + + + + [:Ethi:] + + + + + + + + ወጋ + ሳይኖ + ማቆሳ + አሩዋ + ሃሙሳ + አርባ + ቄራ + + + ወጋ + ሳይኖ + ማቆሳኛ + አሩዋ + ሃሙሳ + አርባ + ቄራ + + + + + + + + ማለዶ + ቃማ + + + አዳ ዎዴ + ግሮተታ ላይታ + + + + + + + + + ETB + $ + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/wal_ET.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/wal_ET.xml new file mode 100644 index 0000000..ff6be12 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/wal_ET.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + EEEE፥ dd MMMM ጋላሳ yyyy G + + + + + dd MMMM yyyy + + + + + dd-MMM-yyyy + + + + + dd/MM/yy + + + + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm:ss a + + + + + h:mm a + + + + + + + {1} {0} + + + + + + + + + + + #ወ##0.###;-#ወ##0.### + + + + + + + #E0 + + + + + + + #ወ##0% + + + + + + + ¤#ወ##0.00;-¤#ወ##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh.xml new file mode 100644 index 0000000..df95d72 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh.xml @@ -0,0 +1,2674 @@ + + + + + + + + + + + 阿法文 + 阿布哈西亚文 + 亚齐文 + 阿乔利文 + 阿当梅文 + 阿迪何文 + 阿维斯塔文 + 南非荷兰文 + 其他亚非语系 + 阿弗里希利文 + 阿肯文 + 阿卡德文 + 阿留申群岛之土语 + 其他阿尔贡语系 + 阿姆哈拉文 + 中古英语 + 阿帕切文 + 阿拉伯文 + 阿拉米文 + 阿劳坎文 + 阿拉帕霍文 + 其他人工语言 + 阿拉瓦克文 + 阿萨姆文 + 阿斯图里亚思特语 + 其他阿撒巴斯卡语系 + 澳大利亚语系 + 阿瓦尔文 + 阿瓦乔文 + 艾马拉文 + 阿塞拜疆文 + 巴什客尔文 + 班达文 + 巴米累克文 + 俾路支文 + 班巴拉文 + 巴里文 + 巴萨文 + 波罗的海地区之语言 + 白俄罗斯文 + 别札文 + 别姆巴文 + 北非回教土族之语言 + 保加利亚文 + 比哈尔文 + 博杰普尔文 + 比斯拉马文 + 毕库尔文 + 比尼文 + 司克司卡文 + 班巴拉文 + 孟加拉文 + 班图文 + 西藏文 + 布里多尼文 + 布拉杰文 + 波斯尼亚文 + 巴塔克文 + 布里亚特文 + 布吉文 + 布林文 + 加泰罗尼亚文 + 卡多文 + 其他中美印第安语系 + 巴勒比文 + 其他高加索语系 + 车臣文 + 宿务文 + 其他凯尔特语系 + 查莫罗文 + 契布卡文 + 查加文 + 楚吾克文 + 马里文 + 契努克文 + 乔克托文 + 佩瓦扬文 + 彻罗基文 + 夏延文 + 查米克文 + 科西嘉文 + 科普特文 + 不纯粹之英国方言 + 不纯粹之法国方言 + 不纯粹之葡国方言 + 克里族文 + 克里米亚土耳其文;克里米亚塔塔文 + 克里奥尔语和皮钦文 + 捷克文 + 卡舒文 + 宗教斯拉夫文 + 其他库施特语系 + 楚瓦什文 + 威尔士文 + 丹麦文 + 达科他文 + 达尔格瓦文 + 达雅克文 + 德文 + 特拉瓦印第安人文 + 司雷夫文 + 多格来文 + 丁卡文 + 多格来文 + 其他德拉维语系 + 下塞尔维亚文 + 都阿拉文 + 中古荷兰文 + 迪维希文 + 迪尤拉文 + 不丹文 + 幽文 + 希腊文 + 艾拉米特文 + 英文 + 中古英文 + 世界文 + 西班牙文 + 爱沙尼亚文 + 巴斯克文 + 旺杜文 + 波斯文 + 芳格文 + 芳蒂文 + 夫拉文 + 芬兰文 + 芬匈文(其他) + 斐济文 + 法罗文 + 丰文 + 法文 + 中古法文 + 古法文 + 弗留利文 + 弗里斯兰文 + 爱尔兰文 + 加文 + 迦约文 + 葛巴亚文 + 苏格兰- 盖尔文 + 吉兹文 + 吉尔伯特斯文 + 加利西亚文 + 中古高地德文 + 瓜拉尼文 + 古代高地德文 + 岗德文 + 科洛涅达罗文 + 哥达文 + 格列博文 + 古希腊文 + 古加拉提文 + 马恩岛文 + 吉维克琴文 + 豪撒文 + 海达文 + 夏威夷文 + 希伯来文 + 印地文 + 希利盖农文 + 赫马查利文 + 西台文 + 赫蒙文 + 新里木托文 + 克罗地亚文 + 上索布文 + 匈牙利文 + 胡帕文 + 亚美尼亚文 + 赫雷罗文 + 拉丁国际文 + 伊班文 + 印度尼西亚文 + 拉丁国际文 + 伊格博文 + 四川话 + 伊乔文 + 依奴皮维克文 + 伊洛干诺文 + 印度文(其他) + 其他印欧语系 + 印古什文 + 爱德莪文(人工语言) + 伊朗文 + 伊洛郭伊费文 + 冰岛文 + 意大利文 + 爱斯基摩文 + 日文 + 洛吉般(人工语言) + 犹太波斯语系 + 犹太阿拉伯语系 + 爪哇文 + 格鲁吉亚文 + 卡拉卡尔帕克文 + 卡比尔文 + 卡琴文 + 卡姆巴文 + 喀伦文 + 卡威文 + 卡巴尔达文 + 刚果文 + 卡西文 + 其他科伊桑文 + 和田文 + 吉库尤文 + 关琊玛文 + 哈萨克文 + 格陵兰文 + 柬埔寨文 + 金邦杜文 + 埃纳德文 + 韩文 + 刚卡尼文 + 柯司瑞恩文 + 克佩列文 + 卡努里文 + 卡拉卡尔帕克文 + 克鲁文 + 库鲁克文 + 克什米尔文 + 库尔德文 + 库梅克文 + 库特内文 + 科米文 + 凯尔特文 + 吉尔吉斯文 + 拉丁文 + 拉迪诺文 + 拉亨达文 + 兰巴文 + 卢森堡文 + 莱兹依昂文 + 卢干达文 + 淋布尔吉文 + 林加拉文 + 老挝文 + 蒙古文 + 洛兹文 + 立陶宛文 + 鲁巴加丹加文 + 鲁巴鲁瓦文 + 路易塞诺文 + 隆达文 + 卢奥文 + 卢晒文 + 拉脫維亞文 + 马都拉文 + 马加伊文 + 迈蒂利文 + 望加锡文 + 曼丁哥文 + 马来亚玻里尼西亚语系 + 萨伊语 + 莫克沙文 + 曼达尔 + 门迪文 + 马尔加什文 + 中古爱尔兰文 + 马绍尔文 + 毛利文 + 米克马克文 + 米南卡保文 + 各种不同语言 + 马其顿文 + 其他蒙吉蔑文 + 马来亚拉姆文 + 蒙古文 + 满文 + 曼尼普里文 + 马诺博污文 + 摩尔多瓦文 + 摩霍克文 + 莫西文 + 马拉地文 + 马来文 + 马耳他文 + 多种语言 + 蒙达文 + 摩斯科格文 + 马尔尼里文 + 缅甸文 + 玛雅文 + 俄日亚文 + 瑙鲁文 + 纳瓦特尔文 + 其他北美印第安语系 + 拿波里文 + 挪威博克马尔文 + 北恩德贝勒文 + 德国北部的德文;低地萨克逊文 + 尼泊尔文 + 尼瓦尔文 + 恩东加文 + 尼尔司文 + 其他尼日尔刚果语系 + 纽埃文 + 荷兰文 + 挪威尼诺斯克文 + 挪威文 + 诺盖文 + 古诺尔斯文 + 南部恩德贝勒文 + 北索托文 + 努比亚文 + 纳瓦霍文 + 尼昂加文;切瓦文;切瓦文 + 尼亚姆韦齐文 + 尼昂科勒文 + 尼约罗语族 + 尼兹玛文 + 奥西坦文 + 奥季布瓦文 + 阿曼文 + 欧里亚文 + 奥塞提文 + 奥萨哲文 + 奥托曼土耳其文 + 奥托米语系 + 旁遮普文 + 其他巴布亚文 + 邦阿西南文 + 帕拉维文 + 邦板牙文 + 帕皮亚内托文 + 帕劳文 + 古老波斯语 + 其他菲律宾语系 + 腓利基文 + 帕利文 + 波兰文 + 波那贝文 + 印度古代及中世纪之中部及北部方言 + 普罗文斯文 + 普什图文 + 葡萄牙文 + 盖丘亚文 + 拉贾斯坦文 + 拉帕努文 + 拉罗汤加文 + 里托罗曼斯文 + 基隆迪文 + 罗马尼亚文 + 其他拉丁语系 + 吉普赛文 + 俄文 + 卢旺达文 + 梵文 + 散达维文 + 雅库特文 + 其他南美印第安文 + 萨利什文 + 萨玛利亚文 + 塞塞卡文 + 桑嗒利文 + 萨丁文 + 苏格兰文 + 苏丹文 + 北萨迷文 + 塞尔库普文 + 其他闪族语系 + 桑戈文 + 古爱尔兰文 + 手语 + 塞波尼斯-克罗地亚文 + 掸文 + 僧伽罗文 + 悉达摩文 + 苏语诸语言 + 其他汉藏语系 + 斯洛伐克文 + 斯洛文尼亚文 + 其他斯拉夫语系 + 萨摩亚文 + 南萨迷文 + 其他萨迷文 + 卢乐萨迷文 + 依纳日萨迷文 + 司寇特萨迷文 + 塞内加尔文 + 索尼基文 + 索马里文 + 索格迪亚文 + 桑海文 + 阿尔巴尼亚文 + 塞尔维亚文 + 谢列尔文 + 辛辛那提文 + 其他尼罗萨哈兰文 + 塞索托文 + 苏丹文 + 苏库马文 + 苏苏文 + 苏马文 + 瑞典文 + 斯瓦希里文 + 叙利亚文 + 泰米尔文 + 其他泰文 + 泰卢固文 + 体姆呐文 + 特喏诺文 + 特图们文 + 塔吉克文 + 泰文 + 提格里尼亚文 + 提格雷文 + 蒂夫文 + 土库曼文 + 陀克娄文 + 塔加路族文 + 特林吉特文 + 塔玛厍克文 + 突尼斯文 + 汤加文 + 汤加文(尼亚萨地区) + 托克皮辛文 + 土耳其文 + 特松加文 + 蒂姆西亚文 + 鞑靼文 + 通布卡文 + 图匹文 + 其他阿尔泰语系 + 图瓦卢文 + 台湾文 + 塔西提文 + 图瓦文 + 乌德穆尔特文 + 维吾尔文 + 乌加里特文 + 乌克兰文 + 姆崩杜文 + 未定语种 + 乌尔都文 + 乌兹别克文 + 瓦伊文 + 文达文 + 越南文 + 沃拉普克文 + 沃提克文 + 華隆文 + 瓦喀山文 + 瓦拉莫文 + 佤瑞文 + 瓦绍文 + 索布诸语言 + 沃尔夫文 + 卡啦迷克文 + 班图文 + 瑶族文 + 雅浦文 + 依地文 + 约鲁巴文 + 喻皮克文 + 藏文 + 萨波蒂克文 + 泽纳加文 + 中文 + 赞德文 + 祖鲁文 + 祖尼语 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 安道尔 + 阿拉伯联合酋长国 + 阿富汗 + 安提瓜和巴布达 + 安圭拉 + 阿尔巴尼亚 + 亚美尼亚 + 荷属安的列斯群岛 + 安哥拉 + 南极洲 + 阿根廷 + 美属萨摩亚 + 奥地利 + 澳大利亚 + 阿鲁巴 + 阿塞拜疆 + 波斯尼亚和黑山共和国 + 巴巴多斯 + 孟加拉国 + 比利时 + 布基纳法索 + 保加利亚 + 巴林 + 布隆迪 + 贝宁 + 百慕大 + 文莱 + 玻利维亚 + 巴西 + 巴哈马 + 不丹 + 布维特岛 + 博茨瓦纳 + 白俄罗斯 + 伯利兹 + 加拿大 + 科科斯群岛 + 刚果民主共和国 + 中非共和国 + 刚果 + 瑞士 + 象牙海岸 + 库克群岛 + 智利 + 喀麦隆 + 中国 + 哥伦比亚 + 哥斯达黎加 + 古巴 + 佛得角 + 圣诞岛 + 塞浦路斯 + 捷克共和国 + 德国 + 吉布提 + 丹麦 + 多米尼加岘 + 多米尼加共和国 + 阿尔及利亚 + 厄瓜多尔 + 爱沙尼亚 + 埃及 + 西撒哈拉 + 厄立特里亚 + 西班牙 + 埃塞俄比亚 + 芬兰 + 斐济 + 福克兰群岛 + 密克罗尼西亚联邦 + 法罗群岛 + 法国 + 加蓬 + 英国 + 格林纳达 + 格鲁吉亚 + 法属圭亚那 + 加纳 + 直布罗陀 + 格陵兰 + 冈比亚 + 几内亚 + 瓜德罗普岛 + 赤道几内亚 + 希腊 + 南佐治亚和南三明治群岛 + 危地马拉 + 关岛 + 几内亚比绍 + 圭亚那 + 中国香港特别行政区 + 赫德与麦克唐纳群岛 + 洪都拉斯 + 克罗地亚 + 海地 + 匈牙利 + 印度尼西亚 + 爱尔兰 + 以色列 + 印度 + 英属印度洋领地 + 伊拉克 + 伊朗 + 冰岛 + 意大利 + 牙买加 + 约旦 + 日本 + 肯尼亚 + 吉尔吉克斯坦 + 柬埔寨 + 基里巴斯 + 科摩罗 + 圣基茨和尼维斯 + 北朝鲜 + 韩国 + 科威特 + 开曼群岛 + 哈萨克斯坦 + 老挝人民民主共和国 + 黎巴嫩 + 圣卢西亚 + 列支敦士登 + 斯里兰卡 + 利比里亚 + 莱索托 + 立陶宛 + 卢森堡 + 拉脱维亚 + 利比亚 + 摩洛哥 + 摩纳哥 + 摩尔多瓦共和国 + 马达加斯加 + 马绍尔群岛 + 马其顿王国 + 马里 + 缅甸 + 蒙古 + 中国澳门特别行政区 + 北马里亚纳群岛 + 马提尼克岛 + 毛里塔尼亚 + 蒙特塞拉群岛 + 马耳他 + 毛里求斯 + 马尔代夫 + 马拉维 + 墨西哥 + 马来西亚 + 莫桑比克 + 纳米比亚 + 新喀里多尼亚 + 尼日尔 + 诺福克岛 + 尼日利亚 + 尼加拉瓜 + 荷兰 + 挪威 + 尼泊尔 + 瑙鲁 + 纽埃 + 新西兰 + 阿曼 + 巴拿马 + 秘鲁 + 法属波利尼西亚 + 巴布亚新几内亚 + 菲律宾 + 巴基斯坦 + 波兰 + 圣皮埃尔和密克隆 + 皮特凯恩 + 波多黎各 + 巴勒斯坦领土 + 葡萄牙 + 帕劳 + 巴拉圭 + 卡塔尔 + 留尼汪 + 罗马尼亚 + 俄罗斯联邦 + 卢旺达 + 沙特阿拉伯 + 所罗门群岛 + 塞舌尔 + 苏丹 + 瑞典 + 新加坡 + 圣赫勒拿 + 斯洛文尼亚 + 斯瓦尔巴特和扬马延 + 斯洛伐克 + 塞拉利昂 + 圣马力诺 + 塞内加尔 + 索马里 + 塞尔维亚 + 苏里南 + 圣多美和普林西比 + 萨尔瓦多 + 叙利亚 + 斯威士兰 + 特克斯和凯科斯群岛 + 乍得 + 法属南半球领地 + 多哥 + 泰国 + 塔吉克斯坦 + 托克劳 + 东帝汶 + 土库曼斯坦 + 突尼斯 + 汤加 + 土耳其 + 特立尼达和多巴哥 + 图瓦卢 + 台湾 + 坦桑尼亚 + 乌克兰 + 乌干达 + 美国边远小岛 + 美国 + 乌拉圭 + 乌兹别克斯坦 + 梵蒂冈 + 圣文森特和格林纳丁斯 + 委内瑞拉 + 英属维京群岛 + 美属维京群岛 + 越南 + 瓦努阿图 + 瓦利斯和富图纳 + 萨摩亚 + 也门 + 马约特 + 南斯拉夫 + 南非 + 赞比亚 + 津巴布韦 + + + 已修订 + + + 日历 + 对照 + 货币 + + + 佛教日历 + 农历 + 公历 + 希伯来日历 + 伊斯兰日历 + 伊斯兰希吉来历 + 日本日历 + 顺序 + 电话簿顺序 + 拼音顺序 + 笔划顺序 + 传统历法 + + + + [一-丁七丈-不专-且世丙-业东-丝丢两-严丧个中串临丸-主丽-举乃久么-义之-乌乎-乐乔乖乘-乙九也-乡书买-乱乾了予-争事-二于-亏云-互五-井亚-些亡交亦亨享-京亮亲人亿-仁仅仇今-介仍-从仔他付-仙代-以仪们仰仲件任份仿企伊伍伏休众伙-会伟-传伤伦伯-估伴伸似但位-住佑体何余佛-作你佩佳使例供依侠侦-侨侬侯侵便促俊俗保信俩修俱倍倒候-倚借倦值倾假偏做停健偶-偷储催傲傻像儒允元-充先-光克免兔入全八-兮兰-共关-典养-兽再冒写军-农冠冬冰冲冷准凌凝凡凤凭凯-凰出-击函刀分-切刊刑列-创初判利到制-刷刺-刻剂前剑剧剩-剪副割力劝-务劣动-劫励-劳势勇勉勒勤勾-勿包-匆化-北匙区-医十千升-午半华-协卒单-南博占-卡卫印-危即卷厅-历厉压-厌厚原去县参又-反发叔取-变口-另叫-叭可-台史-右叶-叹吃-各合-吊同-后吐-向吓吗君吝吟否-吧含吵吸-吹吻吾呀呆呈告员呜呢呦周味呵呼-命和咖咦-咧咪咬咱哀-品哇-哉响-哎哟哥-哦哩-哪哭哲唉唐唤唬售-唯唱唷商啊啡啥-啦啪喂善喇喊喔喜-喝喵喷喻嗨嗯嘉嘛嘴嘻嘿器四回因团园困围固国-图圆圈土在地场圾址均坐-坑块坚-坜坡坤坦坪垂-垃型垒埋城域培-基堂堆堕堡堪塑塔塞填境增墨壁士壮声处备夏夕-外多夜夥大天-夫央失头夹-夺奇-奉奋奏契奔套女奶她好如妇-妈妖妙妥妨妮妹妻姆姊-始姐-姑姓-委姿威娃娘娟婆婚媒嫁嫌子孔-孕字-孙孝孟季-孤学孩它宇-安宋-完宏宗-定宜-实审-室宪害家容宽-宿寂寄密富寒寝-察寡寸-对寻-导寿封射将尊小少尔尖尘尚尝尤就尺尼-尾局-层居屋屏展属屠山岁-岂岚-岛岳岸峰崇川-州巡工-巨巫差己-已巴巷币-布帅师希帐帝带席-帮常帽幅幕干-年幸幻-幽广庆床序库-底店庙府废度-座庭康-庸廉廖延-廷建开弃-弄弊式引弘弟-张弥-弦弯弱弹归-当形彩彬-彭彰-影彷役彻-彼往-征径-待很律-後徐徒得循微徵德心必-忆忌-忍志-忙忠忧快念忽态怎怒怕-怖思怡急性-怨怪总恋恐恢恨-恩恭息-恰恶恼悄悉悔悟-悠患您悲情惑惜惠惧-惨惯想惹愁愈-愉意愚感愧慈慎慕慢慧慰憾懂懒戏-戒或战截戴房-扁扇手才打托扣执扩扫-扯批找-技抄把抑抓投抗-折抢护-报披-抬抱抵抹抽担-拆拉拍拒拔拖拘招-拜拟拥-拦拨-择括拳拷拼拾-拿持指按挑挖挡挤-挥振挺捉捐捕损捡-换捷授-掉掌排探接控-措描-提插握援搜搞搬-搭摄摆摊摔摘摩摸撒撞播操-擎擦支收改攻放-政故效敌敏救教敝敢-散敦敬数敲整文斋斗料斜斥断斯-新方於-施旁旅旋族旗无既日-早旭时旺昆昌明-昏易星-映春昨昭是显晃晋晓晚晨普-景晴晶智暂暑暖-暗暮暴曰曲更曹曼曾-最月-有朋服朗望朝期木未-札术朱朵杀杂-权杉李材-村杜束条来杨杯-杰松-板析林果-枝枢枪-枫架柏-某染-柔查柯柳-柴标栋栏树校样-根格桃框案桌桑档桥梁梅梦梯-械检棋棒棚森椅植椰楚楼概榜模樱欠-欣欧欲欺款歉歌止-武歪死殊-残段毅母每毒比-毕毛毫氏民氛水永求汉汗汝江-污汤汪汽沈-沉沙沟沧河油治沿泉-泊法泛泡-泣泥注泰泳泽洋洗洛洞津洪洲活洽-派流浅测济浑浓浩-浪浮浴海消-涉涛涨涯液涵淑淡深混添清渐渡港渴游湖湾源溜溪滋滑满滥滴漂漏演漠漫潘潜潮澎激灌火灭灯-灰灵灿炉炎炮炸-点烂烈烤烦-烧热焦然煌煞照煮熊熟燃燕爆爬爱爵-爸爽片-版牌牙牛牡-牢牧物牲牵特-牺犯状犹狂狐狗狠独狮狱狼猛-猜献玄率玉王玛玩玫环-现玲玻珊珍珠班球理琪琳-琴瑜瑞瑰璃瓜瓦瓶甘甚甜生用田-申电男画畅界留略番疏疑疗疯疲疼疾病痕痛痴登白-百的皆-皇皮盈益监-盒盖盘盛盟目直相盼盾省眉看真-眠眼睛睡督瞧矛矣知短石码-砂砍研破础硕硬碍-碎碗碟碧碰磁磨示礼社祖祝-神祥票祸禁禅福秀-私秋科-秒秘租秤秦秩积-称移稀程稍稣稳稿究-穷穹-空穿突窗窝立站竞-章童端竹笑笔笛符笨第等筋答策筹签简算管箭箱篇篮籍米类粉粒粗精糊糕-糖糟系素索紧紫累繁红约-级纪纯纲-纳纵纷-纸纽练-组细-终绍经结绕绘-给络统继绩-绪续维-绵综缓编缘缠缩缴缸缺罐罗罚罢罪置署羊美羞群羯羽翁翅翔翘翠翻-翼耀-老考者而-耍耐耗耳耶聊职联聚聪肉肚股肤-肥肩肯育胁胆背胎胖胞胡胶胸能脆脑脸腐腰腹腾-腿臂臣自臭至-致舍舒舞-舟航般舰船良色艺艾节芒芬-芭花芳苍苏苗若-苦英茂茫茶草荒荣药荷莉莎莫莱-莲获菜菩菲萍萤-营萧-萨落著葛蒋蒙蓉蓝蔡薄薪藉藏藤虎虑虫虹虽-虾蚁蛇蛋蛙蛮蜂蜜蝶融蟹蠢血行街衡衣补表袋被袭裁-裂装裕裤西要覆见-观规视览-觉角解言誉誓警计-订认讨-让训-记讲许论设-访证评识诉词译试诗诚话-诞询该-详语误说请-诸诺-读课谁调谅谈谊-谋谓谜谢谨谱谷豆象豪貌贝-负贡-败货-贪购贯贱贴-贵费-贺贼资赋-赌赏-赐赔赖赚-赛赞赠赢赤走赵起趁超越-趋趣足跃跌跑距跟路跳踏踢踩身躲车轨-轩转轮-轰轻载较辅-辆辈-辉辑输辛辞辨-辩辱边达迁迅过-迈迎运-近返还-这进-迟迪-迫述迷追退-送逃逆选-逊透-逐递途通-逛逝速-造逢逸逻-逼遇遍道遗遭遵避-邀邓那邦邪邮邱邻郎郑部郭都配酒酷-酸醉醒采释里-量金针钓钟钢钦钱钻铁铃铭银销-锁锅锋错锦键锺镇镜长门闪闭-问间闷闹闻阁阐阔队阮防-阶阻阿-陀附-陆陈降限院除险-陪陵-陷隆随-隐隔障难雄-集雨雪雯雳零-雷雾需震霖露霸-霹青靖静非靠面革鞋韩音页-顶项-须顽-顿预领-颇频颗-题额风飘-飙飞-食餐饭-饮饰-饱饼馆首香馨马驱驶驻驾验骑骗骚骤骨高鬼魂魅魔鱼鲁鲜鸟鸣鸭鸿鹅鹤鹰鹿麦麻黎黑默鼓鼠鼻齐齿龄龙龟] + + + GanjkHmsSEDFwWxhKzAeugXZ + + + + + + 一月 + 二月 + 三月 + 四月 + 五月 + 六月 + 七月 + 八月 + 九月 + 十月 + 十一月 + 十二月 + + + 1月 + 2月 + 3月 + 4月 + 5月 + 6月 + 7月 + 8月 + 9月 + 10月 + 11月 + 12月 + + + 一月 + 二月 + 三月 + 四月 + 五月 + 六月 + 七月 + 八月 + 九月 + 十月 + 十一月 + 十二月 + + + + + + + + + + + + + + + + + + + + + + + + + 星期日 + 星期一 + 星期二 + 星期三 + 星期四 + 星期五 + 星期六 + + + + 上午 + 下午 + + + 公元前 + 公元 + + + + + + + + 太平洋标准时间 + 太平洋夏令时间 + + + PST + PDT + + 洛杉矶 + + + + 太平洋标准时间 + 太平洋夏令时间 + + + PST + PDT + + 洛杉矶 + + + + 山区标准时间 + 山区夏令时间 + + + MST + MDT + + 丹佛 + + + + 山区标准时间 + 山区夏令时间 + + + MST + MDT + + 丹佛 + + + + 山区标准时间 + 山区标准时间 + + + MST + MST + + 凤凰城 + + + + 山区标准时间 + 山区标准时间 + + + MST + MST + + 凤凰城 + + + + 中央标准时间 + 中央夏令时间 + + + CST + CDT + + 芝加哥 + + + + 中央标准时间 + 中央夏令时间 + + + CST + CDT + + 芝加哥 + + + + 东部标准时间 + 东部夏令时间 + + + EST + EDT + + 纽约 + + + + 东部标准时间 + 东部夏令时间 + + + EST + EDT + + 纽约 + + + + 东部标准时间 + 东部标准时间 + + + EST + EST + + 印地安纳波利斯 + + + + 东部标准时间 + 东部标准时间 + + + EST + EST + + 印地安纳波利斯 + + + + 夏威夷标准时间 + 夏威夷标准时间 + + + HST + HST + + 檀香山 + + + + 夏威夷标准时间 + 夏威夷标准时间 + + + HST + HST + + 檀香山 + + + + 阿拉斯加标准时间 + 阿拉斯加夏令时间 + + + AST + ADT + + 安克雷奇 + + + + 阿拉斯加标准时间 + 阿拉斯加夏令时间 + + + AST + ADT + + 安克雷奇 + + + + 大西洋标准时间 + 大西洋夏令时间 + + + AST + ADT + + 哈利法克斯 + + + + 纽芬兰标准时间 + 纽芬兰夏令时间 + + + CNT + CDT + + 圣约翰 + + + + 纽芬兰标准时间 + 纽芬兰夏令时间 + + + CNT + CDT + + 圣约翰 + + + + 中欧标准时间 + 中欧夏令时间 + + + CET + CEST + + 巴黎 + + + + 中欧标准时间 + 中欧夏令时间 + + + CET + CEST + + 巴黎 + + + + 格林威治标准时间 + 格林威治标准时间 + + + GMT + GMT + + 伦敦 + + + + 格林威治标准时间 + 格林威治标准时间 + + + GMT + GMT + + 卡萨布兰卡 + + + + 以色列标准时间 + 以色列夏令时间 + + + IST + IDT + + 耶路撒冷 + + + + 日本标准时间 + 日本标准时间 + + + JST + JST + + 东京 + + + + 日本标准时间 + 日本标准时间 + + + JST + JST + + 东京 + + + + 东欧标准时间 + 东欧夏令时间 + + + EET + EEST + + 布加勒斯特 + + + + 中国标准时间 + 中国标准时间 + + + CTT + CDT + + 上海 + + + + 中国标准时间 + 中国标准时间 + + + CTT + CDT + + 上海 + + + + + + + 安道尔第纳尔元 + ADD + + + 安道尔比塞塔元 + ADP + + + 阿联酋迪拉姆 + AED + + + 阿富汗尼 (1927-2002) + AFA + + + 阿富汗尼 + AFN + + + 阿发和伊萨法郎 + AIF + + + 阿尔巴尼亚列克 (1946-1961) + ALK + + + 阿尔巴尼亚列克 + ALL + + + 阿尔巴尼亚列克币 + ALV + + + 阿尔巴尼亚元外汇券 + ALX + + + 亚美尼亚德拉姆 + AMD + + + 荷兰安替兰盾 + ANG + + + 安戈拉宽扎 + AOA + + + 安戈拉宽扎 (1977-1990) + AOK + + + 安戈拉新宽扎 (1990-2000) + AON + + + 安戈拉宽扎 Reajustado (1995-1999) + AOR + + + 安哥拉埃斯库多 + AOS + + + 阿根廷奥斯特 + ARA + + + 阿根廷比索标准局 + ARM + + + 阿根廷比索 (1983-1985) + ARP + + + 阿根廷比索 + ARS + + + 奥地利西令 + ATS + + + 澳大利亚元 + AUD + + + 澳大利亚磅 + AUP + + + 阿鲁巴基尔德元 + AWG + + + 波士尼亚-赫塞哥维纳第纳尔元 + BAD + + + 波士尼亚-赫塞哥维纳兑换券 + BAM + + + 波士尼亚-赫塞哥维纳新第纳尔元 + BAN + + + 巴巴多斯元 + BBD + + + 孟加拉达卡 + BDT + + + 比利时法郎兑换券 + BEC + + + 比利时法郎 + BEF + + + 比利时法郎(金融) + BEL + + + 保加利亚硬列克 + BGL + + + 保加利亚社会主义列克 + BGM + + + 保加利亚新列克 + BGN + + + 保加利亚列克 (1879-1952) + BGO + + + 保加利亚列克外汇券 + BGX + + + 巴林第纳尔元 + BHD + + + 布隆迪法郎 + BIF + + + 百慕大元 + BMD + + + 百慕大磅 + BMP + + + 汶莱元 + BND + + + 玻利维亚 + BOB + + + 玻利维亚 (1863-1962) + BOL + + + 玻利维亚比索 + BOP + + + 巴西克鲁赛罗 (1967-1986) + BRB + + + 巴西克鲁塞罗 + BRC + + + 巴西克鲁塞罗 (1990-1993) + BRE + + + 巴西里尔 + BRL + + + 巴西克鲁塞罗 Cruzado Novo + BRN + + + 巴西克鲁塞罗 + BRR + + + 巴西克鲁塞罗 (1942-1967) + BRZ + + + 巴哈马元 + BSD + + + 巴哈马磅 + BSP + + + 不丹努扎姆 + BTN + + + 不丹卢比 + BTR + + + 缅元 + BUK + + + 缅甸卢比 + BUR + + + 波渣那扑拉 + BWP + + + 白俄罗斯新卢布 (1994-1999) + BYB + + + 白俄罗斯卢布 (1992-1994) + BYL + + + 白俄罗斯卢布 + BYR + + + 伯利兹元 + BZD + + + 属洪都拉斯元 + BZH + + + 加拿大元 + CAD + + + 刚果法郎 + CDF + + + 刚果共和国法郎 + CDG + + + 刚果扎伊尔 + CDL + + + 中非共和国 CFA 法郎 + CFF + + + 瑞士法郎 + CHF + + + 库克群岛元 + CKD + + + 智利肯杜 + CLC + + + 智利埃斯库多 + CLE + + + 智利 Unidades de Fomento + CLF + + + 智利 比索 + CLP + + + 喀麦隆 CFA 法郎 + CMF + + + 中国人民票元 + CNP + + + 中国美元外汇券 + CNX + + + 人民币 + + + + 哥伦比亚纸比索 + COB + + + 刚果 CFA 法郎 + COF + + + 哥伦比亚比索 + COP + + + 哥斯达黎加科隆 + CRC + + + 捷克克郎 + CSC + + + 捷克硬克郎 + CSK + + + 古巴比索 + CUP + + + 古巴外汇券 + CUX + + + 佛得角埃斯库多 + CVE + + + 库拉盾 + CWG + + + 塞浦路斯磅 + CYP + + + 捷克克郎 + CZK + + + 东德奥斯特马克 + DDM + + + 德国马克 + DEM + + + 德国司萡马克 + DES + + + 吉布提法郎 + DJF + + + 丹麦克朗 + DKK + + + 多米尼加比索 + DOP + + + 阿尔及利亚第纳尔元 + DZD + + + 阿尔及利亚新法郎 + DZF + + + 阿尔及利亚法郎比斯查 + DZG + + + 厄瓜多尔苏克雷 + ECS + + + 爱沙尼亚克朗 + EEK + + + 埃及磅 + EGP + + + 厄立特里亚纳福卡 + ERN + + + 西班牙马赛塔 + ESP + + + 埃塞俄比亚比尔 + ETB + + + 埃塞俄比亚元 + ETD + + + 欧元 + + + + 芬兰玛卡 + FIM + + + 芬兰玛卡 (1860-1962) + FIN + + + 斐济元 + FJD + + + 斐济磅 + FJP + + + 福克兰群岛磅 + FKP + + + 法罗群岛克朗 + FOK + + + 法国法郎 + FRF + + + 法国法郎比斯查/法郎庞加莱 + FRG + + + 英磅 + £ + + + 乔治亚库蓬拉瑞特 + GEK + + + 乔治亚库蓬拉瑞 + GEL + + + 加纳塞第 + GHC + + + 加纳旧塞第 + GHO + + + 加纳磅 + GHP + + + 加纳重评估塞第 + GHR + + + 直布罗陀磅 + GIP + + + 格陵兰克朗 + GLK + + + 冈比亚达拉西 + GMD + + + 冈比亚磅 + GMP + + + 几内亚法郎 + GNF + + + 几内亚法郎 (1960-1972) + GNI + + + 几内亚Syli + GNS + + + 瓜德罗普岛法郎 + GPF + + + 赤道几内亚爱克威乐 + GQE + + + 赤道几内亚法郎 + GQF + + + 赤道几内亚匹塞塔 + GQP + + + 希腊德拉克马 + GRD + + + 希腊新德拉克马 + GRN + + + 危地马拉 + GTQ + + + 法国属圭亚那法郎 + GUF + + + 葡萄牙几内亚埃斯库多 + GWE + + + 葡萄牙几内亚迷洱瑞 + GWM + + + 几内亚比索 + GWP + + + 圭亚那元 + GYD + + + 港元 + HK$ + + + 洪都拉斯勒皮拉 + HNL + + + 克罗地亚第纳尔元 + HRD + + + 克罗地亚库娜元 + HRK + + + 海地古德 + HTG + + + 匈牙利缶瑞特 + HUF + + + 北爱尔兰磅 + IBP + + + 印度尼西亚尼卡盾 + IDG + + + 印度尼西亚爪哇盾 + IDJ + + + 印度尼西亚新盾 + IDN + + + 印度尼西亚盾 + IDR + + + 爱尔兰磅 + IEP + + + 以色列谢客尔 + ILL + + + 以色列磅 + ILP + + + 以色列新谢客尔 + ILS + + + 曼岛磅 + IMP + + + 印度卢比 + =0#Rs.|1#Re.|1<Rs. + + + 伊拉克第纳尔元 + IQD + + + 伊朗里亚 尔 + IRR + + + 冰岛克朗 + ISK + + + 意大利里拉 + ITL + + + 泽西磅 + JEP + + + 牙买加元 + JMD + + + 牙买加磅 + JMP + + + 约旦第纳尔元 + JOD + + + 日元 + JP¥ + + + 肯尼亚先令 + KES + + + 吉尔吉斯坦萨姆 + KGS + + + 柬埔寨旧里尔 + KHO + + + 柬埔寨里尔 + KHR + + + 基里巴斯元 + KID + + + 科摩罗法郎 + KMF + + + 北朝鲜人民币 + KPP + + + 北朝鲜币 + KPW + + + 韩国元 + KRH + + + 韩国旧币 + KRO + + + 韩国币 + + + + 科威特第纳尔元 + KWD + + + 开曼岛元 + KYD + + + 哈萨克卢布 + KZR + + + 哈萨克腾额 + KZT + + + 老挝基普 + LAK + + + 黎巴嫩磅 + LBP + + + 列支敦士登法郎 + LIF + + + 斯里兰卡卢比 + LKR + + + 锡兰卢比 + LNR + + + 利比亚元 + LRD + + + 莱索托 + LSL + + + 立陶宛利塔 + LTL + + + 立陶宛塔咯呐司 + LTT + + + 卢森堡法郎 + LUF + + + 拉脱维亚拉特 + LVL + + + 拉脱维亚卢布 + LVR + + + 利比亚英国军队军方里拉 + LYB + + + 利比亚第纳尔元 + LYD + + + 利比亚磅 + LYP + + + 摩洛哥迪拉姆 + MAD + + + 摩洛哥法郎 + MAF + + + 摩洛哥新法郎 + MCF + + + 摩洛哥革命时期货币 + MCG + + + 南特市列伊币 + MDC + + + 南特市列伊 + MDL + + + 南特市卢布 + MDR + + + 马达加斯加阿日瑞 + MGA + + + 马达加斯加法郎 + MGF + + + 马绍尔群岛元 + MHD + + + 马其顿戴纳 + MKD + + + 马其顿戴纳 (1992-1993) + MKN + + + 马里法郎 + MLF + + + 缅甸开亚特 + MMK + + + 缅甸元外汇券 + MMX + + + 蒙古图格里克 + MNT + + + 澳门元 + P + + + 马蒂尼法郎 + MQF + + + 里塔尼亚乌吉亚 + MRO + + + 马尔他里拉 + MTL + + + 马尔他磅 + MTP + + + 毛里求斯卢比 + MUR + + + 马尔代夫群岛卢比 + MVP + + + 马尔代夫群岛芦菲亚 + MVR + + + 马拉维夸恰 + MWK + + + 马拉维磅 + MWP + + + 墨西哥比索 + MXN + + + 墨西哥银比索 (1861-1992) + MXP + + + 马来西亚币 + MYR + + + 莫桑比克埃斯库多 + MZE + + + 莫桑比克币 + MZM + + + 纳米比亚元 + NAD + + + 新卡里多尼亚新法郎 + NCF + + + 尼日利亚奈拉. + NGN + + + 尼日利亚磅 + NGP + + + 新赫布里底 CFP 法郎 + NHF + + + 尼加拉瓜科多巴 + NIC + + + 尼加拉瓜金科多巴 + NIG + + + 尼加拉瓜金哥多华 + NIO + + + 荷兰盾 + NLG + + + 挪威克朗 + NOK + + + 尼泊尔卢比 + NPR + + + 新西兰元 + NZD + + + 新西兰磅 + NZP + + + 阿曼里尔 + OMR + + + 阿曼里尔塞迪 + OMS + + + 巴拿马巴波亚 + PAB + + + 车城卢布券 + PDK + + + 车城新卢布 + PDN + + + 车城卢布 + PDR + + + 秘鲁因蒂 + PEI + + + 秘鲁索额奴艾挝 + PEN + + + 秘鲁索额 + PES + + + 巴布亚新几内亚基那 + PGK + + + 菲律宾比索 + PHP + + + 巴基斯坦卢比 + PKR + + + 波兰兹罗提 + PLN + + + 波兰美元 外汇券 + PLX + + + 波兰兹罗提 (1950-1995) + PLZ + + + 巴勒斯坦磅 + PSP + + + 葡萄牙倥涂 + PTC + + + 葡萄牙铃木 + PTE + + + 巴拉圭币 + PYG + + + 卡塔尔里亚尔 + QAR + + + 留尼汪联合会法郎 + REF + + + 罗马尼亚镭 + ROL + + + 罗马尼亚新镭 + RON + + + 俄国卢布 + RUB + + + 俄国卢布 (1991-1998) + RUR + + + 卢旺达法郎 + RWF + + + 沙特里亚尔 + SRl + + + 沙特特权里亚尔 + SAS + + + 所罗门群岛元 + SBD + + + 塞舌尔卢比 + SCR + + + 苏丹第纳尔元 + SDD + + + 苏丹磅 + SDP + + + 瑞士克朗 + SEK + + + 新加坡元 + S$ + + + 圣赫勒拿磅 + SHP + + + 斯洛文尼亚淘拉磅 + SIB + + + 斯洛文尼亚淘拉 + SIT + + + 斯洛伐克科路那 + SKK + + + 赛拉里昂币 + SLL + + + 圣马利诺里拉 + SML + + + 索马里先令 + SOS + + + 索马里大陆先令 + SQS + + + 苏里南基尔 + SRG + + + 苏格兰磅 + SSP + + + 圣多美普林西比都比拉 + STD + + + 圣多美普林西比铃木 + STE + + + 苏联新卢布 + SUN + + + 苏联卢布 + SUR + + + 萨尔瓦多科洛涅 + SVC + + + 叙利亚磅 + SYP + + + 斯威士兰币 + SZL + + + 特克斯和凯科斯群岛克朗 + TCC + + + 泰铢 + THB + + + 塔吉克斯坦卢布 + TJR + + + 塔吉克斯坦索莫尼 + TJS + + + 土库曼斯坦币 + TMM + + + 突尼斯第纳尔元 + TND + + + 汤加币 + TOP + + + 汤加磅 + TOS + + + 帝汶埃斯库多 + TPE + + + 帝汶帕塔卡 + TPP + + + 土耳其里拉 + TRL + + + 特立尼达和多巴哥元 + TTD + + + 特立尼达和多巴哥旧元 + TTO + + + 图瓦卢元 + TVD + + + 新台币 + NT$ + + + 坦桑尼亚先令 + TZS + + + 乌克兰赫里纳 + UAH + + + 乌克兰币 + UAK + + + 乌干达先令 (1966-1987) + UGS + + + 乌干达先令 + UGX + + + 美元 + US$ + + + 美元 (下一天) + USN + + + 美元 (同一天) + USS + + + 乌拉圭比索伏尔特 + UYF + + + 乌拉圭比索 (1975-1993) + UYP + + + 乌拉圭比索 + UYU + + + 乌兹别克斯坦货币券 + UZC + + + 乌兹别克斯坦币 + UZS + + + 梵谛冈里拉 + VAL + + + 北越皮艾斯特盾 + VDD + + + 北越皮艾斯特新盾 + VDN + + + 北越皮艾斯特明盾 + VDP + + + 内瑞拉博利瓦 + VEB + + + 英国维京群岛币 + VGD + + + 越南盾 + VND + + + 越南明盾 + VNN + + + 越南共和国盾 + VNR + + + 越南国家盾 + VNS + + + 瓦努阿图 + VUV + + + 西萨摩亚磅 + WSP + + + 西萨摩亚塔拉 + WST + + + 亚洲第纳尔元帐户单位 + XAD + + + 亚洲货币单位 + XAM + + + 黄金 + XAU + + + 欧洲复合单位 + XBA + + + 欧洲金融单位 + XBB + + + 东加勒比元 + XCD + + + 特别提款权 + XDR + + + 欧洲货币单位 + XEU + + + 法国金法郎 + XFO + + + 伊斯兰第纳尔元 + XID + + + 法国城市偌佛法郎 + XMF + + + 法国安的列斯 CFA 法郎 + XNF + + + 也门第纳尔元 + YDD + + + 也门阿马迪里尔 + YEI + + + 也门里尔 + YER + + + 南斯拉夫硬第纳尔元 + YUD + + + 南斯拉夫联邦第纳尔元 + YUF + + + 南斯拉夫 1994 第纳尔元 + YUG + + + 南斯拉夫偌威第纳尔元 + YUM + + + 南斯拉夫兑换第纳尔元 + YUN + + + 南斯拉夫十月第纳尔元 + YUO + + + 南斯拉夫改革第纳尔元 + YUR + + + 南非兰特 (金融) + ZAL + + + 南非磅 + ZAP + + + 南非兰特 + ZAR + + + 赞比亚马拉维 + ZMK + + + 赞比亚磅 + ZMP + + + 新扎伊尔元 + ZRN + + + 扎伊尔元 + ZRZ + + + 津巴布韦元 + ZWD + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh_CN.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh_CN.xml new file mode 100644 index 0000000..0e39fc2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh_CN.xml @@ -0,0 +1,99 @@ + + + + + + + + + + + + + + + + + yyyy'年'M'月'd'日'EEEE + + + + + yyyy'年'M'月'd'日' + + + + + yyyy-M-d + + + + + yy-M-d + + + + + + + + ahh'时'mm'分'ss'秒' z + + + + + ahh'时'mm'分'ss'秒' + + + + + ahh:mm:ss + + + + + ah:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh_HK.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh_HK.xml new file mode 100644 index 0000000..f85b3a5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh_HK.xml @@ -0,0 +1,772 @@ + + + + + + + + + + + + 阿布哈西亞文 + 亞齊文 + 阿僑利文 + 阿當莫文 + 阿迪各文 + 阿緯斯陀文 + 南非荷蘭文 + 非閃族及非亞語言 + 阿弗里希利文 + 阿坎文 + 阿卡德文 + 阿留申文 + 阿爾岡昆文 + 阿拉貢文 + 古英文 (ca.450-1100) + 阿帕奇語言 + 阿拉米文 + 阿勞坎文 + 阿拉帕霍文 + 其他人工語言 + 阿拉瓦克文 + 阿薩姆文 + 阿斯圖里亞文 + 阿薩巴斯卡文 + 澳洲英文 + 阿法文 + 艾馬拉文 + 亞塞拜然文 + 巴什客爾文 + 班達文 + 巴米累克文 + 俾路支文 + 巴姆巴拉文 + 巴厘文 + 巴薩文 + 波羅的文(其他) + 白俄羅斯文 + 貝扎文 + 別姆巴文 + 柏柏爾文 + 保加利亞文 + 比哈爾文 + 博傑普爾文 + 比斯拉馬文 + 比科爾文 + 比尼文 + 錫克錫卡文 + 班圖文 + 藏文 + 布拉杰文 + 波士尼亞文 + 巴塔克文 + 布里阿特文 + 布吉斯文 + 加泰羅尼亞文 + 卡多文 + 中美印第安文(其他) + 巴勒比文 + 高加索文(其他) + 車臣文 + 宿務族文 + 克爾特文(其他) + 查莫洛文 + 奇布查文 + 查加文 + 處奇斯文 + 馬里文 + 契奴克文 + 喬克托文 + 奇佩瓦揚文 + 柴羅基文 + 沙伊安文 + 查米克文 + 科普特文 + 歐洲腔調和洋涇濱,源自英文的(其他) + 歐洲腔調和洋涇濱,源自法文的(其他) + 歐洲腔調和洋涇濱,源自葡萄牙文的(其他) + 克裡文 + 克里米亞半島的土耳其文;克里米亞半島的塔塔爾文 + 克里奧爾文和皮欽文 + 卡舒布文 + 庫施特語系(其他) + 楚瓦甚文 + 威爾士文 + 丹麥文 + 達科他文 + 達爾格瓦文 + 迪雅克文 + 德拉瓦 + 斯拉夫 + 多格里布文 + 丁卡文 + 多格來文 + 德拉威文(其他) + 下索布文 + 杜亞拉文 + 荷蘭,中古 (ca. 1050-1350) + 迪維西文 + 迪尤拉文 + 埃緯文 + 埃菲克文 + 古埃及文) + 艾卡朱克文 + 希臘文 + 埃蘭文 + 英文,中世紀 (1100-1500) + 世界語 + 愛沙尼亞文 + 依汪都文 + 芳族文 + 芳蒂文 + 富拉文 + 芬蘭文 + 芬蘭-烏戈爾族文(其他) + 斐濟文 + 法羅文 + 豐文 + 弗留利文 + 弗里斯蘭文 + 愛爾蘭文 + 加族文 + 加約文 + 葛巴亞文 + 蘇格蘭 - 蓋爾文 + 德國的(其他) + 吉茲文 + 吉爾伯特群島文 + 加里西亞文 + 德文, 中古全盛時期 (ca.1050-1500) + 德文,上古全盛時期 (ca.750-1050) + 岡德文 + 科隆達羅文 + 哥特文 + 哥博語 + 古希臘文 (至 1453) + 吉亞拉塔文 + 曼島文 + 圭契文 + 海達文 + 希伯來文 + 北印度文 + 希利蓋農文 + 赫馬查利文 + 赫梯文 + 孟文 + 西里莫圖土文 + 克羅埃西亞文 + 上索布文 + 海地人 + 胡帕文 + 亞美尼亞文 + 赫雷羅文 + 拉丁國際文 + 伊班文 + 印尼文 + 拉丁國際文 + 伊布文 + 四川話 + 伊喬文 + 依奴皮維克文 + 伊洛闊文 + 印度語系(其他) + 印歐語系(其他) + 印古什文 + 伊朗文 + 伊芳朗文 + 易洛魁文 + 冰島文 + 義大利文 + 因紐特文 + 邏輯文 + 猶太教-波斯文 + 猶太教-阿拉伯文 + 喬治亞文 + 卡拉卡爾帕克文 + 卡比爾文 + 卡琴文 + 卡姆巴文 + 克倫文 + 卡威文 + 卡巴爾達文 + 剛果文 + 卡西文 + 其他科伊桑文 + 和闐[與闐]文 + 吉庫尤人 + 廣亞馬文 + 哈薩克文 + 格陵蘭文 + 高棉文 + 金邦杜文 + 坎那達文 + 韓文 + 貢根文 + 科斯雷恩文 + 克佩列文 + 卡努裡文 + 卡拉柴-包爾卡爾文 + 克魯文 + 庫魯科文 + 克什米爾文 + 庫爾德文 + 庫密克文 + 庫特奈文 + 康瓦耳文 + 吉爾吉斯文 + 拉迪諾文 + 拉亨達文 + 蘭巴文 + 盧森堡文 + 立陶宛文 + 干達文 + 林堡文 + 寮國文 + 蒙古文 + 洛齊文 + 魯巴加丹加文 + 魯巴魯魯亞文 + 路易塞諾文 + 盧恩達文 + 盧奧文 + 盧晒文 + 馬都拉文 + 馬加伊文 + 邁蒂利文 + 望加錫文 + 曼丁哥文 + 南島文 + 馬賽文 + 莫克沙文 + 曼達文 + 門德文 + 馬爾加什文 + 愛爾蘭文,中古 (900-1200) + 馬紹爾文 + 米克馬克文 + 米南卡堡文 + 其他語言 + 馬其頓文 + 其他高棉語系 + 馬來亞拉姆文 + 滿族文 + 曼尼普裡文 + 馬諾波文 + 摩爾多瓦文 + 莫霍克文 + 莫西文 + 馬拉地文 + 馬來文 + 馬爾他文 + 多種語言 + 蒙達文 + 克里克文 + 馬爾尼裡文 + 緬甸文 + 馬雅文 + 厄爾茲亞文 + 諾魯文 + 納瓦特文 + 其他北美印地安文 + 拿波里文 + 挪威波克默爾文 + 北地畢列文 + 德國北部的德文; 薩克遜文 + 尼泊爾文 + 尼瓦爾文 + 恩東加文 + 尼亞斯文 + 其他尼日剛果語系 + 紐埃文 + 荷蘭文 + 新挪威文 + 諾蓋文 + 古諾爾斯文 + 南地畢列文 + 北索托文 + 努比亞文 + 納瓦約文 + 尼揚賈文 + 尼揚韋齊文 + 尼揚科萊文 + 尼奧囉文 + 尼茲馬文 + 奧西坦文 + 奧杰布瓦文 + 歐里亞文 + 奧塞提文 + 歐塞奇文 + 鄂圖曼土耳其文 (1500-1928) + 奧托米文 + 其他巴布亞諸語言 + 潘加辛文 + 巴列維文 + 潘帕嘉文 + 帕皮阿門托文 + 帛琉文 + 古波斯文 (ca.600-400 B.C.) + 其他菲律賓文 + 腓尼基文 + 巴利文 + 波蘭文 + 波那貝文 + 印度古代及中世紀之中部及北部方言 + 普羅文斯文 (to 1500) + 普什圖文 + 蓋丘亞文 + 拉賈斯坦諸文 + 復活島文 + 拉羅通加文 + 里托羅曼斯文 + 羅馬尼亞文 + 其他羅曼文 + 吉普賽文 + 盧安達文 + 桑達韋文 + 雅庫特文 + 其他南美印第安文 + 薩利甚文 + 薩瑪利亞阿拉姆文 + 撒撒克文 + 散塔利文 + 撒丁文 + 蘇丹文 + 北方薩米文 + 瑟爾卡普文 + 其他閃族語言 + 古愛爾蘭文(至 900) + 手語 + 塞爾維亞克羅埃西亞文 + 撣文 + 僧伽羅文 + 希達摩文 + 大蘇文 + 其他漢藏文 + 斯洛維尼亞文 + 其他斯拉夫文 + 薩摩亞文 + 南薩米文 + 其他薩米文 + 魯勒薩米文 + 伊納裡薩米文 + 斯科特薩米文 + 塞內加爾文 + 索尼基文 + 索馬利文 + 索格底亞納文 + 桑海文 + 阿爾巴尼亞文 + 塞爾維亞文 + 塞雷爾文 + 非洲撒哈拉沙漠邊緣地帶文 + 蘇丹文 + 蘇庫馬文 + 蘇蘇文 + 蘇美文 + 史瓦希里文 + 古敘利亞文 + 坦米爾文 + 其他泰文 + 泰盧固文 + 提姆文 + 泰雷諾文 + 泰頓文 + 提格利尼亞文 + 蒂格雷文 + 提夫文 + 土庫曼文 + 托克勞文 + 特林基特文 + 塔馬奇克文 + 突尼西亞文 + 東加文 + 湯加文(尼亞薩文) + 托比辛文 + 欽西安文 + 韃靼文 + 圖姆布卡文 + 圖皮文 + 其他阿爾泰諸文 + 吐瓦魯文 + 繁體中文 + 大溪地文 + 土凡文 + 沃蒂艾克文 + 維吾爾文 + 烏加列文 + 烏克蘭文 + 姆本杜文 + 未確定的 + 烏爾都文 + 烏茲別克文 + 越南文 + 溫達文 + 沃提克文 + 瓦隆文 + 夸基武特文 + 瓦拉莫文 + 瓦瑞文 + 瓦紹文 + 文德文 + 沃爾夫文 + 卡爾梅克文 + 班圖文 + 瑤文 + 雅浦文 + 意第緒文 + 約魯巴文 + 愛斯基摩文 + 壯文 + 薩波特克文 + 澤納加文 + 贊德文 + 祖魯文 + 祖尼文 + + + 安道爾 + 阿拉伯聯合大公國 + 安地卡及巴布達 + 安圭拉島 + 阿爾巴尼亞 + 亞美尼亞 + 荷屬安地列斯 + 南極洲 + 美屬薩摩亞群島 + 奧地利 + 澳洲 + 阿路巴 + 亞塞拜然 + 波士尼亞與赫塞格維納 + 巴貝多 + 孟加拉 + 比利時 + 布基納法索 + 保加利亞 + 浦隆地 + 貝南 + 百慕達 + 汶萊 + 玻利維亞 + 巴哈馬 + 布威島 + 波札那 + 白俄羅斯 + 貝里斯 + 可可斯群島 + 剛果民主共和國 + 中非共和國 + 剛果 + 科特迪瓦 + 庫克群島 + 喀麥隆 + 中華人民共和國 + 哥倫比亞 + 哥斯大黎加 + 維德角 + 聖誕島 + 賽普勒斯 + 捷克共和國 + 德國 + 吉布地 + 丹麥 + 多明尼加 + 多明尼加共和國 + 阿爾及利亞 + 厄瓜多爾 + 愛沙尼亞 + 厄利垂亞 + 衣索比亞 + 芬蘭 + 斐濟 + 福克蘭群島 + 密克羅尼西亞群島 + 法羅群島 + 法國 + 加彭 + 英國 + 格瑞納達 + 喬治亞共和國 + 法屬圭亞那 + 迦納 + 直布羅陀 + 格陵蘭 + 甘比亞 + 幾內亞 + 哥德普洛 + 赤道幾內亞 + 希臘 + 南喬治亞與南三明治群島 + 瓜地馬拉 + 關島 + 幾內亞比索 + 蓋亞納 + 中華人民共和國香港特別行政區 + 赫德與麥克當諾群島 + 宏都拉斯 + 克羅埃西亞 + 印尼 + 愛爾蘭 + 英屬印度洋領土 + 冰島 + 義大利 + 牙買加 + 約旦 + 肯亞 + 吉爾吉斯 + 高棉 + 吉里巴斯 + 科摩羅群島 + 聖克里斯多福及尼維斯 + 北韓 + 南韓 + 開曼群島 + 哈薩克 + 寮國 + 聖露西亞 + 列支敦斯登 + 斯里蘭卡 + 賴比瑞亞 + 賴索扥 + 盧森堡 + 拉脫維亞 + 利比亞 + 摩納哥 + 摩爾多瓦 + 馬達加斯加 + 馬紹爾群島 + 馬其頓 + 馬利 + 緬甸 + 澳門特別行政區 + 北馬里安納 + 馬丁尼克島 + 茅利塔尼亞 + 蒙特色拉特島 + 馬爾他 + 模里西斯 + 馬爾地夫 + 馬拉威 + 馬來西亞 + 莫三比克 + 納米比亞 + 新喀里多尼亞群島 + 尼日 + 諾福克島 + 奈及利亞 + 荷蘭 + 尼泊爾 + 諾魯 + 紐威島 + 紐西蘭 + 阿曼王國 + 巴拿馬 + 秘魯 + 法屬玻里尼西亞 + 巴布亞紐幾內亞 + 菲律賓 + 波蘭 + 聖彼德與密啟崙 + 皮特康 + 玻多黎克 + 巴勒斯坦 + 帛琉 + 卡達 + 留尼旺 + 羅馬尼亞 + 俄羅斯 + 盧安達 + 沙烏地阿拉伯 + 索羅門群島 + 塞席爾 + 蘇丹 + 聖赫勒拿島 + 斯洛維尼亞 + 冷岸及央麥恩群島 + 獅子山 + 聖馬利諾 + 塞內加爾 + 索馬利亞 + 塞爾維亞 + 蘇利南 + 聖多美及普林西比 + 薩爾瓦多 + 敘利亞 + 史瓦濟蘭 + 土克斯及開科斯群島 + 查德 + 法國南屬地 + 多哥共和國 + 泰國 + 塔吉克 + 托克勞群島 + 東帝文 + 土庫曼 + 突尼西亞 + 東加 + 千里達及托巴哥 + 吐瓦魯 + 臺灣 + 坦尚尼亞 + 烏克蘭 + 烏干達 + 美屬邊疆群島 + 美國 + 烏拉圭 + 烏茲別克 + 梵帝岡 + 聖文森及格瑞那丁 + 委內瑞拉 + 英屬維爾京群島 + 美屬維爾京群島 + 萬那杜 + 瓦利斯和福杜納群島 + 薩摩亞群島 + 葉門 + 馬約特 + 尚比亞 + 辛巴威 + + + 已修訂 + + + 佛教曆法 + 農曆 + 公曆 + 希伯來曆法 + 伊斯蘭曆法 + 伊斯蘭城市曆法 + 日本曆法 + 直接順序 + 電話簿順序 + 拼音順序 + 筆劃顺序 + 傳統曆法 + + + + [一-丁七丈-不且世丙丟並中串丸-丹主乃久么之乎-乏乖乘-乙九也乾亂了予事-二于云-互五-井些亞亡交亦亨享-京亮人什-仁仇今-介仍仔他付-仙代-以仰仲件任份企伊伍休伙伯-估伴伸似但佈位-住佔-何余佛-作你佩佳使來例供依侯侵便係-促俊俗保俠-信修俱個倍們-倒候-倚借倫值假偉偏做停健側-偷傑備傢傲-傳傷傻傾僅像僑價儀億儒儘優允元-充兇-光克免兒兔入內-兩八-兮共兵-典兼冊再冒冠冬冰冷准凌凝凡凰-凱出函刀分-切刊列初判-別利-刪到制-刷刺-刻則前剛剩-剪副割創劃劇劉劍力功-加助-劫勁勇勉勒動務勝-勞勢勤勵勸勿包化-北區-十千升-午半卒協南博卡印-危即卷卻厚原厭厲去參又及-友反叔取-受口-另叫-叭可-台史-右司吃-各合-吊同-后吐-向君吝吟否-吧含吳吵吸-吹吾呀呂呆告呢周味呵呼-命和咖咦-咧咪咬咱哀-品哇-哉哎員哥-哦哩-哪哭哲唉唐唬售-唯唱唷-唸商啊問啟啡啥-啦啪喂善喇喊喔喜-喝喬單喵嗎嗚嗨嗯嘆嘉嘗嘛嘴嘻嘿器噴嚇嚴囉四回因困固圈國圍園-圓圖團土在地圾址均坐坡坤坦坪垃型城域執培-基堂堅-堆堪報場塊塔塗塞填塵境增墨墮壁壓壘壞壢士壯壽夏夕-外多夜夠夢夥大天-夫央失夾奇-奉奏契奔套奧奪奮女奶她好如妙妥妨妮妳妹妻姆姊-始姐-姑姓-委姿威娃娘婆婚婦媒媽嫌子孔字-存孝孟季-孤孩孫學它宇-安宋-完宏宗-定宜客-室宮害家容宿寂寄密富寒寞-察寢實-寧審寫-寬寶封射將-專尊-尋對-小少尖尚尤就尺尼尾局-屁居-屆屋屏展屠層屬山岸峰島崇嵐嶺川-州巡工-巨巫差己-已巴巷市-布希帝帥師席帳帶常帽幅幕幣幫干-年幸-幹幻-幾床序底店府度-座庫庭康-庸廉廖廠廢-廣廳延-廷建弄式引弘弟弦弱張強彈彌彎形彥彩彬-彭彰-影役彼往-征待很律-後徐-徒得從復微徵德徹心必忌-忍志-忙忠快念忽怎怒怕-怖思怡急性-怨怪恆恐恢恥恨-恩恭息-恰悅悉悔悟-悠您悲悶情惑惜惠-惡惱想惹愁愈-愉意愚-愛感慈態慕慘慢-慣慧慮慰慶慾憂憐-憑憲憶憾懂應懶-懷懼戀成-戒或截戰戲戴戶房-扁扇手才打托扣扭扯批找-技抄把抓投抗-折披-抬抱抵抹抽拆拉拋拍拒拔拖招-拜括拳拼拾-拿持指按挑挖振挺捐捕捨捲捷掃授-掉掌排掛採-探接控-推措描-提插揚-換握揮援損搖搞搬-搭搶摘摩摸撐撞撥播撿擁擇擊-擋操-擎擔據擠擦擬擴擺擾攝支收改攻放-政故效敏救敗教敝敢-散敦敬整-敵數文斗料斯-新斷方於-施旁旅旋族旗既日-旦早旭昇昌明-昏易星-映春昨昭是時晚晨普-景晴晶智暑暖-暗暫暴曉曰曲更書曼曾-最會月-有朋服朗望朝期木未-本朱朵李材-村杜束杯東松-板析林果-枝架柏-某染-柔查柳校核-根格桃案桌桑梁梅條梯-械棄棋棒棚森椅植椰楊楓楚業極概榜榮構槍樂樓標樞模樣樹橋機橫檔檢欄權次欣欲欺欽-款歉歌歐歡-武歲歷-歸死殊殘段殺殼毀毅母每毒比毛毫氏民氣水永求汝江-污汪決汽沈-沉沒沖沙河油治沿況泉法泡-波泥注泰泳洋洗洛洞洩-洪洲活洽-派流浩-浪浮海消-涉涯液涵涼淑淚淡淨深混淺清減渡測港游湖湯源準溝溪-溫滄-滅滋滑滴滾-滿漂漏演漠漢漫漲漸潔潛潮澤澳激濃濟濤濫灌灣火灰災炎炮炸為烈烏烤無焦然煙煞照煩熊熟熱燃燈燒營爆爐爛爬-爭爵-父爸爺爽-爾牆-版牌牙牛牠牧物牲特牽犧犯狀狂狐狗狠狼猛-猜猶獄-獅獎獨獲獸獻玄率玉王玩玫玲珍珠班現球理琪琴瑜瑞瑪瑰環瓜瓦瓶甘甚甜生產用田-申男界留畢略番-畫異當疏疑疼病痕痛痴瘋療癡登-百的皆-皇皮盃益盛-盜盟盡監-盤目直相盼盾省眉看真-眠眼眾睛睡督瞧瞭矛矣知短石砂砍研砲破硬碎碗碟碧碩碰確碼磁磨礎礙示社祖祝-神祥票禁禍福禪禮秀-私秋科-秒秘租秤秦移稅程稍種稱稿穌-積穩究穹-空穿突窗窩窮立站竟-章童端競竹笑笛符笨第筆等筋答策算管箭箱節範篇築簡簽籃籌-籍米粉粗精糊糕糟系紀約-紅納純紙-紛素索紫累-細紹終組結絕絡給統-絲經綜綠維綱-網緊緒線緣編-緩練縣縮縱總-績繁織繞繪繳繼續缸缺罪置罰署罵罷羅羊美羞群義羽翁習翔翹翻-翼耀-老考者而-耍耐耗耳耶聊聖聚聞聯-聰聲職聽肉肚股肥肩肯育背胎胖胞胡胸能脆脫腦腰腳腿膽臉臥臨自臭至-致臺與-舊舍舒舞-舟航般船艦良色艾芬花芳若-苦英茫茶草荒荷莉-莊莎莫菜菩華菲萊萬落葉著葛蒙蒼蓋蓮蔡蔣蕭薄薦薩-薪藉藍藏藝藤-藥蘇蘭虎處虛號虧蛋蛙蜂蜜蝶融螢蟲蟹蠍蠻血行術街衛衝衡衣表袋被裁-裂裕補-裝裡製複褲西要覆見規視親覺覽觀角解觸言訂計訊討訓託-記訪設許訴註-証評詞詢試詩話-詳誇誌-認誓誕語誠誤說誰課誼調談請諒論諸諾謀謂講謝證識譜警譯-議護譽讀變讓讚谷豆豈豐象豪豬貌貓貝-貞負-貢貨貪-責貴買費-貼賀資賓賜賞賢-賤賦質賭賴賺購-賽贈贊贏赤走起超越趕趙趣趨足跌跑距跟跡路跳踏踢蹟蹤躍身躲車軌-軍軒軟較載輔-輕輛輝輩-輪輯輸轉轟辛辦辨辭辯辱-農迅迎近迪-迫述迴迷追退-送逃逆透-逐途這-逛逝速-造逢-連週-進逸逼遇遊-運遍-過道-違遙遜遠適遭遲遷-選遺避-邁還邊邏那邦邪邱郎部郭郵都鄉鄭鄰配酒酷-酸醉醒醜醫采釋-量金針釣鈴銀銘銳銷鋒鋼錄錢錦錯鍋鍵鍾鎖鎮鏡鐘鐵鑑長門閃閉開閒-間閣閱闆闊關闡防阻阿-陀附降限院-除陪陰陳陵陷-陸陽隆隊階隔際-障隨險隱隻雄-集雖雙雜雞離-難雨雪雲零-雷電需震霧露霸-霹靂靈青靖靜非靠面革鞋韓音韻響頁-頂項-順須預-頑頓頗-領頭頻顆題-額顏願類顧顯風飄飛食飯飲飽-飾餅養餐餘館首香馬駐駕駛騎騙騷驅驗驚骨體高髮鬆鬥鬧鬱鬼魂魅魔魚魯鮮鳥鳳-鳴鴻鵝鷹鹿麗麥麵麻-麼黃黎黑默點黨鼓鼠鼻齊-齋齒齡龍龜] + + + + + + + + 1月 + 2月 + 3月 + 4月 + 5月 + 6月 + 7月 + 8月 + 9月 + 10月 + 11月 + 12月 + + + + + + + + + + + + + + + + + + + + + yyyy'年'MM'月'dd'日' EEEE + + + + + yyyy'年'MM'月'dd'日' + + + + + yyyy'年'M'月'd'日' + + + + + yy'年'M'月'd'日' + + + + + + + + ahh'時'mm'分'ss'秒' z + + + + + ahh'時'mm'分'ss'秒' + + + + + ahh:mm:ss + + + + + ah:mm + + + + + + + {1} {0} + + + + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;(¤#,##0.00) + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh_Hans.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh_Hans.xml new file mode 100644 index 0000000..c84a93e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh_Hans.xml @@ -0,0 +1,10 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 安道爾 + 阿拉伯聯合大公國 + 安地卡及巴布達 + 安圭拉島 + 阿爾巴尼亞 + 亞美尼亞 + 荷屬安地列斯 + 南極洲 + 美屬薩摩亞群島 + 奧地利 + 澳洲 + 阿路巴 + 亞塞拜然 + 波士尼亞與赫塞格維納 + 巴貝多 + 孟加拉 + 比利時 + 布基納法索 + 保加利亞 + 浦隆地 + 貝南 + 百慕達 + 汶萊 + 玻利維亞 + 巴哈馬 + 布威島 + 波札那 + 白俄羅斯 + 貝里斯 + 可可斯群島 + 剛果民主共和國 + 中非共和國 + 剛果 + 科特迪瓦 + 庫克群島 + 喀麥隆 + 中華人民共和國 + 哥倫比亞 + 哥斯大黎加 + 維德角 + 聖誕島 + 賽普勒斯 + 捷克共和國 + 德國 + 吉布地 + 丹麥 + 多明尼加 + 多明尼加共和國 + 阿爾及利亞 + 厄瓜多爾 + 愛沙尼亞 + 厄利垂亞 + 衣索比亞 + 芬蘭 + 斐濟 + 福克蘭群島 + 密克羅尼西亞群島 + 法羅群島 + 法國 + 加彭 + 英國 + 格瑞納達 + 喬治亞共和國 + 法屬圭亞那 + 迦納 + 直布羅陀 + 格陵蘭 + 甘比亞 + 幾內亞 + 哥德普洛 + 赤道幾內亞 + 希臘 + 南喬治亞與南三明治群島 + 瓜地馬拉 + 關島 + 幾內亞比索 + 蓋亞納 + 中華人民共和國香港特別行政區 + 赫德與麥克當諾群島 + 宏都拉斯 + 克羅埃西亞 + 印尼 + 愛爾蘭 + 英屬印度洋領土 + 冰島 + 義大利 + 牙買加 + 約旦 + 肯亞 + 吉爾吉斯 + 高棉 + 吉里巴斯 + 科摩羅群島 + 聖克里斯多福及尼維斯 + 北韓 + 南韓 + 開曼群島 + 哈薩克 + 寮國 + 聖露西亞 + 列支敦斯登 + 斯里蘭卡 + 賴比瑞亞 + 賴索扥 + 盧森堡 + 拉脫維亞 + 利比亞 + 摩納哥 + 摩爾多瓦 + 馬達加斯加 + 馬紹爾群島 + 馬其頓 + 馬利 + 緬甸 + 澳門特別行政區 + 北馬里安納 + 馬丁尼克島 + 茅利塔尼亞 + 蒙特色拉特島 + 馬爾他 + 模里西斯 + 馬爾地夫 + 馬拉威 + 馬來西亞 + 莫三比克 + 納米比亞 + 新喀里多尼亞群島 + 尼日 + 諾福克島 + 奈及利亞 + 荷蘭 + 尼泊爾 + 諾魯 + 紐威島 + 紐西蘭 + 阿曼王國 + 巴拿馬 + 秘魯 + 法屬玻里尼西亞 + 巴布亞紐幾內亞 + 菲律賓 + 波蘭 + 聖彼德與密啟崙 + 皮特康 + 玻多黎克 + 巴勒斯坦 + 帛琉 + 卡達 + 留尼旺 + 羅馬尼亞 + 俄羅斯 + 盧安達 + 沙烏地阿拉伯 + 索羅門群島 + 塞席爾 + 蘇丹 + 聖赫勒拿島 + 斯洛維尼亞 + 冷岸及央麥恩群島 + 獅子山 + 聖馬利諾 + 塞內加爾 + 索馬利亞 + 塞爾維亞 + 蘇利南 + 聖多美及普林西比 + 薩爾瓦多 + 敘利亞 + 史瓦濟蘭 + 土克斯及開科斯群島 + 查德 + 法國南屬地 + 多哥共和國 + 泰國 + 塔吉克 + 托克勞群島 + 東帝文 + 土庫曼 + 突尼西亞 + 東加 + 千里達及托巴哥 + 吐瓦魯 + 臺灣 + 坦尚尼亞 + 烏克蘭 + 烏干達 + 美屬邊疆群島 + 美國 + 烏拉圭 + 烏茲別克 + 梵帝岡 + 聖文森及格瑞那丁 + 委內瑞拉 + 英屬維爾京群島 + 美屬維爾京群島 + 萬那杜 + 瓦利斯和福杜納群島 + 薩摩亞群島 + 葉門 + 馬約特 + 尚比亞 + 辛巴威 + + + 已修訂 + + + 曆法 + 校對 + 貨幣 + + + 佛教曆法 + 農曆 + 公曆 + 希伯來曆法 + 伊斯蘭曆法 + 伊斯蘭城市曆法 + 日本曆法 + 直接順序 + 電話簿順序 + 拼音順序 + 筆劃顺序 + 傳統曆法 + + + + [一-丁七丈-不且世丙丟並中串丸-丹主乃久么之乎-乏乖乘-乙九也乾亂了予事-二于云-互五-井些亞亡交亦亨享-京亮人什-仁仇今-介仍仔他付-仙代-以仰仲件任份企伊伍休伙伯-估伴伸似但佈位-住佔-何余佛-作你佩佳使來例供依侯侵便係-促俊俗保俠-信修俱個倍們-倒候-倚借倫值假偉偏做停健側-偷傑備傢傲-傳傷傻傾僅像僑價儀億儒儘優允元-充兇-光克免兒兔入內-兩八-兮共兵-典兼冊再冒冠冬冰冷准凌凝凡凰-凱出函刀分-切刊列初判-別利-刪到制-刷刺-刻則前剛剩-剪副割創劃劇劉劍力功-加助-劫勁勇勉勒動務勝-勞勢勤勵勸勿包化-北區-十千升-午半卒協南博卡印-危即卷卻厚原厭厲去參又及-友反叔取-受口-另叫-叭可-台史-右司吃-各合-吊同-后吐-向君吝吟否-吧含吳吵吸-吹吾呀呂呆告呢周味呵呼-命和咖咦-咧咪咬咱哀-品哇-哉哎員哥-哦哩-哪哭哲唉唐唬售-唯唱唷-唸商啊問啟啡啥-啦啪喂善喇喊喔喜-喝喬單喵嗎嗚嗨嗯嘆嘉嘗嘛嘴嘻嘿器噴嚇嚴囉四回因困固圈國圍園-圓圖團土在地圾址均坐坡坤坦坪垃型城域執培-基堂堅-堆堪報場塊塔塗塞填塵境增墨墮壁壓壘壞壢士壯壽夏夕-外多夜夠夢夥大天-夫央失夾奇-奉奏契奔套奧奪奮女奶她好如妙妥妨妮妳妹妻姆姊-始姐-姑姓-委姿威娃娘婆婚婦媒媽嫌子孔字-存孝孟季-孤孩孫學它宇-安宋-完宏宗-定宜客-室宮害家容宿寂寄密富寒寞-察寢實-寧審寫-寬寶封射將-專尊-尋對-小少尖尚尤就尺尼尾局-屁居-屆屋屏展屠層屬山岸峰島崇嵐嶺川-州巡工-巨巫差己-已巴巷市-布希帝帥師席帳帶常帽幅幕幣幫干-年幸-幹幻-幾床序底店府度-座庫庭康-庸廉廖廠廢-廣廳延-廷建弄式引弘弟弦弱張強彈彌彎形彥彩彬-彭彰-影役彼往-征待很律-後徐-徒得從復微徵德徹心必忌-忍志-忙忠快念忽怎怒怕-怖思怡急性-怨怪恆恐恢恥恨-恩恭息-恰悅悉悔悟-悠您悲悶情惑惜惠-惡惱想惹愁愈-愉意愚-愛感慈態慕慘慢-慣慧慮慰慶慾憂憐-憑憲憶憾懂應懶-懷懼戀成-戒或截戰戲戴戶房-扁扇手才打托扣扭扯批找-技抄把抓投抗-折披-抬抱抵抹抽拆拉拋拍拒拔拖招-拜括拳拼拾-拿持指按挑挖振挺捐捕捨捲捷掃授-掉掌排掛採-探接控-推措描-提插揚-換握揮援損搖搞搬-搭搶摘摩摸撐撞撥播撿擁擇擊-擋操-擎擔據擠擦擬擴擺擾攝支收改攻放-政故效敏救敗教敝敢-散敦敬整-敵數文斗料斯-新斷方於-施旁旅旋族旗既日-旦早旭昇昌明-昏易星-映春昨昭是時晚晨普-景晴晶智暑暖-暗暫暴曉曰曲更書曼曾-最會月-有朋服朗望朝期木未-本朱朵李材-村杜束杯東松-板析林果-枝架柏-某染-柔查柳校核-根格桃案桌桑梁梅條梯-械棄棋棒棚森椅植椰楊楓楚業極概榜榮構槍樂樓標樞模樣樹橋機橫檔檢欄權次欣欲欺欽-款歉歌歐歡-武歲歷-歸死殊殘段殺殼毀毅母每毒比毛毫氏民氣水永求汝江-污汪決汽沈-沉沒沖沙河油治沿況泉法泡-波泥注泰泳洋洗洛洞洩-洪洲活洽-派流浩-浪浮海消-涉涯液涵涼淑淚淡淨深混淺清減渡測港游湖湯源準溝溪-溫滄-滅滋滑滴滾-滿漂漏演漠漢漫漲漸潔潛潮澤澳激濃濟濤濫灌灣火灰災炎炮炸為烈烏烤無焦然煙煞照煩熊熟熱燃燈燒營爆爐爛爬-爭爵-父爸爺爽-爾牆-版牌牙牛牠牧物牲特牽犧犯狀狂狐狗狠狼猛-猜猶獄-獅獎獨獲獸獻玄率玉王玩玫玲珍珠班現球理琪琴瑜瑞瑪瑰環瓜瓦瓶甘甚甜生產用田-申男界留畢略番-畫異當疏疑疼病痕痛痴瘋療癡登-百的皆-皇皮盃益盛-盜盟盡監-盤目直相盼盾省眉看真-眠眼眾睛睡督瞧瞭矛矣知短石砂砍研砲破硬碎碗碟碧碩碰確碼磁磨礎礙示社祖祝-神祥票禁禍福禪禮秀-私秋科-秒秘租秤秦移稅程稍種稱稿穌-積穩究穹-空穿突窗窩窮立站竟-章童端競竹笑笛符笨第筆等筋答策算管箭箱節範篇築簡簽籃籌-籍米粉粗精糊糕糟系紀約-紅納純紙-紛素索紫累-細紹終組結絕絡給統-絲經綜綠維綱-網緊緒線緣編-緩練縣縮縱總-績繁織繞繪繳繼續缸缺罪置罰署罵罷羅羊美羞群義羽翁習翔翹翻-翼耀-老考者而-耍耐耗耳耶聊聖聚聞聯-聰聲職聽肉肚股肥肩肯育背胎胖胞胡胸能脆脫腦腰腳腿膽臉臥臨自臭至-致臺與-舊舍舒舞-舟航般船艦良色艾芬花芳若-苦英茫茶草荒荷莉-莊莎莫菜菩華菲萊萬落葉著葛蒙蒼蓋蓮蔡蔣蕭薄薦薩-薪藉藍藏藝藤-藥蘇蘭虎處虛號虧蛋蛙蜂蜜蝶融螢蟲蟹蠍蠻血行術街衛衝衡衣表袋被裁-裂裕補-裝裡製複褲西要覆見規視親覺覽觀角解觸言訂計訊討訓託-記訪設許訴註-証評詞詢試詩話-詳誇誌-認誓誕語誠誤說誰課誼調談請諒論諸諾謀謂講謝證識譜警譯-議護譽讀變讓讚谷豆豈豐象豪豬貌貓貝-貞負-貢貨貪-責貴買費-貼賀資賓賜賞賢-賤賦質賭賴賺購-賽贈贊贏赤走起超越趕趙趣趨足跌跑距跟跡路跳踏踢蹟蹤躍身躲車軌-軍軒軟較載輔-輕輛輝輩-輪輯輸轉轟辛辦辨辭辯辱-農迅迎近迪-迫述迴迷追退-送逃逆透-逐途這-逛逝速-造逢-連週-進逸逼遇遊-運遍-過道-違遙遜遠適遭遲遷-選遺避-邁還邊邏那邦邪邱郎部郭郵都鄉鄭鄰配酒酷-酸醉醒醜醫采釋-量金針釣鈴銀銘銳銷鋒鋼錄錢錦錯鍋鍵鍾鎖鎮鏡鐘鐵鑑長門閃閉開閒-間閣閱闆闊關闡防阻阿-陀附降限院-除陪陰陳陵陷-陸陽隆隊階隔際-障隨險隱隻雄-集雖雙雜雞離-難雨雪雲零-雷電需震霧露霸-霹靂靈青靖靜非靠面革鞋韓音韻響頁-頂項-順須預-頑頓頗-領頭頻顆題-額顏願類顧顯風飄飛食飯飲飽-飾餅養餐餘館首香馬駐駕駛騎騙騷驅驗驚骨體高髮鬆鬥鬧鬱鬼魂魅魔魚魯鮮鳥鳳-鳴鴻鵝鷹鹿麗麥麵麻-麼黃黎黑默點黨鼓鼠鼻齊-齋齒齡龍龜] + + + + + + + 民國前 + 民國 + + + + + + + yyyy'年'M'月'd'日'EEEE + + + + + yyyy'年'M'月'd'日' + + + + + yyyy/M/d + + + + + yyyy/M/d + + + + + + + + ahh'時'mm'分'ss'秒' z + + + + + ahh'時'mm'分'ss'秒' + + + + + a h:mm:ss + + + + + a h:mm + + + + + + + {1} {0} + + + + + + + + + 太平洋標準時間 + 太平洋日光節約時間 + + + PST + PDT + + 洛杉磯 + + + + 太平洋標準時間 + 太平洋日光節約時間 + + + PST + PDT + + 洛杉磯 + + + + 山區標準時間 + 山區日光節約時間 + + + MST + MDT + + 丹佛 + + + + 山區標準時間 + 山區日光節約時間 + + + MST + MDT + + 丹佛 + + + + 山區標準時間 + 山區標準時間 + + + MST + MST + + 鳳凰城 + + + + 山區標準時間 + 山區標準時間 + + + MST + MST + + 鳳凰城 + + + + 中部標準時間 + 中部日光節約時間 + + + CST + CDT + + 芝加哥 + + + + 中部標準時間 + 中部日光節約時間 + + + CST + CDT + + 芝加哥 + + + + 東部標準時間 + 東部日光節約時間 + + + EST + EDT + + 紐約 + + + + 東部標準時間 + 東部日光節約時間 + + + EST + EDT + + 紐約 + + + + 東部標準時間 + 東部標準時間 + + + EST + EST + + 印第安那波里斯 + + + + 東部標準時間 + 東部標準時間 + + + EST + EST + + 印第安那波里斯 + + + + 夏威夷標準時間 + 夏威夷標準時間 + + + HST + HST + + 檀香山 + + + + 夏威夷標準時間 + 夏威夷標準時間 + + + HST + HST + + 檀香山 + + + + 阿拉斯加標準時間 + 阿拉斯加日光節約時間 + + + AST + ADT + + 安克里治 + + + + 阿拉斯加標準時間 + 阿拉斯加日光節約時間 + + + AST + ADT + + 安克里治 + + + + 大西洋標準時間 + 大西洋日光節約時間 + + + AST + ADT + + 哈里法克斯 + + + + 紐芬蘭標準時間 + 紐芬蘭日光節約時間 + + + CNT + CDT + + 聖約翰 + + + + 紐芬蘭標準時間 + 紐芬蘭日光節約時間 + + + CNT + CDT + + 聖約翰 + + + + 中歐標準時間 + 中歐日光節約時間 + + + CET + CEST + + 巴黎 + + + + 中歐標準時間 + 中歐日光節約時間 + + + CET + CEST + + 巴黎 + + + + 格林威治標準時間 + 格林威治標準時間 + + + GMT + GMT + + 倫敦 + + + + 格林威治標準時間 + 格林威治標準時間 + + + GMT + GMT + + 卡薩布蘭卡 + + + + 以色列標準時間 + 以色列日光節約時間 + + + IST + IDT + + 耶路撒冷 + + + + 日本標準時間 + 日本標準時間 + + + JST + JST + + 東京 + + + + 日本標準時間 + 日本標準時間 + + + JST + JST + + 東京 + + + + 東歐標準時間 + 東歐日光節約時間 + + + EET + EEST + + 布加勒斯特 + + + + 中國標準時間 + 中國標準時間 + + + CTT + CDT + + 上海 + + + + 中國標準時間 + 中國標準時間 + + + CTT + CDT + + 上海 + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + 安道爾第納爾 + ADD + + + 安道爾陪士特 + ADP + + + 阿拉伯聯合大公國迪爾汗 + AED + + + 阿富汗尼 (1927-2002) + AFA + + + 阿富汗尼 + AFN + + + 阿法爾和伊薩法郎 + AIF + + + 阿爾巴尼亞列克 (1946-1961) + ALK + + + 阿爾巴尼亞列克 + ALL + + + 阿爾巴尼亞列克幣 + ALV + + + 阿爾巴尼亞元外匯券 + ALX + + + 亞美尼亞德拉姆 + AMD + + + 荷蘭 安梯蘭 盾 + ANG + + + 安哥拉寬扎 + AOA + + + 安哥拉寬扎(1977-1990) + AOK + + + 安哥拉新寬扎 (1990-2000) + AON + + + 安哥拉新寬扎 Reajustado (1995-1999) + AOR + + + 安哥拉埃斯庫多 + AOS + + + 阿根廷奧斯特納爾 + ARA + + + 阿根廷披索 Moneda Nacional + ARM + + + 阿根廷披索(1983-1985) + ARP + + + 阿根廷披索 + ARS + + + 奧地利先令 + ATS + + + 澳幣 + AUD + + + 澳大利亞鎊 + AUP + + + 阿魯巴盾 + AWG + + + 阿塞拜彊馬特納 + AZM + + + 波士尼亞-黑塞哥維那第納爾 + BAD + + + 波士尼亞-黑塞哥維那可轉換馬克 + BAM + + + 波士尼亞-黑塞哥維那新第納爾 + BAN + + + 巴貝多元 + BBD + + + 孟加拉塔卡 + BDT + + + 比利時法郎 (可轉換) + BEC + + + 比利時法郎 + BEF + + + 比利時法郎 (金融) + BEL + + + 保加利亞硬列弗 + BGL + + + 保加利亞 社會主義列弗 + BGM + + + 保加利亞新列弗 + BGN + + + 保加利亞列弗 (1879-1952) + BGO + + + 保加利亞列弗外匯券 + BGX + + + 巴林第納爾 + BHD + + + 蒲隆地法郎 + BIF + + + 百慕達幣 + BMD + + + 百慕達鎊 + BMP + + + 汶萊元 + BND + + + 玻利維亞貨幣單位 + BOB + + + 玻利維亞舊貨幣單位 (1863-1962) + BOL + + + 玻利維亞披索 + BOP + + + 玻利維亞 幕多 + BOV + + + 巴西克魯薩多 農瓦 (1967-1986) + BRB + + + 巴西克魯賽羅 + BRC + + + 巴西克魯賽羅 (1990-1993) + BRE + + + 巴西里拉 + BRL + + + 巴西 克如爾達 農瓦 + BRN + + + 巴西克魯賽羅 + BRR + + + 巴西克魯賽羅 (1942-1967) + BRZ + + + 巴哈馬元 + BSD + + + 巴哈馬鎊 + BSP + + + 不丹努扎姆 + BTN + + + 不丹盧布 + BTR + + + 緬甸元 + BUK + + + 緬甸盧布 + BUR + + + 波札那 - 普拉 + BWP + + + 白俄羅斯新盧布 (1994-1999) + BYB + + + 白俄羅斯盧布 (1992-1994) + BYL + + + 白俄羅斯盧布 + BYR + + + 伯利茲元 + BZD + + + 英國的洪都拉斯元r + BZH + + + 加幣 + CAD + + + 剛果法郎 + CDF + + + 剛果共和國法郎 + CDG + + + 剛果扎伊爾 + CDL + + + 中非共和國西非法郎 + CFF + + + 瑞士法郎 + CHF + + + 庫克群島元 + CKD + + + 智利 康導 + CLC + + + 智利埃斯庫多 + CLE + + + 卡林油達佛曼跎 + CLF + + + 智利披索 + CLP + + + 卡麥隆西非法郎 + CMF + + + 中國人民幣元 + CNP + + + 中國美元外匯券 + CNX + + + 人民幣 + CNY + + + 哥倫比亞披索鈔 + COB + + + 剛果西非法郎 + COF + + + 哥倫比亞披索 + COP + + + 哥斯大黎加科郎 + CRC + + + 捷克克朗 + CSC + + + 捷克斯洛伐克硬克朗 + CSK + + + 古巴披索 + CUP + + + 古巴人外匯券 + CUX + + + 維德角埃斯庫多 + CVE + + + 庫拉克 盾 + CWG + + + 賽浦路斯鎊 + CYP + + + 捷克克朗 + CZK + + + 東德東德馬克 + DDM + + + 德國馬克 + DEM + + + 德國 蘇馬克Sperrmark + DES + + + 吉布地法郎 + DJF + + + 丹麥克羅納 + DKK + + + 多明尼加披索 + DOP + + + 阿爾及利亞第納爾 + DZD + + + 阿爾及利亞新法郎 + DZF + + + 阿爾及利亞法郎 Germinal + DZG + + + 厄瓜多蘇克雷 + ECS + + + 厄瓜多爾由里達瓦康斯坦 (UVC) + ECV + + + 愛沙尼亞克朗 + EEK + + + 埃及鎊 + EGP + + + 厄立特里亞納克法 + ERN + + + 西班牙陪士特 + ESP + + + 衣索比亞比爾 + ETB + + + 埃賽俄比亞元 + ETD + + + 歐元 + EUR + + + 芬蘭馬克 + FIM + + + 芬蘭馬克 (1860-1962) + FIN + + + 斐濟元 + FJD + + + 斐濟鎊 + FJP + + + 福克蘭群島鎊 + FKP + + + 法羅島克朗 + FOK + + + 法國法郎 + FRF + + + 法國法郎 捷米那/龐加萊法郎 + FRG + + + 加蓬西非法郎 + GAF + + + 英鎊 + GBP + + + 喬治 庫旁 拉里 + GEK + + + 喬治拉里 + GEL + + + 迦納仙蔕 + GHC + + + 迦納舊仙蔕 + GHO + + + 迦納鎊 + GHP + + + 迦納重新估价後的仙蔕 + GHR + + + 直布羅陀鎊 + GIP + + + 格陵蘭克羅鈉 + GLK + + + 甘比亞達拉西 + GMD + + + 岡比亞鎊 + GMP + + + 幾內亞法郎 + GNF + + + 幾內亞法郎 (1960-1972) + GNI + + + 幾內亞西里 + GNS + + + 瓜德羅普島法郎 + GPF + + + 赤道幾內亞埃奎勒 + GQE + + + 赤道幾內亞佛朗哥 + GQF + + + 赤道幾內亞比塞塔 + GQP + + + 希臘德拉克馬 + GRD + + + 希臘新德拉克馬 + GRN + + + 瓜地馬拉格查爾 + GTQ + + + 法屬圭亞那法郎圭亞那 + GUF + + + 葡屬幾內亞埃斯庫多 + GWE + + + 葡屬幾內亞米爾里斯 + GWM + + + 幾內亞披索披索 + GWP + + + 圭亞那元 + GYD + + + 港元 + HK$ + + + 洪都拉斯倫皮拉 + HNL + + + 克羅地亞第納爾 + HRD + + + 克羅地亞庫納 + HRK + + + 海地古德 + HTG + + + 匈牙利 - 福林 + HUF + + + 北愛爾蘭鎊 + IBP + + + 印度尼西亞尼可盾 + IDG + + + 印度尼西亞爪哇盧布 + IDJ + + + 印度尼西亞新盧布 + IDN + + + 印尼 - 盧布 + IDR + + + 愛爾蘭鎊 + IEP + + + 以色列謝客爾 + ILL + + + 以色列鎊 + ILP + + + 以色列新謝克爾 + ILS + + + 曼城島英鎊 + IMP + + + 印度盧布 + =0#Rs.|1#Re.|1<Rs. + + + 伊拉克第納爾 + IQD + + + 伊朗里亞爾 + IRR + + + 冰島克朗 + ISK + + + 義大利里拉 + ITL + + + 澤西鎊 + JEP + + + 牙買加元 + JMD + + + 牙買加鎊 + JMP + + + 約旦第納爾 + JOD + + + 日圓 + JP¥ + + + 肯尼亞先令 + KES + + + 吉爾吉斯索馬 + KGS + + + 柬埔寨舊瑞爾 + KHO + + + 柬埔寨瑞爾 + KHR + + + 基里巴斯元 + KID + + + 科摩羅法郎 + KMF + + + 北朝鮮人民幣 + KPP + + + 北朝鮮幣 + KPW + + + 韓國 哈瓦 + KRH + + + 南韓舊幣 + KRO + + + 韓國圜 + KRW + + + 科威特第納爾 + KWD + + + 開曼群島美元 + KYD + + + 卡扎克斯坦盧布 + KZR + + + 卡扎克斯坦坦吉 + KZT + + + 老撾 開普 + LAK + + + 黎巴嫩鎊 + LBP + + + 列支敦斯登法郎 + LIF + + + 斯里蘭卡盧布 + LKR + + + 錫蘭盧布 + LNR + + + 賴比瑞亞元 + LRD + + + 賴索托羅蒂 + LSL + + + 立陶宛里塔 + LTL + + + 立陶宛特羅 + LTT + + + 盧森堡法郎 + LUF + + + 拉脫維亞拉特銀幣 + LVL + + + 拉脫維亞盧布 + LVR + + + 利比亞英國的軍事當局里拉 + LYB + + + 利比亞第納爾 + LYD + + + 利比亞鎊 + LYP + + + 摩洛哥迪拉姆 + MAD + + + 摩洛哥法郎 + MAF + + + 摩納哥新法郎 + MCF + + + 摩納哥法郎 傑米那 + MCG + + + 摩杜雲列伊庫旁 + MDC + + + 摩杜雲列伊 + MDL + + + 摩杜雲盧布庫旁 + MDR + + + 馬達加斯加艾瑞爾 + MGA + + + 馬達加斯加法郎 + MGF + + + 馬紹爾群島美元 + MHD + + + 馬其頓第納爾 + MKD + + + 馬其頓第納爾(1992-1993) + MKN + + + 馬里法郎 + MLF + + + 緬甸元 + MMK + + + 緬甸美元外匯券 + MMX + + + 蒙古圖格里克 + MNT + + + 澳門元 + MOP + + + 馬提尼克島法郎 + MQF + + + 茅利塔尼亞烏吉亞 + MRO + + + 馬爾他里拉 + MTL + + + 馬爾他鎊 + MTP + + + 模里西斯盧布 + MUR + + + 馬爾地夫盧布 + MVP + + + 馬爾地夫海島盧非亞 + MVR + + + 馬拉維克瓦查 + MWK + + + 馬拉維鎊 + MWP + + + 墨西哥 - 披索 + MXN + + + 墨西哥銀披索 (1861-1992) + MXP + + + 墨西哥法律反轉(UDI) + MXV + + + 馬來西亞 - 林吉特 + MYR + + + 莫桑比克埃斯庫多 + MZE + + + 莫三比克梅蒂卡爾 + MZM + + + 納米比亞元 + NAD + + + 赫布里底群島 CFP 法郎 + NCF + + + 奈及利亞奈拉 + NGN + + + 奈及利亞鎊 + NGP + + + 新赫布里底群島 CFP 法郎 + NHF + + + 尼加拉瓜科多巴 + NIC + + + 尼加拉瓜金金哥多華 + NIG + + + 尼加拉瓜 金哥多華 + NIO + + + 荷蘭盾 + NLG + + + 挪威克羅納 + NOK + + + 尼泊爾盧布 + NPR + + + 紐西蘭幣 + $NZ + + + 紐西蘭鎊 + NZP + + + 阿曼里奧 + OMR + + + 阿曼里亞爾仙蔕i + OMS + + + 巴拿馬巴波亞 + PAB + + + 車城盧布 Kupon + PDK + + + 車城新盧布 + PDN + + + 車城盧布 + PDR + + + 祕魯因蒂 + PEI + + + 秘魯新太陽幣 + PEN + + + 秘魯太陽幣 + PES + + + 巴布亞紐幾內亞基那 + PGK + + + 菲律賓披索 + PHP + + + 巴基斯坦盧布 + PKR + + + 波蘭茲羅提 + PLN + + + 波蘭美元外匯券 + PLX + + + 波蘭茲羅提 (1950-1995) + PLZ + + + 巴勒斯坦鎊 + PSP + + + 葡萄牙 康拖 + PTC + + + 葡萄牙埃斯庫多 + PTE + + + 巴拉圭瓜拉尼 + PYG + + + 卡達爾里亞爾 + QAR + + + 留尼汪島法郎 + REF + + + 羅馬尼亞列伊 + ROL + + + 羅馬尼亞新列伊 + RON + + + 俄羅斯盧布 + RUB + + + 俄羅斯盧布 (1991-1998) + RUR + + + 盧安達法郎 + RWF + + + 沙烏地里雅 + SRl + + + 沙烏地宗主里雅 + SAS + + + 索羅門群島元 + SBD + + + 塞舌爾群島盧布 + SCR + + + 蘇丹第納爾 + SDD + + + 蘇丹鎊 + SDP + + + 瑞典克羅納 + SEK + + + 新加坡幣 + SGD + + + 聖赫勒拿 鎊 + SHP + + + 斯洛文尼亞 Tolar Bons + SIB + + + 斯洛維尼亞托勒 + SIT + + + 斯洛伐克克朗 + SKK + + + 獅子山利昂 + SLL + + + 聖馬利諾里拉 + SML + + + 索馬利亞先令 + SOS + + + 索馬里蘭先令 + SQS + + + 蘇里南盾 + SRG + + + 蘇格蘭鎊 + SSP + + + 聖多美島和普林西比島多布拉 + STD + + + 聖多美島和普林西比島埃斯庫多 + STE + + + 蘇聯新盧布 + SUN + + + 蘇聯盧布 + SUR + + + 愛爾 薩爾瓦多科郎 + SVC + + + 敘利亞鎊 + SYP + + + 斯威士蘭 里郎 + SZL + + + 土耳其人和凱科斯冠 + TCC + + + 乍得 西非 法郎 + TDF + + + 泰銖 + THB + + + 塔吉克斯坦盧布 + TJR + + + 塔吉克斯坦 索莫尼 + TJS + + + 土庫曼馬納特 + TMM + + + 突尼西亞第納爾 + TND + + + 東加潘加 + TOP + + + 湯加英鎊 + TOS + + + 帝汶 埃斯庫多 + TPE + + + 帝汶元 + TPP + + + 土耳其里拉 + TRL + + + 千里達及托巴哥r + TTD + + + 特立尼達和多巴哥舊元r + TTO + + + 吐瓦魯美元 + TVD + + + 新臺幣 + NT$ + + + 坦桑尼亞 先令 + TZS + + + 烏克蘭格里夫那 + UAH + + + 烏克蘭 卡本瓦那茲 + UAK + + + 烏干達先令 (1966-1987) + UGS + + + 烏干達先令 + UGX + + + 美元 + US$ + + + 美元 (第二天) + USN + + + 美元 (同一天) + USS + + + 烏拉圭披索福厄特 + UYF + + + 烏拉圭披索 (1975-1993) + UYP + + + 烏拉圭披索 + UYU + + + 烏茲別克斯坦 庫邦 索馬 + UZC + + + 烏茲別克斯坦 薩木 + UZS + + + 梵蒂岡城里拉 + VAL + + + 北越南 皮阿斯特越南盾 + VDD + + + 北越南新盾 + VDN + + + 北越南 名 皮阿斯特越南盾 + VDP + + + 委內瑞拉博利瓦 + VEB + + + 英屬維爾斯群島元 + VGD + + + 越南盾 + VND + + + 越南新盾 + VNN + + + 越南共和國 盾 + VNR + + + 越南國家盾 + VNS + + + 萬那杜萬杜 + VUV + + + 西薩摩亞鎊 + WSP + + + 西薩摩亞塔拉 + WST + + + 亞洲第納爾會計單位 + XAD + + + 西非 法郎 BEAC + XAF + + + 亞洲貨幣單位 + XAM + + + 黃金 + XAU + + + 歐洲綜合單位 + XBA + + + 歐洲貨幣單位 + XBB + + + 歐洲會計單位(XBC) + XBC + + + 歐洲會計單位(XBD) + XBD + + + 格瑞那達元 + XCD + + + 西非 新 法郎 + XCF + + + 特殊提款權 + XDR + + + 西非 法郎 BCEAEC + XEF + + + 歐洲貨幣單位 + XEU + + + 法國金法郎 + XFO + + + 法國 UIC 法郎 + XFU + + + 伊斯蘭第納爾 + XID + + + 法國大城市新 法郎 + XMF + + + 法國安的列斯群島 西非 法郎 + XNF + + + 西非 法郎 BCEAO + XOF + + + CFP 法郎 + XPF + + + COMECON 可轉移盧布 + XTR + + + 葉門第納爾 + YDD + + + 也門阿馬迪里亞爾 + YEI + + + 也門里亞爾 + YER + + + 南斯拉夫第納爾硬幣 + YUD + + + 南斯拉夫聯邦第納爾 + YUF + + + 南斯拉夫人1994 第納爾 + YUG + + + 南斯拉夫挪威亞第納爾 + YUM + + + 南斯拉夫 可轉換第納爾 + YUN + + + 南斯拉夫十月 第納爾 + YUO + + + 南斯拉夫改制後的第納爾 + YUR + + + 南非 - 蘭特 (金融) + ZAL + + + 南非鎊 + ZAP + + + 南非蘭特 + ZAR + + + 尚比亞克瓦查 + ZMK + + + 贊比亞鎊 + ZMP + + + 薩伊扎新伊爾 + ZRN + + + 扎伊爾扎伊爾 + ZRZ + + + 辛巴威元 + ZWD + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh_Hant_HK.xml b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh_Hant_HK.xml new file mode 100644 index 0000000..d7035a4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/data/zh_Hant_HK.xml @@ -0,0 +1,773 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 安道爾 + 阿拉伯聯合大公國 + 安地卡及巴布達 + 安圭拉島 + 阿爾巴尼亞 + 亞美尼亞 + 荷屬安地列斯 + 南極洲 + 美屬薩摩亞群島 + 奧地利 + 澳洲 + 阿路巴 + 亞塞拜然 + 波士尼亞與赫塞格維納 + 巴貝多 + 孟加拉 + 比利時 + 布基納法索 + 保加利亞 + 浦隆地 + 貝南 + 百慕達 + 汶萊 + 玻利維亞 + 巴哈馬 + 布威島 + 波札那 + 白俄羅斯 + 貝里斯 + 可可斯群島 + 剛果民主共和國 + 中非共和國 + 剛果 + 科特迪瓦 + 庫克群島 + 喀麥隆 + 中華人民共和國 + 哥倫比亞 + 哥斯大黎加 + 維德角 + 聖誕島 + 賽普勒斯 + 捷克共和國 + 德國 + 吉布地 + 丹麥 + 多明尼加 + 多明尼加共和國 + 阿爾及利亞 + 厄瓜多爾 + 愛沙尼亞 + 厄利垂亞 + 衣索比亞 + 芬蘭 + 斐濟 + 福克蘭群島 + 密克羅尼西亞群島 + 法羅群島 + 法國 + 加彭 + 英國 + 格瑞納達 + 喬治亞共和國 + 法屬圭亞那 + 迦納 + 直布羅陀 + 格陵蘭 + 甘比亞 + 幾內亞 + 哥德普洛 + 赤道幾內亞 + 希臘 + 南喬治亞與南三明治群島 + 瓜地馬拉 + 關島 + 幾內亞比索 + 蓋亞納 + 中華人民共和國香港特別行政區 + 赫德與麥克當諾群島 + 宏都拉斯 + 克羅埃西亞 + 印尼 + 愛爾蘭 + 英屬印度洋領土 + 冰島 + 義大利 + 牙買加 + 約旦 + 肯亞 + 吉爾吉斯 + 高棉 + 吉里巴斯 + 科摩羅群島 + 聖克里斯多福及尼維斯 + 北韓 + 南韓 + 開曼群島 + 哈薩克 + 寮國 + 聖露西亞 + 列支敦斯登 + 斯里蘭卡 + 賴比瑞亞 + 賴索扥 + 盧森堡 + 拉脫維亞 + 利比亞 + 摩納哥 + 摩爾多瓦 + 馬達加斯加 + 馬紹爾群島 + 馬其頓 + 馬利 + 緬甸 + 澳門特別行政區 + 北馬里安納 + 馬丁尼克島 + 茅利塔尼亞 + 蒙特色拉特島 + 馬爾他 + 模里西斯 + 馬爾地夫 + 馬拉威 + 馬來西亞 + 莫三比克 + 納米比亞 + 新喀里多尼亞群島 + 尼日 + 諾福克島 + 奈及利亞 + 荷蘭 + 尼泊爾 + 諾魯 + 紐威島 + 紐西蘭 + 阿曼王國 + 巴拿馬 + 秘魯 + 法屬玻里尼西亞 + 巴布亞紐幾內亞 + 菲律賓 + 波蘭 + 聖彼德與密啟崙 + 皮特康 + 玻多黎克 + 巴勒斯坦 + 帛琉 + 卡達 + 留尼旺 + 羅馬尼亞 + 俄羅斯 + 盧安達 + 沙烏地阿拉伯 + 索羅門群島 + 塞席爾 + 蘇丹 + 聖赫勒拿島 + 斯洛維尼亞 + 冷岸及央麥恩群島 + 獅子山 + 聖馬利諾 + 塞內加爾 + 索馬利亞 + 塞爾維亞 + 蘇利南 + 聖多美及普林西比 + 薩爾瓦多 + 敘利亞 + 史瓦濟蘭 + 土克斯及開科斯群島 + 查德 + 法國南屬地 + 多哥共和國 + 泰國 + 塔吉克 + 托克勞群島 + 東帝文 + 土庫曼 + 突尼西亞 + 東加 + 千里達及托巴哥 + 吐瓦魯 + 臺灣 + 坦尚尼亞 + 烏克蘭 + 烏干達 + 美屬邊疆群島 + 美國 + 烏拉圭 + 烏茲別克 + 梵帝岡 + 聖文森及格瑞那丁 + 委內瑞拉 + 英屬維爾京群島 + 美屬維爾京群島 + 萬那杜 + 瓦利斯和福杜納群島 + 薩摩亞群島 + 葉門 + 馬約特 + 尚比亞 + 辛巴威 + + + 已修訂 + + + 曆法 + 校對 + 貨幣 + + + 佛教曆法 + 農曆 + 公曆 + 希伯來曆法 + 伊斯蘭曆法 + 伊斯蘭城市曆法 + 日本曆法 + 直接順序 + 電話簿順序 + 拼音順序 + 筆劃顺序 + 傳統曆法 + + + + [一-丁七丈-不且世丙丟並中串丸-丹主乃久么之乎-乏乖乘-乙九也乾亂了予事-二于云-互五-井些亞亡交亦亨享-京亮人什-仁仇今-介仍仔他付-仙代-以仰仲件任份企伊伍休伙伯-估伴伸似但佈位-住佔-何余佛-作你佩佳使來例供依侯侵便係-促俊俗保俠-信修俱個倍們-倒候-倚借倫值假偉偏做停健側-偷傑備傢傲-傳傷傻傾僅像僑價儀億儒儘優允元-充兇-光克免兒兔入內-兩八-兮共兵-典兼冊再冒冠冬冰冷准凌凝凡凰-凱出函刀分-切刊列初判-別利-刪到制-刷刺-刻則前剛剩-剪副割創劃劇劉劍力功-加助-劫勁勇勉勒動務勝-勞勢勤勵勸勿包化-北區-十千升-午半卒協南博卡印-危即卷卻厚原厭厲去參又及-友反叔取-受口-另叫-叭可-台史-右司吃-各合-吊同-后吐-向君吝吟否-吧含吳吵吸-吹吾呀呂呆告呢周味呵呼-命和咖咦-咧咪咬咱哀-品哇-哉哎員哥-哦哩-哪哭哲唉唐唬售-唯唱唷-唸商啊問啟啡啥-啦啪喂善喇喊喔喜-喝喬單喵嗎嗚嗨嗯嘆嘉嘗嘛嘴嘻嘿器噴嚇嚴囉四回因困固圈國圍園-圓圖團土在地圾址均坐坡坤坦坪垃型城域執培-基堂堅-堆堪報場塊塔塗塞填塵境增墨墮壁壓壘壞壢士壯壽夏夕-外多夜夠夢夥大天-夫央失夾奇-奉奏契奔套奧奪奮女奶她好如妙妥妨妮妳妹妻姆姊-始姐-姑姓-委姿威娃娘婆婚婦媒媽嫌子孔字-存孝孟季-孤孩孫學它宇-安宋-完宏宗-定宜客-室宮害家容宿寂寄密富寒寞-察寢實-寧審寫-寬寶封射將-專尊-尋對-小少尖尚尤就尺尼尾局-屁居-屆屋屏展屠層屬山岸峰島崇嵐嶺川-州巡工-巨巫差己-已巴巷市-布希帝帥師席帳帶常帽幅幕幣幫干-年幸-幹幻-幾床序底店府度-座庫庭康-庸廉廖廠廢-廣廳延-廷建弄式引弘弟弦弱張強彈彌彎形彥彩彬-彭彰-影役彼往-征待很律-後徐-徒得從復微徵德徹心必忌-忍志-忙忠快念忽怎怒怕-怖思怡急性-怨怪恆恐恢恥恨-恩恭息-恰悅悉悔悟-悠您悲悶情惑惜惠-惡惱想惹愁愈-愉意愚-愛感慈態慕慘慢-慣慧慮慰慶慾憂憐-憑憲憶憾懂應懶-懷懼戀成-戒或截戰戲戴戶房-扁扇手才打托扣扭扯批找-技抄把抓投抗-折披-抬抱抵抹抽拆拉拋拍拒拔拖招-拜括拳拼拾-拿持指按挑挖振挺捐捕捨捲捷掃授-掉掌排掛採-探接控-推措描-提插揚-換握揮援損搖搞搬-搭搶摘摩摸撐撞撥播撿擁擇擊-擋操-擎擔據擠擦擬擴擺擾攝支收改攻放-政故效敏救敗教敝敢-散敦敬整-敵數文斗料斯-新斷方於-施旁旅旋族旗既日-旦早旭昇昌明-昏易星-映春昨昭是時晚晨普-景晴晶智暑暖-暗暫暴曉曰曲更書曼曾-最會月-有朋服朗望朝期木未-本朱朵李材-村杜束杯東松-板析林果-枝架柏-某染-柔查柳校核-根格桃案桌桑梁梅條梯-械棄棋棒棚森椅植椰楊楓楚業極概榜榮構槍樂樓標樞模樣樹橋機橫檔檢欄權次欣欲欺欽-款歉歌歐歡-武歲歷-歸死殊殘段殺殼毀毅母每毒比毛毫氏民氣水永求汝江-污汪決汽沈-沉沒沖沙河油治沿況泉法泡-波泥注泰泳洋洗洛洞洩-洪洲活洽-派流浩-浪浮海消-涉涯液涵涼淑淚淡淨深混淺清減渡測港游湖湯源準溝溪-溫滄-滅滋滑滴滾-滿漂漏演漠漢漫漲漸潔潛潮澤澳激濃濟濤濫灌灣火灰災炎炮炸為烈烏烤無焦然煙煞照煩熊熟熱燃燈燒營爆爐爛爬-爭爵-父爸爺爽-爾牆-版牌牙牛牠牧物牲特牽犧犯狀狂狐狗狠狼猛-猜猶獄-獅獎獨獲獸獻玄率玉王玩玫玲珍珠班現球理琪琴瑜瑞瑪瑰環瓜瓦瓶甘甚甜生產用田-申男界留畢略番-畫異當疏疑疼病痕痛痴瘋療癡登-百的皆-皇皮盃益盛-盜盟盡監-盤目直相盼盾省眉看真-眠眼眾睛睡督瞧瞭矛矣知短石砂砍研砲破硬碎碗碟碧碩碰確碼磁磨礎礙示社祖祝-神祥票禁禍福禪禮秀-私秋科-秒秘租秤秦移稅程稍種稱稿穌-積穩究穹-空穿突窗窩窮立站竟-章童端競竹笑笛符笨第筆等筋答策算管箭箱節範篇築簡簽籃籌-籍米粉粗精糊糕糟系紀約-紅納純紙-紛素索紫累-細紹終組結絕絡給統-絲經綜綠維綱-網緊緒線緣編-緩練縣縮縱總-績繁織繞繪繳繼續缸缺罪置罰署罵罷羅羊美羞群義羽翁習翔翹翻-翼耀-老考者而-耍耐耗耳耶聊聖聚聞聯-聰聲職聽肉肚股肥肩肯育背胎胖胞胡胸能脆脫腦腰腳腿膽臉臥臨自臭至-致臺與-舊舍舒舞-舟航般船艦良色艾芬花芳若-苦英茫茶草荒荷莉-莊莎莫菜菩華菲萊萬落葉著葛蒙蒼蓋蓮蔡蔣蕭薄薦薩-薪藉藍藏藝藤-藥蘇蘭虎處虛號虧蛋蛙蜂蜜蝶融螢蟲蟹蠍蠻血行術街衛衝衡衣表袋被裁-裂裕補-裝裡製複褲西要覆見規視親覺覽觀角解觸言訂計訊討訓託-記訪設許訴註-証評詞詢試詩話-詳誇誌-認誓誕語誠誤說誰課誼調談請諒論諸諾謀謂講謝證識譜警譯-議護譽讀變讓讚谷豆豈豐象豪豬貌貓貝-貞負-貢貨貪-責貴買費-貼賀資賓賜賞賢-賤賦質賭賴賺購-賽贈贊贏赤走起超越趕趙趣趨足跌跑距跟跡路跳踏踢蹟蹤躍身躲車軌-軍軒軟較載輔-輕輛輝輩-輪輯輸轉轟辛辦辨辭辯辱-農迅迎近迪-迫述迴迷追退-送逃逆透-逐途這-逛逝速-造逢-連週-進逸逼遇遊-運遍-過道-違遙遜遠適遭遲遷-選遺避-邁還邊邏那邦邪邱郎部郭郵都鄉鄭鄰配酒酷-酸醉醒醜醫采釋-量金針釣鈴銀銘銳銷鋒鋼錄錢錦錯鍋鍵鍾鎖鎮鏡鐘鐵鑑長門閃閉開閒-間閣閱闆闊關闡防阻阿-陀附降限院-除陪陰陳陵陷-陸陽隆隊階隔際-障隨險隱隻雄-集雖雙雜雞離-難雨雪雲零-雷電需震霧露霸-霹靂靈青靖靜非靠面革鞋韓音韻響頁-頂項-順須預-頑頓頗-領頭頻顆題-額顏願類顧顯風飄飛食飯飲飽-飾餅養餐餘館首香馬駐駕駛騎騙騷驅驗驚骨體高髮鬆鬥鬧鬱鬼魂魅魔魚魯鮮鳥鳳-鳴鴻鵝鷹鹿麗麥麵麻-麼黃黎黑默點黨鼓鼠鼻齊-齋齒齡龍龜] + + + + + + + 民國前 + 民國 + + + + + + + yyyy'年'M'月'd'日'EEEE + + + + + yyyy'年'M'月'd'日' + + + + + yyyy/M/d + + + + + yyyy/M/d + + + + + + + + ahh'時'mm'分'ss'秒' z + + + + + ahh'時'mm'分'ss'秒' + + + + + a h:mm:ss + + + + + a h:mm + + + + + + + {1} {0} + + + + + + + + + 太平洋標準時間 + 太平洋日光節約時間 + + + PST + PDT + + 洛杉磯 + + + + 太平洋標準時間 + 太平洋日光節約時間 + + + PST + PDT + + 洛杉磯 + + + + 山區標準時間 + 山區日光節約時間 + + + MST + MDT + + 丹佛 + + + + 山區標準時間 + 山區日光節約時間 + + + MST + MDT + + 丹佛 + + + + 山區標準時間 + 山區標準時間 + + + MST + MST + + 鳳凰城 + + + + 山區標準時間 + 山區標準時間 + + + MST + MST + + 鳳凰城 + + + + 中部標準時間 + 中部日光節約時間 + + + CST + CDT + + 芝加哥 + + + + 中部標準時間 + 中部日光節約時間 + + + CST + CDT + + 芝加哥 + + + + 東部標準時間 + 東部日光節約時間 + + + EST + EDT + + 紐約 + + + + 東部標準時間 + 東部日光節約時間 + + + EST + EDT + + 紐約 + + + + 東部標準時間 + 東部標準時間 + + + EST + EST + + 印第安那波里斯 + + + + 東部標準時間 + 東部標準時間 + + + EST + EST + + 印第安那波里斯 + + + + 夏威夷標準時間 + 夏威夷標準時間 + + + HST + HST + + 檀香山 + + + + 夏威夷標準時間 + 夏威夷標準時間 + + + HST + HST + + 檀香山 + + + + 阿拉斯加標準時間 + 阿拉斯加日光節約時間 + + + AST + ADT + + 安克里治 + + + + 阿拉斯加標準時間 + 阿拉斯加日光節約時間 + + + AST + ADT + + 安克里治 + + + + 大西洋標準時間 + 大西洋日光節約時間 + + + AST + ADT + + 哈里法克斯 + + + + 紐芬蘭標準時間 + 紐芬蘭日光節約時間 + + + CNT + CDT + + 聖約翰 + + + + 紐芬蘭標準時間 + 紐芬蘭日光節約時間 + + + CNT + CDT + + 聖約翰 + + + + 中歐標準時間 + 中歐日光節約時間 + + + CET + CEST + + 巴黎 + + + + 中歐標準時間 + 中歐日光節約時間 + + + CET + CEST + + 巴黎 + + + + 格林威治標準時間 + 格林威治標準時間 + + + GMT + GMT + + 倫敦 + + + + 格林威治標準時間 + 格林威治標準時間 + + + GMT + GMT + + 卡薩布蘭卡 + + + + 以色列標準時間 + 以色列日光節約時間 + + + IST + IDT + + 耶路撒冷 + + + + 日本標準時間 + 日本標準時間 + + + JST + JST + + 東京 + + + + 日本標準時間 + 日本標準時間 + + + JST + JST + + 東京 + + + + 東歐標準時間 + 東歐日光節約時間 + + + EET + EEST + + 布加勒斯特 + + + + 中國標準時間 + 中國標準時間 + + + CTT + CDT + + 上海 + + + + 中國標準時間 + 中國標準時間 + + + CTT + CDT + + 上海 + + + + + + + + #,##0.###;-#,##0.### + + + + + + + #E0 + + + + + + + #,##0% + + + + + + + ¤#,##0.00;-¤#,##0.00 + + + + + + 安道爾第納爾 + ADD + + + 安道爾陪士特 + ADP + + + 阿拉伯聯合大公國迪爾汗 + AED + + + 阿富汗尼 (1927-2002) + AFA + + + 阿富汗尼 + AFN + + + 阿法爾和伊薩法郎 + AIF + + + 阿爾巴尼亞列克 (1946-1961) + ALK + + + 阿爾巴尼亞列克 + ALL + + + 阿爾巴尼亞列克幣 + ALV + + + 阿爾巴尼亞元外匯券 + ALX + + + 亞美尼亞德拉姆 + AMD + + + 荷蘭 安梯蘭 盾 + ANG + + + 安哥拉寬扎 + AOA + + + 安哥拉寬扎(1977-1990) + AOK + + + 安哥拉新寬扎 (1990-2000) + AON + + + 安哥拉新寬扎 Reajustado (1995-1999) + AOR + + + 安哥拉埃斯庫多 + AOS + + + 阿根廷奧斯特納爾 + ARA + + + 阿根廷披索 Moneda Nacional + ARM + + + 阿根廷披索(1983-1985) + ARP + + + 阿根廷披索 + ARS + + + 奧地利先令 + ATS + + + 澳幣 + AUD + + + 澳大利亞鎊 + AUP + + + 阿魯巴盾 + AWG + + + 阿塞拜彊馬特納 + AZM + + + 波士尼亞-黑塞哥維那第納爾 + BAD + + + 波士尼亞-黑塞哥維那可轉換馬克 + BAM + + + 波士尼亞-黑塞哥維那新第納爾 + BAN + + + 巴貝多元 + BBD + + + 孟加拉塔卡 + BDT + + + 比利時法郎 (可轉換) + BEC + + + 比利時法郎 + BEF + + + 比利時法郎 (金融) + BEL + + + 保加利亞硬列弗 + BGL + + + 保加利亞 社會主義列弗 + BGM + + + 保加利亞新列弗 + BGN + + + 保加利亞列弗 (1879-1952) + BGO + + + 保加利亞列弗外匯券 + BGX + + + 巴林第納爾 + BHD + + + 蒲隆地法郎 + BIF + + + 百慕達幣 + BMD + + + 百慕達鎊 + BMP + + + 汶萊元 + BND + + + 玻利維亞貨幣單位 + BOB + + + 玻利維亞舊貨幣單位 (1863-1962) + BOL + + + 玻利維亞披索 + BOP + + + 玻利維亞 幕多 + BOV + + + 巴西克魯薩多 農瓦 (1967-1986) + BRB + + + 巴西克魯賽羅 + BRC + + + 巴西克魯賽羅 (1990-1993) + BRE + + + 巴西里拉 + BRL + + + 巴西 克如爾達 農瓦 + BRN + + + 巴西克魯賽羅 + BRR + + + 巴西克魯賽羅 (1942-1967) + BRZ + + + 巴哈馬元 + BSD + + + 巴哈馬鎊 + BSP + + + 不丹努扎姆 + BTN + + + 不丹盧布 + BTR + + + 緬甸元 + BUK + + + 緬甸盧布 + BUR + + + 波札那 - 普拉 + BWP + + + 白俄羅斯新盧布 (1994-1999) + BYB + + + 白俄羅斯盧布 (1992-1994) + BYL + + + 白俄羅斯盧布 + BYR + + + 伯利茲元 + BZD + + + 英國的洪都拉斯元r + BZH + + + 加幣 + CAD + + + 剛果法郎 + CDF + + + 剛果共和國法郎 + CDG + + + 剛果扎伊爾 + CDL + + + 中非共和國西非法郎 + CFF + + + 瑞士法郎 + CHF + + + 庫克群島元 + CKD + + + 智利 康導 + CLC + + + 智利埃斯庫多 + CLE + + + 卡林油達佛曼跎 + CLF + + + 智利披索 + CLP + + + 卡麥隆西非法郎 + CMF + + + 中國人民幣元 + CNP + + + 中國美元外匯券 + CNX + + + 人民幣 + CNY + + + 哥倫比亞披索鈔 + COB + + + 剛果西非法郎 + COF + + + 哥倫比亞披索 + COP + + + 哥斯大黎加科郎 + CRC + + + 捷克克朗 + CSC + + + 捷克斯洛伐克硬克朗 + CSK + + + 古巴披索 + CUP + + + 古巴人外匯券 + CUX + + + 維德角埃斯庫多 + CVE + + + 庫拉克 盾 + CWG + + + 賽浦路斯鎊 + CYP + + + 捷克克朗 + CZK + + + 東德東德馬克 + DDM + + + 德國馬克 + DEM + + + 德國 蘇馬克Sperrmark + DES + + + 吉布地法郎 + DJF + + + 丹麥克羅納 + DKK + + + 多明尼加披索 + DOP + + + 阿爾及利亞第納爾 + DZD + + + 阿爾及利亞新法郎 + DZF + + + 阿爾及利亞法郎 Germinal + DZG + + + 厄瓜多蘇克雷 + ECS + + + 厄瓜多爾由里達瓦康斯坦 (UVC) + ECV + + + 愛沙尼亞克朗 + EEK + + + 埃及鎊 + EGP + + + 厄立特里亞納克法 + ERN + + + 西班牙陪士特 + ESP + + + 衣索比亞比爾 + ETB + + + 埃賽俄比亞元 + ETD + + + 歐元 + EUR + + + 芬蘭馬克 + FIM + + + 芬蘭馬克 (1860-1962) + FIN + + + 斐濟元 + FJD + + + 斐濟鎊 + FJP + + + 福克蘭群島鎊 + FKP + + + 法羅島克朗 + FOK + + + 法國法郎 + FRF + + + 法國法郎 捷米那/龐加萊法郎 + FRG + + + 加蓬西非法郎 + GAF + + + 英鎊 + GBP + + + 喬治 庫旁 拉里 + GEK + + + 喬治拉里 + GEL + + + 迦納仙蔕 + GHC + + + 迦納舊仙蔕 + GHO + + + 迦納鎊 + GHP + + + 迦納重新估价後的仙蔕 + GHR + + + 直布羅陀鎊 + GIP + + + 格陵蘭克羅鈉 + GLK + + + 甘比亞達拉西 + GMD + + + 岡比亞鎊 + GMP + + + 幾內亞法郎 + GNF + + + 幾內亞法郎 (1960-1972) + GNI + + + 幾內亞西里 + GNS + + + 瓜德羅普島法郎 + GPF + + + 赤道幾內亞埃奎勒 + GQE + + + 赤道幾內亞佛朗哥 + GQF + + + 赤道幾內亞比塞塔 + GQP + + + 希臘德拉克馬 + GRD + + + 希臘新德拉克馬 + GRN + + + 瓜地馬拉格查爾 + GTQ + + + 法屬圭亞那法郎圭亞那 + GUF + + + 葡屬幾內亞埃斯庫多 + GWE + + + 葡屬幾內亞米爾里斯 + GWM + + + 幾內亞披索披索 + GWP + + + 圭亞那元 + GYD + + + 港元 + HK$ + + + 洪都拉斯倫皮拉 + HNL + + + 克羅地亞第納爾 + HRD + + + 克羅地亞庫納 + HRK + + + 海地古德 + HTG + + + 匈牙利 - 福林 + HUF + + + 北愛爾蘭鎊 + IBP + + + 印度尼西亞尼可盾 + IDG + + + 印度尼西亞爪哇盧布 + IDJ + + + 印度尼西亞新盧布 + IDN + + + 印尼 - 盧布 + IDR + + + 愛爾蘭鎊 + IEP + + + 以色列謝客爾 + ILL + + + 以色列鎊 + ILP + + + 以色列新謝克爾 + ILS + + + 曼城島英鎊 + IMP + + + 印度盧布 + =0#Rs.|1#Re.|1<Rs. + + + 伊拉克第納爾 + IQD + + + 伊朗里亞爾 + IRR + + + 冰島克朗 + ISK + + + 義大利里拉 + ITL + + + 澤西鎊 + JEP + + + 牙買加元 + JMD + + + 牙買加鎊 + JMP + + + 約旦第納爾 + JOD + + + 日圓 + JP¥ + + + 肯尼亞先令 + KES + + + 吉爾吉斯索馬 + KGS + + + 柬埔寨舊瑞爾 + KHO + + + 柬埔寨瑞爾 + KHR + + + 基里巴斯元 + KID + + + 科摩羅法郎 + KMF + + + 北朝鮮人民幣 + KPP + + + 北朝鮮幣 + KPW + + + 韓國 哈瓦 + KRH + + + 南韓舊幣 + KRO + + + 韓國圜 + KRW + + + 科威特第納爾 + KWD + + + 開曼群島美元 + KYD + + + 卡扎克斯坦盧布 + KZR + + + 卡扎克斯坦坦吉 + KZT + + + 老撾 開普 + LAK + + + 黎巴嫩鎊 + LBP + + + 列支敦斯登法郎 + LIF + + + 斯里蘭卡盧布 + LKR + + + 錫蘭盧布 + LNR + + + 賴比瑞亞元 + LRD + + + 賴索托羅蒂 + LSL + + + 立陶宛里塔 + LTL + + + 立陶宛特羅 + LTT + + + 盧森堡法郎 + LUF + + + 拉脫維亞拉特銀幣 + LVL + + + 拉脫維亞盧布 + LVR + + + 利比亞英國的軍事當局里拉 + LYB + + + 利比亞第納爾 + LYD + + + 利比亞鎊 + LYP + + + 摩洛哥迪拉姆 + MAD + + + 摩洛哥法郎 + MAF + + + 摩納哥新法郎 + MCF + + + 摩納哥法郎 傑米那 + MCG + + + 摩杜雲列伊庫旁 + MDC + + + 摩杜雲列伊 + MDL + + + 摩杜雲盧布庫旁 + MDR + + + 馬達加斯加艾瑞爾 + MGA + + + 馬達加斯加法郎 + MGF + + + 馬紹爾群島美元 + MHD + + + 馬其頓第納爾 + MKD + + + 馬其頓第納爾(1992-1993) + MKN + + + 馬里法郎 + MLF + + + 緬甸元 + MMK + + + 緬甸美元外匯券 + MMX + + + 蒙古圖格里克 + MNT + + + 澳門元 + MOP + + + 馬提尼克島法郎 + MQF + + + 茅利塔尼亞烏吉亞 + MRO + + + 馬爾他里拉 + MTL + + + 馬爾他鎊 + MTP + + + 模里西斯盧布 + MUR + + + 馬爾地夫盧布 + MVP + + + 馬爾地夫海島盧非亞 + MVR + + + 馬拉維克瓦查 + MWK + + + 馬拉維鎊 + MWP + + + 墨西哥 - 披索 + MXN + + + 墨西哥銀披索 (1861-1992) + MXP + + + 墨西哥法律反轉(UDI) + MXV + + + 馬來西亞 - 林吉特 + MYR + + + 莫桑比克埃斯庫多 + MZE + + + 莫三比克梅蒂卡爾 + MZM + + + 納米比亞元 + NAD + + + 赫布里底群島 CFP 法郎 + NCF + + + 奈及利亞奈拉 + NGN + + + 奈及利亞鎊 + NGP + + + 新赫布里底群島 CFP 法郎 + NHF + + + 尼加拉瓜科多巴 + NIC + + + 尼加拉瓜金金哥多華 + NIG + + + 尼加拉瓜 金哥多華 + NIO + + + 荷蘭盾 + NLG + + + 挪威克羅納 + NOK + + + 尼泊爾盧布 + NPR + + + 紐西蘭幣 + $NZ + + + 紐西蘭鎊 + NZP + + + 阿曼里奧 + OMR + + + 阿曼里亞爾仙蔕i + OMS + + + 巴拿馬巴波亞 + PAB + + + 車城盧布 Kupon + PDK + + + 車城新盧布 + PDN + + + 車城盧布 + PDR + + + 祕魯因蒂 + PEI + + + 秘魯新太陽幣 + PEN + + + 秘魯太陽幣 + PES + + + 巴布亞紐幾內亞基那 + PGK + + + 菲律賓披索 + PHP + + + 巴基斯坦盧布 + PKR + + + 波蘭茲羅提 + PLN + + + 波蘭美元外匯券 + PLX + + + 波蘭茲羅提 (1950-1995) + PLZ + + + 巴勒斯坦鎊 + PSP + + + 葡萄牙 康拖 + PTC + + + 葡萄牙埃斯庫多 + PTE + + + 巴拉圭瓜拉尼 + PYG + + + 卡達爾里亞爾 + QAR + + + 留尼汪島法郎 + REF + + + 羅馬尼亞列伊 + ROL + + + 羅馬尼亞新列伊 + RON + + + 俄羅斯盧布 + RUB + + + 俄羅斯盧布 (1991-1998) + RUR + + + 盧安達法郎 + RWF + + + 沙烏地里雅 + SRl + + + 沙烏地宗主里雅 + SAS + + + 索羅門群島元 + SBD + + + 塞舌爾群島盧布 + SCR + + + 蘇丹第納爾 + SDD + + + 蘇丹鎊 + SDP + + + 瑞典克羅納 + SEK + + + 新加坡幣 + SGD + + + 聖赫勒拿 鎊 + SHP + + + 斯洛文尼亞 Tolar Bons + SIB + + + 斯洛維尼亞托勒 + SIT + + + 斯洛伐克克朗 + SKK + + + 獅子山利昂 + SLL + + + 聖馬利諾里拉 + SML + + + 索馬利亞先令 + SOS + + + 索馬里蘭先令 + SQS + + + 蘇里南盾 + SRG + + + 蘇格蘭鎊 + SSP + + + 聖多美島和普林西比島多布拉 + STD + + + 聖多美島和普林西比島埃斯庫多 + STE + + + 蘇聯新盧布 + SUN + + + 蘇聯盧布 + SUR + + + 愛爾 薩爾瓦多科郎 + SVC + + + 敘利亞鎊 + SYP + + + 斯威士蘭 里郎 + SZL + + + 土耳其人和凱科斯冠 + TCC + + + 乍得 西非 法郎 + TDF + + + 泰銖 + THB + + + 塔吉克斯坦盧布 + TJR + + + 塔吉克斯坦 索莫尼 + TJS + + + 土庫曼馬納特 + TMM + + + 突尼西亞第納爾 + TND + + + 東加潘加 + TOP + + + 湯加英鎊 + TOS + + + 帝汶 埃斯庫多 + TPE + + + 帝汶元 + TPP + + + 土耳其里拉 + TRL + + + 千里達及托巴哥r + TTD + + + 特立尼達和多巴哥舊元r + TTO + + + 吐瓦魯美元 + TVD + + + 新臺幣 + NT$ + + + 坦桑尼亞 先令 + TZS + + + 烏克蘭格里夫那 + UAH + + + 烏克蘭 卡本瓦那茲 + UAK + + + 烏干達先令 (1966-1987) + UGS + + + 烏干達先令 + UGX + + + 美元 + US$ + + + 美元 (第二天) + USN + + + 美元 (同一天) + USS + + + 烏拉圭披索福厄特 + UYF + + + 烏拉圭披索 (1975-1993) + UYP + + + 烏拉圭披索 + UYU + + + 烏茲別克斯坦 庫邦 索馬 + UZC + + + 烏茲別克斯坦 薩木 + UZS + + + 梵蒂岡城里拉 + VAL + + + 北越南 皮阿斯特越南盾 + VDD + + + 北越南新盾 + VDN + + + 北越南 名 皮阿斯特越南盾 + VDP + + + 委內瑞拉博利瓦 + VEB + + + 英屬維爾斯群島元 + VGD + + + 越南盾 + VND + + + 越南新盾 + VNN + + + 越南共和國 盾 + VNR + + + 越南國家盾 + VNS + + + 萬那杜萬杜 + VUV + + + 西薩摩亞鎊 + WSP + + + 西薩摩亞塔拉 + WST + + + 亞洲第納爾會計單位 + XAD + + + 西非 法郎 BEAC + XAF + + + 亞洲貨幣單位 + XAM + + + 黃金 + XAU + + + 歐洲綜合單位 + XBA + + + 歐洲貨幣單位 + XBB + + + 歐洲會計單位(XBC) + XBC + + + 歐洲會計單位(XBD) + XBD + + + 格瑞那達元 + XCD + + + 西非 新 法郎 + XCF + + + 特殊提款權 + XDR + + + 西非 法郎 BCEAEC + XEF + + + 歐洲貨幣單位 + XEU + + + 法國金法郎 + XFO + + + 法國 UIC 法郎 + XFU + + + 伊斯蘭第納爾 + XID + + + 法國大城市新 法郎 + XMF + + + 法國安的列斯群島 西非 法郎 + XNF + + + 西非 法郎 BCEAO + XOF + + + CFP 法郎 + XPF + + + COMECON 可轉移盧布 + XTR + + + 葉門第納爾 + YDD + + + 也門阿馬迪里亞爾 + YEI + + + 也門里亞爾 + YER + + + 南斯拉夫第納爾硬幣 + YUD + + + 南斯拉夫聯邦第納爾 + YUF + + + 南斯拉夫人1994 第納爾 + YUG + + + 南斯拉夫挪威亞第納爾 + YUM + + + 南斯拉夫 可轉換第納爾 + YUN + + + 南斯拉夫十月 第納爾 + YUO + + + 南斯拉夫改制後的第納爾 + YUR + + + 南非 - 蘭特 (金融) + ZAL + + + 南非鎊 + ZAP + + + 南非蘭特 + ZAR + + + 尚比亞克瓦查 + ZMK + + + 贊比亞鎊 + ZMP + + + 薩伊扎新伊爾 + ZRN + + + 扎伊爾扎伊爾 + ZRZ + + + 辛巴威元 + ZWD + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/fallbackcollator.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/fallbackcollator.py new file mode 100644 index 0000000..fc76a58 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/fallbackcollator.py @@ -0,0 +1,32 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Fallback collator +""" + +from unicodedata import normalize + +class FallbackCollator: + + def __init__(self, locale): + pass + + def key(self, s): + s = normalize('NFKC', s) + return s.lower(), s + + def cmp(self, s1, s2): + k1, k2 = self.key(s1), self.key(s2) + if k1 == k2: + return 0 + return -1 if k1 < k2 else 1 diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/fallbackcollator.txt b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/fallbackcollator.txt new file mode 100644 index 0000000..367e4e6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/fallbackcollator.txt @@ -0,0 +1,63 @@ +Fallback Collator +================= + +The zope.i18n.interfaces.locales.ICollator interface defines an API +for collating text. Why is this important? Simply sorting unicode +strings doesn't provide an ordering that users in a given locale will +fine useful. Various languages have text sorting conventions that +don't agree with the ordering of unicode code points. (This is even +true for English. :) + +Text collation is a fairly involved process. Systems that need this, +will likely use something like ICU +(http://www-306.ibm.com/software/globalization/icu, +http://pyicu.osafoundation.org/). We don't want to introduce a +dependency on ICU and this time, so we are providing a fallback +collator that: + +- Provides an implementation of the ICollator interface that can be + used for development, and + +- Provides a small amount of value, at least for English speakers. :) + +Application code should obtain a collator by adapting a locale to +ICollator. Here we just call the collator factory with None. The +fallback collator doesn't actually use the locale, although +application code should certainly *not* count on this. + + >>> import zope.i18n.locales.fallbackcollator + >>> collator = zope.i18n.locales.fallbackcollator.FallbackCollator(None) + +Now, we can pass the collator's key method to sort functions to sort +strings in a slightly friendly way: + + >>> sorted([u"Sam", u"sally", u"Abe", u"alice", u"Terry", u"tim"], + ... key=collator.key) + [u'Abe', u'alice', u'sally', u'Sam', u'Terry', u'tim'] + + +The collator has a very simple algorithm. It normalizes strings and +then returns a tuple with the result of lower-casing the normalized +string and the normalized string. We can see this by calling the key +method, which converts unicode strings to collation keys: + + >>> collator.key(u"Sam") + (u'sam', u'Sam') + + >>> collator.key(u"\xc6\xf8a\u030a") + (u'\xe6\xf8\xe5', u'\xc6\xf8\xe5') + +There is also a cmp function for comparing strings: + + >>> collator.cmp(u"Terry", u"sally") + 1 + + + >>> collator.cmp(u"sally", u"Terry") + -1 + + >>> collator.cmp(u"terry", u"Terry") + 1 + + >>> collator.cmp(u"terry", u"terry") + 0 diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/inheritance.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/inheritance.py new file mode 100644 index 0000000..912d8c9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/inheritance.py @@ -0,0 +1,243 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Locale Inheritance Support + +This module provides support for locale inheritance. + +Note: In many respects this is similar to Zope 2's acquisition model, since +locale inheritance is not inheritance in the programming sense. +""" +__docformat__ = 'restructuredtext' + +from zope.deprecation import deprecate + +from zope.interface import implementer +from zope.i18n.interfaces.locales import \ + ILocaleInheritance, IAttributeInheritance, IDictionaryInheritance + +class NoParentException(AttributeError): + pass + +@implementer(ILocaleInheritance) +class Inheritance(object): + """A simple base version of locale inheritance. + + This object contains some shared code amongst the various + 'ILocaleInheritance' implementations. + """ + + + # See zope.i18n.interfaces.locales.ILocaleInheritance + __parent__ = None + + # See zope.i18n.interfaces.locales.ILocaleInheritance + __name__ = None + + def getInheritedSelf(self): + """See zope.i18n.interfaces.locales.ILocaleInheritance""" + if self.__parent__ is None: + raise NoParentException('No parent was specified.') + parent = self.__parent__.getInheritedSelf() + if isinstance(parent, dict): + return parent[self.__name__] + return getattr(parent, self.__name__) + + +@implementer(IAttributeInheritance) +class AttributeInheritance(Inheritance): + r"""Implementation of locale inheritance for attributes. + + Example:: + + >>> from zope.i18n.locales.tests.test_docstrings import \ + ... LocaleInheritanceStub + + >>> root = LocaleInheritanceStub() + >>> root.data = 'value' + >>> root.attr = 'bar value' + >>> root.data2 = AttributeInheritance() + >>> root.data2.attr = 'value2' + + >>> locale = LocaleInheritanceStub(root) + >>> locale.attr = 'foo value' + >>> locale.data2 = AttributeInheritance() + + Here is an attribute lookup directly from the locale:: + + >>> locale.data + 'value' + >>> locale.attr + 'foo value' + + ... however, we can also have any amount of nesting:: + + >>> locale.data2.attr + 'value2' + + Once we have looked up a particular attribute, it should be cached, + i.e. exist in the dictionary of this inheritance object:: + + >>> 'attr' in locale.data2.__dict__ + True + >>> locale.data2.__dict__['attr'] + 'value2' + + Make sure that None can be assigned as value as well:: + + >>> locale.data2.attr = None + >>> locale.data2.attr is None + True + """ + + + def __setattr__(self, name, value): + """See zope.i18n.interfaces.locales.ILocaleInheritance""" + # If we have a value that can also inherit data from other locales, we + # set its parent and name, so that we know how to get to it. + if (ILocaleInheritance.providedBy(value) and + not name.startswith('__')): + value.__parent__ = self + value.__name__ = name + super(AttributeInheritance, self).__setattr__(name, value) + + + def __getattr__(self, name): + """See zope.i18n.interfaces.locales.ILocaleInheritance""" + try: + selfUp = self.getInheritedSelf() + except NoParentException: + # There was simply no parent anymore, so let's raise an error + # for good + raise AttributeError("'%s' object (or any of its parents) has no " + "attribute '%s'" % (self.__class__.__name__, + name)) + else: + value = getattr(selfUp, name) + # Since a locale hierarchy never changes after startup, we can + # cache the value locally, saving the time to ever look it up + # again. + # Note that we cannot use the normal setattr function, since + # __setattr__ of this class tries to assign a parent and name, + # which we do not want to override. + super(AttributeInheritance, self).__setattr__(name, value) + return value + + + +@implementer(IDictionaryInheritance) +class InheritingDictionary(Inheritance, dict): + """Implementation of a dictionary that can also inherit values. + + Example:: + + >>> from zope.i18n.locales.tests.test_docstrings import \\ + ... LocaleInheritanceStub + + >>> root = LocaleInheritanceStub() + >>> root.data = InheritingDictionary({1: 'one', 2: 'two', 3: 'three'}) + >>> root.data2 = AttributeInheritance() + >>> root.data2.dict = InheritingDictionary({1: 'i', 2: 'ii', 3: 'iii'}) + + >>> locale = LocaleInheritanceStub(root) + >>> locale.data = InheritingDictionary({1: 'eins'}) + >>> locale.data2 = AttributeInheritance() + >>> locale.data2.dict = InheritingDictionary({1: 'I'}) + + Here is a dictionary lookup directly from the locale:: + + >>> locale.data[1] + 'eins' + >>> locale.data[2] + 'two' + + ... however, we can also have any amount of nesting:: + + >>> locale.data2.dict[1] + 'I' + >>> locale.data2.dict[2] + 'ii' + + We also have to overwrite `get`, `keys` and `items` since we want + to make sure that all upper locales are consulted before returning the + default or to construct the list of elements, respectively:: + + >>> locale.data2.dict.get(2) + 'ii' + >>> locale.data2.dict.get(4) is None + True + >>> sorted(locale.data.keys()) + [1, 2, 3] + >>> sorted(locale.data.items()) + [(1, 'eins'), (2, 'two'), (3, 'three')] + + We also override `values`:: + + >>> sorted(locale.data.values()) + ['eins', 'three', 'two'] + + Historically, `value` was a synonym of this method; it is still + available, but is deprecated:: + + >>> import warnings + >>> with warnings.catch_warnings(record=True) as w: + ... sorted(locale.data.value()) + ['eins', 'three', 'two'] + >>> print(w[0].message) + `value` is a deprecated synonym for `values` + """ + + + def __setitem__(self, name, value): + """See zope.i18n.interfaces.locales.ILocaleInheritance""" + if ILocaleInheritance.providedBy(value): + value.__parent__ = self + value.__name__ = name + super(InheritingDictionary, self).__setitem__(name, value) + + def __getitem__(self, name): + """See zope.i18n.interfaces.locales.ILocaleInheritance""" + if name not in self: + try: + selfUp = self.getInheritedSelf() + except NoParentException: + pass + else: + return selfUp.__getitem__(name) + return super(InheritingDictionary, self).__getitem__(name) + + def get(self, name, default=None): + """See zope.i18n.interfaces.locales.ILocaleInheritance""" + try: + return self[name] + except KeyError: + return default + + def _make_reified_inherited_dict(self): + try: + d = dict(self.getInheritedSelf()) + except NoParentException: + d = {} + d.update(self) + return d + + def items(self): + return self._make_reified_inherited_dict().items() + + def keys(self): + return list(self._make_reified_inherited_dict().keys()) + + def values(self): + return list(self._make_reified_inherited_dict().values()) + + value = deprecate("`value` is a deprecated synonym for `values`")(values) diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/provider.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/provider.py new file mode 100644 index 0000000..3864b7a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/provider.py @@ -0,0 +1,80 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Locale Provider + +The Locale Provider looks up locales and loads them from the XML data, if +necessary. +""" +import os +from zope.interface import implementer +from zope.i18n.interfaces.locales import ILocaleProvider + +class LoadLocaleError(Exception): + """This error is raised if a locale cannot be loaded.""" + + +@implementer(ILocaleProvider) +class LocaleProvider(object): + """A locale provider that gets its data from the XML data.""" + + + def __init__(self, locale_dir): + self._locales = {} + self._locale_dir = locale_dir + + def _compute_filename(self, language, country, variant): + # Creating the filename + if language is None and country is None and variant is None: + filename = 'root.xml' + else: + filename = language + if country is not None: + filename += '_' + country + if variant is not None: + if '_' not in filename: + filename += '_' + filename += '_' + variant + filename += '.xml' + return filename + + def loadLocale(self, language=None, country=None, variant=None): + """See zope.i18n.interfaces.locales.ILocaleProvider""" + filename = self._compute_filename(language, country, variant) + # Making sure we have this locale + path = os.path.join(self._locale_dir, filename) + if not os.path.exists(path): + raise LoadLocaleError( + 'The desired locale is not available.\nPath: %s' % path) + + # Import here to avoid circular imports + from zope.i18n.locales.xmlfactory import LocaleFactory + + # Let's get it! + locale = LocaleFactory(path)() + self._locales[(language, country, variant)] = locale + + def getLocale(self, language=None, country=None, variant=None): + """See zope.i18n.interfaces.locales.ILocaleProvider""" + # We want to be liberal in what we accept, but the standard is lower + # case language codes, upper case country codes, and upper case + # variants, so coerce case here. + if language: + language = language.lower() + if country: + country = country.upper() + if variant: + variant = variant.upper() + if (language, country, variant) not in self._locales: + self.loadLocale(language, country, variant) + return self._locales[(language, country, variant)] diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/__init__.py new file mode 100644 index 0000000..1e4c732 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/__init__.py @@ -0,0 +1 @@ +# Test package for locales diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/test_docstrings.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/test_docstrings.py new file mode 100644 index 0000000..fa52078 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/test_docstrings.py @@ -0,0 +1,42 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests for the ZCML Documentation Module +""" +import unittest +from doctest import DocTestSuite +from zope.i18n.locales.inheritance import AttributeInheritance +from zope.i18n.locales.inheritance import NoParentException + +from zope.i18n.testing import unicode_checker + +class LocaleInheritanceStub(AttributeInheritance): + + def __init__(self, nextLocale=None): + self.__nextLocale__ = nextLocale + + def getInheritedSelf(self): + if self.__nextLocale__ is None: + raise NoParentException('No parent was specified.') + return self.__nextLocale__ + + +def test_suite(): + return unittest.TestSuite(( + DocTestSuite('zope.i18n.locales', checker=unicode_checker), + DocTestSuite('zope.i18n.locales.inheritance', checker=unicode_checker), + DocTestSuite('zope.i18n.locales.xmlfactory', checker=unicode_checker), + )) + +if __name__ == '__main__': + unittest.main() diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/test_fallbackcollator.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/test_fallbackcollator.py new file mode 100644 index 0000000..7e10b50 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/test_fallbackcollator.py @@ -0,0 +1,27 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import unittest +import doctest + +from zope.i18n.testing import unicode_checker + +def test_suite(): + return unittest.TestSuite(( + doctest.DocFileSuite('../fallbackcollator.txt', checker=unicode_checker), + )) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/test_locales.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/test_locales.py new file mode 100644 index 0000000..2b100b8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/test_locales.py @@ -0,0 +1,162 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""This module tests the LocaleProvider and everything that goes with it. +""" +import os +import datetime +from unittest import TestCase + +from zope.i18n.interfaces.locales import ILocaleProvider +from zope.i18n.locales import locales +from zope.i18n.locales.provider import LocaleProvider, LoadLocaleError + +import zope.i18n +datadir = os.path.join(os.path.dirname(zope.i18n.__file__), 'locales', 'data') + +class AbstractTestILocaleProviderMixin(object): + """Test the functionality of an implmentation of the ILocaleProvider + interface.""" + + def _makeNewProvider(self): + raise NotImplementedError() + + def setUp(self): + self.locales = self._makeNewProvider() + + def testInterfaceConformity(self): + self.assertTrue(ILocaleProvider.providedBy(self.locales)) + + def test_getLocale(self): + locale = self.locales.getLocale(None, None, None) + self.assertEqual(locale.id.language, None) + self.assertEqual(locale.id.territory, None) + self.assertEqual(locale.id.variant, None) + + locale = self.locales.getLocale('en', None, None) + self.assertEqual(locale.id.language, 'en') + self.assertEqual(locale.id.territory, None) + self.assertEqual(locale.id.variant, None) + + locale = self.locales.getLocale('en', 'US', None) + self.assertEqual(locale.id.language, 'en') + self.assertEqual(locale.id.territory, 'US') + self.assertEqual(locale.id.variant, None) + + locale = self.locales.getLocale('en', 'US', 'POSIX') + self.assertEqual(locale.id.language, 'en') + self.assertEqual(locale.id.territory, 'US') + self.assertEqual(locale.id.variant, 'POSIX') + + +class TestLocaleProvider(AbstractTestILocaleProviderMixin, TestCase): + + def _makeNewProvider(self): + return LocaleProvider(datadir) + + def test_loadLocale(self): + self.locales.loadLocale(None, None, None) + self.assertEqual(list(self.locales._locales.keys()), + [(None, None, None)]) + + self.locales.loadLocale('en', None, None) + self.assertIn(('en', None, None), self.locales._locales.keys()) + + def test_loadLocaleFailure(self): + self.assertRaises(LoadLocaleError, self.locales.loadLocale, 'zzz') + + def test_compute_filename_with_variant_no_country(self): + filename = self.locales._compute_filename('en', None, 'variant') + self.assertEqual('en__variant.xml', filename) + + +class TestLocaleAndProvider(TestCase): + + # Set the locale on the class so that test cases don't have + # to pay to construct a new one each time. + + locales.loadLocale(None, None, None) + locales.loadLocale('en', None, None) + locales.loadLocale('en', 'US', None) + locales.loadLocale('en', 'US', 'POSIX') + locale = locales.getLocale('en', 'US', 'POSIX') + + def test_getTimeFormatter(self): + formatter = self.locale.dates.getFormatter('time', 'medium') + self.assertEqual(formatter.getPattern(), 'h:mm:ss a') + self.assertEqual(formatter.format(datetime.time(12, 30, 10)), + '12:30:10 PM') + self.assertEqual(formatter.parse('12:30:10 PM'), + datetime.time(12, 30, 10)) + + def test_getDateFormatter(self): + formatter = self.locale.dates.getFormatter('date', 'medium') + self.assertEqual(formatter.getPattern(), 'MMM d, yyyy') + self.assertEqual(formatter.format(datetime.date(2003, 1, 2)), + 'Jan 2, 2003') + self.assertEqual(formatter.parse('Jan 2, 2003'), + datetime.date(2003, 1, 2)) + + def test_getDateTimeFormatter(self): + formatter = self.locale.dates.getFormatter('dateTime', 'medium') + self.assertEqual(formatter.getPattern(), 'MMM d, yyyy h:mm:ss a') + self.assertEqual( + formatter.format(datetime.datetime(2003, 1, 2, 12, 30)), + 'Jan 2, 2003 12:30:00 PM') + self.assertEqual(formatter.parse('Jan 2, 2003 12:30:00 PM'), + datetime.datetime(2003, 1, 2, 12, 30)) + + def test_getNumberFormatter(self): + formatter = self.locale.numbers.getFormatter('decimal') + self.assertEqual(formatter.getPattern(), '###0.###;-###0.###') + self.assertEqual(formatter.format(1234.5678), '1234.568') + self.assertEqual(formatter.format(-1234.5678), '-1234.568') + self.assertEqual(formatter.parse('1234.567'), 1234.567) + self.assertEqual(formatter.parse('-1234.567'), -1234.567) + + +class TestGlobalLocaleProvider(TestCase): + + def testLoading(self): + locales.loadLocale(None, None, None) + self.assertIn((None, None, None), locales._locales) + locales.loadLocale('en', None, None) + self.assertIn(('en', None, None), locales._locales) + locales.loadLocale('en', 'US', None) + self.assertIn(('en', 'US', None), locales._locales) + locales.loadLocale('en', 'US', 'POSIX') + self.assertIn(('en', 'US', 'POSIX'), locales._locales) + + def test_getLocale(self): + locale = locales.getLocale('en', 'GB') + self.assertEqual(locale.id.language, 'en') + self.assertEqual(locale.id.territory, 'GB') + self.assertEqual(locale.id.variant, None) + +class TestRootLocale(TestCase): + """There were some complaints that the root locale does not work + correctly, so make sure it does.""" + + locales.loadLocale(None, None, None) + locale = locales.getLocale(None, None, None) + + def test_dateFormatter(self): + formatter = self.locale.dates.getFormatter('date') + self.assertEqual( + formatter.format(datetime.date(2004, 10, 31), 'E'), '1') + self.assertEqual( + formatter.format(datetime.date(2004, 10, 31), 'EE'), '01') + self.assertEqual( + formatter.format(datetime.date(2004, 10, 31), 'EEE'), '1') + self.assertEqual( + formatter.format(datetime.date(2004, 10, 31), 'EEEE'), '1') diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/test_xmlfactory.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/test_xmlfactory.py new file mode 100644 index 0000000..792c96d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/tests/test_xmlfactory.py @@ -0,0 +1,62 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTLAR PURPOSE. +# +############################################################################## +"""Testing all XML Locale functionality. +""" +import os +from unittest import TestCase, TestSuite + +from zope.i18n.locales.xmlfactory import LocaleFactory +import zope.i18n + +class LocaleXMLFileTestCase(TestCase): + """This test verifies that every locale XML file can be loaded.""" + + def __init__(self, path): + self.__path = path + TestCase.__init__(self) + + def runTest(self): + # Loading Locale object + LocaleFactory(self.__path)() + + # XXX: The tests below are commented out because it's not + # necessary for the xml files to have all format definitions. + + ## Making sure all number format patterns parse + #for category in (u'decimal', u'scientific', u'percent', u'currency'): + # for length in getattr(locale.numbers, category+'Formats').values(): + # for format in length.formats.values(): + # self.assert_(parseNumberPattern(format.pattern) is not None) + + ## Making sure all datetime patterns parse + #for calendar in locale.dates.calendars.values(): + # for category in ('date', 'time', 'dateTime'): + # for length in getattr(calendar, category+'Formats').values(): + # for format in length.formats.values(): + # self.assert_( + # parseDateTimePattern(format.pattern) is not None) + + + +def test_suite(): + suite = TestSuite() + locale_dir = os.path.join(os.path.dirname(zope.i18n.__file__), + 'locales', 'data') + for path in os.listdir(locale_dir): + if not path.endswith(".xml"): + continue + path = os.path.join(locale_dir, path) + case = LocaleXMLFileTestCase(path) + suite.addTest(case) + return suite diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/xmlfactory.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/xmlfactory.py new file mode 100644 index 0000000..5b2dbf5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/locales/xmlfactory.py @@ -0,0 +1,1379 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""XML Locale-related objects and functions +""" +from datetime import date, time +from xml.dom.minidom import parse as parseXML +from zope.i18n.locales import Locale, LocaleDisplayNames, LocaleDates +from zope.i18n.locales import LocaleVersion, LocaleIdentity, LocaleTimeZone +from zope.i18n.locales import LocaleCalendar, LocaleCurrency, LocaleNumbers +from zope.i18n.locales import LocaleFormat, LocaleFormatLength, dayMapping +from zope.i18n.locales import LocaleOrientation, LocaleDayContext +from zope.i18n.locales import LocaleMonthContext, calendarAliases +from zope.i18n.locales.inheritance import InheritingDictionary + + +class LocaleFactory(object): + """This class creates a Locale object from an ICU XML file.""" + + def __init__(self, path): + """Initialize factory.""" + self._path = path + # Mainly for testing + if path: + self._data = parseXML(path).documentElement + + def _getText(self, nodelist): + rc = u'' + for node in nodelist: + if node.nodeType == node.TEXT_NODE: + rc = rc + node.data + return rc + + + def _extractVersion(self, identity_node): + """Extract the Locale's version info based on data from the DOM + tree. + + Example:: + + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... Some notes + ... + ... + ... + ... ''' + >>> dom = parseString(xml) + + >>> version = factory._extractVersion(dom.documentElement) + >>> version.number + u'1.0' + >>> version.generationDate + datetime.date(2003, 12, 19) + >>> version.notes + u'Some notes' + """ + number = generationDate = notes = None + # Retrieve the version number and notes of the locale + nodes = identity_node.getElementsByTagName('version') + if nodes: + number = nodes[0].getAttribute('number') + notes = self._getText(nodes[0].childNodes) + # Retrieve the generationDate of the locale + nodes = identity_node.getElementsByTagName('generation') + if nodes: + year, month, day = nodes[0].getAttribute('date').split('-') + generationDate = date(int(year), int(month), int(day)) + + return LocaleVersion(number, generationDate, notes) + + + def _extractIdentity(self): + """Extract the Locale's identity object based on info from the DOM + tree. + + Example:: + + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... + ... + ... + ... + ... + ... + ... ''' + >>> factory = LocaleFactory(None) + >>> factory._data = parseString(xml).documentElement + + >>> id = factory._extractIdentity() + >>> id.language + u'en' + >>> id.script is None + True + >>> id.territory + u'US' + >>> id.variant + u'POSIX' + >>> id.version.number + u'1.0' + """ + id = LocaleIdentity() + identity = self._data.getElementsByTagName('identity')[0] + # Retrieve the language of the locale + nodes = identity.getElementsByTagName('language') + if nodes != []: + id.language = nodes[0].getAttribute('type') or None + # Retrieve the territory of the locale + nodes = identity.getElementsByTagName('territory') + if nodes != []: + id.territory = nodes[0].getAttribute('type') or None + # Retrieve the varriant of the locale + nodes = identity.getElementsByTagName('variant') + if nodes != []: + id.variant = nodes[0].getAttribute('type') or None + + id.version = self._extractVersion(identity) + return id + + + def _extractTypes(self, names_node): + """Extract all types from the names_node. + + Example:: + + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... + ... BUDDHIST + ... CHINESE + ... GREGORIAN + ... STROKE + ... TRADITIONAL + ... + ... ''' + >>> dom = parseString(xml) + + >>> types = factory._extractTypes(dom.documentElement) + >>> keys = types.keys() + >>> keys.sort() + >>> keys[:2] + [(u'Fallback', u'calendar'), (u'buddhist', u'calendar')] + >>> keys[2:4] + [(u'chinese', u'calendar'), (u'gregorian', u'calendar')] + >>> keys[4:] + [(u'stroke', u'collation'), (u'traditional', u'collation')] + >>> types[(u'chinese', u'calendar')] + u'CHINESE' + >>> types[(u'stroke', u'collation')] + u'STROKE' + """ + # 'types' node has not to exist + types_nodes = names_node.getElementsByTagName('types') + if types_nodes == []: + return + # Retrieve all types + types = InheritingDictionary() + for type_node in types_nodes[0].getElementsByTagName('type'): + type = type_node.getAttribute('type') + key = type_node.getAttribute('key') + types[(type, key)] = self._getText(type_node.childNodes) + return types + + + def _extractDisplayNames(self): + """Extract all display names from the DOM tree. + + Example:: + + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... + ... + ... aa + ... ab + ... + ... + ... + ... + ... + ... + ... AD + ... AE + ... + ... + ... + ... POSIX + ... + ... + ... CALENDAR + ... COLLATION + ... + ... + ... BUDDHIST + ... STROKE + ... + ... + ... ''' + >>> factory = LocaleFactory(None) + >>> factory._data = parseString(xml).documentElement + + >>> names = factory._extractDisplayNames() + + >>> keys = names.languages.keys() + >>> keys.sort() + >>> keys + [u'Fallback', u'aa', u'ab'] + >>> names.languages[u"aa"] + u'aa' + + >>> keys = names.scripts.keys() + >>> keys.sort() + >>> keys + [u'Arab', u'Armn'] + >>> names.scripts[u"Arab"] + u'Arab' + + >>> keys = names.territories.keys() + >>> keys.sort() + >>> keys + [u'AD', u'AE'] + >>> names.territories[u"AD"] + u'AD' + + >>> keys = names.variants.keys() + >>> keys.sort() + >>> keys + [u'Fallback', u'POSIX'] + >>> names.variants[u"Fallback"] + u'' + + >>> keys = names.keys.keys() + >>> keys.sort() + >>> keys + [u'calendar', u'collation'] + >>> names.keys[u"calendar"] + u'CALENDAR' + + >>> names.types[(u"stroke", u"collation")] + u'STROKE' + """ + displayNames = LocaleDisplayNames() + # Neither the 'localeDisplayNames' or 'scripts' node has to exist + names_nodes = self._data.getElementsByTagName('localeDisplayNames') + if names_nodes == []: + return displayNames + + for group_tag, single_tag in (('languages', 'language'), + ('scripts', 'script'), + ('territories', 'territory'), + ('variants', 'variant'), + ('keys', 'key')): + group_nodes = names_nodes[0].getElementsByTagName(group_tag) + if group_nodes == []: + continue + # Retrieve all children + elements = InheritingDictionary() + for element in group_nodes[0].getElementsByTagName(single_tag): + type = element.getAttribute('type') + elements[type] = self._getText(element.childNodes) + setattr(displayNames, group_tag, elements) + + types = self._extractTypes(names_nodes[0]) + if types is not None: + displayNames.types = types + return displayNames + + + def _extractMonths(self, months_node, calendar): + """Extract all month entries from cal_node and store them in calendar. + + Example:: + + >>> class CalendarStub(object): + ... months = None + >>> calendar = CalendarStub() + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... + ... + ... + ... Januar + ... Februar + ... Maerz + ... April + ... Mai + ... Juni + ... Juli + ... August + ... September + ... Oktober + ... November + ... Dezember + ... + ... + ... Jan + ... Feb + ... Mrz + ... Apr + ... Mai + ... Jun + ... Jul + ... Aug + ... Sep + ... Okt + ... Nov + ... Dez + ... + ... + ... ''' + >>> dom = parseString(xml) + >>> factory._extractMonths(dom.documentElement, calendar) + + The contexts and widths were introduced in CLDR 1.1, the way + of getting month names is like this:: + + >>> calendar.defaultMonthContext + u'format' + + >>> ctx = calendar.monthContexts[u"format"] + >>> ctx.defaultWidth + u'wide' + + >>> names = [ctx.months[u"wide"][type] for type in range(1,13)] + >>> names[:7] + [u'Januar', u'Februar', u'Maerz', u'April', u'Mai', u'Juni', u'Juli'] + >>> names[7:] + [u'August', u'September', u'Oktober', u'November', u'Dezember'] + + >>> abbrs = [ctx.months[u"abbreviated"][type] for type in range(1,13)] + >>> abbrs[:6] + [u'Jan', u'Feb', u'Mrz', u'Apr', u'Mai', u'Jun'] + >>> abbrs[6:] + [u'Jul', u'Aug', u'Sep', u'Okt', u'Nov', u'Dez'] + + The old, CLDR 1.0 way of getting month names and abbreviations:: + + >>> names = [calendar.months.get(type, (None, None))[0] + ... for type in range(1, 13)] + >>> names[:7] + [u'Januar', u'Februar', u'Maerz', u'April', u'Mai', u'Juni', u'Juli'] + >>> names[7:] + [u'August', u'September', u'Oktober', u'November', u'Dezember'] + + >>> abbrs = [calendar.months.get(type, (None, None))[1] + ... for type in range(1, 13)] + >>> abbrs[:6] + [u'Jan', u'Feb', u'Mrz', u'Apr', u'Mai', u'Jun'] + >>> abbrs[6:] + [u'Jul', u'Aug', u'Sep', u'Okt', u'Nov', u'Dez'] + + If there are no months, nothing happens: + + >>> calendar = CalendarStub() + >>> factory = LocaleFactory(None) + >>> xml = u'''''' + >>> dom = parseString(xml) + >>> factory._extractMonths(dom.documentElement, calendar) + >>> calendar.months + + """ + + defaultMonthContext_node = months_node.getElementsByTagName('default') + if defaultMonthContext_node: + calendar.defaultMonthContext = defaultMonthContext_node[0].getAttribute('type') + + monthContext_nodes = months_node.getElementsByTagName('monthContext') + if not monthContext_nodes: + return + + calendar.monthContexts = InheritingDictionary() + names_node = abbrs_node = None # BBB + + for node in monthContext_nodes: + context_type = node.getAttribute('type') + mctx = LocaleMonthContext(context_type) + calendar.monthContexts[context_type] = mctx + + defaultWidth_node = node.getElementsByTagName('default') + if defaultWidth_node: + mctx.defaultWidth = defaultWidth_node[0].getAttribute('type') + + widths = InheritingDictionary() + mctx.months = widths + for width_node in node.getElementsByTagName('monthWidth'): + width_type = width_node.getAttribute('type') + width = InheritingDictionary() + widths[width_type] = width + + for month_node in width_node.getElementsByTagName('month'): + mtype = int(month_node.getAttribute('type')) + width[mtype] = self._getText(month_node.childNodes) + + if context_type == 'format': + if width_type == 'abbreviated': + abbrs_node = width_node + elif width_type == 'wide': + names_node = width_node + + if not (names_node and abbrs_node): + return + + # Get all month names + names = {} + for name_node in names_node.getElementsByTagName('month'): + type = int(name_node.getAttribute('type')) + names[type] = self._getText(name_node.childNodes) + + # Get all month abbrs + abbrs = {} + for abbr_node in abbrs_node.getElementsByTagName('month'): + type = int(abbr_node.getAttribute('type')) + abbrs[type] = self._getText(abbr_node.childNodes) + + # Put the info together + calendar.months = InheritingDictionary() + for type in range(1, 13): + calendar.months[type] = (names.get(type, None), + abbrs.get(type, None)) + + + def _extractDays(self, days_node, calendar): + """Extract all day entries from cal_node and store them in + calendar. + + Example:: + + >>> class CalendarStub(object): + ... days = None + >>> calendar = CalendarStub() + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... + ... + ... + ... Sonntag + ... Montag + ... Dienstag + ... Mittwoch + ... Donnerstag + ... Freitag + ... Samstag + ... + ... + ... So + ... Mo + ... Di + ... Mi + ... Do + ... Fr + ... Sa + ... + ... + ... ''' + >>> dom = parseString(xml) + >>> factory._extractDays(dom.documentElement, calendar) + + Day contexts and widths were introduced in CLDR 1.1, here's + how to use them:: + + >>> calendar.defaultDayContext + u'format' + + >>> ctx = calendar.dayContexts[u"format"] + >>> ctx.defaultWidth + u'wide' + + >>> names = [ctx.days[u"wide"][type] for type in range(1,8)] + >>> names[:4] + [u'Montag', u'Dienstag', u'Mittwoch', u'Donnerstag'] + >>> names[4:] + [u'Freitag', u'Samstag', u'Sonntag'] + + >>> abbrs = [ctx.days[u"abbreviated"][type] for type in range(1,8)] + >>> abbrs + [u'Mo', u'Di', u'Mi', u'Do', u'Fr', u'Sa', u'So'] + + And here's the old CLDR 1.0 way of getting day names and + abbreviations:: + + >>> names = [calendar.days.get(type, (None, None))[0] + ... for type in range(1, 8)] + >>> names[:4] + [u'Montag', u'Dienstag', u'Mittwoch', u'Donnerstag'] + >>> names[4:] + [u'Freitag', u'Samstag', u'Sonntag'] + + >>> abbrs = [calendar.days.get(type, (None, None))[1] + ... for type in range(1, 8)] + >>> abbrs + [u'Mo', u'Di', u'Mi', u'Do', u'Fr', u'Sa', u'So'] + + If there are no days, nothing happens: + + >>> calendar = CalendarStub() + >>> factory = LocaleFactory(None) + >>> xml = u'''''' + >>> dom = parseString(xml) + >>> factory._extractDays(dom.documentElement, calendar) + >>> calendar.days + + """ + + defaultDayContext_node = days_node.getElementsByTagName('default') + if defaultDayContext_node: + calendar.defaultDayContext = defaultDayContext_node[0].getAttribute('type') + + dayContext_nodes = days_node.getElementsByTagName('dayContext') + if not dayContext_nodes: + return + + calendar.dayContexts = InheritingDictionary() + names_node = abbrs_node = None # BBB + + for node in dayContext_nodes: + context_type = node.getAttribute('type') + dctx = LocaleDayContext(context_type) + calendar.dayContexts[context_type] = dctx + + defaultWidth_node = node.getElementsByTagName('default') + if defaultWidth_node: + dctx.defaultWidth = defaultWidth_node[0].getAttribute('type') + + widths = InheritingDictionary() + dctx.days = widths + for width_node in node.getElementsByTagName('dayWidth'): + width_type = width_node.getAttribute('type') + width = InheritingDictionary() + widths[width_type] = width + + for day_node in width_node.getElementsByTagName('day'): + dtype = dayMapping[day_node.getAttribute('type')] + width[dtype] = self._getText(day_node.childNodes) + + if context_type == 'format': + if width_type == 'abbreviated': + abbrs_node = width_node + elif width_type == 'wide': + names_node = width_node + + if not (names_node and abbrs_node): + return + + # Get all weekday names + names = {} + for name_node in names_node.getElementsByTagName('day'): + type = dayMapping[name_node.getAttribute('type')] + names[type] = self._getText(name_node.childNodes) + # Get all weekday abbreviations + abbrs = {} + for abbr_node in abbrs_node.getElementsByTagName('day'): + type = dayMapping[abbr_node.getAttribute('type')] + abbrs[type] = self._getText(abbr_node.childNodes) + + # Put the info together + calendar.days = InheritingDictionary() + for type in range(1, 13): + calendar.days[type] = (names.get(type, None), + abbrs.get(type, None)) + + + def _extractWeek(self, cal_node, calendar): + """Extract all week entries from cal_node and store them in + calendar. + + Example:: + + >>> class CalendarStub(object): + ... week = None + >>> calendar = CalendarStub() + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... + ... + ... + ... + ... + ... ''' + >>> dom = parseString(xml) + >>> factory._extractWeek(dom.documentElement, calendar) + + >>> calendar.week['minDays'] + 1 + >>> calendar.week['firstDay'] + 7 + >>> calendar.week['weekendStart'] + (5, datetime.time(18, 0)) + >>> calendar.week['weekendEnd'] + (7, datetime.time(18, 0)) + """ + # See whether we have week entries + week_nodes = cal_node.getElementsByTagName('week') + if not week_nodes: + return + + calendar.week = InheritingDictionary() + + # Get the 'minDays' value if available + for node in week_nodes[0].getElementsByTagName('minDays'): + calendar.week['minDays'] = int(node.getAttribute('count')) + + # Get the 'firstDay' value if available + for node in week_nodes[0].getElementsByTagName('firstDay'): + calendar.week['firstDay'] = dayMapping[node.getAttribute('day')] + + # Get the 'weekendStart' value if available + for node in week_nodes[0].getElementsByTagName('weekendStart'): + day = dayMapping[node.getAttribute('day')] + time_args = map(int, node.getAttribute('time').split(':')) + calendar.week['weekendStart'] = (day, time(*time_args)) + + # Get the 'weekendEnd' value if available + for node in week_nodes[0].getElementsByTagName('weekendEnd'): + day = dayMapping[node.getAttribute('day')] + time_args = map(int, node.getAttribute('time').split(':')) + calendar.week['weekendEnd'] = (day, time(*time_args)) + + + def _extractEras(self, cal_node, calendar): + """Extract all era entries from cal_node and store them in + calendar. + + Example:: + + >>> class CalendarStub(object): + ... days = None + >>> calendar = CalendarStub() + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... + ... BC + ... AD + ... + ... + ... Before Christ + ... + ... + ... ''' + >>> dom = parseString(xml) + >>> factory._extractEras(dom.documentElement, calendar) + + >>> names = [calendar.eras.get(type, (None, None))[0] + ... for type in range(2)] + >>> names + [u'Before Christ', None] + + >>> abbrs = [calendar.eras.get(type, (None, None))[1] + ... for type in range(2)] + >>> abbrs + [u'BC', u'AD'] + """ + # See whether we have era names and abbreviations + eras_nodes = cal_node.getElementsByTagName('eras') + if not eras_nodes: + return + names_nodes = eras_nodes[0].getElementsByTagName('eraName') + abbrs_nodes = eras_nodes[0].getElementsByTagName('eraAbbr') + + # Get all era names + names = {} + if names_nodes: + for name_node in names_nodes[0].getElementsByTagName('era'): + type = int(name_node.getAttribute('type')) + names[type] = self._getText(name_node.childNodes) + # Get all era abbreviations + abbrs = {} + if abbrs_nodes: + for abbr_node in abbrs_nodes[0].getElementsByTagName('era'): + type = int(abbr_node.getAttribute('type')) + abbrs[type] = self._getText(abbr_node.childNodes) + + calendar.eras = InheritingDictionary() + for type in abbrs.keys(): + calendar.eras[type] = (names.get(type, None), abbrs.get(type, None)) + + + def _extractFormats(self, formats_node, lengthNodeName, formatNodeName): + """Extract all format entries from formats_node and return a + tuple of the form (defaultFormatType, [LocaleFormatLength, ...]). + + Example:: + + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... + ... + ... EEEE, MMMM d, yyyy + ... + ... + ... + ... + ... + ... Standard Date + ... MMM d, yyyy + ... + ... + ... MMM dd, yyyy + ... + ... + ... ''' + >>> dom = parseString(xml) + + >>> default, lengths = factory._extractFormats( + ... dom.documentElement, 'dateFormatLength', 'dateFormat') + >>> default + u'medium' + >>> lengths[u"full"].formats[None].pattern + u'EEEE, MMMM d, yyyy' + >>> lengths[u"medium"].default + u'DateFormatsKey2' + >>> lengths[u"medium"].formats['DateFormatsKey3'].pattern + u'MMM dd, yyyy' + >>> lengths[u"medium"].formats['DateFormatsKey2'].displayName + u'Standard Date' + """ + formats_default = None + default_nodes = formats_node.getElementsByTagName('default') + if default_nodes: + formats_default = default_nodes[0].getAttribute('type') + + lengths = InheritingDictionary() + for length_node in formats_node.getElementsByTagName(lengthNodeName): + type = length_node.getAttribute('type') or None + length = LocaleFormatLength(type) + + default_nodes = length_node.getElementsByTagName('default') + if default_nodes: + length.default = default_nodes[0].getAttribute('type') + + if length_node.getElementsByTagName(formatNodeName): + length.formats = InheritingDictionary() + + for format_node in length_node.getElementsByTagName(formatNodeName): + format = LocaleFormat() + format.type = format_node.getAttribute('type') or None + pattern_node = format_node.getElementsByTagName('pattern')[0] + format.pattern = self._getText(pattern_node.childNodes) + name_nodes = format_node.getElementsByTagName('displayName') + if name_nodes: + format.displayName = self._getText(name_nodes[0].childNodes) + length.formats[format.type] = format + + lengths[length.type] = length + + return (formats_default, lengths) + + def _extractCalendars(self, dates_node): + """Extract all calendars and their specific information from the + Locale's DOM tree. + + Example:: + + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... + ... + ... January + ... December + ... + ... + ... Jan + ... Dec + ... + ... + ... Sunday + ... Saturday + ... + ... + ... Sun + ... Sat + ... + ... + ... + ... + ... + ... AM + ... PM + ... + ... + ... BC + ... AD + ... + ... + ... + ... + ... + ... EEEE, MMMM d, yyyy + ... + ... + ... + ... + ... + ... + ... + ... h:mm:ss a + ... + ... + ... + ... + ... + ... + ... {0} {1} + ... + ... + ... + ... + ... + ... + ... BE + ... + ... + ... + ... ''' + >>> dom = parseString(xml) + + >>> calendars = factory._extractCalendars(dom.documentElement) + >>> keys = calendars.keys() + >>> keys.sort() + >>> keys + [u'buddhist', u'gregorian', 'thai-buddhist'] + + Note that "thai-buddhist" are added as an alias to "buddhist". + + >>> calendars['buddhist'] is calendars['thai-buddhist'] + True + + If there are no calendars, nothing happens: + + >>> xml = u'''''' + >>> dom = parseString(xml) + >>> factory._extractCalendars(dom.documentElement) + + """ + cals_nodes = dates_node.getElementsByTagName('calendars') + # no calendar node + if cals_nodes == []: + return None + + calendars = InheritingDictionary() + for cal_node in cals_nodes[0].getElementsByTagName('calendar'): + # get the calendar type + type = cal_node.getAttribute('type') + calendar = LocaleCalendar(type) + + # get month names and abbreviations + months_nodes = cal_node.getElementsByTagName('months') + if months_nodes: + self._extractMonths(months_nodes[0], calendar) + + # get weekday names and abbreviations + days_nodes = cal_node.getElementsByTagName('days') + if days_nodes: + self._extractDays(days_nodes[0], calendar) + + # get week information + self._extractWeek(cal_node, calendar) + + # get am/pm designation values + nodes = cal_node.getElementsByTagName('am') + if nodes: + calendar.am = self._getText(nodes[0].childNodes) + nodes = cal_node.getElementsByTagName('pm') + if nodes: + calendar.pm = self._getText(nodes[0].childNodes) + + # get era names and abbreviations + self._extractEras(cal_node, calendar) + + for formatsName, lengthName, formatName in ( + ('dateFormats', 'dateFormatLength', 'dateFormat'), + ('timeFormats', 'timeFormatLength', 'timeFormat'), + ('dateTimeFormats', 'dateTimeFormatLength', 'dateTimeFormat')): + + formats_nodes = cal_node.getElementsByTagName(formatsName) + if formats_nodes: + default, formats = self._extractFormats( + formats_nodes[0], lengthName, formatName) + setattr(calendar, + 'default'+formatName[0].upper()+formatName[1:], + default) + setattr(calendar, formatsName, formats) + + calendars[calendar.type] = calendar + if calendar.type in calendarAliases: + for alias in calendarAliases[calendar.type]: + calendars[alias] = calendar + + return calendars + + + def _extractTimeZones(self, dates_node): + """Extract all timezone information for the locale from the DOM + tree. + + Example:: + + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... + ... + ... Pacific Time + ... Pacific Standard Time + ... Pacific Daylight Time + ... + ... + ... PT + ... PST + ... PDT + ... + ... San Francisco + ... + ... + ... + ... British Time + ... British Standard Time + ... British Daylight Time + ... + ... York + ... + ... + ... ''' + >>> dom = parseString(xml) + >>> zones = factory._extractTimeZones(dom.documentElement) + + >>> keys = zones.keys() + >>> keys.sort() + >>> keys + [u'America/Los_Angeles', u'Europe/London'] + >>> zones[u"Europe/London"].names[u"generic"] + (u'British Time', None) + >>> zones[u"Europe/London"].cities + [u'York'] + >>> zones[u"America/Los_Angeles"].names[u"generic"] + (u'Pacific Time', u'PT') + """ + tz_names = dates_node.getElementsByTagName('timeZoneNames') + if not tz_names: + return + + zones = InheritingDictionary() + for node in tz_names[0].getElementsByTagName('zone'): + type = node.getAttribute('type') + zone = LocaleTimeZone(type) + + # get the short and long name node + long = node.getElementsByTagName('long') + short = node.getElementsByTagName('short') + for type in (u"generic", u"standard", u"daylight"): + # get long name + long_desc = None + if long: + long_nodes = long[0].getElementsByTagName(type) + if long_nodes: + long_desc = self._getText(long_nodes[0].childNodes) + # get short name + short_desc = None + if short: + short_nodes = short[0].getElementsByTagName(type) + if short_nodes: + short_desc = self._getText(short_nodes[0].childNodes) + if long_desc is not None or short_desc is not None: + zone.names[type] = (long_desc, short_desc) + + for city in node.getElementsByTagName('exemplarCity'): + zone.cities.append(self._getText(city.childNodes)) + + zones[zone.type] = zone + + return zones + + + def _extractDates(self): + """Extract all date information from the DOM tree""" + dates_nodes = self._data.getElementsByTagName('dates') + if dates_nodes == []: + return + + dates = LocaleDates() + calendars = self._extractCalendars(dates_nodes[0]) + if calendars is not None: + dates.calendars = calendars + timezones = self._extractTimeZones(dates_nodes[0]) + if timezones is not None: + dates.timezones = timezones + return dates + + + def _extractSymbols(self, numbers_node): + """Extract all week entries from cal_node and store them in + calendar. + + Example:: + + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... . + ... , + ... ; + ... % + ... 0 + ... # + ... + + ... - + ... E + ... o/oo + ... oo + ... NaN + ... + ... ''' + >>> dom = parseString(xml) + >>> symbols = factory._extractSymbols(dom.documentElement) + + >>> symbols['list'] + u';' + >>> keys = symbols.keys() + >>> keys.sort() + >>> keys[:5] + [u'decimal', u'exponential', u'group', u'infinity', u'list'] + >>> keys[5:9] + [u'minusSign', u'nan', u'nativeZeroDigit', u'patternDigit'] + >>> keys[9:] + [u'perMille', u'percentSign', u'plusSign'] + """ + # See whether we have symbols entries + symbols_nodes = numbers_node.getElementsByTagName('symbols') + if not symbols_nodes: + return + + symbols = InheritingDictionary() + for name in (u"decimal", u"group", u"list", u"percentSign", + u"nativeZeroDigit", u"patternDigit", u"plusSign", + u"minusSign", u"exponential", u"perMille", + u"infinity", u"nan"): + nodes = symbols_nodes[0].getElementsByTagName(name) + if nodes: + symbols[name] = self._getText(nodes[0].childNodes) + + return symbols + + + def _extractNumberFormats(self, numbers_node, numbers): + """Extract all number formats from the numbers_node and save the data + in numbers. + + Example:: + + >>> class Numbers(object): + ... defaultDecimalFormat = None + ... decimalFormats = None + ... defaultScientificFormat = None + ... scientificFormats = None + ... defaultPercentFormat = None + ... percentFormats = None + ... defaultCurrencyFormat = None + ... currencyFormats = None + >>> numbers = Numbers() + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... + ... + ... #,##0.### + ... + ... + ... + ... + ... + ... + ... + ... 0.000###E+00 + ... + ... + ... + ... + ... 0.00##E+00 + ... + ... + ... + ... + ... + ... + ... #,##0% + ... + ... + ... + ... + ... + ... + ... $ #,##0.00;($ #,##0.00) + ... + ... + ... + ... ''' + >>> dom = parseString(xml) + >>> factory._extractNumberFormats(dom.documentElement, numbers) + + >>> numbers.decimalFormats[u"long"].formats[None].pattern + u'#,##0.###' + + >>> numbers.defaultScientificFormat + u'long' + >>> numbers.scientificFormats[u"long"].formats[None].pattern + u'0.000###E+00' + >>> numbers.scientificFormats[u"medium"].formats[None].pattern + u'0.00##E+00' + + >>> numbers.percentFormats[u"long"].formats[None].pattern + u'#,##0%' + >>> numbers.percentFormats.get(u"medium", None) is None + True + + >>> numbers.currencyFormats[u"long"].formats[None].pattern + u'$ #,##0.00;($ #,##0.00)' + >>> numbers.currencyFormats.get(u"medium", None) is None + True + """ + + for category in ('decimal', 'scientific', 'percent', 'currency'): + formatsName = category+'Formats' + lengthName = category+'FormatLength' + formatName = category+'Format' + defaultName = 'default'+formatName[0].upper()+formatName[1:] + + formats_nodes = numbers_node.getElementsByTagName(formatsName) + if formats_nodes: + default, formats = self._extractFormats( + formats_nodes[0], lengthName, formatName) + setattr(numbers, defaultName, default) + setattr(numbers, formatsName, formats) + + + def _extractCurrencies(self, numbers_node): + """Extract all currency definitions and their information from the + Locale's DOM tree. + + Example:: + + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... + ... Dollar + ... $ + ... + ... + ... Yen + ... Y + ... + ... + ... Rupee + ... 0<=Rf|1<=Ru|1<Rf + ... + ... + ... Escudo + ... $ + ... + ... + ... ''' + >>> dom = parseString(xml) + >>> currencies = factory._extractCurrencies(dom.documentElement) + + >>> keys = currencies.keys() + >>> keys.sort() + >>> keys + [u'INR', u'JPY', u'PTE', u'USD'] + + >>> currencies['USD'].symbol + u'$' + >>> currencies['USD'].displayName + u'Dollar' + >>> currencies['USD'].symbolChoice + False + """ + currs_nodes = numbers_node.getElementsByTagName('currencies') + if not currs_nodes: + return + + currencies = InheritingDictionary() + for curr_node in currs_nodes[0].getElementsByTagName('currency'): + type = curr_node.getAttribute('type') + currency = LocaleCurrency(type) + + nodes = curr_node.getElementsByTagName('symbol') + if nodes: + currency.symbol = self._getText(nodes[0].childNodes) + currency.symbolChoice = \ + nodes[0].getAttribute('choice') == u"true" + + nodes = curr_node.getElementsByTagName('displayName') + if nodes: + currency.displayName = self._getText(nodes[0].childNodes) + + currencies[type] = currency + + return currencies + + + def _extractNumbers(self): + """Extract all number information from the DOM tree""" + numbers_nodes = self._data.getElementsByTagName('numbers') + if not numbers_nodes: + return + + numbers = LocaleNumbers() + symbols = self._extractSymbols(numbers_nodes[0]) + if symbols is not None: + numbers.symbols = symbols + self._extractNumberFormats(numbers_nodes[0], numbers) + currencies = self._extractCurrencies(numbers_nodes[0]) + if currencies is not None: + numbers.currencies = currencies + return numbers + + + def _extractDelimiters(self): + """Extract all delimiter entries from the DOM tree. + + Example:: + + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... `` + ... '' + ... ` + ... ' + ... + ... + ... + ... + ... + ... + ... + ... + ... ''' + >>> dom = parseString(xml) + >>> factory._data = parseString(xml).documentElement + >>> delimiters = factory._extractDelimiters() + + >>> delimiters[u"quotationStart"] + u'``' + >>> delimiters[u"quotationEnd"] + u"''" + >>> delimiters[u"alternateQuotationStart"] + u'`' + >>> delimiters[u"alternateQuotationEnd"] + u"'" + + Escape: "'" + + >>> factory().delimiters == delimiters + True + """ + # See whether we have symbols entries + delimiters_nodes = self._data.getElementsByTagName('delimiters') + if not delimiters_nodes: + return + + delimiters = InheritingDictionary() + for name in (u'quotationStart', u"quotationEnd", + u"alternateQuotationStart", u"alternateQuotationEnd"): + nodes = delimiters_nodes[0].getElementsByTagName(name) + if nodes: + delimiters[name] = self._getText(nodes[0].childNodes) + + return delimiters + + + def _extractOrientation(self): + """Extract orientation information. + + >>> factory = LocaleFactory(None) + >>> from xml.dom.minidom import parseString + >>> xml = u''' + ... + ... + ... + ... + ... ''' + >>> dom = parseString(xml) + >>> factory._data = parseString(xml).documentElement + >>> orientation = factory._extractOrientation() + >>> orientation.lines + u'bottom-to-top' + >>> orientation.characters + u'right-to-left' + """ + orientation_nodes = self._data.getElementsByTagName('orientation') + if not orientation_nodes: + return + orientation = LocaleOrientation() + for name in (u"characters", u"lines"): + value = orientation_nodes[0].getAttribute(name) + if value: + setattr(orientation, name, value) + return orientation + + + def __call__(self): + """Create the Locale.""" + locale = Locale(self._extractIdentity()) + + names = self._extractDisplayNames() + if names is not None: + locale.displayNames = names + + dates = self._extractDates() + if dates is not None: + locale.dates = dates + + numbers = self._extractNumbers() + if numbers is not None: + locale.numbers = numbers + + delimiters = self._extractDelimiters() + if delimiters is not None: + locale.delimiters = delimiters + + orientation = self._extractOrientation() + if orientation is not None: + locale.orientation = orientation + + # Unmapped: + # + # - + # - + # - , + + return locale diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/meta.zcml b/thesisenv/lib/python3.6/site-packages/zope/i18n/meta.zcml new file mode 100644 index 0000000..a499f23 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/meta.zcml @@ -0,0 +1,16 @@ + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/negotiator.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/negotiator.py new file mode 100644 index 0000000..3c5fa17 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/negotiator.py @@ -0,0 +1,60 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Language Negotiator +""" +from zope.interface import implementer +from zope.i18n.interfaces import INegotiator +from zope.i18n.interfaces import IUserPreferredLanguages + + +def normalize_lang(lang): + lang = lang.strip().lower() + lang = lang.replace('_', '-') + lang = lang.replace(' ', '') + return lang + + +def normalize_langs(langs): + # Make a mapping from normalized->original so we keep can match + # the normalized lang and return the original string. + n_langs = {} + for l in langs: + n_langs[normalize_lang(l)] = l + return n_langs + + +@implementer(INegotiator) +class Negotiator(object): + + def getLanguage(self, langs, env): + envadapter = IUserPreferredLanguages(env) + userlangs = envadapter.getPreferredLanguages() + # Prioritize on the user preferred languages. Return the + # first user preferred language that the object has available. + langs = normalize_langs(langs) + for lang in userlangs: + if lang in langs: + return langs.get(lang) + # If the user asked for a specific variation, but we don't + # have it available we may serve the most generic one, + # according to the spec (eg: user asks for ('en-us', + # 'de'), but we don't have 'en-us', then 'en' is preferred + # to 'de'). + parts = lang.split('-') + if len(parts) > 1 and parts[0] in langs: + return langs.get(parts[0]) + return None + + +negotiator = Negotiator() diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/simpletranslationdomain.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/simpletranslationdomain.py new file mode 100644 index 0000000..b093db8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/simpletranslationdomain.py @@ -0,0 +1,67 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""This is a simple implementation of the ITranslationDomain interface. +""" +from zope.interface import implementer +from zope.component import getUtility +from zope.i18n.interfaces import ITranslationDomain, INegotiator +from zope.i18n import interpolate + + +text_type = str if bytes is not str else unicode + + +@implementer(ITranslationDomain) +class SimpleTranslationDomain(object): + """This is the simplest implementation of the ITranslationDomain I + could come up with. + + The constructor takes one optional argument 'messages', which will be + used to do the translation. The 'messages' attribute has to have the + following structure: + + {('language', 'msg_id'): 'message', ...} + + Note: This Translation Domain does not use message catalogs. + """ + + # See zope.i18n.interfaces.ITranslationDomain + domain = None + + def __init__(self, domain, messages=None): + """Initializes the object. No arguments are needed.""" + self.domain = ( + domain.decode("utf-8") if isinstance(domain, bytes) else domain) + self.messages = messages if messages is not None else {} + assert self.messages is not None + + def translate(self, msgid, mapping=None, context=None, + target_language=None, default=None, msgid_plural=None, + default_plural=None, number=None): + '''See interface ITranslationDomain''' + # Find out what the target language should be + if target_language is None and context is not None: + langs = [m[0] for m in self.messages.keys()] + # Let's negotiate the language to translate to. :) + negotiator = getUtility(INegotiator) + target_language = negotiator.getLanguage(langs, context) + + # Find a translation; if nothing is found, use the default + # value + if default is None: + default = text_type(msgid) + text = self.messages.get((target_language, msgid)) + if text is None: + text = default + return interpolate(text, mapping) diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/testing.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/testing.py new file mode 100644 index 0000000..035827c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/testing.py @@ -0,0 +1,58 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" +Unit test logic for setting up and tearing down basic infrastructure. + +This relies on :mod:`zope.publisher` being available. +""" + +import re + +from zope.testing import renormalizing + +rules = [] +if bytes is not str: + rules = [ + (re.compile("u('.*?')"), r"\1"), + (re.compile('u(".*?")'), r"\1"), + ] +unicode_checker = renormalizing.RENormalizing(rules) + + +def setUp(test=None): + import zope.component + from zope.publisher.browser import BrowserLanguages + from zope.publisher.http import HTTPCharsets + zope.component.provideAdapter(HTTPCharsets) + zope.component.provideAdapter(BrowserLanguages) + + +class PlacelessSetup(object): + + def setUp(self): + """ + Install the language and charset negotiators. + + >>> PlacelessSetup().setUp() + >>> from zope.publisher.browser import TestRequest + >>> from zope.i18n.interfaces import IUserPreferredCharsets + >>> from zope.i18n.interfaces import IUserPreferredLanguages + >>> from zope.component import getAdapter + >>> getAdapter(TestRequest(), IUserPreferredCharsets) + + >>> getAdapter(TestRequest(), IUserPreferredLanguages) + + + """ + setUp() diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/testmessagecatalog.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/testmessagecatalog.py new file mode 100644 index 0000000..8420c58 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/testmessagecatalog.py @@ -0,0 +1,55 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test message catalog +""" + +from zope import interface +import zope.i18n.interfaces +from zope.i18n.translationdomain import TranslationDomain + +@interface.implementer(zope.i18n.interfaces.IGlobalMessageCatalog) +class TestMessageCatalog(object): + + language = 'test' + + def __init__(self, domain): + self.domain = domain + + def queryMessage(self, msgid, default=None): + default = getattr(msgid, 'default', default) + if default != None and default != msgid: + msg = u"%s (%s)" % (msgid, default) + else: + msg = msgid + + return u"[[%s][%s]]" % (self.domain, msg) + + getMessage = queryMessage + + def getIdentifier(self): + return 'test' + + def reload(self): + pass + +@interface.implementer(zope.i18n.interfaces.ITranslationDomain) +def TestMessageFallbackDomain(domain_id=u""): + domain = TranslationDomain(domain_id) + domain.addCatalog(TestMessageCatalog(domain_id)) + return domain + +interface.directlyProvides( + TestMessageFallbackDomain, + zope.i18n.interfaces.IFallbackTranslationDomainFactory, + ) diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/testmessagecatalog.rst b/thesisenv/lib/python3.6/site-packages/zope/i18n/testmessagecatalog.rst new file mode 100644 index 0000000..8cf4148 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/testmessagecatalog.rst @@ -0,0 +1,70 @@ +====================== + Test Message Catalog +====================== + +The test message catalog "translates" test by simply outputing (in +unicode) the domain and message id in square-bracket markers: + + >>> import zope.i18n.testmessagecatalog + >>> cat = zope.i18n.testmessagecatalog.TestMessageCatalog('foo.bar') + + >>> cat.language, cat.domain + ('test', 'foo.bar') + + >>> print(cat.queryMessage('eek')) + [[foo.bar][eek]] + + >>> print(cat.getMessage('eek')) + [[foo.bar][eek]] + + >>> isinstance(cat.getMessage('eek'), str if bytes is not str else unicode) + True + + >>> cat.getIdentifier() + 'test' + + >>> cat.reload() + +If a message id has a default, it will be included in the output: + + >>> id = zope.i18nmessageid.MessageFactory('foo.bar')('eek', default='Eek') + + >>> print(cat.queryMessage(id)) + [[foo.bar][eek (Eek)]] + + >>> print(cat.getMessage(id)) + [[foo.bar][eek (Eek)]] + +If a message doesn't have a default, but a default is passed in to +queryMessage, the default will be used used: + + >>> print(cat.queryMessage('eek', default='Eek')) + [[foo.bar][eek (Eek)]] + + >>> print(cat.getMessage(id, default='Waaa')) + [[foo.bar][eek (Eek)]] + +Fallback domains +================ + +The testmessagecatalog module also provide a fallback domain factory +that has the test catalog as it's only catalog: + + >>> factory = zope.i18n.testmessagecatalog.TestMessageFallbackDomain + >>> import zope.i18n.interfaces + >>> zope.i18n.interfaces.IFallbackTranslationDomainFactory.providedBy( + ... factory) + True + + >>> domain = factory('foo.bar') + >>> print(domain.translate('eek')) + eek + + >>> print(domain.translate('eek', target_language='test')) + [[foo.bar][eek]] + +Note that if a default is padded in, it will be included in test +output: + + >>> print(domain.translate('eek', target_language='test', default='Eek')) + [[foo.bar][eek (Eek)]] diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/configure.txt b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/configure.txt new file mode 100644 index 0000000..b6c7a9c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/configure.txt @@ -0,0 +1,18 @@ +Package configuration +===================== + +The ``zope.i18n`` package provides a ZCML file that configures a utility and +some security: + + >>> from zope.configuration.xmlconfig import XMLConfig + >>> import zope.i18n + + >>> XMLConfig('configure.zcml', zope.i18n.locales)() + + >>> len(list(zope.component.getGlobalSiteManager().registeredUtilities())) + 13 + + >>> XMLConfig('configure.zcml', zope.i18n)() + + >>> len(list(zope.component.getGlobalSiteManager().registeredUtilities())) + 15 diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/de-default.mo b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/de-default.mo new file mode 100644 index 0000000..f49b8da Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/de-default.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/de-default.po b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/de-default.po new file mode 100644 index 0000000..7d3b2f6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/de-default.po @@ -0,0 +1,20 @@ +msgid "" +msgstr "" +"Project-Id-Version: Zope 3\n" +"PO-Revision-Date: 2002/06/13\n" +"Last-Translator: Zope 3 contributors\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=ISO-8859-1\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=n != 1;\n" + +msgid "short_greeting" +msgstr "Hallo!" + +msgid "greeting" +msgstr "Hallo $name, wie geht es Dir?" + +msgid "There is one file." +msgid_plural "There are %d files." +msgstr[0] "Es gibt eine Datei." +msgstr[1] "Es gibt %d Dateien." diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/en-alt.mo b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/en-alt.mo new file mode 100644 index 0000000..2e7a16a Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/en-alt.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/en-alt.po b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/en-alt.po new file mode 100644 index 0000000..eb439fb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/en-alt.po @@ -0,0 +1,19 @@ +msgid "" +msgstr "" +"Project-Id-Version: Zope 3\n" +"PO-Revision-Date: 2002/06/13\n" +"Last-Translator: Zope 3 contributors\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=ISO-8859-1\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "short_greeting" +msgstr "Hey!" + +msgid "special" +msgstr "Wow" + +msgid "apple" +msgid_plural "apple" +msgstr[0] "orange" +msgstr[1] "oranges" diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/en-default.mo b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/en-default.mo new file mode 100644 index 0000000..0dc0588 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/en-default.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/en-default.po b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/en-default.po new file mode 100644 index 0000000..2432498 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/en-default.po @@ -0,0 +1,45 @@ +msgid "" +msgstr "" +"Project-Id-Version: Zope 3\n" +"PO-Revision-Date: 2002/06/13\n" +"Last-Translator: Zope 3 contributors\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=ISO-8859-1\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=n != 1;\n" + +msgid "short_greeting" +msgstr "Hello!" + +msgid "greeting" +msgstr "Hello $name, how are you?" + +msgid "There is one file." +msgid_plural "There are %d files." +msgstr[0] "There is one file." +msgstr[1] "There are %d files." + +msgid "The item is rated 1/5 star." +msgid_plural "The item is rated %s/5 stars." +msgstr[0] "The item is rated 1/5 star." +msgstr[1] "The item is rated %s/5 stars." + +msgid "There is %d chance." +msgid_plural "There are %f chances." +msgstr[0] "There is %d chance." +msgstr[1] "There are %f chances." + +msgid "There is %d ${type}." +msgid_plural "There are %d ${type}." +msgstr[0] "There is %d ${type}." +msgstr[1] "There are %d ${type}." + +msgid "apple" +msgid_plural "apples" +msgstr[0] "apple" +msgstr[1] "apples" + +msgid "banana" +msgid_plural "bananas" +msgstr[0] "banana" +msgstr[1] "bananas" diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/de/LC_MESSAGES/zope-i18n.mo b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/de/LC_MESSAGES/zope-i18n.mo new file mode 100644 index 0000000..271abdd Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/de/LC_MESSAGES/zope-i18n.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/de/LC_MESSAGES/zope-i18n.po b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/de/LC_MESSAGES/zope-i18n.po new file mode 100644 index 0000000..4d3bcd7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/de/LC_MESSAGES/zope-i18n.po @@ -0,0 +1,11 @@ +msgid "" +msgstr "" +"Project-Id-Version: Zope 3\n" +"PO-Revision-Date: 2002/06/13\n" +"Last-Translator: Zope 3 contributors\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=ISO-8859-1\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "Message" +msgstr "Message translated" diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/en/LC_MESSAGES/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/en/LC_MESSAGES/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/en/LC_MESSAGES/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/en/LC_MESSAGES/zope-i18n.mo b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/en/LC_MESSAGES/zope-i18n.mo new file mode 100644 index 0000000..3e157a5 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/en/LC_MESSAGES/zope-i18n.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/en/LC_MESSAGES/zope-i18n.po b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/en/LC_MESSAGES/zope-i18n.po new file mode 100644 index 0000000..9e1077c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale/en/LC_MESSAGES/zope-i18n.po @@ -0,0 +1,14 @@ +msgid "" +msgstr "" +"Project-Id-Version: Zope 3\n" +"PO-Revision-Date: 2002/06/13\n" +"Last-Translator: Zope 3 contributors\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=ISO-8859-1\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "New Domain" +msgstr "New Domain translated" + +msgid "New Language" +msgstr "New Language translated" diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale2/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale2/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale2/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale2/en/LC_MESSAGES/zope-i18n.mo b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale2/en/LC_MESSAGES/zope-i18n.mo new file mode 100644 index 0000000..d58a7d7 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale2/en/LC_MESSAGES/zope-i18n.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale2/en/LC_MESSAGES/zope-i18n.po b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale2/en/LC_MESSAGES/zope-i18n.po new file mode 100644 index 0000000..17815a1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale2/en/LC_MESSAGES/zope-i18n.po @@ -0,0 +1,14 @@ +msgid "" +msgstr "" +"Project-Id-Version: Zope 3\n" +"PO-Revision-Date: 2008/04/26\n" +"Last-Translator: Zope 3 contributors\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=ISO-8859-1\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "Additional message" +msgstr "Additional message translated" + +msgid "New Language" +msgstr "New Language translated differently" diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/zope-i18n.in b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/zope-i18n.in new file mode 100644 index 0000000..56ec799 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/zope-i18n.in differ diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/zope-i18n.mo b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/zope-i18n.mo new file mode 100644 index 0000000..8bdb36f Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/zope-i18n.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/zope-i18n.po b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/zope-i18n.po new file mode 100644 index 0000000..45c2671 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/zope-i18n.po @@ -0,0 +1,11 @@ +msgid "" +msgstr "" +"Project-Id-Version: Zope 3\n" +"PO-Revision-Date: 2008/04/26\n" +"Last-Translator: Zope 3 contributors\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=ISO-8859-1\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "I'm a newer file" +msgstr "I'm a newer file translated" diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/zope-i18n2.po b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/zope-i18n2.po new file mode 100644 index 0000000..ae7a53f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/locale3/en/LC_MESSAGES/zope-i18n2.po @@ -0,0 +1,11 @@ +msgid "" +msgstr "" +"Project-Id-Version: Zope 3\n" +"PO-Revision-Date: 2008/04/26\n" +"Last-Translator: Zope 3 contributors\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=ISO-8859-1\n" +"Content-Transfer-Encoding: 8bit\n" + +msgid "I'm a new file" +msgstr "I'm a new file translated" diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/pl-default.mo b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/pl-default.mo new file mode 100644 index 0000000..37b73fe Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/pl-default.mo differ diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/pl-default.po b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/pl-default.po new file mode 100644 index 0000000..3951a9c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/pl-default.po @@ -0,0 +1,22 @@ +msgid "" +msgstr "" +"Project-Id-Version: Zope 3\n" +"PO-Revision-Date: 2018-09-04 11:05+0100\n" +"Last-Translator: Zope 3 contributors\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=3; plural=n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;\n" + + +msgid "short_greeting" +msgstr "Cześć !" + +msgid "greeting" +msgstr "Cześć $name, jak się masz?" + +msgid "There is one file." +msgid_plural "There are %d files." +msgstr[0] "Istnieje %d plik." +msgstr[1] "Istnieją %d pliki." +msgstr[2] "Istnieją %d plików." diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test.py new file mode 100644 index 0000000..4fb4f4c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test.py @@ -0,0 +1,40 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Misc tests +""" +import unittest + +import doctest +from zope.component.testing import setUp, tearDown +from zope.i18n.testing import unicode_checker + + +def test_suite(): + options = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS + def suite(name): + return doctest.DocTestSuite( + name, + setUp=setUp, tearDown=tearDown, + optionflags=options, + checker=unicode_checker) + + return unittest.TestSuite([ + suite('zope.i18n'), + suite("zope.i18n.config"), + suite("zope.i18n.testing"), + ]) + + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_compile.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_compile.py new file mode 100644 index 0000000..d82c3e7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_compile.py @@ -0,0 +1,72 @@ +############################################################################## +# +# Copyright (c) 2017 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import unittest + +from zope.testing.loggingsupport import InstalledHandler + +from zope.i18n import compile + + +@unittest.skipUnless(compile.HAS_PYTHON_GETTEXT, + "Need python-gettext") +class TestCompile(unittest.TestCase): + + def setUp(self): + self.handler = InstalledHandler('zope.i18n') + self.addCleanup(self.handler.uninstall) + + def test_non_existant_path(self): + self.assertIsNone(compile.compile_mo_file('no_such_domain', '')) + + def test_po_exists_but_invalid(self): + import tempfile + import shutil + import os.path + + td = tempfile.mkdtemp(suffix=".zopei18n_test_compile") + self.addCleanup(shutil.rmtree, td) + + with open(os.path.join(td, "foo.po"), 'w') as f: + f.write("this should not compile") + + compile.compile_mo_file('foo', td) + + self.assertIn("Syntax error while compiling", + str(self.handler)) + + def test_po_exists_cannot_write_mo(self): + import tempfile + import shutil + import os + import os.path + + td = tempfile.mkdtemp(suffix=".zopei18n_test_compile") + self.addCleanup(shutil.rmtree, td) + + mofile = os.path.join(td, 'foo.mo') + with open(mofile, 'w') as f: + f.write("Touching") + + # Put it in the past, make it not writable + os.utime(mofile, (1000, 1000)) + os.chmod(mofile, 0) + + with open(os.path.join(td, "foo.po"), 'w') as f: + f.write("# A comment") + + compile.compile_mo_file('foo', td) + + self.assertIn("Error while compiling", + str(self.handler)) diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_formats.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_formats.py new file mode 100644 index 0000000..99c4bf7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_formats.py @@ -0,0 +1,1334 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""This module tests the Formats and everything that goes with it. +""" +import decimal +import datetime +import pickle +from unittest import TestCase + +import pytz + +from zope.i18n.interfaces import IDateTimeFormat +from zope.i18n.format import DateTimeFormat +from zope.i18n.format import parseDateTimePattern, buildDateTimeParseInfo +from zope.i18n.format import DateTimePatternParseError, DateTimeParseError + +from zope.i18n.interfaces import INumberFormat +from zope.i18n.format import NumberFormat, NumberParseError +from zope.i18n.format import parseNumberPattern +from zope.i18n.format import NumberPatternParseError + + +class LocaleStub(object): + pass + +class LocaleCalendarStub(object): + + type = u"gregorian" + + months = { + 1: ('Januar', 'Jan'), + 2: ('Februar', 'Feb'), + 3: ('Maerz', 'Mrz'), + 4: ('April', 'Apr'), + 5: ('Mai', 'Mai'), + 6: ('Juni', 'Jun'), + 7: ('Juli', 'Jul'), + 8: ('August', 'Aug'), + 9: ('September', 'Sep'), + 10: ('Oktober', 'Okt'), + 11: ('November', 'Nov'), + 12: ('Dezember', 'Dez') + } + + days = { + 1: ('Montag', 'Mo'), + 2: ('Dienstag', 'Di'), + 3: ('Mittwoch', 'Mi'), + 4: ('Donnerstag', 'Do'), + 5: ('Freitag', 'Fr'), + 6: ('Samstag', 'Sa'), + 7: ('Sonntag', 'So') + } + + am = 'vorm.' + pm = 'nachm.' + + eras = {1: (None, 'v. Chr.'), 2: (None, 'n. Chr.')} + + week = {'firstDay': 1, 'minDays': 1} + + def getMonthNames(self): + return [self.months.get(type, (None, None))[0] for type in range(1, 13)] + + def getMonthTypeFromName(self, name): + for item in self.months.items(): + if item[1][0] == name: + return item[0] + + def getMonthAbbreviations(self): + return [self.months.get(type, (None, None))[1] for type in range(1, 13)] + + def getMonthTypeFromAbbreviation(self, abbr): + for item in self.months.items(): + if item[1][1] == abbr: + return item[0] + + def getDayNames(self): + return [self.days.get(type, (None, None))[0] for type in range(1, 8)] + + def getDayTypeFromName(self, name): + raise NotImplementedError() + + def getDayAbbreviations(self): + return [self.days.get(type, (None, None))[1] for type in range(1, 8)] + + def getDayTypeFromAbbreviation(self, abbr): + raise NotImplementedError() + + +class _TestCase(TestCase): + # Avoid deprecation warnings in Python 3 by making the preferred + # method name available for Python 2. + assertRaisesRegex = getattr(TestCase, 'assertRaisesRegex', TestCase.assertRaisesRegexp) + + +class TestDateTimePatternParser(_TestCase): + """Extensive tests for the ICU-based-syntax datetime pattern parser.""" + + + def testParseSimpleTimePattern(self): + self.assertEqual(parseDateTimePattern('HH'), + [('H', 2)]) + self.assertEqual(parseDateTimePattern('HH:mm'), + [('H', 2), ':', ('m', 2)]) + self.assertEqual(parseDateTimePattern('HH:mm:ss'), + [('H', 2), ':', ('m', 2), ':', ('s', 2)]) + self.assertEqual(parseDateTimePattern('mm:ss'), + [('m', 2), ':', ('s', 2)]) + self.assertEqual(parseDateTimePattern('H:m:s'), + [('H', 1), ':', ('m', 1), ':', ('s', 1)]) + self.assertEqual(parseDateTimePattern('HHH:mmmm:sssss'), + [('H', 3), ':', ('m', 4), ':', ('s', 5)]) + + def testParseGermanTimePattern(self): + # German full + self.assertEqual(parseDateTimePattern("H:mm' Uhr 'z"), + [('H', 1), ':', ('m', 2), ' Uhr ', ('z', 1)]) + # German long + self.assertEqual(parseDateTimePattern("HH:mm:ss z"), + [('H', 2), ':', ('m', 2), ':', ('s', 2), ' ', + ('z', 1)]) + # German medium + self.assertEqual(parseDateTimePattern("HH:mm:ss"), + [('H', 2), ':', ('m', 2), ':', ('s', 2)]) + # German short + self.assertEqual(parseDateTimePattern("HH:mm"), + [('H', 2), ':', ('m', 2)]) + + def testParseRealDate(self): + # German full + self.assertEqual(parseDateTimePattern("EEEE, d. MMMM yyyy"), + [('E', 4), ', ', ('d', 1), '. ', ('M', 4), + ' ', ('y', 4)]) + # German long + self.assertEqual(parseDateTimePattern("d. MMMM yyyy"), + [('d', 1), '. ', ('M', 4), ' ', ('y', 4)]) + # German medium + self.assertEqual(parseDateTimePattern("dd.MM.yyyy"), + [('d', 2), '.', ('M', 2), '.', ('y', 4)]) + # German short + self.assertEqual(parseDateTimePattern("dd.MM.yy"), + [('d', 2), '.', ('M', 2), '.', ('y', 2)]) + + def testParseRealDateTime(self): + # German full + self.assertEqual( + parseDateTimePattern("EEEE, d. MMMM yyyy H:mm' Uhr 'z"), + [('E', 4), ', ', ('d', 1), '. ', ('M', 4), ' ', ('y', 4), + ' ', ('H', 1), ':', ('m', 2), ' Uhr ', ('z', 1)]) + # German long + self.assertEqual( + parseDateTimePattern("d. MMMM yyyy HH:mm:ss z"), + [('d', 1), '. ', ('M', 4), ' ', ('y', 4), + ' ', ('H', 2), ':', ('m', 2), ':', ('s', 2), ' ', ('z', 1)]) + # German medium + self.assertEqual( + parseDateTimePattern("dd.MM.yyyy HH:mm:ss"), + [('d', 2), '.', ('M', 2), '.', ('y', 4), + ' ', ('H', 2), ':', ('m', 2), ':', ('s', 2)]) + # German short + self.assertEqual( + parseDateTimePattern("dd.MM.yy HH:mm"), + [('d', 2), '.', ('M', 2), '.', ('y', 2), + ' ', ('H', 2), ':', ('m', 2)]) + + def testParseQuotesInPattern(self): + self.assertEqual(parseDateTimePattern("HH''mm"), + [('H', 2), "'", ('m', 2)]) + self.assertEqual(parseDateTimePattern("HH'HHmm'mm"), + [('H', 2), 'HHmm', ('m', 2)]) + self.assertEqual(parseDateTimePattern("HH':'''':'mm"), + [('H', 2), ":':", ('m', 2)]) + self.assertEqual(parseDateTimePattern("HH':' ':'mm"), + [('H', 2), ": :", ('m', 2)]) + + def testParseDateTimePatternError(self): + # Quote not closed + with self.assertRaisesRegex( + DateTimePatternParseError, + 'The quote starting at character 2 is not closed.'): + parseDateTimePattern("HH' Uhr") + + # Test correct length of characters in datetime fields + # XXX: This should actually fail, but it doesn't. Why not? + parseDateTimePattern("HHHHH") + + def testParseDateTimePatternRepeatDateTimeChars(self): + result = parseDateTimePattern('aG') + self.assertEqual( + result, + [('a', 1), ('G', 1)] + ) + + +class TestBuildDateTimeParseInfo(_TestCase): + """This class tests the functionality of the buildDateTimeParseInfo() + method with the German locale. + """ + + def info(self, entry): + info = buildDateTimeParseInfo(LocaleCalendarStub(), [entry]) + return info[entry] + + def testGenericNumbers(self): + for char in 'dDFkKhHmsSwW': + for length in range(1, 6): + self.assertEqual(self.info((char, length)), + '([0-9]{%i,1000})' %length) + def testYear(self): + self.assertEqual(self.info(('y', 2)), '([0-9]{2})') + self.assertEqual(self.info(('y', 4)), '([0-9]{4})') + self.assertRaises(DateTimePatternParseError, self.info, ('y', 1)) + self.assertRaises(DateTimePatternParseError, self.info, ('y', 3)) + self.assertRaises(DateTimePatternParseError, self.info, ('y', 5)) + + def testAMPMMarker(self): + names = ['vorm.', 'nachm.'] + for length in range(1, 6): + self.assertEqual(self.info(('a', length)), '('+'|'.join(names)+')') + + def testEra(self): + self.assertEqual(self.info(('G', 1)), '(v. Chr.|n. Chr.)') + + def testTimeZone(self): + self.assertEqual(self.info(('z', 1)), r'([\+-][0-9]{3,4})') + self.assertEqual(self.info(('z', 2)), r'([\+-][0-9]{2}:[0-9]{2})') + self.assertEqual(self.info(('z', 3)), r'([a-zA-Z]{3})') + self.assertEqual(self.info(('z', 4)), r'([a-zA-Z /\.]*)') + self.assertEqual(self.info(('z', 5)), r'([a-zA-Z /\.]*)') + + def testMonthNumber(self): + self.assertEqual(self.info(('M', 1)), '([0-9]{1,2})') + self.assertEqual(self.info(('M', 2)), '([0-9]{2})') + + def testMonthNames(self): + names = [u"Januar", u"Februar", u"Maerz", u"April", + u"Mai", u"Juni", u"Juli", u"August", u"September", u"Oktober", + u"November", u"Dezember"] + self.assertEqual(self.info(('M', 4)), '('+'|'.join(names)+')') + + def testMonthAbbr(self): + names = ['Jan', 'Feb', 'Mrz', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', + 'Sep', 'Okt', 'Nov', 'Dez'] + self.assertEqual(self.info(('M', 3)), '('+'|'.join(names)+')') + + def testWeekdayNumber(self): + self.assertEqual(self.info(('E', 1)), '([0-9])') + self.assertEqual(self.info(('E', 2)), '([0-9]{2})') + + def testWeekdayNames(self): + names = ['Montag', 'Dienstag', 'Mittwoch', 'Donnerstag', + 'Freitag', 'Samstag', 'Sonntag'] + self.assertEqual(self.info(('E', 4)), '('+'|'.join(names)+')') + self.assertEqual(self.info(('E', 5)), '('+'|'.join(names)+')') + self.assertEqual(self.info(('E', 10)), '('+'|'.join(names)+')') + + def testWeekdayAbbr(self): + names = ['Mo', 'Di', 'Mi', 'Do', 'Fr', 'Sa', 'So'] + self.assertEqual(self.info(('E', 3)), '('+'|'.join(names)+')') + + +class TestDateTimeFormat(_TestCase): + """Test the functionality of an implmentation of the ILocaleProvider + interface.""" + + format = DateTimeFormat(calendar=LocaleCalendarStub()) + + def testInterfaceConformity(self): + self.assertTrue(IDateTimeFormat.providedBy(self.format)) + + def testParseSimpleDateTime(self): + # German short + self.assertEqual( + self.format.parse('02.01.03 21:48', 'dd.MM.yy HH:mm'), + datetime.datetime(2003, 1, 2, 21, 48)) + + def testParseRealDateTime(self): + # German medium + self.assertEqual( + self.format.parse('02.01.2003 21:48:01', 'dd.MM.yyyy HH:mm:ss'), + datetime.datetime(2003, 1, 2, 21, 48, 1)) + + # German long + # TODO: The parser does not support timezones yet. + self.assertEqual( + self.format.parse( + '2. Januar 2003 21:48:01 +100', + 'd. MMMM yyyy HH:mm:ss z'), + pytz.timezone('Europe/Berlin').localize( + datetime.datetime(2003, 1, 2, 21, 48, 1))) + + # German full + # TODO: The parser does not support timezones yet. + self.assertEqual( + self.format.parse( + 'Donnerstag, 2. Januar 2003 21:48 Uhr +100', + "EEEE, d. MMMM yyyy H:mm' Uhr 'z"), + pytz.timezone('Europe/Berlin').localize( + datetime.datetime(2003, 1, 2, 21, 48))) + + def testParseAMPMDateTime(self): + self.assertEqual( + self.format.parse('02.01.03 09:48 nachm.', 'dd.MM.yy hh:mm a'), + datetime.datetime(2003, 1, 2, 21, 48)) + + def testParseTimeZone(self): + dt = self.format.parse('09:48 -600', 'HH:mm z') + self.assertEqual(pickle.loads(pickle.dumps(dt)), dt) + self.assertEqual(dt.tzinfo.utcoffset(dt), datetime.timedelta(hours=-6)) + self.assertEqual(dt.tzinfo.zone, None) + self.assertEqual(dt.tzinfo.tzname(dt), None) + + dt = self.format.parse('09:48 -06:00', 'HH:mm zz') + self.assertEqual(pickle.loads(pickle.dumps(dt)), dt) + self.assertEqual(dt.tzinfo.utcoffset(dt), datetime.timedelta(hours=-6)) + self.assertEqual(dt.tzinfo.zone, None) + self.assertEqual(dt.tzinfo.tzname(dt), None) + + def testParseTimeZoneNames(self): + # Note that EST is a deprecated timezone name since it is a US + # interpretation (other countries also use the EST timezone + # abbreviation) + dt = self.format.parse('01.01.2003 09:48 EST', 'dd.MM.yyyy HH:mm zzz') + self.assertEqual(pickle.loads(pickle.dumps(dt)), dt) + self.assertEqual(dt.tzinfo.utcoffset(dt), datetime.timedelta(hours=-5)) + self.assertEqual(dt.tzinfo.zone, 'EST') + self.assertEqual(dt.tzinfo.tzname(dt), 'EST') + + dt = self.format.parse('01.01.2003 09:48 US/Eastern', + 'dd.MM.yyyy HH:mm zzzz') + self.assertEqual(pickle.loads(pickle.dumps(dt)), dt) + self.assertEqual(dt.tzinfo.utcoffset(dt), datetime.timedelta(hours=-5)) + self.assertEqual(dt.tzinfo.zone, 'US/Eastern') + self.assertEqual(dt.tzinfo.tzname(dt), 'EST') + + dt = self.format.parse('01.01.2003 09:48 Canada/Eastern', + 'dd.MM.yyyy HH:mm zzzz') + self.assertEqual(dt.tzinfo.utcoffset(dt), datetime.timedelta(hours=-5)) + self.assertEqual(dt.tzinfo.zone, 'Canada/Eastern') + self.assertEqual(dt.tzinfo.tzname(dt), 'EST') + + # Note that historical and future (as far as known) + # timezones are handled happily using the pytz timezone database + # US DST transition points are changing in 2007 + dt = self.format.parse('01.04.2006 09:48 US/Eastern', + 'dd.MM.yyyy HH:mm zzzz') + self.assertEqual(dt.tzinfo.zone, 'US/Eastern') + self.assertEqual(dt.tzinfo.tzname(dt), 'EST') + self.assertEqual(dt.tzinfo.utcoffset(dt), datetime.timedelta(hours=-5)) + dt = self.format.parse('01.04.2007 09:48 US/Eastern', + 'dd.MM.yyyy HH:mm zzzz') + self.assertEqual(dt.tzinfo.zone, 'US/Eastern') + self.assertEqual(dt.tzinfo.tzname(dt), 'EDT') + self.assertEqual(dt.tzinfo.utcoffset(dt), datetime.timedelta(hours=-4)) + + def testDateTimeParseError(self): + with self.assertRaises(DateTimeParseError): + self.format.parse('02.01.03 21:48', 'dd.MM.yyyy HH:mm') + with self.assertRaises(DateTimeParseError): + self.format.parse('02.01.2003', 'dd.MM.yy') + with self.assertRaises(DateTimeParseError): + self.format.parse('ff02.01.03', 'dd.MM.yy') + + def testParse12PM(self): + self.assertEqual( + self.format.parse('01.01.03 12:00 nachm.', 'dd.MM.yy hh:mm a'), + datetime.datetime(2003, 1, 1, 12, 00, 00, 00)) + + def testParseUnusualFormats(self): + self.assertEqual( + self.format.parse('001. Januar 03 0012:00', + 'ddd. MMMMM yy HHHH:mm'), + datetime.datetime(2003, 1, 1, 12, 00, 00, 00)) + self.assertEqual( + self.format.parse('0001. Jan 2003 0012:00 vorm.', + 'dddd. MMM yyyy hhhh:mm a'), + datetime.datetime(2003, 1, 1, 00, 00, 00, 00)) + + def testParseNotObject(self): + self.assertEqual( + ('2017', '01', '01'), + self.format.parse('2017-01-01', 'yyyy-MM-dd', asObject=False)) + + def testParseTwoDigitYearIs20thCentury(self): + self.assertEqual( + datetime.date(1952, 1, 1), + self.format.parse('52-01-01', 'yy-MM-dd')) + + # 30 is the cut off + self.assertEqual( + datetime.date(1931, 1, 1), + self.format.parse('31-01-01', 'yy-MM-dd')) + + self.assertEqual( + datetime.date(2030, 1, 1), + self.format.parse('30-01-01', 'yy-MM-dd')) + + def testParseAMPMMissing(self): + with self.assertRaisesRegex( + DateTimeParseError, + 'Cannot handle 12-hour format without am/pm marker.'): + self.format.parse('02.01.03 09:48', 'dd.MM.yy hh:mm') + + def testParseBadTimezone(self): + # Produces an object without pytz info + self.assertEqual( + datetime.time(21, 48, 1), + self.format.parse( + '21:48:01 Bad/Timezone', + 'HH:mm:ss zzzz')) + + def testParsePyTzTimezone(self): + tzinfo = pytz.timezone("US/Central") + self.assertEqual( + datetime.time(21, 48, 1, tzinfo=tzinfo), + self.format.parse( + '21:48:01 US/Central', + 'HH:mm:ss zzzz')) + + def testFormatSimpleDateTime(self): + # German short + self.assertEqual( + self.format.format(datetime.datetime(2003, 1, 2, 21, 48), + 'dd.MM.yy HH:mm'), + '02.01.03 21:48') + + def testFormatRealDateTime(self): + tz = pytz.timezone('Europe/Berlin') + dt = tz.localize(datetime.datetime(2003, 1, 2, 21, 48, 1)) + # German medium + self.assertEqual( + self.format.format(dt, 'dd.MM.yyyy HH:mm:ss'), + '02.01.2003 21:48:01') + + # German long + self.assertEqual( + self.format.format(dt, 'd. MMMM yyyy HH:mm:ss z'), + '2. Januar 2003 21:48:01 +100') + + # German full + self.assertEqual( + self.format.format( + dt, "EEEE, d. MMMM yyyy H:mm' Uhr 'z"), + 'Donnerstag, 2. Januar 2003 21:48 Uhr +100') + + def testFormatAMPMDateTime(self): + self.assertEqual( + self.format.format( + datetime.datetime(2003, 1, 2, 21, 48), + 'dd.MM.yy hh:mm a'), + '02.01.03 09:48 nachm.') + + def testFormatAllWeekdays(self): + for day in range(1, 8): + self.assertEqual( + self.format.format( + datetime.datetime(2003, 1, day+5, 21, 48), + "EEEE, d. MMMM yyyy H:mm' Uhr 'z"), + '%s, %i. Januar 2003 21:48 Uhr +000' % ( + self.format.calendar.days[day][0], day+5)) + + def testFormatTimeZone(self): + self.assertEqual( + self.format.format( + datetime.datetime(2003, 1, 2, 12, 00), 'z'), + '+000') + self.assertEqual( + self.format.format( + datetime.datetime(2003, 1, 2, 12, 00), 'zz'), + '+00:00') + self.assertEqual( + self.format.format( + datetime.datetime(2003, 1, 2, 12, 00), 'zzz'), + 'UTC') + self.assertEqual( + self.format.format( + datetime.datetime(2003, 1, 2, 12, 00), 'zzzz'), + 'UTC') + tz = pytz.timezone('US/Eastern') + self.assertEqual( + self.format.format( + tz.localize(datetime.datetime(2003, 1, 2, 12)), 'z'), + '-500') + self.assertEqual( + self.format.format( + tz.localize(datetime.datetime(2003, 1, 2, 12)), 'zz'), + '-05:00') + self.assertEqual( + self.format.format( + tz.localize(datetime.datetime(2003, 1, 2, 12)), 'zzz'), + 'EST') + self.assertEqual( + self.format.format( + tz.localize(datetime.datetime(2003, 1, 2, 12)), 'zzzz'), + 'US/Eastern') + + def testFormatWeekDay(self): + date = datetime.date(2003, 1, 2) + self.assertEqual(self.format.format(date, "E"), + '4') + self.assertEqual(self.format.format(date, "EE"), + '04') + self.assertEqual(self.format.format(date, "EEE"), + 'Do') + self.assertEqual(self.format.format(date, "EEEE"), + 'Donnerstag') + + # Create custom calendar, which has Sunday as the first day of the + # week. I am assigning a totally new dict here, since dicts are + # mutable and the value would be changed for the class and all its + # instances. + calendar = LocaleCalendarStub() + calendar.week = {'firstDay': 7, 'minDays': 1} + format = DateTimeFormat(calendar=calendar) + + self.assertEqual(format.format(date, "E"), + '5') + self.assertEqual(format.format(date, "EE"), + '05') + + def testFormatDayOfWeekInMonth(self): + date = datetime.date(2003, 1, 2) + self.assertEqual(self.format.format(date, "F"), + '1') + self.assertEqual(self.format.format(date, "FF"), + '01') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 9), "F"), + '2') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 16), "F"), + '3') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 23), "F"), + '4') + + def testFormatWeekInMonth(self): + self.assertEqual( + self.format.format(datetime.date(2003, 1, 3), "W"), + '1') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 3), "WW"), + '01') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 8), "W"), + '2') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 19), "W"), + '3') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 20), "W"), + '4') + self.assertEqual( + self.format.format(datetime.date(2003, 1, 31), "W"), + '5') + + def testFormatHourInDayOneTo24(self): + self.assertEqual( + self.format.format(datetime.time(5, 0), "k"), + '5') + self.assertEqual( + self.format.format(datetime.time(5, 0), "kk"), + '05') + self.assertEqual( + self.format.format(datetime.time(0, 0), "k"), + '24') + self.assertEqual( + self.format.format(datetime.time(1, 0), "k"), + '1') + + def testFormatHourInDayZeroToEleven(self): + self.assertEqual( + self.format.format(datetime.time(5, 0), "K"), + '5') + self.assertEqual( + self.format.format(datetime.time(5, 0), "KK"), + '05') + self.assertEqual( + self.format.format(datetime.time(0, 0), "K"), + '0') + self.assertEqual( + self.format.format(datetime.time(12, 0), "K"), + '0') + self.assertEqual( + self.format.format(datetime.time(11, 0), "K"), + '11') + self.assertEqual( + self.format.format(datetime.time(23, 0), "K"), + '11') + + def testFormatSimpleHourRepresentation(self): + self.assertEqual( + self.format.format(datetime.datetime(2003, 1, 2, 23, 00), + 'dd.MM.yy h:mm:ss a'), + '02.01.03 11:00:00 nachm.') + self.assertEqual( + self.format.format(datetime.datetime(2003, 1, 2, 2, 00), + 'dd.MM.yy h:mm:ss a'), + '02.01.03 2:00:00 vorm.') + self.assertEqual( + self.format.format(datetime.time(0, 15), 'h:mm a'), + '12:15 vorm.') + self.assertEqual( + self.format.format(datetime.time(1, 15), 'h:mm a'), + '1:15 vorm.') + self.assertEqual( + self.format.format(datetime.time(12, 15), 'h:mm a'), + '12:15 nachm.') + self.assertEqual( + self.format.format(datetime.time(13, 15), 'h:mm a'), + '1:15 nachm.') + + def testFormatDayInYear(self): + self.assertEqual( + self.format.format(datetime.date(2003, 1, 3), 'D'), + u"3") + self.assertEqual( + self.format.format(datetime.date(2003, 1, 3), 'DD'), + u"03") + self.assertEqual( + self.format.format(datetime.date(2003, 1, 3), 'DDD'), + u"003") + self.assertEqual( + self.format.format(datetime.date(2003, 12, 31), 'D'), + u"365") + self.assertEqual( + self.format.format(datetime.date(2003, 12, 31), 'DD'), + u"365") + self.assertEqual( + self.format.format(datetime.date(2003, 12, 31), 'DDD'), + u"365") + self.assertEqual( + self.format.format(datetime.date(2004, 12, 31), 'DDD'), + u"366") + + def testFormatDayOfWeekInMOnth(self): + self.assertEqual( + self.format.format(datetime.date(2003, 1, 3), 'F'), + u"1") + self.assertEqual( + self.format.format(datetime.date(2003, 1, 10), 'F'), + u"2") + self.assertEqual( + self.format.format(datetime.date(2003, 1, 17), 'F'), + u"3") + self.assertEqual( + self.format.format(datetime.date(2003, 1, 24), 'F'), + u"4") + self.assertEqual( + self.format.format(datetime.date(2003, 1, 31), 'F'), + u"5") + self.assertEqual( + self.format.format(datetime.date(2003, 1, 6), 'F'), + u"1") + + def testFormatUnusualFormats(self): + self.assertEqual( + self.format.format(datetime.date(2003, 1, 3), 'DDD-yyyy'), + u"003-2003") + self.assertEqual( + self.format.format(datetime.date(2003, 1, 10), + "F. EEEE 'im' MMMM, yyyy"), + u"2. Freitag im Januar, 2003") + + + def testFormatGregorianEra(self): + self.assertEqual( + self.format.format(datetime.date(2017, 12, 17), 'G'), + u'n. Chr.' + ) + + def testFormateMonthLengthOne(self): + self.assertEqual( + self.format.format(datetime.date(2017, 12, 17), 'M'), + u'12' + ) + + +class TestNumberPatternParser(_TestCase): + """Extensive tests for the ICU-based-syntax number pattern parser.""" + + def testParseSimpleIntegerPattern(self): + self.assertEqual( + parseNumberPattern('###0'), + ((None, '', None, '###0', '', '', None, '', None, 0), + (None, '', None, '###0', '', '', None, '', None, 0))) + + def testParseScientificIntegerPattern(self): + self.assertEqual( + parseNumberPattern('###0E#0'), + ((None, '', None, '###0', '', '#0', None, '', None, 0), + (None, '', None, '###0', '', '#0', None, '', None, 0))) + self.assertEqual( + parseNumberPattern('###0E+#0'), + ((None, '', None, '###0', '', '+#0', None, '', None, 0), + (None, '', None, '###0', '', '+#0', None, '', None, 0))) + + def testParsePosNegAlternativeIntegerPattern(self): + self.assertEqual( + parseNumberPattern('###0;#0'), + ((None, '', None, '###0', '', '', None, '', None, 0), + (None, '', None, '#0', '', '', None, '', None, 0))) + + def testParsePrefixedIntegerPattern(self): + self.assertEqual( + parseNumberPattern('+###0'), + ((None, '+', None, '###0', '', '', None, '', None, 0), + (None, '+', None, '###0', '', '', None, '', None, 0))) + + def testParsePosNegIntegerPattern(self): + self.assertEqual( + parseNumberPattern('+###0;-###0'), + ((None, '+', None, '###0', '', '', None, '', None, 0), + (None, '-', None, '###0', '', '', None, '', None, 0))) + + def testParseScientificPosNegIntegerPattern(self): + self.assertEqual( + parseNumberPattern('+###0E0;-###0E#0'), + ((None, '+', None, '###0', '', '0', None, '', None, 0), + (None, '-', None, '###0', '', '#0', None, '', None, 0))) + + def testParseThousandSeparatorIntegerPattern(self): + self.assertEqual( + parseNumberPattern('#,##0'), + ((None, '', None, '###0', '', '', None, '', None, 1), + (None, '', None, '###0', '', '', None, '', None, 1))) + + def testParseSimpleDecimalPattern(self): + self.assertEqual( + parseNumberPattern('###0.00#'), + ((None, '', None, '###0', '00#', '', None, '', None, 0), + (None, '', None, '###0', '00#', '', None, '', None, 0))) + + def testParseScientificDecimalPattern(self): + self.assertEqual( + parseNumberPattern('###0.00#E#0'), + ((None, '', None, '###0', '00#', '#0', None, '', None, 0), + (None, '', None, '###0', '00#', '#0', None, '', None, 0))) + + def testParsePosNegAlternativeFractionPattern(self): + self.assertEqual( + parseNumberPattern('###0.00#;#0.0#'), + ((None, '', None, '###0', '00#', '', None, '', None, 0), + (None, '', None, '#0', '0#', '', None, '', None, 0))) + + def testParsePosNegFractionPattern(self): + self.assertEqual( + parseNumberPattern('+###0.0##;-###0.0##'), + ((None, '+', None, '###0', '0##', '', None, '', None, 0), + (None, '-', None, '###0', '0##', '', None, '', None, 0))) + + def testParseScientificPosNegFractionPattern(self): + self.assertEqual( + parseNumberPattern('+###0.0##E#0;-###0.0##E0'), + ((None, '+', None, '###0', '0##', '#0', None, '', None, 0), + (None, '-', None, '###0', '0##', '0', None, '', None, 0))) + + def testParseThousandSeparatorFractionPattern(self): + self.assertEqual( + parseNumberPattern('#,##0.0#'), + ((None, '', None, '###0', '0#', '', None, '', None, 1), + (None, '', None, '###0', '0#', '', None, '', None, 1))) + + def testParsePadding1WithoutPrefixPattern(self): + self.assertEqual( + parseNumberPattern('* ###0'), + ((' ', '', None, '###0', '', '', None, '', None, 0), + (' ', '', None, '###0', '', '', None, '', None, 0))) + self.assertEqual( + parseNumberPattern('* ###0.0##'), + ((' ', '', None, '###0', '0##', '', None, '', None, 0), + (' ', '', None, '###0', '0##', '', None, '', None, 0))) + self.assertEqual( + parseNumberPattern('* ###0.0##;*_###0.0##'), + ((' ', '', None, '###0', '0##', '', None, '', None, 0), + ('_', '', None, '###0', '0##', '', None, '', None, 0))) + + def testParsePadding1WithPrefixPattern(self): + self.assertEqual( + parseNumberPattern('* +###0'), + ((' ', '+', None, '###0', '', '', None, '', None, 0), + (' ', '+', None, '###0', '', '', None, '', None, 0))) + self.assertEqual( + parseNumberPattern('* +###0.0##'), + ((' ', '+', None, '###0', '0##', '', None, '', None, 0), + (' ', '+', None, '###0', '0##', '', None, '', None, 0))) + self.assertEqual( + parseNumberPattern('* +###0.0##;*_-###0.0##'), + ((' ', '+', None, '###0', '0##', '', None, '', None, 0), + ('_', '-', None, '###0', '0##', '', None, '', None, 0))) + + def testParsePadding1Padding2WithPrefixPattern(self): + self.assertEqual( + parseNumberPattern('* +* ###0'), + ((' ', '+', ' ', '###0', '', '', None, '', None, 0), + (' ', '+', ' ', '###0', '', '', None, '', None, 0))) + self.assertEqual( + parseNumberPattern('* +* ###0.0##'), + ((' ', '+', ' ', '###0', '0##', '', None, '', None, 0), + (' ', '+', ' ', '###0', '0##', '', None, '', None, 0))) + self.assertEqual( + parseNumberPattern('* +* ###0.0##;*_-*_###0.0##'), + ((' ', '+', ' ', '###0', '0##', '', None, '', None, 0), + ('_', '-', '_', '###0', '0##', '', None, '', None, 0))) + + def testParsePadding3WithoutSufffixPattern(self): + self.assertEqual( + parseNumberPattern('###0* '), + ((None, '', None, '###0', '', '', ' ', '', None, 0), + (None, '', None, '###0', '', '', ' ', '', None, 0))) + self.assertEqual( + parseNumberPattern('###0.0##* '), + ((None, '', None, '###0', '0##', '', ' ', '', None, 0), + (None, '', None, '###0', '0##', '', ' ', '', None, 0))) + self.assertEqual( + parseNumberPattern('###0.0##* ;###0.0##*_'), + ((None, '', None, '###0', '0##', '', ' ', '', None, 0), + (None, '', None, '###0', '0##', '', '_', '', None, 0))) + + def testParsePadding3InScientificPattern(self): + self.assertEqual( + parseNumberPattern('###0E#0* '), + ((None, '', None, '###0', '', '#0', ' ', '', None, 0), + (None, '', None, '###0', '', '#0', ' ', '', None, 0))) + self.assertEqual( + parseNumberPattern('###0.0##E0* '), + ((None, '', None, '###0', '0##', '0', ' ', '', None, 0), + (None, '', None, '###0', '0##', '0', ' ', '', None, 0))) + self.assertEqual( + parseNumberPattern('###0.0##E#0* ;###0.0##E0*_'), + ((None, '', None, '###0', '0##', '#0', ' ', '', None, 0), + (None, '', None, '###0', '0##', '0', '_', '', None, 0))) + + def testParsePadding3WithSufffixPattern(self): + self.assertEqual( + parseNumberPattern('###0* /'), + ((None, '', None, '###0', '', '', ' ', '/', None, 0), + (None, '', None, '###0', '', '', ' ', '/', None, 0))) + self.assertEqual( + parseNumberPattern('###0.0#* /'), + ((None, '', None, '###0', '0#', '', ' ', '/', None, 0), + (None, '', None, '###0', '0#', '', ' ', '/', None, 0))) + self.assertEqual( + parseNumberPattern('###0.0#* /;###0.0#*_/'), + ((None, '', None, '###0', '0#', '', ' ', '/', None, 0), + (None, '', None, '###0', '0#', '', '_', '/', None, 0))) + + def testParsePadding3And4WithSuffixPattern(self): + self.assertEqual( + parseNumberPattern('###0* /* '), + ((None, '', None, '###0', '', '', ' ', '/', ' ', 0), + (None, '', None, '###0', '', '', ' ', '/', ' ', 0))) + self.assertEqual( + parseNumberPattern('###0* /* ;###0*_/*_'), + ((None, '', None, '###0', '', '', ' ', '/', ' ', 0), + (None, '', None, '###0', '', '', '_', '/', '_', 0))) + + def testParseMultipleCharacterPrefix(self): + self.assertEqual( + parseNumberPattern('DM###0'), + ((None, 'DM', None, '###0', '', '', None, '', None, 0), + (None, 'DM', None, '###0', '', '', None, '', None, 0))) + self.assertEqual( + parseNumberPattern('DM* ###0'), + ((None, 'DM', ' ', '###0', '', '', None, '', None, 0), + (None, 'DM', ' ', '###0', '', '', None, '', None, 0))) + + def testParseStringEscapedPrefix(self): + self.assertEqual( + parseNumberPattern("'DEM'###0"), + ((None, 'DEM', None, '###0', '', '', None, '', None, 0), + (None, 'DEM', None, '###0', '', '', None, '', None, 0))) + self.assertEqual( + parseNumberPattern("D'EM'###0"), + ((None, 'DEM', None, '###0', '', '', None, '', None, 0), + (None, 'DEM', None, '###0', '', '', None, '', None, 0))) + self.assertEqual( + parseNumberPattern("D'E'M###0"), + ((None, 'DEM', None, '###0', '', '', None, '', None, 0), + (None, 'DEM', None, '###0', '', '', None, '', None, 0))) + + def testParseStringEscapedSuffix(self): + self.assertEqual( + parseNumberPattern("###0'DEM'"), + ((None, '', None, '###0', '', '', None, 'DEM', None, 0), + (None, '', None, '###0', '', '', None, 'DEM', None, 0))) + self.assertEqual( + parseNumberPattern("###0D'EM'"), + ((None, '', None, '###0', '', '', None, 'DEM', None, 0), + (None, '', None, '###0', '', '', None, 'DEM', None, 0))) + self.assertEqual( + parseNumberPattern("###0D'E'M"), + ((None, '', None, '###0', '', '', None, 'DEM', None, 0), + (None, '', None, '###0', '', '', None, 'DEM', None, 0))) + + def testParseInvalidBegin(self): + with self.assertRaisesRegex(NumberPatternParseError, + "Wrong syntax at beginning"): + parseNumberPattern(".") + + def testParseFractionQuate(self): + pattern, neg_pattern = parseNumberPattern("0.'") + self.assertEqual( + (None, '', None, '0', '', '', None, '', None, 0), + pattern) + self.assertEqual( + (None, '', None, '0', '', '', None, '', None, 0), + neg_pattern) + + def testParseExponentialQuote(self): + pattern, neg_pattern = parseNumberPattern("0E'") + self.assertEqual( + (None, '', None, '0', '', '', None, '', None, 0), + pattern) + self.assertEqual( + (None, '', None, '0', '', '', None, '', None, 0), + neg_pattern) + + def testParseExponentialNumber(self): + pattern, neg_pattern = parseNumberPattern("0E1") + self.assertEqual( + (None, '', None, '0', '', '', None, '1', None, 0), + pattern) + self.assertEqual( + (None, '', None, '0', '', '', None, '1', None, 0), + neg_pattern) + +class TestNumberFormat(_TestCase): + """Test the functionality of an implmentation of the NumberFormat.""" + + format = NumberFormat(symbols={ + 'decimal': '.', 'group': ',', 'list': ';', 'percentSign': '%', + 'nativeZeroDigit': '0', 'patternDigit': '#', 'plusSign': '+', + 'minusSign': '-', 'exponential': 'E', 'perMille': 'o/oo', + 'infinity': 'oo', 'nan': 'N/A'}) + + def testInterfaceConformity(self): + self.assertTrue(INumberFormat.providedBy(self.format)) + + def testParseSimpleInteger(self): + self.assertEqual(self.format.parse('23341', '###0'), + 23341) + self.assertEqual(self.format.parse('041', '#000'), + 41) + + def testParseScientificInteger(self): + self.assertEqual(self.format.parse('2.3341E4', '0.0###E0'), + 23341) + self.assertEqual(self.format.parse('4.100E01', '0.000##E00'), + 41) + self.assertEqual(self.format.parse('1E0', '0E0'), + 1) + self.assertEqual(self.format.parse('0E0', '0E0'), + 0) + # This is a special case I found not working, but is used frequently + # in the new LDML Locale files. + self.assertEqual(self.format.parse('2.3341E+04', '0.000###E+00'), + 23341) + + def testParsePosNegAlternativeInteger(self): + self.assertEqual(self.format.parse('23341', '#000;#00'), + 23341) + self.assertEqual(self.format.parse('041', '#000;#00'), + 41) + self.assertEqual(self.format.parse('41', '#000;#00'), + -41) + self.assertEqual(self.format.parse('01', '#000;#00'), + -1) + + def testParsePrefixedInteger(self): + self.assertEqual(self.format.parse('+23341', '+###0'), + 23341) + self.assertEqual(self.format.parse('+041', '+#000'), + 41) + + def testParsePosNegInteger(self): + self.assertEqual(self.format.parse('+23341', '+###0;-###0'), + 23341) + self.assertEqual(self.format.parse('+041', '+#000;-#000'), + 41) + self.assertEqual(self.format.parse('-23341', '+###0;-###0'), + -23341) + self.assertEqual(self.format.parse('-041', '+#000;-#000'), + -41) + + def testParseThousandSeparatorInteger(self): + self.assertEqual(self.format.parse('+23,341', '+#,##0;-#,##0'), + 23341) + self.assertEqual(self.format.parse('-23,341', '+#,##0;-#,##0'), + -23341) + self.assertEqual(self.format.parse('+0,041', '+#0,000;-#0,000'), + 41) + self.assertEqual(self.format.parse('-0,041', '+#0,000;-#0,000'), + -41) + + def testParseDecimal(self): + self.assertEqual(self.format.parse('23341.02', '###0.0#'), + 23341.02) + self.assertEqual(self.format.parse('23341.1', '###0.0#'), + 23341.1) + self.assertEqual(self.format.parse('23341.020', '###0.000#'), + 23341.02) + + def testParseDecimalWithOptionalDecimalDigits(self): + self.assertEqual(self.format.parse('23341.02', '###0.##'), + 23341.02) + self.assertEqual(self.format.parse('23341', '###0.#'), + 23341.0) + self.assertEqual(self.format.parse('23341.', '###0.#'), + 23341.0) + + def testParseScientificDecimal(self): + self.assertEqual(self.format.parse('2.334102E04', '0.00####E00'), + 23341.02) + self.assertEqual(self.format.parse('2.3341020E004', '0.0000000E000'), + 23341.02) + self.assertEqual(self.format.parse('0.0E0', '0.0#E0'), + 0.0) + + def testParseScientificDecimalSmallerOne(self): + self.assertEqual(self.format.parse('2.357E-02', '0.00####E00'), + 0.02357) + self.assertEqual(self.format.parse('2.0000E-02', '0.0000E00'), + 0.02) + + def testParsePadding1WithoutPrefix(self): + self.assertEqual(self.format.parse(' 41', '* ##0;*_##0'), + 41) + self.assertEqual(self.format.parse('_41', '* ##0;*_##0'), + -41) + + def testParsePadding1WithPrefix(self): + self.assertEqual(self.format.parse(' +41', '* +##0;*_-##0'), + 41) + self.assertEqual(self.format.parse('_-41', '* +##0;*_-##0'), + -41) + + def testParsePadding1Padding2WithPrefix(self): + self.assertEqual(self.format.parse(' + 41', '* +* ###0;*_-*_###0'), + +41) + self.assertEqual(self.format.parse('__-_41', '* +* ###0;*_-*_###0'), + -41) + + def testParsePadding1Scientific(self): + self.assertEqual(self.format.parse(' 4.102E1', + '* 0.0####E0;*_0.0####E0'), + 41.02) + self.assertEqual(self.format.parse('__4.102E1', + '* 0.0####E0;*_0.0####E0'), + -41.02) + self.assertEqual(self.format.parse(' +4.102E1', + '* +0.0###E0;*_-0.0###E0'), + 41.02) + self.assertEqual(self.format.parse('_-4.102E1', + '* +0.0###E0;*_-0.0###E0'), + -41.02) + + def testParsePadding3WithoutSufffix(self): + self.assertEqual(self.format.parse('41.02 ', '#0.0###* ;#0.0###*_'), + 41.02) + self.assertEqual(self.format.parse('41.02__', '#0.0###* ;#0.0###*_'), + -41.02) + + def testParsePadding3WithSufffix(self): + self.assertEqual( + self.format.parse('[41.02 ]', '[#0.0###* ];(#0.0###*_)'), + 41.02) + self.assertEqual( + self.format.parse('(41.02__)', '[#0.0###* ];(#0.0###*_)'), + -41.02) + + def testParsePadding3Scientific(self): + self.assertEqual(self.format.parse('4.102E1 ', + '0.0##E0##* ;0.0##E0##*_'), + 41.02) + self.assertEqual(self.format.parse('4.102E1__', + '0.0##E0##* ;0.0##E0##*_'), + -41.02) + self.assertEqual(self.format.parse('(4.102E1 )', + '(0.0##E0##* );0.0E0'), + 41.02) + self.assertEqual(self.format.parse('[4.102E1__]', + '0.0E0;[0.0##E0##*_]'), + -41.02) + + def testParsePadding3Padding4WithSuffix(self): + self.assertEqual(self.format.parse('(41.02 ) ', '(#0.0###* )* '), + 41.02) + self.assertEqual(self.format.parse('(4.102E1 ) ', '(0.0##E0##* )* '), + 41.02) + + def testParseDecimalWithGermanDecimalSeparator(self): + format = NumberFormat(symbols={'decimal': ',', 'group': '.'}) + self.assertEqual(format.parse('1.234,567', '#,##0.000'), 1234.567) + + def testParseWithAlternativeExponentialSymbol(self): + format = NumberFormat( + symbols={'decimal': '.', 'group': ',', 'exponential': 'X'}) + self.assertEqual(format.parse('1.2X11', '#.#E0'), 1.2e11) + + def testParseFailWithInvalidCharacters(self): + with self.assertRaises(NumberParseError): + self.format.parse('123xx', '###0.0#') + with self.assertRaises(NumberParseError): + self.format.parse('xx123', '###0.0#') + with self.assertRaises(NumberParseError): + self.format.parse('1xx23', '###0.0#') + + def testParseFailWithInvalidGroupCharacterPosition(self): + with self.assertRaises(NumberParseError): + self.format.parse('123,00', '###0.0#') + with self.assertRaises(NumberParseError): + self.format.parse(',123', '###0.0#') + with self.assertRaises(NumberParseError): + self.format.parse('1,23.00', '###0.0#') + + def testChangeOutputType(self): + format = NumberFormat() + format.type = decimal.Decimal + self.assertEqual(format.parse('23341', '###0'), + decimal.Decimal('23341')) + self.assertEqual(format.parse('233.41', '###0.00'), + decimal.Decimal('233.41')) + + def testFormatSimpleInteger(self): + self.assertEqual(self.format.format(23341, '###0'), + '23341') + self.assertEqual(self.format.format(41, '#000'), + '041') + + def testFormatScientificInteger(self): + self.assertEqual(self.format.format(23341, '0.000#E0'), + '2.3341E4') + self.assertEqual(self.format.format(23341, '0.000#E00'), + '2.3341E04') + self.assertEqual(self.format.format(1, '0.##E0'), + '1E0') + self.assertEqual(self.format.format(1, '0.00E00'), + '1.00E00') + # This is a special case I found not working, but is used frequently + # in the new LDML Locale files. + self.assertEqual(self.format.format(23341, '0.000###E+00'), + '2.3341E+04') + + def testFormatScientificZero(self): + self.assertEqual(self.format.format(0, '0.00E00'), + '0.00E00') + self.assertEqual(self.format.format(0, '0E0'), + '0E0') + + def testFormatPosNegAlternativeInteger(self): + self.assertEqual(self.format.format(23341, '#000;#00'), + '23341') + self.assertEqual(self.format.format(41, '#000;#00'), + '041') + self.assertEqual(self.format.format(-23341, '#000;#00'), + '23341') + self.assertEqual(self.format.format(-41, '#000;#00'), + '41') + self.assertEqual(self.format.format(-1, '#000;#00'), + '01') + + def testFormatPrefixedInteger(self): + self.assertEqual(self.format.format(23341, '+###0'), + '+23341') + self.assertEqual(self.format.format(41, '+#000'), + '+041') + self.assertEqual(self.format.format(-23341, '+###0'), + '+23341') + self.assertEqual(self.format.format(-41, '+#000'), + '+041') + + def testFormatPosNegInteger(self): + self.assertEqual(self.format.format(23341, '+###0;-###0'), + '+23341') + self.assertEqual(self.format.format(41, '+#000;-#000'), + '+041') + self.assertEqual(self.format.format(-23341, '+###0;-###0'), + '-23341') + self.assertEqual(self.format.format(-41, '+#000;-#000'), + '-041') + + def testFormatPosNegScientificInteger(self): + self.assertEqual(self.format.format(23341, '+0.00###E00;-0.00###E00'), + '+2.3341E04') + self.assertEqual(self.format.format(23341, '-0.00###E00;-0.00###E00'), + '-2.3341E04') + + def testFormatThousandSeparatorInteger(self): + self.assertEqual(self.format.format(23341, '+#,##0;-#,##0'), + '+23,341') + self.assertEqual(self.format.format(-23341, '+#,##0;-#,##0'), + '-23,341') + self.assertEqual(self.format.format(41, '+#0,000;-#0,000'), + '+0,041') + self.assertEqual(self.format.format(-41, '+#0,000;-#0,000'), + '-0,041') + + def testFormatDecimal(self): + self.assertEqual(self.format.format(23341.02357, '###0.0#'), + '23341.02') + self.assertEqual(self.format.format(23341.02357, '###0.000#'), + '23341.0236') + self.assertEqual(self.format.format(23341.02, '###0.000#'), + '23341.020') + + def testRounding(self): + self.assertEqual(self.format.format(0.5, '#'), '1') + self.assertEqual(self.format.format(0.49, '#'), '0') + self.assertEqual(self.format.format(0.45, '0.0'), '0.5') + self.assertEqual(self.format.format(150, '0E0'), '2E2') + self.assertEqual(self.format.format(149, '0E0'), '1E2') + self.assertEqual(self.format.format(1.9999, '0.000'), '2.000') + self.assertEqual(self.format.format(1.9999, '0.0000'), '1.9999') + + + def testFormatScientificDecimal(self): + self.assertEqual(self.format.format(23341.02357, '0.00####E00'), + '2.334102E04') + self.assertEqual(self.format.format(23341.02, '0.0000000E000'), + '2.3341020E004') + + def testFormatScientificDecimalSmallerOne(self): + self.assertEqual(self.format.format(0.02357, '0.00####E00'), + '2.357E-02') + self.assertEqual(self.format.format(0.02, '0.0000E00'), + '2.0000E-02') + + def testFormatPadding1WithoutPrefix(self): + self.assertEqual(self.format.format(41, '* ##0;*_##0'), + ' 41') + self.assertEqual(self.format.format(-41, '* ##0;*_##0'), + '_41') + + def testFormatPadding1WithPrefix(self): + self.assertEqual(self.format.format(41, '* +##0;*_-##0'), + ' +41') + self.assertEqual(self.format.format(-41, '* +##0;*_-##0'), + '_-41') + + def testFormatPadding1Scientific(self): + self.assertEqual(self.format.format(41.02, '* 0.0####E0;*_0.0####E0'), + ' 4.102E1') + self.assertEqual(self.format.format(-41.02, '* 0.0####E0;*_0.0####E0'), + '__4.102E1') + self.assertEqual(self.format.format(41.02, '* +0.0###E0;*_-0.0###E0'), + ' +4.102E1') + self.assertEqual(self.format.format(-41.02, '* +0.0###E0;*_-0.0###E0'), + '_-4.102E1') + + def testFormatPadding1Padding2WithPrefix(self): + self.assertEqual(self.format.format(41, '* +* ###0;*_-*_###0'), + ' + 41') + self.assertEqual(self.format.format(-41, '* +* ###0;*_-*_###0'), + '__-_41') + + def testFormatPadding3WithoutSufffix(self): + self.assertEqual(self.format.format(41.02, '#0.0###* ;#0.0###*_'), + '41.02 ') + self.assertEqual(self.format.format(-41.02, '#0.0###* ;#0.0###*_'), + '41.02__') + + def testFormatPadding3WithSufffix(self): + self.assertEqual(self.format.format(41.02, '[#0.0###* ];(#0.0###*_)'), + '[41.02 ]') + self.assertEqual(self.format.format(-41.02, '[#0.0###* ];(#0.0###*_)'), + '(41.02__)') + + def testFormatPadding3Scientific(self): + self.assertEqual(self.format.format(41.02, '0.0##E0##* ;0.0##E0##*_'), + '4.102E1 ') + self.assertEqual(self.format.format(-41.02, '0.0##E0##* ;0.0##E0##*_'), + '4.102E1__') + self.assertEqual(self.format.format(41.02, '(0.0##E0##* );0.0E0'), + '(4.102E1 )') + self.assertEqual(self.format.format(-41.02, '0.0E0;[0.0##E0##*_]'), + '[4.102E1__]') + + def testFormatPadding3Padding4WithSuffix(self): + self.assertEqual(self.format.format(41.02, '(#0.0###* )* '), + '(41.02 ) ') + self.assertEqual(self.format.format(41.02, '(0.0##E0##* )* '), + '(4.102E1 ) ') + + def testFormatSmallNumbers(self): + self.assertEqual(self.format.format( + -1e-7, '(#0.00#####);(-#0.00#####)'), '(-0.0000001)') + self.assertEqual(self.format.format(1e-9, '(#0.00###)'), '(0.00)') + self.assertEqual(self.format.format(1e-9, '(#0.00###)'), '(0.00)') + + def testFormatHighPrecisionNumbers(self): + self.assertEqual( + self.format.format( + 1+1e-7, '(#0.00#####);(-#0.00#####)'), + '(1.0000001)') + self.assertEqual( + self.format.format( + 1+1e-7, '(#0.00###)'), + '(1.00000)') + self.assertEqual( + self.format.format( + 1+1e-9, '(#0.00#######);(-#0.00#######)'), + '(1.000000001)') + self.assertEqual( + self.format.format( + 1+1e-9, '(#0.00###)'), + '(1.00000)') + self.assertEqual( + self.format.format( + 1+1e-12, '(#0.00##########);(-#0.00##########)'), + '(1.000000000001)') + self.assertEqual( + self.format.format( + 1+1e-12, '(#0.00###)'), + '(1.00000)') + + def testNoRounding(self): + # Witout Rounding + self.assertEqual( + self.format.format( + decimal.Decimal('0.99999'), '0.###', rounding=False), + '0.99999') diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_gettextmessagecatalog.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_gettextmessagecatalog.py new file mode 100644 index 0000000..07a0932 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_gettextmessagecatalog.py @@ -0,0 +1,32 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test a gettext implementation of a Message Catalog. +""" +import os +from zope.i18n.gettextmessagecatalog import GettextMessageCatalog +from zope.i18n.tests import test_imessagecatalog + + +class GettextMessageCatalogTest(test_imessagecatalog.TestIMessageCatalog): + + def _getMessageCatalog(self): + from zope.i18n import tests + path = os.path.dirname(tests.__file__) + self._path = os.path.join(path, 'en-default.mo') + catalog = GettextMessageCatalog('en', 'default', self._path) + return catalog + + + def _getUniqueIndentifier(self): + return self._path diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_imessagecatalog.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_imessagecatalog.py new file mode 100644 index 0000000..803d4f0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_imessagecatalog.py @@ -0,0 +1,66 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""This is an 'abstract' test for the IMessageCatalog interface. +""" +import unittest +from zope.interface.verify import verifyObject +from zope.i18n.interfaces import IMessageCatalog +from zope.schema import getValidationErrors + + +class TestIMessageCatalog(unittest.TestCase): + + + # This should be overridden by every class that inherits this test + def _getMessageCatalog(self): + raise NotImplementedError() + + def _getUniqueIndentifier(self): + raise NotImplementedError() + + + def setUp(self): + self._catalog = self._getMessageCatalog() + + def testInterface(self): + verifyObject(IMessageCatalog, self._catalog) + errors = getValidationErrors(IMessageCatalog, self._catalog) + self.assertFalse(errors) + + def testGetMessage(self): + catalog = self._catalog + self.assertEqual(catalog.getMessage('short_greeting'), 'Hello!') + self.assertRaises(KeyError, catalog.getMessage, 'foo') + + def testQueryMessage(self): + catalog = self._catalog + self.assertEqual(catalog.queryMessage('short_greeting'), 'Hello!') + self.assertEqual(catalog.queryMessage('foo'), None) + self.assertEqual(catalog.queryMessage('foo', 'bar'), 'bar') + + def testGetLanguage(self): + catalog = self._catalog + self.assertEqual(catalog.language, 'en') + + def testGetDomain(self): + catalog = self._catalog + self.assertEqual(catalog.domain, 'default') + + def testGetIdentifier(self): + catalog = self._catalog + self.assertEqual(catalog.getIdentifier(), self._getUniqueIndentifier()) + + +def test_suite(): + return unittest.TestSuite() # Deliberately empty diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_itranslationdomain.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_itranslationdomain.py new file mode 100644 index 0000000..79c385f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_itranslationdomain.py @@ -0,0 +1,111 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""This is an 'abstract' test for the ITranslationDomain interface. +""" +import unittest +from zope.interface.verify import verifyObject +from zope.interface import implementer + +import zope.component +from zope.component.testing import PlacelessSetup + +from zope.schema import getValidationErrors + +from zope.i18n.negotiator import negotiator +from zope.i18n.interfaces import INegotiator, IUserPreferredLanguages +from zope.i18n.interfaces import ITranslationDomain + +text_type = str if bytes is not str else unicode + +@implementer(IUserPreferredLanguages) +class Environment(object): + + + def __init__(self, langs=()): + self.langs = langs + + def getPreferredLanguages(self): + return self.langs + +class TestITranslationDomain(PlacelessSetup): + + # This should be overwritten by every class that inherits this test + def _getTranslationDomain(self): + raise NotImplementedError() + + def setUp(self): + super(TestITranslationDomain, self).setUp() + self._domain = self._getTranslationDomain() + + # Setup the negotiator utility + zope.component.provideUtility(negotiator, INegotiator) + + def testInterface(self): + verifyObject(ITranslationDomain, self._domain) + errors = getValidationErrors(ITranslationDomain, self._domain) + self.assertFalse(errors) + + def testSimpleTranslate(self): + translate = self._domain.translate + eq = self.assertEqual + # Test that a given message id is properly translated in a supported + # language + eq(translate('short_greeting', target_language='de'), 'Hallo!') + # Same test, but use the context argument + context = Environment(('de', 'en')) + eq(translate('short_greeting', context=context), 'Hallo!') + + def testDynamicTranslate(self): + translate = self._domain.translate + eq = self.assertEqual + # Testing both translation and interpolation + eq(translate('greeting', mapping={'name': 'Stephan'}, + target_language='de'), + 'Hallo Stephan, wie geht es Dir?') + # Testing default value interpolation + eq(translate('greeting', mapping={'name': 'Philipp'}, + target_language='fr', + default="Hello $name, how are you?"), + 'Hello Philipp, how are you?') + + def testNoTranslation(self): + translate = self._domain.translate + eq = self.assertEqual + # Verify that an unknown message id will end up not being translated + eq(translate('glorp_smurf_hmpf', target_language='en'), + 'glorp_smurf_hmpf') + # Test default value behaviour + eq(translate('glorp_smurf_hmpf', target_language='en', + default='Glorp Smurf Hmpf'), + 'Glorp Smurf Hmpf') + + def testUnicodeDefaultValue(self): + translate = self._domain.translate + translated = translate('no way', target_language='en') + self.assertEqual(translated, "no way") + self.assertIsInstance(translated, text_type) + + def testNoTargetLanguage(self): + translate = self._domain.translate + eq = self.assertEqual + # Test that default is returned when no language can be negotiated + context = Environment(('xx', )) + eq(translate('short_greeting', context=context, default=42), 42) + + # Test that default is returned when there's no destination language + eq(translate('short_greeting', default=42), 42) + + +def test_suite(): + return unittest.TestSuite() # Deliberately empty diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_negotiator.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_negotiator.py new file mode 100644 index 0000000..f8a0336 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_negotiator.py @@ -0,0 +1,61 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Language Negotiator tests. +""" +import unittest + +from zope.i18n.negotiator import Negotiator +from zope.i18n.interfaces import IUserPreferredLanguages +from zope.component.testing import PlacelessSetup +from zope.interface import implementer + +@implementer(IUserPreferredLanguages) +class Env(object): + + def __init__(self, langs=()): + self.langs = langs + + def getPreferredLanguages(self): + return self.langs + + +class NegotiatorTest(PlacelessSetup, unittest.TestCase): + + def setUp(self): + super(NegotiatorTest, self).setUp() + self.negotiator = Negotiator() + + def test_findLanguages(self): + + _cases = ( + (('en','de'), ('en','de','fr'), 'en'), + (('en'), ('it','de','fr'), None), + (('pt-br','de'), ('pt_BR','de','fr'), 'pt_BR'), + (('pt-br','en'), ('pt', 'en', 'fr'), 'pt'), + (('pt-br','en-us', 'de'), ('de', 'en', 'fr'), 'en'), + ) + + for user_pref_langs, obj_langs, expected in _cases: + env = Env(user_pref_langs) + self.assertEqual(self.negotiator.getLanguage(obj_langs, env), + expected) + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(NegotiatorTest), + )) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_plurals.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_plurals.py new file mode 100644 index 0000000..d4f57c7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_plurals.py @@ -0,0 +1,292 @@ +# -*- coding: utf-8 -*- +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test a gettext implementation of a Message Catalog. +""" +import os +import unittest + +import zope.component +from zope.i18n import tests, translate +from zope.i18n.translationdomain import TranslationDomain +from zope.i18n.gettextmessagecatalog import GettextMessageCatalog +from zope.i18nmessageid import MessageFactory +from zope.i18n.interfaces import ITranslationDomain + + +class TestPlurals(unittest.TestCase): + + def _getMessageCatalog(self, locale, variant="default"): + path = os.path.dirname(tests.__file__) + self._path = os.path.join(path, '%s-%s.mo' % (locale, variant)) + catalog = GettextMessageCatalog(locale, variant, self._path) + return catalog + + def _getTranslationDomain(self, locale, variant="default"): + path = os.path.dirname(tests.__file__) + self._path = os.path.join(path, '%s-%s.mo' % (locale, variant)) + catalog = GettextMessageCatalog(locale, variant, self._path) + domain = TranslationDomain('default') + domain.addCatalog(catalog) + return domain + + def test_missing_queryPluralMessage(self): + catalog = self._getMessageCatalog('en') + self.assertEqual(catalog.language, 'en') + + self.assertEqual( + catalog.queryPluralMessage( + 'One apple', '%d apples', 0, + dft1='One fruit', dft2='%d fruits'), + '0 fruits') + + self.assertEqual( + catalog.queryPluralMessage( + 'One apple.', '%d apples.', 1, + dft1='One fruit', dft2='%d fruits'), + 'One fruit') + + self.assertEqual( + catalog.queryPluralMessage( + 'One apple.', '%d apples.', 2, + dft1='One fruit', dft2='%d fruits'), + '2 fruits') + + def test_missing_getPluralMessage(self): + catalog = self._getMessageCatalog('en') + self.assertEqual(catalog.language, 'en') + + with self.assertRaises(KeyError): + catalog.getPluralMessage('One apple', '%d fruits', 0) + + with self.assertRaises(KeyError): + catalog.getPluralMessage('One apple', '%d fruits', 1) + + with self.assertRaises(KeyError): + catalog.getPluralMessage('One apple', '%d fruits', 2) + + def test_GermanPlurals(self): + """Germanic languages such as english and german share the plural + rule. We test the german here. + """ + catalog = self._getMessageCatalog('de') + self.assertEqual(catalog.language, 'de') + + self.assertEqual(catalog.getPluralMessage( + 'There is one file.', 'There are %d files.', 1), + 'Es gibt eine Datei.') + self.assertEqual(catalog.getPluralMessage( + 'There is one file.', 'There are %d files.', 3), + 'Es gibt 3 Dateien.') + self.assertEqual(catalog.getPluralMessage( + 'There is one file.', 'There are %d files.', 0), + 'Es gibt 0 Dateien.') + + # Unknown id + self.assertRaises(KeyError, catalog.getPluralMessage, + 'There are %d files.', 'bar', 6) + + # Query without default values + self.assertEqual(catalog.queryPluralMessage( + 'There is one file.', 'There are %d files.', 1), + 'Es gibt eine Datei.') + self.assertEqual(catalog.queryPluralMessage( + 'There is one file.', 'There are %d files.', 3), + 'Es gibt 3 Dateien.') + + # Query with default values + self.assertEqual(catalog.queryPluralMessage( + 'There are %d files.', 'There is one file.', 1, + 'Es gibt 1 Datei.', 'Es gibt %d Dateien !', ), + 'Es gibt 1 Datei.') + self.assertEqual(catalog.queryPluralMessage( + 'There are %d files.', 'There is one file.', 3, + 'Es gibt 1 Datei.', 'Es gibt %d Dateien !', ), + 'Es gibt 3 Dateien !') + + def test_PolishPlurals(self): + """Polish has a complex rule for plurals. It makes for a good + test subject. + """ + catalog = self._getMessageCatalog('pl') + self.assertEqual(catalog.language, 'pl') + + self.assertEqual(catalog.getPluralMessage( + 'There is one file.', 'There are %d files.', 0), + u"Istnieją 0 plików.") + self.assertEqual(catalog.getPluralMessage( + 'There is one file.', 'There are %d files.', 1), + u"Istnieje 1 plik.") + self.assertEqual(catalog.getPluralMessage( + 'There is one file.', 'There are %d files.', 3), + u"Istnieją 3 pliki.") + self.assertEqual(catalog.getPluralMessage( + 'There is one file.', 'There are %d files.', 17), + u"Istnieją 17 plików.") + self.assertEqual(catalog.getPluralMessage( + 'There is one file.', 'There are %d files.', 23), + u"Istnieją 23 pliki.") + self.assertEqual(catalog.getPluralMessage( + 'There is one file.', 'There are %d files.', 28), + u"Istnieją 28 plików.") + + def test_floater(self): + """Test with the number being a float. + We can use %f or %s to make sure it works. + """ + catalog = self._getMessageCatalog('en') + self.assertEqual(catalog.language, 'en') + + # It's cast to integer because of the %d in the translation string. + self.assertEqual(catalog.getPluralMessage( + 'There is one file.', 'There are %d files.', 1.0), + 'There is one file.') + self.assertEqual(catalog.getPluralMessage( + 'There is one file.', 'There are %d files.', 3.5), + 'There are 3 files.') + + # It's cast to a string because of the %s in the translation string. + self.assertEqual(catalog.getPluralMessage( + 'The item is rated 1/5 star.', + 'The item is rated %s/5 stars.', 3.5), + 'The item is rated 3.5/5 stars.') + + # It's cast either to an int or a float because of the %s in + # the translation string. + self.assertEqual(catalog.getPluralMessage( + 'There is %d chance.', + 'There are %f chances.', 1.5), + 'There are 1.500000 chances.') + self.assertEqual(catalog.getPluralMessage( + 'There is %d chance.', + 'There are %f chances.', 3.5), + 'There are 3.500000 chances.') + + def test_translate_without_defaults(self): + domain = self._getTranslationDomain('en') + zope.component.provideUtility(domain, ITranslationDomain, 'default') + self.assertEqual( + translate('One apple', domain='default', + msgid_plural='%d apples', number=0), + '0 apples') + self.assertEqual( + translate('One apple', domain='default', + msgid_plural='%d apples', number=1), + 'One apple') + self.assertEqual( + translate('One apple', domain='default', + msgid_plural='%d apples', number=2), + '2 apples') + + def test_translate_with_defaults(self): + domain = self._getTranslationDomain('en') + zope.component.provideUtility(domain, ITranslationDomain, 'default') + self.assertEqual( + translate('One apple', domain='default', + msgid_plural='%d apples', number=0, + default='One fruit', default_plural='%d fruits'), + '0 fruits') + self.assertEqual( + translate('One apple', domain='default', + msgid_plural='%d apples', number=1, + default='One fruit', default_plural='%d fruits'), + 'One fruit') + self.assertEqual( + translate('One apple', domain='default', + msgid_plural='%d apples', number=2, + default='One fruit', default_plural='%d fruits'), + '2 fruits') + + def test_translate_message_without_defaults(self): + domain = self._getTranslationDomain('en') + factory = MessageFactory('default') + zope.component.provideUtility(domain, ITranslationDomain, 'default') + self.assertEqual( + translate(factory('One apple', msgid_plural='%d apples', + number=0)), + '0 apples') + self.assertEqual( + translate(factory('One apple', msgid_plural='%d apples', + number=1)), + 'One apple') + self.assertEqual( + translate(factory('One apple', msgid_plural='%d apples', + number=2)), + '2 apples') + + def test_translate_message_with_defaults(self): + domain = self._getTranslationDomain('en') + factory = MessageFactory('default') + zope.component.provideUtility(domain, ITranslationDomain, 'default') + self.assertEqual( + translate(factory('One apple', msgid_plural='%d apples', number=0, + default='One fruit', + default_plural='%d fruits')), + '0 fruits') + self.assertEqual( + translate(factory('One apple', msgid_plural='%d apples', number=1, + default='One fruit', + default_plural='%d fruits')), + 'One fruit') + self.assertEqual( + translate(factory('One apple', msgid_plural='%d apples', number=2, + default='One fruit', + default_plural='%d fruits')), + '2 fruits') + + def test_translate_recursive(self): + domain = self._getTranslationDomain('en') + factory = MessageFactory('default') + + # Singular + banana = factory('banana', msgid_plural='bananas', number=1) + phrase = factory('There is %d ${type}.', + msgid_plural='There are %d ${type}.', + number=1, mapping={'type': banana}) + self.assertEqual( + domain.translate(phrase, target_language="en"), + 'There is 1 banana.') + + # Plural + apple = factory('apple', msgid_plural='apples', number=10) + phrase = factory('There is %d ${type}.', + msgid_plural='There are %d ${type}.', + number=10, mapping={'type': apple}) + self.assertEqual( + domain.translate(phrase, target_language="en"), + 'There are 10 apples.') + + # Straight translation with translatable mapping + apple = factory('apple', msgid_plural='apples', number=75) + self.assertEqual( + domain.translate(msgid='There is %d ${type}.', + msgid_plural='There are %d ${type}.', + mapping={'type': apple}, + target_language="en", number=75), + 'There are 75 apples.') + + # Add another catalog, to test the domain's catalogs iteration + # We add this catalog in first position, to resolve the translations + # there first. + alt_en = self._getMessageCatalog('en', variant="alt") + domain._data[alt_en.getIdentifier()] = alt_en + domain._catalogs[alt_en.language].insert(0, alt_en.getIdentifier()) + + apple = factory('apple', msgid_plural='apples', number=42) + self.assertEqual( + domain.translate(msgid='There is %d ${type}.', + msgid_plural='There are %d ${type}.', + mapping={'type': apple}, + target_language="de", number=42), + 'There are 42 oranges.') diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_simpletranslationdomain.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_simpletranslationdomain.py new file mode 100644 index 0000000..7c87c40 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_simpletranslationdomain.py @@ -0,0 +1,48 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""This module tests the regular persistent Translation Domain. +""" +import unittest +from zope.i18n.simpletranslationdomain import SimpleTranslationDomain +from zope.i18n.tests.test_itranslationdomain import TestITranslationDomain + + +data = { + ('en', 'short_greeting'): 'Hello!', + ('de', 'short_greeting'): 'Hallo!', + ('en', 'greeting'): 'Hello $name, how are you?', + ('de', 'greeting'): 'Hallo $name, wie geht es Dir?'} + + +class TestSimpleTranslationDomain(unittest.TestCase, TestITranslationDomain): + + def setUp(self): + TestITranslationDomain.setUp(self) + + def tearDown(self): + TestITranslationDomain.tearDown(self) + + def _getTranslationDomain(self): + domain = SimpleTranslationDomain('default', data) + return domain + + +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(TestSimpleTranslationDomain)) + return suite + + +if __name__ == '__main__': + unittest.TextTestRunner().run(test_suite()) diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_testmessagecatalog.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_testmessagecatalog.py new file mode 100644 index 0000000..ce5adc6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_testmessagecatalog.py @@ -0,0 +1,24 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +import unittest +import doctest + +def test_suite(): + return unittest.TestSuite(( + doctest.DocFileSuite('../testmessagecatalog.rst') + )) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_translationdomain.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_translationdomain.py new file mode 100644 index 0000000..f8efbd9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_translationdomain.py @@ -0,0 +1,199 @@ +############################################################################## +# +# Copyright (c) 2001-2008 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""This module tests the regular persistent Translation Domain. +""" +import unittest +import os +from zope.i18n.translationdomain import TranslationDomain +from zope.i18n.gettextmessagecatalog import GettextMessageCatalog +from zope.i18n.tests.test_itranslationdomain import \ + TestITranslationDomain, Environment +from zope.i18nmessageid import MessageFactory +from zope.i18n.interfaces import ITranslationDomain + +import zope.component + +testdir = os.path.dirname(__file__) + +en_file = os.path.join(testdir, 'en-default.mo') +de_file = os.path.join(testdir, 'de-default.mo') + + +class TestGlobalTranslationDomain(TestITranslationDomain, unittest.TestCase): + + def _getTranslationDomain(self): + domain = TranslationDomain('default') + en_catalog = GettextMessageCatalog('en', 'default', + en_file) + de_catalog = GettextMessageCatalog('de', 'default', + de_file) + domain.addCatalog(en_catalog) + domain.addCatalog(de_catalog) + return domain + + def testNoTargetLanguage(self): + # Having a fallback would interfere with this test + self._domain.setLanguageFallbacks([]) + TestITranslationDomain.testNoTargetLanguage(self) + + def testSimpleNoTranslate(self): + translate = self._domain.translate + eq = self.assertEqual + # Unset fallback translation languages + self._domain.setLanguageFallbacks([]) + + # Test that a translation in an unsupported language returns the + # default, if there is no fallback language + eq(translate('short_greeting', target_language='es'), 'short_greeting') + eq(translate('short_greeting', target_language='es', + default='short_greeting'), 'short_greeting') + + # Same test, but use the context argument instead of target_language + context = Environment() + eq(translate('short_greeting', context=context), 'short_greeting') + eq(translate('short_greeting', context=context, + default='short_greeting'), 'short_greeting') + + def testEmptyStringTranslate(self): + translate = self._domain.translate + self.assertEqual(translate(u"", target_language='en'), u"") + self.assertEqual(translate(u"", target_language='foo'), u"") + + def testStringTranslate(self): + self.assertEqual( + self._domain.translate(u"short_greeting", target_language='en'), + u"Hello!") + + def testMessageIDTranslate(self): + factory = MessageFactory('default') + translate = self._domain.translate + msgid = factory(u"short_greeting", 'default') + self.assertEqual(translate(msgid, target_language='en'), u"Hello!") + # MessageID attributes override arguments + msgid = factory('43-not-there', 'this ${that} the other', + mapping={'that': 'THAT'}) + self.assertEqual( + translate(msgid, target_language='en', default="default", + mapping={"that": "that"}), "this THAT the other") + + def testMessageIDRecursiveTranslate(self): + factory = MessageFactory('default') + translate = self._domain.translate + msgid_sub1 = factory(u"44-not-there", '${blue}', + mapping={'blue': 'BLUE'}) + msgid_sub2 = factory(u"45-not-there", '${yellow}', + mapping={'yellow': 'YELLOW'}) + mapping = {'color1': msgid_sub1, + 'color2': msgid_sub2} + msgid = factory(u"46-not-there", 'Color: ${color1}/${color2}', + mapping=mapping) + self.assertEqual( + translate(msgid, target_language='en', default="default"), + "Color: BLUE/YELLOW") + # The recursive translation must not change the mappings + self.assertEqual(msgid.mapping, {'color1': msgid_sub1, + 'color2': msgid_sub2}) + # A circular reference should not lead to crashes + msgid1 = factory(u"47-not-there", 'Message 1 and $msg2', + mapping={}) + msgid2 = factory(u"48-not-there", 'Message 2 and $msg1', + mapping={}) + msgid1.mapping['msg2'] = msgid2 + msgid2.mapping['msg1'] = msgid1 + self.assertRaises(ValueError, + translate, msgid1, None, None, 'en', "default") + # Recursive translations also work if the original message id wasn't a + # message id but a Unicode with a directly passed mapping + self.assertEqual( + "Color: BLUE/YELLOW", + translate(u"Color: ${color1}/${color2}", mapping=mapping, + target_language='en')) + + # If we have mapping with a message id from a different + # domain, make sure we use that domain, not ours. If the + # message domain is not registered yet, we should return a + # default translation. + alt_factory = MessageFactory('alt') + msgid_sub = alt_factory(u"special", default=u"oohhh") + mapping = {'message': msgid_sub} + msgid = factory(u"46-not-there", 'Message: ${message}', + mapping=mapping) + # test we get a default with no domain registered + self.assertEqual( + translate(msgid, target_language='en', default="default"), + "Message: oohhh") + # provide the domain + domain = TranslationDomain('alt') + path = testdir + en_catalog = GettextMessageCatalog('en', 'alt', + os.path.join(path, 'en-alt.mo')) + domain.addCatalog(en_catalog) + # test that we get the right translation + zope.component.provideUtility(domain, ITranslationDomain, 'alt') + self.assertEqual( + translate(msgid, target_language='en', default="default"), + "Message: Wow") + + def testMessageIDTranslateForDifferentDomain(self): + domain = TranslationDomain('alt') + path = testdir + en_catalog = GettextMessageCatalog('en', 'alt', + os.path.join(path, 'en-alt.mo')) + domain.addCatalog(en_catalog) + + zope.component.provideUtility(domain, ITranslationDomain, 'alt') + + factory = MessageFactory('alt') + msgid = factory(u"special", 'default') + self.assertEqual( + self._domain.translate(msgid, target_language='en'), u"Wow") + + def testSimpleFallbackTranslation(self): + translate = self._domain.translate + eq = self.assertEqual + # Test that a translation in an unsupported language returns a + # translation in the fallback language (by default, English) + eq(translate('short_greeting', target_language='es'), + u"Hello!") + # Same test, but use the context argument instead of target_language + context = Environment() + eq(translate('short_greeting', context=context), + u"Hello!") + + def testInterpolationWithoutTranslation(self): + translate = self._domain.translate + self.assertEqual( + translate('42-not-there', target_language="en", + default="this ${that} the other", + mapping={"that": "THAT"}), + "this THAT the other") + + def test_getCatalogInfos(self): + cats = self._domain.getCatalogsInfo() + self.assertEqual( + cats, + {'en': [en_file], + 'de': [de_file]}) + + def test_releoadCatalogs(self): + # It uses the keys we pass + # so this does nothing + self._domain.reloadCatalogs(()) + + # The catalogNames, somewhat confusingly, are + # the paths to the files. + self._domain.reloadCatalogs((en_file, de_file)) + + with self.assertRaises(KeyError): + self._domain.reloadCatalogs(('dne',)) diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_zcml.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_zcml.py new file mode 100644 index 0000000..52ee437 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/test_zcml.py @@ -0,0 +1,175 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test the gts ZCML namespace directives. +""" +import doctest +import os +import shutil +import stat +import unittest + +from zope.component import getUtility +from zope.component import queryUtility +from zope.component.testing import PlacelessSetup +from zope.configuration import xmlconfig + +import zope.i18n.tests +from zope.i18n.interfaces import ITranslationDomain +from zope.i18n import config + +text_type = str if bytes is not str else unicode + +template = """\ + + %s +""" + +class DirectivesTest(PlacelessSetup, unittest.TestCase): + + # This test suite needs the [zcml] and [compile] extra dependencies + + def setUp(self): + super(DirectivesTest, self).setUp() + self.context = xmlconfig.file('meta.zcml', zope.i18n) + self.allowed = config.ALLOWED_LANGUAGES + config.ALLOWED_LANGUAGES = None + + def tearDown(self): + super(DirectivesTest, self).tearDown() + config.ALLOWED_LANGUAGES = self.allowed + + def testRegisterTranslations(self): + self.assertTrue(queryUtility(ITranslationDomain) is None) + xmlconfig.string( + template % ''' + + + + ''', self.context) + path = os.path.join(os.path.dirname(zope.i18n.tests.__file__), + 'locale', 'en', 'LC_MESSAGES', 'zope-i18n.mo') + util = getUtility(ITranslationDomain, 'zope-i18n') + self.assertEqual(util._catalogs.get('test'), ['test']) + self.assertEqual(util._catalogs.get('en'), [text_type(path)]) + + def testAllowedTranslations(self): + self.assertTrue(queryUtility(ITranslationDomain) is None) + config.ALLOWED_LANGUAGES = ('de', 'fr') + xmlconfig.string( + template % ''' + + + + ''', self.context) + path = os.path.join(os.path.dirname(zope.i18n.tests.__file__), + 'locale', 'de', 'LC_MESSAGES', 'zope-i18n.mo') + util = getUtility(ITranslationDomain, 'zope-i18n') + self.assertEqual(util._catalogs, + {'test': ['test'], 'de': [text_type(path)]}) + + def testRegisterDistributedTranslations(self): + self.assertTrue(queryUtility(ITranslationDomain, 'zope-i18n') is None) + xmlconfig.string( + template % ''' + + + + ''', self.context) + xmlconfig.string( + template % ''' + + + + ''', self.context) + path1 = os.path.join(os.path.dirname(zope.i18n.tests.__file__), + 'locale', 'en', 'LC_MESSAGES', 'zope-i18n.mo') + path2 = os.path.join(os.path.dirname(zope.i18n.tests.__file__), + 'locale2', 'en', 'LC_MESSAGES', 'zope-i18n.mo') + util = getUtility(ITranslationDomain, 'zope-i18n') + self.assertEqual(util._catalogs.get('test'), ['test', 'test']) + self.assertEqual(util._catalogs.get('en'), + [text_type(path1), text_type(path2)]) + + msg = util.translate(u"Additional message", target_language='en') + self.assertEqual(msg, u"Additional message translated") + + msg = util.translate(u"New Domain", target_language='en') + self.assertEqual(msg, u"New Domain translated") + + msg = util.translate(u"New Language", target_language='en') + self.assertEqual(msg, u"New Language translated") + + def testRegisterAndCompileTranslations(self): + config.COMPILE_MO_FILES = True + self.assertTrue(queryUtility(ITranslationDomain) is None) + + # Copy an old and outdated file over, so we can test if the + # newer file check works + testpath = os.path.join(os.path.dirname(zope.i18n.tests.__file__)) + basepath = os.path.join(testpath, 'locale3', 'en', 'LC_MESSAGES') + in_ = os.path.join(basepath, 'zope-i18n.in') + path = os.path.join(basepath, 'zope-i18n.mo') + shutil.copy2(in_, path) + + # Make sure the older mo file always has an older time stamp + # than the po file + path_atime = os.stat(path)[stat.ST_ATIME] + path_mtime = os.stat(path)[stat.ST_MTIME] + os.utime(path, (path_atime, path_mtime - 6000)) + + xmlconfig.string( + template % ''' + + + + ''', self.context) + util = getUtility(ITranslationDomain, 'zope-i18n') + self.assertEqual(util._catalogs, + {'test': ['test'], 'en': [text_type(path)]}) + + msg = util.translate(u"I'm a newer file", target_language='en') + self.assertEqual(msg, u"I'm a newer file translated") + + util = getUtility(ITranslationDomain, 'zope-i18n2') + msg = util.translate(u"I'm a new file", target_language='en') + self.assertEqual(msg, u"I'm a new file translated") + + # Reset the mtime of the mo file + os.utime(path, (path_atime, path_mtime)) + + def testRegisterTranslationsForDomain(self): + self.assertTrue(queryUtility(ITranslationDomain, 'zope-i18n') is None) + self.assertTrue(queryUtility(ITranslationDomain, 'zope-i18n2') is None) + xmlconfig.string( + template % ''' + + + + ''', self.context) + path = os.path.join(os.path.dirname(zope.i18n.tests.__file__), + 'locale3', 'en', 'LC_MESSAGES', 'zope-i18n.mo') + util = getUtility(ITranslationDomain, 'zope-i18n') + self.assertEqual(util._catalogs, + {'test': ['test'], 'en': [text_type(path)]}) + + self.assertTrue(queryUtility(ITranslationDomain, 'zope-i18n2') is None) + + +def test_suite(): + return unittest.TestSuite(( + unittest.defaultTestLoader.loadTestsFromName(__name__), + doctest.DocFileSuite('configure.txt'), + )) diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/testi18nawareobject.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/testi18nawareobject.py new file mode 100644 index 0000000..21a61f4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/tests/testi18nawareobject.py @@ -0,0 +1,99 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""This is a test for the II18nAware interface. +""" +import unittest + +from zope.i18n.interfaces import II18nAware +from zope.interface import implementer + + +@implementer(II18nAware) +class I18nAwareContentObject(object): + + def __init__(self): + self.content = {} + self.defaultLanguage = 'en' + + def getContent(self, language): + return self.content[language] + + def queryContent(self, language, default=None): + return self.content.get(language, default) + + def setContent(self, content, language): + self.content[language] = content + + ############################################################ + # Implementation methods for interface + # II18nAware.py + + def getDefaultLanguage(self): + 'See II18nAware' + return self.defaultLanguage + + def setDefaultLanguage(self, language): + 'See II18nAware' + self.defaultLanguage = language + + def getAvailableLanguages(self): + 'See II18nAware' + return self.content.keys() + + # + ############################################################ + +class AbstractTestII18nAwareMixin(object): + + def setUp(self): + self.object = self._createObject() + self.object.setDefaultLanguage('fr') + + def _createObject(self): + # Should create an object that has lt, en and fr as available + # languages + raise NotImplementedError() + + def testGetDefaultLanguage(self): + self.assertEqual(self.object.getDefaultLanguage(), 'fr') + + def testSetDefaultLanguage(self): + self.object.setDefaultLanguage('lt') + self.assertEqual(self.object.getDefaultLanguage(), 'lt') + + def testGetAvailableLanguages(self): + self.assertEqual(sorted(self.object.getAvailableLanguages()), ['en', 'fr', 'lt']) + + +class TestI18nAwareObject(AbstractTestII18nAwareMixin, unittest.TestCase): + + def _createObject(self): + object = I18nAwareContentObject() + object.setContent('English', 'en') + object.setContent('Lithuanian', 'lt') + object.setContent('French', 'fr') + return object + + def testSetContent(self): + self.object.setContent('German', 'de') + self.assertEqual(self.object.content['de'], 'German') + + def testGetContent(self): + self.assertEqual(self.object.getContent('en'), 'English') + with self.assertRaises(KeyError): + self.object.getContent('es') + + def testQueryContent(self): + self.assertEqual(self.object.queryContent('en'), 'English') + self.assertEqual(self.object.queryContent('es', 'N/A'), 'N/A') diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/translationdomain.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/translationdomain.py new file mode 100644 index 0000000..b9287f0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/translationdomain.py @@ -0,0 +1,173 @@ +############################################################################## +# +# Copyright (c) 2001-2008 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Global Translation Service for providing I18n to file-based code. +""" + +import zope.component +import zope.interface + +from zope.i18nmessageid import Message +from zope.i18n import translate, interpolate +from zope.i18n.interfaces import ITranslationDomain, INegotiator + + +# The configuration should specify a list of fallback languages for the +# site. If a particular catalog for a negotiated language is not available, +# then the zcml specified order should be tried. If that fails, then as a +# last resort the languages in the following list are tried. If these fail +# too, then the msgid is returned. +# +# Note that these fallbacks are used only to find a catalog. If a particular +# message in a catalog is not translated, tough luck, you get the msgid. +LANGUAGE_FALLBACKS = ['en'] + +text_type = str if bytes is not str else unicode + + +@zope.interface.implementer(ITranslationDomain) +class TranslationDomain(object): + + def __init__(self, domain, fallbacks=None): + self.domain = ( + domain.decode("utf-8") if isinstance(domain, bytes) else domain) + # _catalogs maps (language, domain) to IMessageCatalog instances + self._catalogs = {} + # _data maps IMessageCatalog.getIdentifier() to IMessageCatalog + self._data = {} + # What languages to fallback to, if there is no catalog for the + # requested language (no fallback on individual messages) + self.setLanguageFallbacks(fallbacks) + + def _registerMessageCatalog(self, language, catalog_name): + key = language + mc = self._catalogs.setdefault(key, []) + mc.append(catalog_name) + + def addCatalog(self, catalog): + self._data[catalog.getIdentifier()] = catalog + self._registerMessageCatalog(catalog.language, + catalog.getIdentifier()) + + def setLanguageFallbacks(self, fallbacks=None): + if fallbacks is None: + fallbacks = LANGUAGE_FALLBACKS + self._fallbacks = fallbacks + + def translate(self, msgid, mapping=None, context=None, + target_language=None, default=None, + msgid_plural=None, default_plural=None, number=None): + """See zope.i18n.interfaces.ITranslationDomain""" + # if the msgid is empty, let's save a lot of calculations and return + # an empty string. + if msgid == u'': + return u'' + + if target_language is None and context is not None: + langs = self._catalogs.keys() + # invoke local or global unnamed 'INegotiator' utilities + negotiator = zope.component.getUtility(INegotiator) + # try to determine target language from negotiator utility + target_language = negotiator.getLanguage(langs, context) + + return self._recursive_translate( + msgid, mapping, target_language, default, context, + msgid_plural, default_plural, number) + + def _recursive_translate(self, msgid, mapping, target_language, default, + context, msgid_plural, default_plural, number, + seen=None): + """Recursively translate msg.""" + # MessageID attributes override arguments + if isinstance(msgid, Message): + if msgid.domain != self.domain: + return translate( + msgid, msgid.domain, mapping, context, target_language, + default, msgid_plural, default_plural, number) + default = msgid.default + mapping = msgid.mapping + msgid_plural = msgid.msgid_plural + default_plural = msgid.default_plural + number = msgid.number + + # Recursively translate mappings, if they are translatable + if (mapping is not None + and Message in (type(m) for m in mapping.values())): + if seen is None: + seen = set() + seen.add((msgid, msgid_plural)) + mapping = mapping.copy() + for key, value in mapping.items(): + if isinstance(value, Message): + # TODO Why isn't there an IMessage interface? + # https://bugs.launchpad.net/zope3/+bug/220122 + if (value, value.msgid_plural) in seen: + raise ValueError( + "Circular reference in mappings detected: %s" % + value) + mapping[key] = self._recursive_translate( + value, mapping, target_language, default, context, + msgid_plural, default_plural, number, seen) + + if default is None: + default = text_type(msgid) + if msgid_plural is not None and default_plural is None: + default_plural = text_type(msgid_plural) + + # Get the translation. Use the specified fallbacks if this fails + catalog_names = self._catalogs.get(target_language) + if catalog_names is None: + for language in self._fallbacks: + catalog_names = self._catalogs.get(language) + if catalog_names is not None: + break + + text = default + if catalog_names: + if len(catalog_names) == 1: + # this is a slight optimization for the case when there is a + # single catalog. More importantly, it is extremely helpful + # when testing and the test language is used, because it + # allows the test language to get the default. + if msgid_plural is not None: + # This is a plural + text = self._data[catalog_names[0]].queryPluralMessage( + msgid, msgid_plural, number, default, default_plural) + else: + text = self._data[catalog_names[0]].queryMessage( + msgid, default) + else: + for name in catalog_names: + catalog = self._data[name] + if msgid_plural is not None: + # This is a plural + s = catalog.queryPluralMessage( + msgid, msgid_plural, number, + default, default_plural) + else: + s = catalog.queryMessage(msgid) + if s is not None: + text = s + break + + # Now we need to do the interpolation + if text and mapping: + text = interpolate(text, mapping) + return text + + def getCatalogsInfo(self): + return self._catalogs + + def reloadCatalogs(self, catalogNames): + for catalogName in catalogNames: + self._data[catalogName].reload() diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18n/zcml.py b/thesisenv/lib/python3.6/site-packages/zope/i18n/zcml.py new file mode 100644 index 0000000..4df9ccb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18n/zcml.py @@ -0,0 +1,127 @@ + +# ############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""This module handles the 'i18n' namespace directives. +""" +__docformat__ = 'restructuredtext' + +import os +import logging +from glob import glob + +from zope.component import getSiteManager +from zope.component import queryUtility +from zope.component.interface import provideInterface +from zope.configuration.fields import Path +from zope.interface import Interface +from zope.schema import TextLine + +from zope.i18n import config +from zope.i18n.compile import compile_mo_file +from zope.i18n.gettextmessagecatalog import GettextMessageCatalog +from zope.i18n.testmessagecatalog import TestMessageCatalog +from zope.i18n.translationdomain import TranslationDomain +from zope.i18n.interfaces import ITranslationDomain + + +logger = logging.getLogger("zope.i18n") + + +class IRegisterTranslationsDirective(Interface): + """Register translations with the global site manager.""" + + directory = Path( + title=u"Directory", + description=u"Directory containing the translations", + required=True + ) + + domain = TextLine( + title=u"Domain", + description=(u"Translation domain to register. If not specified, " + u"all domains found in the directory are registered"), + required=False + ) + + +def allow_language(lang): + if config.ALLOWED_LANGUAGES is None: + return True + return lang in config.ALLOWED_LANGUAGES + + +def handler(catalogs, name): + """ special handler handling the merging of two message catalogs """ + gsm = getSiteManager() + # Try to get an existing domain and add the given catalogs to it + domain = queryUtility(ITranslationDomain, name) + if domain is None: + domain = TranslationDomain(name) + gsm.registerUtility(domain, ITranslationDomain, name=name) + for catalog in catalogs: + domain.addCatalog(catalog) + # make sure we have a TEST catalog for each domain: + domain.addCatalog(TestMessageCatalog(name)) + + +def registerTranslations(_context, directory, domain='*'): + path = os.path.normpath(directory) + domains = {} + + loaded = False + # Gettext has the domain-specific catalogs inside the language directory, + # which is exactly the opposite as we need it. So create a dictionary that + # reverses the nesting. + for language in os.listdir(path): + if not allow_language(language): + continue + lc_messages_path = os.path.join(path, language, 'LC_MESSAGES') + if os.path.isdir(lc_messages_path): + # Preprocess files and update or compile the mo files + if config.COMPILE_MO_FILES: + for domain_path in glob(os.path.join(lc_messages_path, + '%s.po' % domain)): + domain_file = os.path.basename(domain_path) + name = domain_file[:-3] + compile_mo_file(name, lc_messages_path) + for domain_path in glob(os.path.join(lc_messages_path, + '%s.mo' % domain)): + loaded = True + domain_file = os.path.basename(domain_path) + name = domain_file[:-3] + if name not in domains: + domains[name] = {} + domains[name][language] = domain_path + if loaded: + logger.debug('register directory %s', directory) + + # Now create TranslationDomain objects and add them as utilities + for name, langs in domains.items(): + catalogs = [] + for lang, file in langs.items(): + catalogs.append(GettextMessageCatalog(lang, name, file)) + # register the necessary actions directly (as opposed to using + # `zope.component.zcml.utility`) since we need the actual utilities + # in place before the merging can be done... + _context.action( + discriminator=None, + callable=handler, + args=(catalogs, name)) + + # also register the interface for the translation utilities + provides = ITranslationDomain + _context.action( + discriminator=None, + callable=provideInterface, + args=(provides.__module__ + '.' + provides.getName(), provides)) diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/__init__.py new file mode 100644 index 0000000..1de49ef --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/__init__.py @@ -0,0 +1,19 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""I18n Messages +""" +from zope.i18nmessageid.message import Message, MessageFactory + +# import this as _ to create i18n messages in the zope domain +ZopeMessageFactory = MessageFactory('zope') diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/_zope_i18nmessageid_message.c b/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/_zope_i18nmessageid_message.c new file mode 100644 index 0000000..66cd93d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/_zope_i18nmessageid_message.c @@ -0,0 +1,320 @@ +/*############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################*/ + +#include "Python.h" + + +#if PY_MAJOR_VERSION >= 3 + #define MOD_ERROR_VAL NULL +#else + #define MOD_ERROR_VAL +#endif + +typedef struct { + PyUnicodeObject base; + PyObject *domain; + PyObject *default_; + PyObject *mapping; + PyObject *value_plural; + PyObject *default_plural; + PyObject *number; +} Message; + +static PyTypeObject MessageType; + +static PyObject * +Message_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"value", "domain", "default", "mapping", + "msgid_plural", "default_plural", "number", NULL}; + PyObject *value, *domain=NULL, *default_=NULL, *mapping=NULL, *s; + PyObject *value_plural=NULL, *default_plural=NULL, *number=NULL; + Message *self; + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|OOOOOO", kwlist, + &value, &domain, &default_, &mapping, + &value_plural, &default_plural, &number)) + return NULL; + + if (number != NULL && Py_None != number) { +#if PY_MAJOR_VERSION >= 3 + if (!(PyLong_Check(number) || PyFloat_Check(number))) { +#else + if (!(PyLong_Check(number) || PyInt_Check(number) || PyFloat_Check(number))) { +#endif + PyErr_SetString(PyExc_TypeError, + "`number` should be an integer or a float"); + return NULL; + } + } + + args = Py_BuildValue("(O)", value); + if (args == NULL) + return NULL; + + s = PyUnicode_Type.tp_new(type, args, NULL); + Py_DECREF(args); + if (s == NULL) + return NULL; + + if (!PyObject_TypeCheck(s, &MessageType)) { + PyErr_SetString(PyExc_TypeError, "unicode.__new__ didn't return a Message"); + Py_DECREF(s); + return NULL; + } + + self = (Message*)s; + + if (PyObject_TypeCheck(value, &MessageType)) { + /* value is a Message so we copy it and use it as base */ + self->domain = ((Message *)value)->domain; + self->default_ = ((Message *)value)->default_; + self->mapping = ((Message *)value)->mapping; + self->value_plural = ((Message *)value)->value_plural; + self->default_plural = ((Message *)value)->default_plural; + self->number = ((Message *)value)->number; + } + else { + self->domain = NULL; + self->default_ = NULL; + self->mapping = NULL; + self->value_plural = NULL; + self->default_plural = NULL; + self->number = NULL; + } + + if (domain != NULL) + self->domain = domain; + + if (default_ != NULL) + self->default_ = default_; + + if (mapping != NULL) + self->mapping = mapping; + + if (value_plural != NULL) + self->value_plural = value_plural; + + if (default_plural != NULL) + self->default_plural = default_plural; + + if (number != NULL) { + self->number = number; + } + + Py_XINCREF(self->mapping); + Py_XINCREF(self->default_); + Py_XINCREF(self->domain); + Py_XINCREF(self->value_plural); + Py_XINCREF(self->default_plural); + Py_XINCREF(self->number); + + return (PyObject *)self; +} + +/* Code to access structure members by accessing attributes */ + +#include "structmember.h" + +static PyMemberDef Message_members[] = { + { "domain", T_OBJECT, offsetof(Message, domain), READONLY }, + { "default", T_OBJECT, offsetof(Message, default_), READONLY }, + { "mapping", T_OBJECT, offsetof(Message, mapping), READONLY }, + { "msgid_plural", T_OBJECT, offsetof(Message, value_plural), READONLY }, + { "default_plural", T_OBJECT, offsetof(Message, default_plural), READONLY }, + { "number", T_OBJECT, offsetof(Message, number), READONLY }, + {NULL} /* Sentinel */ +}; + +static int +Message_traverse(Message *self, visitproc visit, void *arg) +{ + Py_VISIT(self->domain); + Py_VISIT(self->default_); + Py_VISIT(self->mapping); + Py_VISIT(self->value_plural); + Py_VISIT(self->default_plural); + Py_VISIT(self->number); + return 0; +} + +static int +Message_clear(Message *self) +{ + Py_CLEAR(self->domain); + Py_CLEAR(self->default_); + Py_CLEAR(self->mapping); + Py_CLEAR(self->value_plural); + Py_CLEAR(self->default_plural); + Py_CLEAR(self->number); + return 0; +} + +static void +Message_dealloc(Message *self) +{ + PyObject_GC_UnTrack((PyObject *)self); + Message_clear(self); + PyUnicode_Type.tp_dealloc((PyObject*)self); +} + +static PyObject * +Message_reduce(Message *self) +{ + PyObject *value, *result; + value = PyObject_CallFunctionObjArgs((PyObject *)&PyUnicode_Type, self, NULL); + if (value == NULL) + return NULL; + result = Py_BuildValue("(O(OOOOOOO))", Py_TYPE(&(self->base)), + value, + self->domain ? self->domain : Py_None, + self->default_ ? self->default_ : Py_None, + self->mapping ? self->mapping : Py_None, + self->value_plural ? self->value_plural : Py_None, + self->default_plural ? self->default_plural : Py_None, + self->number ? self->number : Py_None); + Py_DECREF(value); + return result; +} + +static PyMethodDef Message_methods[] = { + {"__reduce__", (PyCFunction)Message_reduce, METH_NOARGS, + "Reduce messages to a serializable form."}, + {NULL} /* Sentinel */ +}; + + +static char MessageType__doc__[] = +"Message\n" +"\n" +"This is a string used as a message. It has a domain attribute that is\n" +"its source domain, and a default attribute that is its default text to\n" +"display when there is no translation. domain may be None meaning there is\n" +"no translation domain. default may also be None, in which case the\n" +"message id itself implicitly serves as the default text.\n"; + +static PyTypeObject +MessageType = { + PyVarObject_HEAD_INIT(NULL, 0) + /* tp_name */ "zope.i18nmessageid.message." + "Message", + /* tp_basicsize */ sizeof(Message), + /* tp_itemsize */ 0, + /* tp_dealloc */ (destructor)&Message_dealloc, + /* tp_print */ (printfunc)0, + /* tp_getattr */ (getattrfunc)0, + /* tp_setattr */ (setattrfunc)0, + /* tp_compare */ 0, + /* tp_repr */ (reprfunc)0, + /* tp_as_number */ 0, + /* tp_as_sequence */ 0, + /* tp_as_mapping */ 0, + /* tp_hash */ (hashfunc)0, + /* tp_call */ (ternaryfunc)0, + /* tp_str */ (reprfunc)0, + /* tp_getattro */ (getattrofunc)0, + /* tp_setattro */ (setattrofunc)0, + /* tp_as_buffer */ 0, + /* tp_flags */ Py_TPFLAGS_DEFAULT + | Py_TPFLAGS_BASETYPE + | Py_TPFLAGS_HAVE_GC, + /* tp_doc */ MessageType__doc__, + /* tp_traverse */ (traverseproc)Message_traverse, + /* tp_clear */ (inquiry)Message_clear, + /* tp_richcompare */ (richcmpfunc)0, + /* tp_weaklistoffset */ (long)0, + /* tp_iter */ (getiterfunc)0, + /* tp_iternext */ (iternextfunc)0, + /* tp_methods */ Message_methods, + /* tp_members */ Message_members, + /* tp_getset */ 0, + /* tp_base */ 0, + /* tp_dict */ 0, /* internal use */ + /* tp_descr_get */ (descrgetfunc)0, + /* tp_descr_set */ (descrsetfunc)0, + /* tp_dictoffset */ 0, + /* tp_init */ (initproc)0, + /* tp_alloc */ (allocfunc)0, + /* tp_new */ (newfunc)Message_new, + /* tp_free */ 0, /* Low-level free-mem routine */ + /* tp_is_gc */ (inquiry)0, /* For PyObject_IS_GC */ +}; + +/* End of code for Message objects */ +/* -------------------------------------------------------- */ + + +/* List of methods defined in the module */ +static struct PyMethodDef _zope_i18nmessageid_message_methods[] = { + {NULL, (PyCFunction)NULL, 0, NULL}, /* sentinel */ +}; + +static char _zope_i18nmessageid_message_module_name[] = +"_zope_i18nmessageid_message"; + +static char _zope_i18nmessageid_message_module_documentation[] = +"I18n Messages"; + +#if PY_MAJOR_VERSION >= 3 + static struct PyModuleDef moduledef = { + PyModuleDef_HEAD_INIT, + _zope_i18nmessageid_message_module_name,/* m_name */ + _zope_i18nmessageid_message_module_documentation,/* m_doc */ + -1,/* m_size */ + _zope_i18nmessageid_message_methods,/* m_methods */ + NULL,/* m_reload */ + NULL,/* m_traverse */ + NULL,/* m_clear */ + NULL,/* m_free */ + }; +#endif + +#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ + #define PyMODINIT_FUNC void +#endif + +PyMODINIT_FUNC +#if PY_MAJOR_VERSION >= 3 + PyInit__zope_i18nmessageid_message(void) +#else + init_zope_i18nmessageid_message(void) +#endif +{ + PyObject *m; + /* Initialize types: */ + MessageType.tp_base = &PyUnicode_Type; + if (PyType_Ready(&MessageType) < 0) + return MOD_ERROR_VAL; + + /* Create the module and add the functions */ +#if PY_MAJOR_VERSION >= 3 + m = PyModule_Create(&moduledef); +#else + m = Py_InitModule3(_zope_i18nmessageid_message_module_name, + _zope_i18nmessageid_message_methods, + _zope_i18nmessageid_message_module_documentation); +#endif + + if (m == NULL) + return MOD_ERROR_VAL; + + /* Add types: */ + if (PyModule_AddObject(m, "Message", (PyObject *)&MessageType) < 0) + return MOD_ERROR_VAL; + +#if PY_MAJOR_VERSION >= 3 + return m; +#endif + +} diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/_zope_i18nmessageid_message.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/_zope_i18nmessageid_message.cpython-36m-darwin.so new file mode 100755 index 0000000..6e18f44 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/_zope_i18nmessageid_message.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/message.py b/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/message.py new file mode 100644 index 0000000..70a5053 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/message.py @@ -0,0 +1,112 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""I18n Messages and factories. +""" +import six + +__docformat__ = "reStructuredText" +_marker = object() + + +class Message(six.text_type): + """Message (Python implementation) + + This is a string used as a message. It has a domain attribute that is + its source domain, and a default attribute that is its default text to + display when there is no translation. domain may be None meaning there is + no translation domain. default may also be None, in which case the + message id itself implicitly serves as the default text. + """ + + __slots__ = ( + 'domain', 'default', 'mapping', '_readonly', + 'msgid_plural', 'default_plural', 'number') + + def __new__(cls, ustr, domain=_marker, default=_marker, mapping=_marker, + msgid_plural=_marker, default_plural=_marker, number=_marker): + self = six.text_type.__new__(cls, ustr) + if isinstance(ustr, self.__class__): + self.domain = ustr.domain + self.default = ustr.default + self.mapping = ustr.mapping + self.msgid_plural = ustr.msgid_plural + self.default_plural = ustr.default_plural + self.number = ustr.number + else: + self.domain = None + self.default = None + self.mapping = None + self.msgid_plural = None + self.default_plural = None + self.number = None + + if domain is not _marker: + self.domain = domain + if default is not _marker: + self.default = default + if mapping is not _marker: + self.mapping = mapping + if msgid_plural is not _marker: + self.msgid_plural = msgid_plural + if default_plural is not _marker: + self.default_plural = default_plural + if number is not _marker: + self.number = number + + if self.number is not None and not isinstance( + self.number, six.integer_types + (float,)): + raise TypeError('`number` should be an integer or a float') + + self._readonly = True + return self + + def __setattr__(self, key, value): + """Message is immutable + + It cannot be changed once the message id is created. + """ + if getattr(self, '_readonly', False): + raise TypeError('readonly attribute') + else: + return six.text_type.__setattr__(self, key, value) + + def __getstate__(self): + return ( + six.text_type(self), self.domain, self.default, self.mapping, + self.msgid_plural, self.default_plural, self.number) + + def __reduce__(self): + return self.__class__, self.__getstate__() + + +# Name the fallback Python implementation to make it easier to test. +pyMessage = Message + + +try: + from ._zope_i18nmessageid_message import Message +except ImportError: # pragma: no cover + pass + + +class MessageFactory(object): + """Factory for creating i18n messages.""" + + def __init__(self, domain): + self._domain = domain + + def __call__(self, ustr, default=None, mapping=None, + msgid_plural=None, default_plural=None, number=None): + return Message(ustr, self._domain, default, mapping, + msgid_plural, default_plural, number) diff --git a/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/tests.py b/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/tests.py new file mode 100644 index 0000000..89ed869 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/i18nmessageid/tests.py @@ -0,0 +1,286 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Message ID tests. +""" +import sys +import unittest +from zope.i18nmessageid import message as messageid + + +class PyMessageTests(unittest.TestCase): + + _TEST_READONLY = True + + def _getTargetClass(self): + return messageid.pyMessage + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_defaults(self): + message = self._makeOne('testing') + self.assertEqual(message, 'testing') + self.assertEqual(message.domain, None) + self.assertEqual(message.default, None) + self.assertEqual(message.mapping, None) + self.assertEqual(message.msgid_plural, None) + self.assertEqual(message.default_plural, None) + self.assertEqual(message.number, None) + if self._TEST_READONLY: + self.assertTrue(message._readonly) + + def test_values(self): + mapping = {'key': 'value'} + message = self._makeOne( + 'testing', 'domain', 'default', mapping, + msgid_plural='testings', default_plural="defaults", number=2) + self.assertEqual(message, 'testing') + self.assertEqual(message.domain, 'domain') + self.assertEqual(message.default, 'default') + self.assertEqual(message.mapping, mapping) + self.assertEqual(message.msgid_plural, 'testings') + self.assertEqual(message.default_plural, 'defaults') + self.assertEqual(message.number, 2) + if self._TEST_READONLY: + self.assertTrue(message._readonly) + + def test_values_without_defaults(self): + mapping = {'key': 'value'} + message = self._makeOne( + 'testing', 'domain', mapping=mapping, + msgid_plural='testings', number=2) + self.assertEqual(message, 'testing') + self.assertEqual(message.domain, 'domain') + self.assertEqual(message.default, None) + self.assertEqual(message.mapping, mapping) + self.assertEqual(message.msgid_plural, 'testings') + self.assertEqual(message.default_plural, None) + self.assertEqual(message.number, 2) + if self._TEST_READONLY: + self.assertTrue(message._readonly) + + def test_values_with_float_for_number(self): + mapping = {'key': 'value'} + message = self._makeOne( + 'testing', 'domain', 'default', mapping, + msgid_plural='testings', default_plural="defaults", number=2.2) + self.assertEqual(message, 'testing') + self.assertEqual(message.domain, 'domain') + self.assertEqual(message.default, 'default') + self.assertEqual(message.mapping, mapping) + self.assertEqual(message.msgid_plural, 'testings') + self.assertEqual(message.default_plural, 'defaults') + self.assertEqual(message.number, 2.2) + if self._TEST_READONLY: + self.assertTrue(message._readonly) + + def test_values_with_zero(self): + mapping = {'key': 'value'} + message = self._makeOne( + 'testing', 'domain', 'default', mapping, + msgid_plural='testings', default_plural="defaults", number=0) + self.assertEqual(message, 'testing') + self.assertEqual(message.domain, 'domain') + self.assertEqual(message.default, 'default') + self.assertEqual(message.mapping, mapping) + self.assertEqual(message.msgid_plural, 'testings') + self.assertEqual(message.default_plural, 'defaults') + self.assertEqual(message.number, 0) + if self._TEST_READONLY: + self.assertTrue(message._readonly) + + def test_copy(self): + mapping = {'key': 'value'} + source = self._makeOne( + 'testing', 'domain', 'default', mapping, + msgid_plural='testings', default_plural="defaults", number=0) + message = self._makeOne(source) + self.assertEqual(message, 'testing') + self.assertEqual(message.domain, 'domain') + self.assertEqual(message.default, 'default') + self.assertEqual(message.mapping, mapping) + self.assertEqual(message.msgid_plural, 'testings') + self.assertEqual(message.default_plural, 'defaults') + self.assertEqual(message.number, 0) + + # Besides just being equal, they maintain their identity + for attr in ( + 'domain', + 'default', + 'mapping', + 'msgid_plural', + 'default_plural', + 'number', + ): + self.assertIs(getattr(source, attr), + getattr(message, attr)) + + if self._TEST_READONLY: + self.assertTrue(message._readonly) + + def test_copy_with_overrides(self): + mapping = {'key': 'value'} + source = self._makeOne( + 'testing', 'domain', default='other', mapping=mapping, + msgid_plural='workings', default_plural='others', number=3) + message = self._makeOne( + source, mapping=None, msgid_plural='override', number=0) + self.assertEqual(message, 'testing') + self.assertEqual(message.domain, 'domain') + self.assertEqual(message.default, 'other') + self.assertEqual(message.mapping, None) + self.assertEqual(message.msgid_plural, 'override') + self.assertEqual(message.default_plural, 'others') + self.assertEqual(message.number, 0) + if self._TEST_READONLY: + self.assertTrue(message._readonly) + + def test_copy_no_default(self): + # https://github.com/zopefoundation/zope.i18nmessageid/issues/14 + pref_msg = self._makeOne("${name} Preferences") + self.assertIsNone(pref_msg.default) + copy = self._makeOne(pref_msg, mapping={u'name': u'name'}) + self.assertIsNone(copy.default) + + def test_copy_no_overrides(self): + # https://github.com/zopefoundation/zope.i18nmessageid/issues/14 + pref_msg = self._makeOne("${name} Preferences") + + copy = self._makeOne(pref_msg) + for attr in ( + 'domain', + 'default', + 'mapping', + 'msgid_plural', + 'default_plural', + 'number', + ): + self.assertIsNone(getattr(pref_msg, attr)) + self.assertIsNone(getattr(copy, attr)) + + def test_domain_immutable(self): + message = self._makeOne('testing') + with self.assertRaises((TypeError, AttributeError)): + message.domain = 'domain' + + def test_default_immutable(self): + message = self._makeOne('testing') + with self.assertRaises((TypeError, AttributeError)): + message.default = 'default' + + def test_mapping_immutable(self): + mapping = {'key': 'value'} + message = self._makeOne('testing') + with self.assertRaises((TypeError, AttributeError)): + message.mapping = mapping + + def test_msgid_plural_immutable(self): + message = self._makeOne('testing') + with self.assertRaises((TypeError, AttributeError)): + message.msgid_plural = 'bar' + + def test_default_plural_immutable(self): + message = self._makeOne('testing') + with self.assertRaises((TypeError, AttributeError)): + message.default_plural = 'bar' + + def test_number_immutable(self): + message = self._makeOne('testing') + with self.assertRaises((TypeError, AttributeError)): + message.number = 23 + + def test_unknown_immutable(self): + message = self._makeOne('testing') + with self.assertRaises((TypeError, AttributeError)): + message.unknown = 'unknown' + + def test___reduce__(self): + mapping = {'key': 'value'} + source = self._makeOne('testing') + message = self._makeOne( + source, 'domain', 'default', mapping, + msgid_plural='testings', default_plural="defaults", number=2) + klass, state = message.__reduce__() + self.assertTrue(klass is self._getTargetClass()) + self.assertEqual( + state, + ('testing', 'domain', 'default', {'key': 'value'}, + 'testings', 'defaults', 2)) + + def test_non_unicode_default(self): + message = self._makeOne(u'str', default=123) + self.assertEqual(message.default, 123) + + def test_non_numeric_number(self): + with self.assertRaises((TypeError, AttributeError)): + self._makeOne(u'str', default=123, number="one") + + +@unittest.skipIf(messageid.Message is messageid.pyMessage, "Duplicate tests") +class MessageTests(PyMessageTests): + + _TEST_READONLY = False + + def _getTargetClass(self): + return messageid.Message + + +@unittest.skipIf('java' in sys.platform or hasattr(sys, 'pypy_version_info'), + "We don't expect the C implementation here") +class OptimizationTests(unittest.TestCase): + + def test_optimizations_available(self): + self.assertIsNot(messageid.Message, messageid.pyMessage) + + +class MessageFactoryTests(unittest.TestCase): + + def _getTargetClass(self): + return messageid.MessageFactory + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test___call___defaults(self): + factory = self._makeOne('domain') + message = factory('testing') + self.assertTrue(isinstance(message, messageid.Message)) + self.assertEqual(message, 'testing') + self.assertEqual(message.domain, 'domain') + self.assertEqual(message.default, None) + self.assertEqual(message.mapping, None) + self.assertEqual(message.msgid_plural, None) + self.assertEqual(message.default_plural, None) + self.assertEqual(message.number, None) + + def test___call___explicit(self): + mapping = {'key': 'value'} + factory = self._makeOne('domain') + message = factory( + 'testing', 'default', mapping, + msgid_plural='testings', default_plural="defaults", number=2) + self.assertTrue(isinstance(message, messageid.Message)) + self.assertEqual(message, 'testing') + self.assertEqual(message.domain, 'domain') + self.assertEqual(message.default, 'default') + self.assertEqual(message.mapping, mapping) + self.assertEqual(message.msgid_plural, 'testings') + self.assertEqual(message.default_plural, 'defaults') + self.assertEqual(message.number, 2) + + +def test_suite(): + return unittest.TestSuite(( + unittest.defaultTestLoader.loadTestsFromName(__name__), + )) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/interface/__init__.py new file mode 100644 index 0000000..605b706 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/__init__.py @@ -0,0 +1,90 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Interfaces + +This package implements the Python "scarecrow" proposal. + +The package exports two objects, `Interface` and `Attribute` directly. It also +exports several helper methods. Interface is used to create an interface with +a class statement, as in: + + class IMyInterface(Interface): + '''Interface documentation + ''' + + def meth(arg1, arg2): + '''Documentation for meth + ''' + + # Note that there is no self argument + +To find out what you can do with interfaces, see the interface +interface, `IInterface` in the `interfaces` module. + +The package has several public modules: + + o `declarations` provides utilities to declare interfaces on objects. It + also provides a wide range of helpful utilities that aid in managing + declared interfaces. Most of its public names are however imported here. + + o `document` has a utility for documenting an interface as structured text. + + o `exceptions` has the interface-defined exceptions + + o `interfaces` contains a list of all public interfaces for this package. + + o `verify` has utilities for verifying implementations of interfaces. + +See the module doc strings for more information. +""" +__docformat__ = 'restructuredtext' + +from zope.interface.interface import Interface +from zope.interface.interface import _wire + +# Need to actually get the interface elements to implement the right interfaces +_wire() +del _wire + +from zope.interface.declarations import Declaration +from zope.interface.declarations import alsoProvides +from zope.interface.declarations import classImplements +from zope.interface.declarations import classImplementsOnly +from zope.interface.declarations import classProvides +from zope.interface.declarations import directlyProvidedBy +from zope.interface.declarations import directlyProvides +from zope.interface.declarations import implementedBy +from zope.interface.declarations import implementer +from zope.interface.declarations import implementer_only +from zope.interface.declarations import implements +from zope.interface.declarations import implementsOnly +from zope.interface.declarations import moduleProvides +from zope.interface.declarations import named +from zope.interface.declarations import noLongerProvides +from zope.interface.declarations import providedBy +from zope.interface.declarations import provider +from zope.interface.exceptions import Invalid +from zope.interface.interface import Attribute +from zope.interface.interface import invariant +from zope.interface.interface import taggedValue + +# The following are to make spec pickles cleaner +from zope.interface.declarations import Provides + + +from zope.interface.interfaces import IInterfaceDeclaration + +moduleProvides(IInterfaceDeclaration) + +__all__ = ('Interface', 'Attribute') + tuple(IInterfaceDeclaration) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/_compat.py b/thesisenv/lib/python3.6/site-packages/zope/interface/_compat.py new file mode 100644 index 0000000..fb61e13 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/_compat.py @@ -0,0 +1,58 @@ +############################################################################## +# +# Copyright (c) 2006 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Basic components support +""" +import sys +import types + +if sys.version_info[0] < 3: + + def _normalize_name(name): + if isinstance(name, basestring): + return unicode(name) + raise TypeError("name must be a regular or unicode string") + + CLASS_TYPES = (type, types.ClassType) + STRING_TYPES = (basestring,) + + _BUILTINS = '__builtin__' + + PYTHON3 = False + PYTHON2 = True + +else: + + def _normalize_name(name): + if isinstance(name, bytes): + name = str(name, 'ascii') + if isinstance(name, str): + return name + raise TypeError("name must be a string or ASCII-only bytes") + + CLASS_TYPES = (type,) + STRING_TYPES = (str,) + + _BUILTINS = 'builtins' + + PYTHON3 = True + PYTHON2 = False + +def _skip_under_py3k(test_method): + import unittest + return unittest.skipIf(sys.version_info[0] >= 3, "Only on Python 2")(test_method) + + +def _skip_under_py2(test_method): + import unittest + return unittest.skipIf(sys.version_info[0] < 3, "Only on Python 3")(test_method) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/_flatten.py b/thesisenv/lib/python3.6/site-packages/zope/interface/_flatten.py new file mode 100644 index 0000000..a80c2de --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/_flatten.py @@ -0,0 +1,35 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Adapter-style interface registry + +See Adapter class. +""" +from zope.interface import Declaration + +def _flatten(implements, include_None=0): + + try: + r = implements.flattened() + except AttributeError: + if implements is None: + r=() + else: + r = Declaration(implements).flattened() + + if not include_None: + return r + + r = list(r) + r.append(None) + return r diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/_zope_interface_coptimizations.c b/thesisenv/lib/python3.6/site-packages/zope/interface/_zope_interface_coptimizations.c new file mode 100644 index 0000000..b1e955e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/_zope_interface_coptimizations.c @@ -0,0 +1,1726 @@ +/*########################################################################### + # + # Copyright (c) 2003 Zope Foundation and Contributors. + # All Rights Reserved. + # + # This software is subject to the provisions of the Zope Public License, + # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. + # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED + # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS + # FOR A PARTICULAR PURPOSE. + # + ############################################################################*/ + +#include "Python.h" +#include "structmember.h" + +#define TYPE(O) ((PyTypeObject*)(O)) +#define OBJECT(O) ((PyObject*)(O)) +#define CLASSIC(O) ((PyClassObject*)(O)) +#ifndef PyVarObject_HEAD_INIT +#define PyVarObject_HEAD_INIT(a, b) PyObject_HEAD_INIT(a) b, +#endif +#ifndef Py_TYPE +#define Py_TYPE(o) ((o)->ob_type) +#endif + +#if PY_MAJOR_VERSION >= 3 +#define PY3K +#endif + +static PyObject *str__dict__, *str__implemented__, *strextends; +static PyObject *BuiltinImplementationSpecifications, *str__provides__; +static PyObject *str__class__, *str__providedBy__; +static PyObject *empty, *fallback, *str_implied, *str_cls, *str_implements; +static PyObject *str__conform__, *str_call_conform, *adapter_hooks; +static PyObject *str_uncached_lookup, *str_uncached_lookupAll; +static PyObject *str_uncached_subscriptions; +static PyObject *str_registry, *strro, *str_generation, *strchanged; + +static PyTypeObject *Implements; + +static int imported_declarations = 0; + +static int +import_declarations(void) +{ + PyObject *declarations, *i; + + declarations = PyImport_ImportModule("zope.interface.declarations"); + if (declarations == NULL) + return -1; + + BuiltinImplementationSpecifications = PyObject_GetAttrString( + declarations, "BuiltinImplementationSpecifications"); + if (BuiltinImplementationSpecifications == NULL) + return -1; + + empty = PyObject_GetAttrString(declarations, "_empty"); + if (empty == NULL) + return -1; + + fallback = PyObject_GetAttrString(declarations, "implementedByFallback"); + if (fallback == NULL) + return -1; + + + + i = PyObject_GetAttrString(declarations, "Implements"); + if (i == NULL) + return -1; + + if (! PyType_Check(i)) + { + PyErr_SetString(PyExc_TypeError, + "zope.interface.declarations.Implements is not a type"); + return -1; + } + + Implements = (PyTypeObject *)i; + + Py_DECREF(declarations); + + imported_declarations = 1; + return 0; +} + +static PyTypeObject SpecType; /* Forward */ + +static PyObject * +implementedByFallback(PyObject *cls) +{ + if (imported_declarations == 0 && import_declarations() < 0) + return NULL; + + return PyObject_CallFunctionObjArgs(fallback, cls, NULL); +} + +static PyObject * +implementedBy(PyObject *ignored, PyObject *cls) +{ + /* Fast retrieval of implements spec, if possible, to optimize + common case. Use fallback code if we get stuck. + */ + + PyObject *dict = NULL, *spec; + + if (PyType_Check(cls)) + { + dict = TYPE(cls)->tp_dict; + Py_XINCREF(dict); + } + + if (dict == NULL) + dict = PyObject_GetAttr(cls, str__dict__); + + if (dict == NULL) + { + /* Probably a security proxied class, use more expensive fallback code */ + PyErr_Clear(); + return implementedByFallback(cls); + } + + spec = PyObject_GetItem(dict, str__implemented__); + Py_DECREF(dict); + if (spec) + { + if (imported_declarations == 0 && import_declarations() < 0) + return NULL; + + if (PyObject_TypeCheck(spec, Implements)) + return spec; + + /* Old-style declaration, use more expensive fallback code */ + Py_DECREF(spec); + return implementedByFallback(cls); + } + + PyErr_Clear(); + + /* Maybe we have a builtin */ + if (imported_declarations == 0 && import_declarations() < 0) + return NULL; + + spec = PyDict_GetItem(BuiltinImplementationSpecifications, cls); + if (spec != NULL) + { + Py_INCREF(spec); + return spec; + } + + /* We're stuck, use fallback */ + return implementedByFallback(cls); +} + +static PyObject * +getObjectSpecification(PyObject *ignored, PyObject *ob) +{ + PyObject *cls, *result; + + result = PyObject_GetAttr(ob, str__provides__); + if (result != NULL && PyObject_TypeCheck(result, &SpecType)) + return result; + + PyErr_Clear(); + + /* We do a getattr here so as not to be defeated by proxies */ + cls = PyObject_GetAttr(ob, str__class__); + if (cls == NULL) + { + PyErr_Clear(); + if (imported_declarations == 0 && import_declarations() < 0) + return NULL; + Py_INCREF(empty); + return empty; + } + + result = implementedBy(NULL, cls); + Py_DECREF(cls); + + return result; +} + +static PyObject * +providedBy(PyObject *ignored, PyObject *ob) +{ + PyObject *result, *cls, *cp; + + result = PyObject_GetAttr(ob, str__providedBy__); + if (result == NULL) + { + PyErr_Clear(); + return getObjectSpecification(NULL, ob); + } + + + /* We want to make sure we have a spec. We can't do a type check + because we may have a proxy, so we'll just try to get the + only attribute. + */ + if (PyObject_TypeCheck(result, &SpecType) + || + PyObject_HasAttr(result, strextends) + ) + return result; + + /* + The object's class doesn't understand descriptors. + Sigh. We need to get an object descriptor, but we have to be + careful. We want to use the instance's __provides__,l if + there is one, but only if it didn't come from the class. + */ + Py_DECREF(result); + + cls = PyObject_GetAttr(ob, str__class__); + if (cls == NULL) + return NULL; + + result = PyObject_GetAttr(ob, str__provides__); + if (result == NULL) + { + /* No __provides__, so just fall back to implementedBy */ + PyErr_Clear(); + result = implementedBy(NULL, cls); + Py_DECREF(cls); + return result; + } + + cp = PyObject_GetAttr(cls, str__provides__); + if (cp == NULL) + { + /* The the class has no provides, assume we're done: */ + PyErr_Clear(); + Py_DECREF(cls); + return result; + } + + if (cp == result) + { + /* + Oops, we got the provides from the class. This means + the object doesn't have it's own. We should use implementedBy + */ + Py_DECREF(result); + result = implementedBy(NULL, cls); + } + + Py_DECREF(cls); + Py_DECREF(cp); + + return result; +} + +/* + Get an attribute from an inst dict. Return a borrowed reference. + + This has a number of advantages: + + - It avoids layers of Python api + + - It doesn't waste time looking for descriptors + + - It fails wo raising an exception, although that shouldn't really + matter. + +*/ +static PyObject * +inst_attr(PyObject *self, PyObject *name) +{ + PyObject **dictp, *v; + + dictp = _PyObject_GetDictPtr(self); + if (dictp && *dictp && (v = PyDict_GetItem(*dictp, name))) + return v; + PyErr_SetObject(PyExc_AttributeError, name); + return NULL; +} + + +static PyObject * +Spec_extends(PyObject *self, PyObject *other) +{ + PyObject *implied; + + implied = inst_attr(self, str_implied); + if (implied == NULL) + return NULL; + +#ifdef Py_True + if (PyDict_GetItem(implied, other) != NULL) + { + Py_INCREF(Py_True); + return Py_True; + } + Py_INCREF(Py_False); + return Py_False; +#else + return PyInt_FromLong(PyDict_GetItem(implied, other) != NULL); +#endif +} + +static char Spec_extends__doc__[] = +"Test whether a specification is or extends another" +; + +static char Spec_providedBy__doc__[] = +"Test whether an interface is implemented by the specification" +; + +static PyObject * +Spec_call(PyObject *self, PyObject *args, PyObject *kw) +{ + PyObject *spec; + + if (! PyArg_ParseTuple(args, "O", &spec)) + return NULL; + return Spec_extends(self, spec); +} + +static PyObject * +Spec_providedBy(PyObject *self, PyObject *ob) +{ + PyObject *decl, *item; + + decl = providedBy(NULL, ob); + if (decl == NULL) + return NULL; + + if (PyObject_TypeCheck(decl, &SpecType)) + item = Spec_extends(decl, self); + else + /* decl is probably a security proxy. We have to go the long way + around. + */ + item = PyObject_CallFunctionObjArgs(decl, self, NULL); + + Py_DECREF(decl); + return item; +} + + +static char Spec_implementedBy__doc__[] = +"Test whether the specification is implemented by a class or factory.\n" +"Raise TypeError if argument is neither a class nor a callable." +; + +static PyObject * +Spec_implementedBy(PyObject *self, PyObject *cls) +{ + PyObject *decl, *item; + + decl = implementedBy(NULL, cls); + if (decl == NULL) + return NULL; + + if (PyObject_TypeCheck(decl, &SpecType)) + item = Spec_extends(decl, self); + else + item = PyObject_CallFunctionObjArgs(decl, self, NULL); + + Py_DECREF(decl); + return item; +} + +static struct PyMethodDef Spec_methods[] = { + {"providedBy", + (PyCFunction)Spec_providedBy, METH_O, + Spec_providedBy__doc__}, + {"implementedBy", + (PyCFunction)Spec_implementedBy, METH_O, + Spec_implementedBy__doc__}, + {"isOrExtends", (PyCFunction)Spec_extends, METH_O, + Spec_extends__doc__}, + + {NULL, NULL} /* sentinel */ +}; + +static PyTypeObject SpecType = { + PyVarObject_HEAD_INIT(NULL, 0) + /* tp_name */ "_interface_coptimizations." + "SpecificationBase", + /* tp_basicsize */ 0, + /* tp_itemsize */ 0, + /* tp_dealloc */ (destructor)0, + /* tp_print */ (printfunc)0, + /* tp_getattr */ (getattrfunc)0, + /* tp_setattr */ (setattrfunc)0, + /* tp_compare */ 0, + /* tp_repr */ (reprfunc)0, + /* tp_as_number */ 0, + /* tp_as_sequence */ 0, + /* tp_as_mapping */ 0, + /* tp_hash */ (hashfunc)0, + /* tp_call */ (ternaryfunc)Spec_call, + /* tp_str */ (reprfunc)0, + /* tp_getattro */ (getattrofunc)0, + /* tp_setattro */ (setattrofunc)0, + /* tp_as_buffer */ 0, + /* tp_flags */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, + "Base type for Specification objects", + /* tp_traverse */ (traverseproc)0, + /* tp_clear */ (inquiry)0, + /* tp_richcompare */ (richcmpfunc)0, + /* tp_weaklistoffset */ (long)0, + /* tp_iter */ (getiterfunc)0, + /* tp_iternext */ (iternextfunc)0, + /* tp_methods */ Spec_methods, +}; + +static PyObject * +OSD_descr_get(PyObject *self, PyObject *inst, PyObject *cls) +{ + PyObject *provides; + + if (inst == NULL) + return getObjectSpecification(NULL, cls); + + provides = PyObject_GetAttr(inst, str__provides__); + if (provides != NULL) + return provides; + PyErr_Clear(); + return implementedBy(NULL, cls); +} + +static PyTypeObject OSDType = { + PyVarObject_HEAD_INIT(NULL, 0) + /* tp_name */ "_interface_coptimizations." + "ObjectSpecificationDescriptor", + /* tp_basicsize */ 0, + /* tp_itemsize */ 0, + /* tp_dealloc */ (destructor)0, + /* tp_print */ (printfunc)0, + /* tp_getattr */ (getattrfunc)0, + /* tp_setattr */ (setattrfunc)0, + /* tp_compare */ 0, + /* tp_repr */ (reprfunc)0, + /* tp_as_number */ 0, + /* tp_as_sequence */ 0, + /* tp_as_mapping */ 0, + /* tp_hash */ (hashfunc)0, + /* tp_call */ (ternaryfunc)0, + /* tp_str */ (reprfunc)0, + /* tp_getattro */ (getattrofunc)0, + /* tp_setattro */ (setattrofunc)0, + /* tp_as_buffer */ 0, + /* tp_flags */ Py_TPFLAGS_DEFAULT + | Py_TPFLAGS_BASETYPE , + "Object Specification Descriptor", + /* tp_traverse */ (traverseproc)0, + /* tp_clear */ (inquiry)0, + /* tp_richcompare */ (richcmpfunc)0, + /* tp_weaklistoffset */ (long)0, + /* tp_iter */ (getiterfunc)0, + /* tp_iternext */ (iternextfunc)0, + /* tp_methods */ 0, + /* tp_members */ 0, + /* tp_getset */ 0, + /* tp_base */ 0, + /* tp_dict */ 0, /* internal use */ + /* tp_descr_get */ (descrgetfunc)OSD_descr_get, +}; + +static PyObject * +CPB_descr_get(PyObject *self, PyObject *inst, PyObject *cls) +{ + PyObject *mycls, *implements; + + mycls = inst_attr(self, str_cls); + if (mycls == NULL) + return NULL; + + if (cls == mycls) + { + if (inst == NULL) + { + Py_INCREF(self); + return OBJECT(self); + } + + implements = inst_attr(self, str_implements); + Py_XINCREF(implements); + return implements; + } + + PyErr_SetObject(PyExc_AttributeError, str__provides__); + return NULL; +} + +static PyTypeObject CPBType = { + PyVarObject_HEAD_INIT(NULL, 0) + /* tp_name */ "_interface_coptimizations." + "ClassProvidesBase", + /* tp_basicsize */ 0, + /* tp_itemsize */ 0, + /* tp_dealloc */ (destructor)0, + /* tp_print */ (printfunc)0, + /* tp_getattr */ (getattrfunc)0, + /* tp_setattr */ (setattrfunc)0, + /* tp_compare */ 0, + /* tp_repr */ (reprfunc)0, + /* tp_as_number */ 0, + /* tp_as_sequence */ 0, + /* tp_as_mapping */ 0, + /* tp_hash */ (hashfunc)0, + /* tp_call */ (ternaryfunc)0, + /* tp_str */ (reprfunc)0, + /* tp_getattro */ (getattrofunc)0, + /* tp_setattro */ (setattrofunc)0, + /* tp_as_buffer */ 0, + /* tp_flags */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, + "C Base class for ClassProvides", + /* tp_traverse */ (traverseproc)0, + /* tp_clear */ (inquiry)0, + /* tp_richcompare */ (richcmpfunc)0, + /* tp_weaklistoffset */ (long)0, + /* tp_iter */ (getiterfunc)0, + /* tp_iternext */ (iternextfunc)0, + /* tp_methods */ 0, + /* tp_members */ 0, + /* tp_getset */ 0, + /* tp_base */ &SpecType, + /* tp_dict */ 0, /* internal use */ + /* tp_descr_get */ (descrgetfunc)CPB_descr_get, +}; + +/* ==================================================================== */ +/* ========== Begin: __call__ and __adapt__ =========================== */ + +/* + def __adapt__(self, obj): + """Adapt an object to the reciever + """ + if self.providedBy(obj): + return obj + + for hook in adapter_hooks: + adapter = hook(self, obj) + if adapter is not None: + return adapter + + +*/ +static PyObject * +__adapt__(PyObject *self, PyObject *obj) +{ + PyObject *decl, *args, *adapter; + int implements, i, l; + + decl = providedBy(NULL, obj); + if (decl == NULL) + return NULL; + + if (PyObject_TypeCheck(decl, &SpecType)) + { + PyObject *implied; + + implied = inst_attr(decl, str_implied); + if (implied == NULL) + { + Py_DECREF(decl); + return NULL; + } + + implements = PyDict_GetItem(implied, self) != NULL; + Py_DECREF(decl); + } + else + { + /* decl is probably a security proxy. We have to go the long way + around. + */ + PyObject *r; + r = PyObject_CallFunctionObjArgs(decl, self, NULL); + Py_DECREF(decl); + if (r == NULL) + return NULL; + implements = PyObject_IsTrue(r); + Py_DECREF(r); + } + + if (implements) + { + Py_INCREF(obj); + return obj; + } + + l = PyList_GET_SIZE(adapter_hooks); + args = PyTuple_New(2); + if (args == NULL) + return NULL; + Py_INCREF(self); + PyTuple_SET_ITEM(args, 0, self); + Py_INCREF(obj); + PyTuple_SET_ITEM(args, 1, obj); + for (i = 0; i < l; i++) + { + adapter = PyObject_CallObject(PyList_GET_ITEM(adapter_hooks, i), args); + if (adapter == NULL || adapter != Py_None) + { + Py_DECREF(args); + return adapter; + } + Py_DECREF(adapter); + } + + Py_DECREF(args); + + Py_INCREF(Py_None); + return Py_None; +} + +static struct PyMethodDef ib_methods[] = { + {"__adapt__", (PyCFunction)__adapt__, METH_O, + "Adapt an object to the reciever"}, + {NULL, NULL} /* sentinel */ +}; + +/* + def __call__(self, obj, alternate=_marker): + conform = getattr(obj, '__conform__', None) + if conform is not None: + adapter = self._call_conform(conform) + if adapter is not None: + return adapter + + adapter = self.__adapt__(obj) + + if adapter is not None: + return adapter + elif alternate is not _marker: + return alternate + else: + raise TypeError("Could not adapt", obj, self) +*/ +static PyObject * +ib_call(PyObject *self, PyObject *args, PyObject *kwargs) +{ + PyObject *conform, *obj, *alternate=NULL, *adapter; + + static char *kwlist[] = {"obj", "alternate", NULL}; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|O", kwlist, + &obj, &alternate)) + return NULL; + + conform = PyObject_GetAttr(obj, str__conform__); + if (conform != NULL) + { + adapter = PyObject_CallMethodObjArgs(self, str_call_conform, + conform, NULL); + Py_DECREF(conform); + if (adapter == NULL || adapter != Py_None) + return adapter; + Py_DECREF(adapter); + } + else + PyErr_Clear(); + + adapter = __adapt__(self, obj); + if (adapter == NULL || adapter != Py_None) + return adapter; + Py_DECREF(adapter); + + if (alternate != NULL) + { + Py_INCREF(alternate); + return alternate; + } + + adapter = Py_BuildValue("sOO", "Could not adapt", obj, self); + if (adapter != NULL) + { + PyErr_SetObject(PyExc_TypeError, adapter); + Py_DECREF(adapter); + } + return NULL; +} + +static PyTypeObject InterfaceBase = { + PyVarObject_HEAD_INIT(NULL, 0) + /* tp_name */ "_zope_interface_coptimizations." + "InterfaceBase", + /* tp_basicsize */ 0, + /* tp_itemsize */ 0, + /* tp_dealloc */ (destructor)0, + /* tp_print */ (printfunc)0, + /* tp_getattr */ (getattrfunc)0, + /* tp_setattr */ (setattrfunc)0, + /* tp_compare */ 0, + /* tp_repr */ (reprfunc)0, + /* tp_as_number */ 0, + /* tp_as_sequence */ 0, + /* tp_as_mapping */ 0, + /* tp_hash */ (hashfunc)0, + /* tp_call */ (ternaryfunc)ib_call, + /* tp_str */ (reprfunc)0, + /* tp_getattro */ (getattrofunc)0, + /* tp_setattro */ (setattrofunc)0, + /* tp_as_buffer */ 0, + /* tp_flags */ Py_TPFLAGS_DEFAULT + | Py_TPFLAGS_BASETYPE , + /* tp_doc */ "Interface base type providing __call__ and __adapt__", + /* tp_traverse */ (traverseproc)0, + /* tp_clear */ (inquiry)0, + /* tp_richcompare */ (richcmpfunc)0, + /* tp_weaklistoffset */ (long)0, + /* tp_iter */ (getiterfunc)0, + /* tp_iternext */ (iternextfunc)0, + /* tp_methods */ ib_methods, +}; + +/* =================== End: __call__ and __adapt__ ==================== */ +/* ==================================================================== */ + +/* ==================================================================== */ +/* ========================== Begin: Lookup Bases ===================== */ + +typedef struct { + PyObject_HEAD + PyObject *_cache; + PyObject *_mcache; + PyObject *_scache; +} lookup; + +typedef struct { + PyObject_HEAD + PyObject *_cache; + PyObject *_mcache; + PyObject *_scache; + PyObject *_verify_ro; + PyObject *_verify_generations; +} verify; + +static int +lookup_traverse(lookup *self, visitproc visit, void *arg) +{ + int vret; + + if (self->_cache) { + vret = visit(self->_cache, arg); + if (vret != 0) + return vret; + } + + if (self->_mcache) { + vret = visit(self->_mcache, arg); + if (vret != 0) + return vret; + } + + if (self->_scache) { + vret = visit(self->_scache, arg); + if (vret != 0) + return vret; + } + + return 0; +} + +static int +lookup_clear(lookup *self) +{ + Py_CLEAR(self->_cache); + Py_CLEAR(self->_mcache); + Py_CLEAR(self->_scache); + return 0; +} + +static void +lookup_dealloc(lookup *self) +{ + PyObject_GC_UnTrack((PyObject *)self); + lookup_clear(self); + Py_TYPE(self)->tp_free((PyObject*)self); +} + +/* + def changed(self, ignored=None): + self._cache.clear() + self._mcache.clear() + self._scache.clear() +*/ +static PyObject * +lookup_changed(lookup *self, PyObject *ignored) +{ + lookup_clear(self); + Py_INCREF(Py_None); + return Py_None; +} + +#define ASSURE_DICT(N) if (N == NULL) { N = PyDict_New(); \ + if (N == NULL) return NULL; \ + } + +/* + def _getcache(self, provided, name): + cache = self._cache.get(provided) + if cache is None: + cache = {} + self._cache[provided] = cache + if name: + c = cache.get(name) + if c is None: + c = {} + cache[name] = c + cache = c + return cache +*/ +static PyObject * +_subcache(PyObject *cache, PyObject *key) +{ + PyObject *subcache; + + subcache = PyDict_GetItem(cache, key); + if (subcache == NULL) + { + int status; + + subcache = PyDict_New(); + if (subcache == NULL) + return NULL; + status = PyDict_SetItem(cache, key, subcache); + Py_DECREF(subcache); + if (status < 0) + return NULL; + } + + return subcache; +} +static PyObject * +_getcache(lookup *self, PyObject *provided, PyObject *name) +{ + PyObject *cache; + + ASSURE_DICT(self->_cache); + cache = _subcache(self->_cache, provided); + if (cache == NULL) + return NULL; + + if (name != NULL && PyObject_IsTrue(name)) + cache = _subcache(cache, name); + + return cache; +} + + +/* + def lookup(self, required, provided, name=u'', default=None): + cache = self._getcache(provided, name) + if len(required) == 1: + result = cache.get(required[0], _not_in_mapping) + else: + result = cache.get(tuple(required), _not_in_mapping) + + if result is _not_in_mapping: + result = self._uncached_lookup(required, provided, name) + if len(required) == 1: + cache[required[0]] = result + else: + cache[tuple(required)] = result + + if result is None: + return default + + return result +*/ +static PyObject * +tuplefy(PyObject *v) +{ + if (! PyTuple_Check(v)) + { + v = PyObject_CallFunctionObjArgs(OBJECT(&PyTuple_Type), v, NULL); + if (v == NULL) + return NULL; + } + else + Py_INCREF(v); + + return v; +} +static PyObject * +_lookup(lookup *self, + PyObject *required, PyObject *provided, PyObject *name, + PyObject *default_) +{ + PyObject *result, *key, *cache; + +#ifdef PY3K + if ( name && !PyUnicode_Check(name) ) +#else + if ( name && !PyString_Check(name) && !PyUnicode_Check(name) ) +#endif + { + PyErr_SetString(PyExc_ValueError, + "name is not a string or unicode"); + return NULL; + } + cache = _getcache(self, provided, name); + if (cache == NULL) + return NULL; + + required = tuplefy(required); + if (required == NULL) + return NULL; + + if (PyTuple_GET_SIZE(required) == 1) + key = PyTuple_GET_ITEM(required, 0); + else + key = required; + + result = PyDict_GetItem(cache, key); + if (result == NULL) + { + int status; + + result = PyObject_CallMethodObjArgs(OBJECT(self), str_uncached_lookup, + required, provided, name, NULL); + if (result == NULL) + { + Py_DECREF(required); + return NULL; + } + status = PyDict_SetItem(cache, key, result); + Py_DECREF(required); + if (status < 0) + { + Py_DECREF(result); + return NULL; + } + } + else + { + Py_INCREF(result); + Py_DECREF(required); + } + + if (result == Py_None && default_ != NULL) + { + Py_DECREF(Py_None); + Py_INCREF(default_); + return default_; + } + + return result; +} +static PyObject * +lookup_lookup(lookup *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", "name", "default", NULL}; + PyObject *required, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist, + &required, &provided, &name, &default_)) + return NULL; + + return _lookup(self, required, provided, name, default_); +} + + +/* + def lookup1(self, required, provided, name=u'', default=None): + cache = self._getcache(provided, name) + result = cache.get(required, _not_in_mapping) + if result is _not_in_mapping: + return self.lookup((required, ), provided, name, default) + + if result is None: + return default + + return result +*/ +static PyObject * +_lookup1(lookup *self, + PyObject *required, PyObject *provided, PyObject *name, + PyObject *default_) +{ + PyObject *result, *cache; + +#ifdef PY3K + if ( name && !PyUnicode_Check(name) ) +#else + if ( name && !PyString_Check(name) && !PyUnicode_Check(name) ) +#endif + { + PyErr_SetString(PyExc_ValueError, + "name is not a string or unicode"); + return NULL; + } + + cache = _getcache(self, provided, name); + if (cache == NULL) + return NULL; + + result = PyDict_GetItem(cache, required); + if (result == NULL) + { + PyObject *tup; + + tup = PyTuple_New(1); + if (tup == NULL) + return NULL; + Py_INCREF(required); + PyTuple_SET_ITEM(tup, 0, required); + result = _lookup(self, tup, provided, name, default_); + Py_DECREF(tup); + } + else + { + if (result == Py_None && default_ != NULL) + { + result = default_; + } + Py_INCREF(result); + } + + return result; +} +static PyObject * +lookup_lookup1(lookup *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", "name", "default", NULL}; + PyObject *required, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist, + &required, &provided, &name, &default_)) + return NULL; + + return _lookup1(self, required, provided, name, default_); +} + +/* + def adapter_hook(self, provided, object, name=u'', default=None): + required = providedBy(object) + cache = self._getcache(provided, name) + factory = cache.get(required, _not_in_mapping) + if factory is _not_in_mapping: + factory = self.lookup((required, ), provided, name) + + if factory is not None: + result = factory(object) + if result is not None: + return result + + return default +*/ +static PyObject * +_adapter_hook(lookup *self, + PyObject *provided, PyObject *object, PyObject *name, + PyObject *default_) +{ + PyObject *required, *factory, *result; + +#ifdef PY3K + if ( name && !PyUnicode_Check(name) ) +#else + if ( name && !PyString_Check(name) && !PyUnicode_Check(name) ) +#endif + { + PyErr_SetString(PyExc_ValueError, + "name is not a string or unicode"); + return NULL; + } + + required = providedBy(NULL, object); + if (required == NULL) + return NULL; + + factory = _lookup1(self, required, provided, name, Py_None); + Py_DECREF(required); + if (factory == NULL) + return NULL; + + if (factory != Py_None) + { + result = PyObject_CallFunctionObjArgs(factory, object, NULL); + Py_DECREF(factory); + if (result == NULL || result != Py_None) + return result; + } + else + result = factory; /* None */ + + if (default_ == NULL || default_ == result) /* No default specified, */ + return result; /* Return None. result is owned None */ + + Py_DECREF(result); + Py_INCREF(default_); + + return default_; +} +static PyObject * +lookup_adapter_hook(lookup *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"provided", "object", "name", "default", NULL}; + PyObject *object, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist, + &provided, &object, &name, &default_)) + return NULL; + + return _adapter_hook(self, provided, object, name, default_); +} + +static PyObject * +lookup_queryAdapter(lookup *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"object", "provided", "name", "default", NULL}; + PyObject *object, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist, + &object, &provided, &name, &default_)) + return NULL; + + return _adapter_hook(self, provided, object, name, default_); +} + +/* + def lookupAll(self, required, provided): + cache = self._mcache.get(provided) + if cache is None: + cache = {} + self._mcache[provided] = cache + + required = tuple(required) + result = cache.get(required, _not_in_mapping) + if result is _not_in_mapping: + result = self._uncached_lookupAll(required, provided) + cache[required] = result + + return result +*/ +static PyObject * +_lookupAll(lookup *self, PyObject *required, PyObject *provided) +{ + PyObject *cache, *result; + + ASSURE_DICT(self->_mcache); + cache = _subcache(self->_mcache, provided); + if (cache == NULL) + return NULL; + + required = tuplefy(required); + if (required == NULL) + return NULL; + + result = PyDict_GetItem(cache, required); + if (result == NULL) + { + int status; + + result = PyObject_CallMethodObjArgs(OBJECT(self), str_uncached_lookupAll, + required, provided, NULL); + if (result == NULL) + { + Py_DECREF(required); + return NULL; + } + status = PyDict_SetItem(cache, required, result); + Py_DECREF(required); + if (status < 0) + { + Py_DECREF(result); + return NULL; + } + } + else + { + Py_INCREF(result); + Py_DECREF(required); + } + + return result; +} +static PyObject * +lookup_lookupAll(lookup *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", NULL}; + PyObject *required, *provided; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO", kwlist, + &required, &provided)) + return NULL; + + return _lookupAll(self, required, provided); +} + +/* + def subscriptions(self, required, provided): + cache = self._scache.get(provided) + if cache is None: + cache = {} + self._scache[provided] = cache + + required = tuple(required) + result = cache.get(required, _not_in_mapping) + if result is _not_in_mapping: + result = self._uncached_subscriptions(required, provided) + cache[required] = result + + return result +*/ +static PyObject * +_subscriptions(lookup *self, PyObject *required, PyObject *provided) +{ + PyObject *cache, *result; + + ASSURE_DICT(self->_scache); + cache = _subcache(self->_scache, provided); + if (cache == NULL) + return NULL; + + required = tuplefy(required); + if (required == NULL) + return NULL; + + result = PyDict_GetItem(cache, required); + if (result == NULL) + { + int status; + + result = PyObject_CallMethodObjArgs( + OBJECT(self), str_uncached_subscriptions, + required, provided, NULL); + if (result == NULL) + { + Py_DECREF(required); + return NULL; + } + status = PyDict_SetItem(cache, required, result); + Py_DECREF(required); + if (status < 0) + { + Py_DECREF(result); + return NULL; + } + } + else + { + Py_INCREF(result); + Py_DECREF(required); + } + + return result; +} +static PyObject * +lookup_subscriptions(lookup *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", NULL}; + PyObject *required, *provided; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO", kwlist, + &required, &provided)) + return NULL; + + return _subscriptions(self, required, provided); +} + +static struct PyMethodDef lookup_methods[] = { + {"changed", (PyCFunction)lookup_changed, METH_O, ""}, + {"lookup", (PyCFunction)lookup_lookup, METH_KEYWORDS | METH_VARARGS, ""}, + {"lookup1", (PyCFunction)lookup_lookup1, METH_KEYWORDS | METH_VARARGS, ""}, + {"queryAdapter", (PyCFunction)lookup_queryAdapter, METH_KEYWORDS | METH_VARARGS, ""}, + {"adapter_hook", (PyCFunction)lookup_adapter_hook, METH_KEYWORDS | METH_VARARGS, ""}, + {"lookupAll", (PyCFunction)lookup_lookupAll, METH_KEYWORDS | METH_VARARGS, ""}, + {"subscriptions", (PyCFunction)lookup_subscriptions, METH_KEYWORDS | METH_VARARGS, ""}, + {NULL, NULL} /* sentinel */ +}; + +static PyTypeObject LookupBase = { + PyVarObject_HEAD_INIT(NULL, 0) + /* tp_name */ "_zope_interface_coptimizations." + "LookupBase", + /* tp_basicsize */ sizeof(lookup), + /* tp_itemsize */ 0, + /* tp_dealloc */ (destructor)&lookup_dealloc, + /* tp_print */ (printfunc)0, + /* tp_getattr */ (getattrfunc)0, + /* tp_setattr */ (setattrfunc)0, + /* tp_compare */ 0, + /* tp_repr */ (reprfunc)0, + /* tp_as_number */ 0, + /* tp_as_sequence */ 0, + /* tp_as_mapping */ 0, + /* tp_hash */ (hashfunc)0, + /* tp_call */ (ternaryfunc)0, + /* tp_str */ (reprfunc)0, + /* tp_getattro */ (getattrofunc)0, + /* tp_setattro */ (setattrofunc)0, + /* tp_as_buffer */ 0, + /* tp_flags */ Py_TPFLAGS_DEFAULT + | Py_TPFLAGS_BASETYPE + | Py_TPFLAGS_HAVE_GC, + /* tp_doc */ "", + /* tp_traverse */ (traverseproc)lookup_traverse, + /* tp_clear */ (inquiry)lookup_clear, + /* tp_richcompare */ (richcmpfunc)0, + /* tp_weaklistoffset */ (long)0, + /* tp_iter */ (getiterfunc)0, + /* tp_iternext */ (iternextfunc)0, + /* tp_methods */ lookup_methods, +}; + +static int +verifying_traverse(verify *self, visitproc visit, void *arg) +{ + int vret; + + vret = lookup_traverse((lookup *)self, visit, arg); + if (vret != 0) + return vret; + + if (self->_verify_ro) { + vret = visit(self->_verify_ro, arg); + if (vret != 0) + return vret; + } + if (self->_verify_generations) { + vret = visit(self->_verify_generations, arg); + if (vret != 0) + return vret; + } + + return 0; +} + +static int +verifying_clear(verify *self) +{ + lookup_clear((lookup *)self); + Py_CLEAR(self->_verify_generations); + Py_CLEAR(self->_verify_ro); + return 0; +} + + +static void +verifying_dealloc(verify *self) +{ + PyObject_GC_UnTrack((PyObject *)self); + verifying_clear(self); + Py_TYPE(self)->tp_free((PyObject*)self); +} + +/* + def changed(self, originally_changed): + super(VerifyingBasePy, self).changed(originally_changed) + self._verify_ro = self._registry.ro[1:] + self._verify_generations = [r._generation for r in self._verify_ro] +*/ +static PyObject * +_generations_tuple(PyObject *ro) +{ + int i, l; + PyObject *generations; + + l = PyTuple_GET_SIZE(ro); + generations = PyTuple_New(l); + for (i=0; i < l; i++) + { + PyObject *generation; + + generation = PyObject_GetAttr(PyTuple_GET_ITEM(ro, i), str_generation); + if (generation == NULL) + { + Py_DECREF(generations); + return NULL; + } + PyTuple_SET_ITEM(generations, i, generation); + } + + return generations; +} +static PyObject * +verifying_changed(verify *self, PyObject *ignored) +{ + PyObject *t, *ro; + + verifying_clear(self); + + t = PyObject_GetAttr(OBJECT(self), str_registry); + if (t == NULL) + return NULL; + ro = PyObject_GetAttr(t, strro); + Py_DECREF(t); + if (ro == NULL) + return NULL; + + t = PyObject_CallFunctionObjArgs(OBJECT(&PyTuple_Type), ro, NULL); + Py_DECREF(ro); + if (t == NULL) + return NULL; + + ro = PyTuple_GetSlice(t, 1, PyTuple_GET_SIZE(t)); + Py_DECREF(t); + if (ro == NULL) + return NULL; + + self->_verify_generations = _generations_tuple(ro); + if (self->_verify_generations == NULL) + { + Py_DECREF(ro); + return NULL; + } + + self->_verify_ro = ro; + + Py_INCREF(Py_None); + return Py_None; +} + +/* + def _verify(self): + if ([r._generation for r in self._verify_ro] + != self._verify_generations): + self.changed(None) +*/ +static int +_verify(verify *self) +{ + PyObject *changed_result; + + if (self->_verify_ro != NULL && self->_verify_generations != NULL) + { + PyObject *generations; + int changed; + + generations = _generations_tuple(self->_verify_ro); + if (generations == NULL) + return -1; + + changed = PyObject_RichCompareBool(self->_verify_generations, + generations, Py_NE); + Py_DECREF(generations); + if (changed == -1) + return -1; + + if (changed == 0) + return 0; + } + + changed_result = PyObject_CallMethodObjArgs(OBJECT(self), strchanged, + Py_None, NULL); + if (changed_result == NULL) + return -1; + + Py_DECREF(changed_result); + return 0; +} + +static PyObject * +verifying_lookup(verify *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", "name", "default", NULL}; + PyObject *required, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist, + &required, &provided, &name, &default_)) + return NULL; + + if (_verify(self) < 0) + return NULL; + + return _lookup((lookup *)self, required, provided, name, default_); +} + +static PyObject * +verifying_lookup1(verify *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", "name", "default", NULL}; + PyObject *required, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist, + &required, &provided, &name, &default_)) + return NULL; + + if (_verify(self) < 0) + return NULL; + + return _lookup1((lookup *)self, required, provided, name, default_); +} + +static PyObject * +verifying_adapter_hook(verify *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"provided", "object", "name", "default", NULL}; + PyObject *object, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist, + &provided, &object, &name, &default_)) + return NULL; + + if (_verify(self) < 0) + return NULL; + + return _adapter_hook((lookup *)self, provided, object, name, default_); +} + +static PyObject * +verifying_queryAdapter(verify *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"object", "provided", "name", "default", NULL}; + PyObject *object, *provided, *name=NULL, *default_=NULL; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist, + &object, &provided, &name, &default_)) + return NULL; + + if (_verify(self) < 0) + return NULL; + + return _adapter_hook((lookup *)self, provided, object, name, default_); +} + +static PyObject * +verifying_lookupAll(verify *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", NULL}; + PyObject *required, *provided; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO", kwlist, + &required, &provided)) + return NULL; + + if (_verify(self) < 0) + return NULL; + + return _lookupAll((lookup *)self, required, provided); +} + +static PyObject * +verifying_subscriptions(verify *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"required", "provided", NULL}; + PyObject *required, *provided; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO", kwlist, + &required, &provided)) + return NULL; + + if (_verify(self) < 0) + return NULL; + + return _subscriptions((lookup *)self, required, provided); +} + +static struct PyMethodDef verifying_methods[] = { + {"changed", (PyCFunction)verifying_changed, METH_O, ""}, + {"lookup", (PyCFunction)verifying_lookup, METH_KEYWORDS | METH_VARARGS, ""}, + {"lookup1", (PyCFunction)verifying_lookup1, METH_KEYWORDS | METH_VARARGS, ""}, + {"queryAdapter", (PyCFunction)verifying_queryAdapter, METH_KEYWORDS | METH_VARARGS, ""}, + {"adapter_hook", (PyCFunction)verifying_adapter_hook, METH_KEYWORDS | METH_VARARGS, ""}, + {"lookupAll", (PyCFunction)verifying_lookupAll, METH_KEYWORDS | METH_VARARGS, ""}, + {"subscriptions", (PyCFunction)verifying_subscriptions, METH_KEYWORDS | METH_VARARGS, ""}, + {NULL, NULL} /* sentinel */ +}; + +static PyTypeObject VerifyingBase = { + PyVarObject_HEAD_INIT(NULL, 0) + /* tp_name */ "_zope_interface_coptimizations." + "VerifyingBase", + /* tp_basicsize */ sizeof(verify), + /* tp_itemsize */ 0, + /* tp_dealloc */ (destructor)&verifying_dealloc, + /* tp_print */ (printfunc)0, + /* tp_getattr */ (getattrfunc)0, + /* tp_setattr */ (setattrfunc)0, + /* tp_compare */ 0, + /* tp_repr */ (reprfunc)0, + /* tp_as_number */ 0, + /* tp_as_sequence */ 0, + /* tp_as_mapping */ 0, + /* tp_hash */ (hashfunc)0, + /* tp_call */ (ternaryfunc)0, + /* tp_str */ (reprfunc)0, + /* tp_getattro */ (getattrofunc)0, + /* tp_setattro */ (setattrofunc)0, + /* tp_as_buffer */ 0, + /* tp_flags */ Py_TPFLAGS_DEFAULT + | Py_TPFLAGS_BASETYPE + | Py_TPFLAGS_HAVE_GC, + /* tp_doc */ "", + /* tp_traverse */ (traverseproc)verifying_traverse, + /* tp_clear */ (inquiry)verifying_clear, + /* tp_richcompare */ (richcmpfunc)0, + /* tp_weaklistoffset */ (long)0, + /* tp_iter */ (getiterfunc)0, + /* tp_iternext */ (iternextfunc)0, + /* tp_methods */ verifying_methods, + /* tp_members */ 0, + /* tp_getset */ 0, + /* tp_base */ &LookupBase, +}; + +/* ========================== End: Lookup Bases ======================= */ +/* ==================================================================== */ + + + +static struct PyMethodDef m_methods[] = { + {"implementedBy", (PyCFunction)implementedBy, METH_O, + "Interfaces implemented by a class or factory.\n" + "Raises TypeError if argument is neither a class nor a callable."}, + {"getObjectSpecification", (PyCFunction)getObjectSpecification, METH_O, + "Get an object's interfaces (internal api)"}, + {"providedBy", (PyCFunction)providedBy, METH_O, + "Get an object's interfaces"}, + + {NULL, (PyCFunction)NULL, 0, NULL} /* sentinel */ +}; + +#if PY_MAJOR_VERSION >= 3 +static char module_doc[] = "C optimizations for zope.interface\n\n"; + +static struct PyModuleDef _zic_module = { + PyModuleDef_HEAD_INIT, + "_zope_interface_coptimizations", + module_doc, + -1, + m_methods, + NULL, + NULL, + NULL, + NULL +}; +#endif + +static PyObject * +init(void) +{ + PyObject *m; + +#if PY_MAJOR_VERSION < 3 +#define DEFINE_STRING(S) \ + if(! (str ## S = PyString_FromString(# S))) return NULL +#else +#define DEFINE_STRING(S) \ + if(! (str ## S = PyUnicode_FromString(# S))) return NULL +#endif + + DEFINE_STRING(__dict__); + DEFINE_STRING(__implemented__); + DEFINE_STRING(__provides__); + DEFINE_STRING(__class__); + DEFINE_STRING(__providedBy__); + DEFINE_STRING(extends); + DEFINE_STRING(_implied); + DEFINE_STRING(_implements); + DEFINE_STRING(_cls); + DEFINE_STRING(__conform__); + DEFINE_STRING(_call_conform); + DEFINE_STRING(_uncached_lookup); + DEFINE_STRING(_uncached_lookupAll); + DEFINE_STRING(_uncached_subscriptions); + DEFINE_STRING(_registry); + DEFINE_STRING(_generation); + DEFINE_STRING(ro); + DEFINE_STRING(changed); +#undef DEFINE_STRING + adapter_hooks = PyList_New(0); + if (adapter_hooks == NULL) + return NULL; + + /* Initialize types: */ + SpecType.tp_new = PyBaseObject_Type.tp_new; + if (PyType_Ready(&SpecType) < 0) + return NULL; + OSDType.tp_new = PyBaseObject_Type.tp_new; + if (PyType_Ready(&OSDType) < 0) + return NULL; + CPBType.tp_new = PyBaseObject_Type.tp_new; + if (PyType_Ready(&CPBType) < 0) + return NULL; + + InterfaceBase.tp_new = PyBaseObject_Type.tp_new; + if (PyType_Ready(&InterfaceBase) < 0) + return NULL; + + LookupBase.tp_new = PyBaseObject_Type.tp_new; + if (PyType_Ready(&LookupBase) < 0) + return NULL; + + VerifyingBase.tp_new = PyBaseObject_Type.tp_new; + if (PyType_Ready(&VerifyingBase) < 0) + return NULL; + + #if PY_MAJOR_VERSION < 3 + /* Create the module and add the functions */ + m = Py_InitModule3("_zope_interface_coptimizations", m_methods, + "C optimizations for zope.interface\n\n"); + #else + m = PyModule_Create(&_zic_module); + #endif + if (m == NULL) + return NULL; + + /* Add types: */ + if (PyModule_AddObject(m, "SpecificationBase", OBJECT(&SpecType)) < 0) + return NULL; + if (PyModule_AddObject(m, "ObjectSpecificationDescriptor", + (PyObject *)&OSDType) < 0) + return NULL; + if (PyModule_AddObject(m, "ClassProvidesBase", OBJECT(&CPBType)) < 0) + return NULL; + if (PyModule_AddObject(m, "InterfaceBase", OBJECT(&InterfaceBase)) < 0) + return NULL; + if (PyModule_AddObject(m, "LookupBase", OBJECT(&LookupBase)) < 0) + return NULL; + if (PyModule_AddObject(m, "VerifyingBase", OBJECT(&VerifyingBase)) < 0) + return NULL; + if (PyModule_AddObject(m, "adapter_hooks", adapter_hooks) < 0) + return NULL; + return m; +} + +PyMODINIT_FUNC +#if PY_MAJOR_VERSION < 3 +init_zope_interface_coptimizations(void) +{ + init(); +} +#else +PyInit__zope_interface_coptimizations(void) +{ + return init(); +} +#endif diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/_zope_interface_coptimizations.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/zope/interface/_zope_interface_coptimizations.cpython-36m-darwin.so new file mode 100755 index 0000000..a55fba5 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/zope/interface/_zope_interface_coptimizations.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/adapter.py b/thesisenv/lib/python3.6/site-packages/zope/interface/adapter.py new file mode 100644 index 0000000..aae3155 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/adapter.py @@ -0,0 +1,712 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Adapter management +""" +import weakref + +from zope.interface import implementer +from zope.interface import providedBy +from zope.interface import Interface +from zope.interface import ro +from zope.interface.interfaces import IAdapterRegistry + +from zope.interface._compat import _normalize_name +from zope.interface._compat import STRING_TYPES + +_BLANK = u'' + +class BaseAdapterRegistry(object): + + # List of methods copied from lookup sub-objects: + _delegated = ('lookup', 'queryMultiAdapter', 'lookup1', 'queryAdapter', + 'adapter_hook', 'lookupAll', 'names', + 'subscriptions', 'subscribers') + + # All registries maintain a generation that can be used by verifying + # registries + _generation = 0 + + def __init__(self, bases=()): + + # The comments here could be improved. Possibly this bit needs + # explaining in a separate document, as the comments here can + # be quite confusing. /regebro + + # {order -> {required -> {provided -> {name -> value}}}} + # Here "order" is actually an index in a list, "required" and + # "provided" are interfaces, and "required" is really a nested + # key. So, for example: + # for order == 0 (that is, self._adapters[0]), we have: + # {provided -> {name -> value}} + # but for order == 2 (that is, self._adapters[2]), we have: + # {r1 -> {r2 -> {provided -> {name -> value}}}} + # + self._adapters = [] + + # {order -> {required -> {provided -> {name -> [value]}}}} + # where the remarks about adapters above apply + self._subscribers = [] + + # Set, with a reference count, keeping track of the interfaces + # for which we have provided components: + self._provided = {} + + # Create ``_v_lookup`` object to perform lookup. We make this a + # separate object to to make it easier to implement just the + # lookup functionality in C. This object keeps track of cache + # invalidation data in two kinds of registries. + + # Invalidating registries have caches that are invalidated + # when they or their base registies change. An invalidating + # registry can only have invalidating registries as bases. + # See LookupBaseFallback below for the pertinent logic. + + # Verifying registies can't rely on getting invalidation messages, + # so have to check the generations of base registries to determine + # if their cache data are current. See VerifyingBasePy below + # for the pertinent object. + self._createLookup() + + # Setting the bases causes the registries described above + # to be initialized (self._setBases -> self.changed -> + # self._v_lookup.changed). + + self.__bases__ = bases + + def _setBases(self, bases): + self.__dict__['__bases__'] = bases + self.ro = ro.ro(self) + self.changed(self) + + __bases__ = property(lambda self: self.__dict__['__bases__'], + lambda self, bases: self._setBases(bases), + ) + + def _createLookup(self): + self._v_lookup = self.LookupClass(self) + for name in self._delegated: + self.__dict__[name] = getattr(self._v_lookup, name) + + def changed(self, originally_changed): + self._generation += 1 + self._v_lookup.changed(originally_changed) + + def register(self, required, provided, name, value): + if not isinstance(name, STRING_TYPES): + raise ValueError('name is not a string') + if value is None: + self.unregister(required, provided, name, value) + return + + required = tuple(map(_convert_None_to_Interface, required)) + name = _normalize_name(name) + order = len(required) + byorder = self._adapters + while len(byorder) <= order: + byorder.append({}) + components = byorder[order] + key = required + (provided,) + + for k in key: + d = components.get(k) + if d is None: + d = {} + components[k] = d + components = d + + if components.get(name) is value: + return + + components[name] = value + + n = self._provided.get(provided, 0) + 1 + self._provided[provided] = n + if n == 1: + self._v_lookup.add_extendor(provided) + + self.changed(self) + + def registered(self, required, provided, name=_BLANK): + required = tuple(map(_convert_None_to_Interface, required)) + name = _normalize_name(name) + order = len(required) + byorder = self._adapters + if len(byorder) <= order: + return None + + components = byorder[order] + key = required + (provided,) + + for k in key: + d = components.get(k) + if d is None: + return None + components = d + + return components.get(name) + + def unregister(self, required, provided, name, value=None): + required = tuple(map(_convert_None_to_Interface, required)) + order = len(required) + byorder = self._adapters + if order >= len(byorder): + return False + components = byorder[order] + key = required + (provided,) + + # Keep track of how we got to `components`: + lookups = [] + for k in key: + d = components.get(k) + if d is None: + return + lookups.append((components, k)) + components = d + + old = components.get(name) + if old is None: + return + if (value is not None) and (old is not value): + return + + del components[name] + if not components: + # Clean out empty containers, since we don't want our keys + # to reference global objects (interfaces) unnecessarily. + # This is often a problem when an interface is slated for + # removal; a hold-over entry in the registry can make it + # difficult to remove such interfaces. + for comp, k in reversed(lookups): + d = comp[k] + if d: + break + else: + del comp[k] + while byorder and not byorder[-1]: + del byorder[-1] + n = self._provided[provided] - 1 + if n == 0: + del self._provided[provided] + self._v_lookup.remove_extendor(provided) + else: + self._provided[provided] = n + + self.changed(self) + + def subscribe(self, required, provided, value): + required = tuple(map(_convert_None_to_Interface, required)) + name = _BLANK + order = len(required) + byorder = self._subscribers + while len(byorder) <= order: + byorder.append({}) + components = byorder[order] + key = required + (provided,) + + for k in key: + d = components.get(k) + if d is None: + d = {} + components[k] = d + components = d + + components[name] = components.get(name, ()) + (value, ) + + if provided is not None: + n = self._provided.get(provided, 0) + 1 + self._provided[provided] = n + if n == 1: + self._v_lookup.add_extendor(provided) + + self.changed(self) + + def unsubscribe(self, required, provided, value=None): + required = tuple(map(_convert_None_to_Interface, required)) + order = len(required) + byorder = self._subscribers + if order >= len(byorder): + return + components = byorder[order] + key = required + (provided,) + + # Keep track of how we got to `components`: + lookups = [] + for k in key: + d = components.get(k) + if d is None: + return + lookups.append((components, k)) + components = d + + old = components.get(_BLANK) + if not old: + # this is belt-and-suspenders against the failure of cleanup below + return # pragma: no cover + + if value is None: + new = () + else: + new = tuple([v for v in old if v != value]) + + if new == old: + return + + if new: + components[_BLANK] = new + else: + # Instead of setting components[_BLANK] = new, we clean out + # empty containers, since we don't want our keys to + # reference global objects (interfaces) unnecessarily. This + # is often a problem when an interface is slated for + # removal; a hold-over entry in the registry can make it + # difficult to remove such interfaces. + del components[_BLANK] + for comp, k in reversed(lookups): + d = comp[k] + if d: + break + else: + del comp[k] + while byorder and not byorder[-1]: + del byorder[-1] + + if provided is not None: + n = self._provided[provided] + len(new) - len(old) + if n == 0: + del self._provided[provided] + self._v_lookup.remove_extendor(provided) + + self.changed(self) + + # XXX hack to fake out twisted's use of a private api. We need to get them + # to use the new registed method. + def get(self, _): # pragma: no cover + class XXXTwistedFakeOut: + selfImplied = {} + return XXXTwistedFakeOut + + +_not_in_mapping = object() +class LookupBaseFallback(object): + + def __init__(self): + self._cache = {} + self._mcache = {} + self._scache = {} + + def changed(self, ignored=None): + self._cache.clear() + self._mcache.clear() + self._scache.clear() + + def _getcache(self, provided, name): + cache = self._cache.get(provided) + if cache is None: + cache = {} + self._cache[provided] = cache + if name: + c = cache.get(name) + if c is None: + c = {} + cache[name] = c + cache = c + return cache + + def lookup(self, required, provided, name=_BLANK, default=None): + if not isinstance(name, STRING_TYPES): + raise ValueError('name is not a string') + cache = self._getcache(provided, name) + required = tuple(required) + if len(required) == 1: + result = cache.get(required[0], _not_in_mapping) + else: + result = cache.get(tuple(required), _not_in_mapping) + + if result is _not_in_mapping: + result = self._uncached_lookup(required, provided, name) + if len(required) == 1: + cache[required[0]] = result + else: + cache[tuple(required)] = result + + if result is None: + return default + + return result + + def lookup1(self, required, provided, name=_BLANK, default=None): + if not isinstance(name, STRING_TYPES): + raise ValueError('name is not a string') + cache = self._getcache(provided, name) + result = cache.get(required, _not_in_mapping) + if result is _not_in_mapping: + return self.lookup((required, ), provided, name, default) + + if result is None: + return default + + return result + + def queryAdapter(self, object, provided, name=_BLANK, default=None): + return self.adapter_hook(provided, object, name, default) + + def adapter_hook(self, provided, object, name=_BLANK, default=None): + if not isinstance(name, STRING_TYPES): + raise ValueError('name is not a string') + required = providedBy(object) + cache = self._getcache(provided, name) + factory = cache.get(required, _not_in_mapping) + if factory is _not_in_mapping: + factory = self.lookup((required, ), provided, name) + + if factory is not None: + result = factory(object) + if result is not None: + return result + + return default + + def lookupAll(self, required, provided): + cache = self._mcache.get(provided) + if cache is None: + cache = {} + self._mcache[provided] = cache + + required = tuple(required) + result = cache.get(required, _not_in_mapping) + if result is _not_in_mapping: + result = self._uncached_lookupAll(required, provided) + cache[required] = result + + return result + + + def subscriptions(self, required, provided): + cache = self._scache.get(provided) + if cache is None: + cache = {} + self._scache[provided] = cache + + required = tuple(required) + result = cache.get(required, _not_in_mapping) + if result is _not_in_mapping: + result = self._uncached_subscriptions(required, provided) + cache[required] = result + + return result + +LookupBasePy = LookupBaseFallback # BBB + +try: + from zope.interface._zope_interface_coptimizations import LookupBase +except ImportError: + LookupBase = LookupBaseFallback + + +class VerifyingBaseFallback(LookupBaseFallback): + # Mixin for lookups against registries which "chain" upwards, and + # whose lookups invalidate their own caches whenever a parent registry + # bumps its own '_generation' counter. E.g., used by + # zope.component.persistentregistry + + def changed(self, originally_changed): + LookupBaseFallback.changed(self, originally_changed) + self._verify_ro = self._registry.ro[1:] + self._verify_generations = [r._generation for r in self._verify_ro] + + def _verify(self): + if ([r._generation for r in self._verify_ro] + != self._verify_generations): + self.changed(None) + + def _getcache(self, provided, name): + self._verify() + return LookupBaseFallback._getcache(self, provided, name) + + def lookupAll(self, required, provided): + self._verify() + return LookupBaseFallback.lookupAll(self, required, provided) + + def subscriptions(self, required, provided): + self._verify() + return LookupBaseFallback.subscriptions(self, required, provided) + +VerifyingBasePy = VerifyingBaseFallback #BBB + +try: + from zope.interface._zope_interface_coptimizations import VerifyingBase +except ImportError: + VerifyingBase = VerifyingBaseFallback + + +class AdapterLookupBase(object): + + def __init__(self, registry): + self._registry = registry + self._required = {} + self.init_extendors() + super(AdapterLookupBase, self).__init__() + + def changed(self, ignored=None): + super(AdapterLookupBase, self).changed(None) + for r in self._required.keys(): + r = r() + if r is not None: + r.unsubscribe(self) + self._required.clear() + + + # Extendors + # --------- + + # When given an target interface for an adapter lookup, we need to consider + # adapters for interfaces that extend the target interface. This is + # what the extendors dictionary is about. It tells us all of the + # interfaces that extend an interface for which there are adapters + # registered. + + # We could separate this by order and name, thus reducing the + # number of provided interfaces to search at run time. The tradeoff, + # however, is that we have to store more information. For example, + # if the same interface is provided for multiple names and if the + # interface extends many interfaces, we'll have to keep track of + # a fair bit of information for each name. It's better to + # be space efficient here and be time efficient in the cache + # implementation. + + # TODO: add invalidation when a provided interface changes, in case + # the interface's __iro__ has changed. This is unlikely enough that + # we'll take our chances for now. + + def init_extendors(self): + self._extendors = {} + for p in self._registry._provided: + self.add_extendor(p) + + def add_extendor(self, provided): + _extendors = self._extendors + for i in provided.__iro__: + extendors = _extendors.get(i, ()) + _extendors[i] = ( + [e for e in extendors if provided.isOrExtends(e)] + + + [provided] + + + [e for e in extendors if not provided.isOrExtends(e)] + ) + + def remove_extendor(self, provided): + _extendors = self._extendors + for i in provided.__iro__: + _extendors[i] = [e for e in _extendors.get(i, ()) + if e != provided] + + + def _subscribe(self, *required): + _refs = self._required + for r in required: + ref = r.weakref() + if ref not in _refs: + r.subscribe(self) + _refs[ref] = 1 + + def _uncached_lookup(self, required, provided, name=_BLANK): + required = tuple(required) + result = None + order = len(required) + for registry in self._registry.ro: + byorder = registry._adapters + if order >= len(byorder): + continue + + extendors = registry._v_lookup._extendors.get(provided) + if not extendors: + continue + + components = byorder[order] + result = _lookup(components, required, extendors, name, 0, + order) + if result is not None: + break + + self._subscribe(*required) + + return result + + def queryMultiAdapter(self, objects, provided, name=_BLANK, default=None): + factory = self.lookup(map(providedBy, objects), provided, name) + if factory is None: + return default + + result = factory(*objects) + if result is None: + return default + + return result + + def _uncached_lookupAll(self, required, provided): + required = tuple(required) + order = len(required) + result = {} + for registry in reversed(self._registry.ro): + byorder = registry._adapters + if order >= len(byorder): + continue + extendors = registry._v_lookup._extendors.get(provided) + if not extendors: + continue + components = byorder[order] + _lookupAll(components, required, extendors, result, 0, order) + + self._subscribe(*required) + + return tuple(result.items()) + + def names(self, required, provided): + return [c[0] for c in self.lookupAll(required, provided)] + + def _uncached_subscriptions(self, required, provided): + required = tuple(required) + order = len(required) + result = [] + for registry in reversed(self._registry.ro): + byorder = registry._subscribers + if order >= len(byorder): + continue + + if provided is None: + extendors = (provided, ) + else: + extendors = registry._v_lookup._extendors.get(provided) + if extendors is None: + continue + + _subscriptions(byorder[order], required, extendors, _BLANK, + result, 0, order) + + self._subscribe(*required) + + return result + + def subscribers(self, objects, provided): + subscriptions = self.subscriptions(map(providedBy, objects), provided) + if provided is None: + result = () + for subscription in subscriptions: + subscription(*objects) + else: + result = [] + for subscription in subscriptions: + subscriber = subscription(*objects) + if subscriber is not None: + result.append(subscriber) + return result + +class AdapterLookup(AdapterLookupBase, LookupBase): + pass + +@implementer(IAdapterRegistry) +class AdapterRegistry(BaseAdapterRegistry): + + LookupClass = AdapterLookup + + def __init__(self, bases=()): + # AdapterRegisties are invalidating registries, so + # we need to keep track of out invalidating subregistries. + self._v_subregistries = weakref.WeakKeyDictionary() + + super(AdapterRegistry, self).__init__(bases) + + def _addSubregistry(self, r): + self._v_subregistries[r] = 1 + + def _removeSubregistry(self, r): + if r in self._v_subregistries: + del self._v_subregistries[r] + + def _setBases(self, bases): + old = self.__dict__.get('__bases__', ()) + for r in old: + if r not in bases: + r._removeSubregistry(self) + for r in bases: + if r not in old: + r._addSubregistry(self) + + super(AdapterRegistry, self)._setBases(bases) + + def changed(self, originally_changed): + super(AdapterRegistry, self).changed(originally_changed) + + for sub in self._v_subregistries.keys(): + sub.changed(originally_changed) + + +class VerifyingAdapterLookup(AdapterLookupBase, VerifyingBase): + pass + +@implementer(IAdapterRegistry) +class VerifyingAdapterRegistry(BaseAdapterRegistry): + + LookupClass = VerifyingAdapterLookup + +def _convert_None_to_Interface(x): + if x is None: + return Interface + else: + return x + +def _lookup(components, specs, provided, name, i, l): + if i < l: + for spec in specs[i].__sro__: + comps = components.get(spec) + if comps: + r = _lookup(comps, specs, provided, name, i+1, l) + if r is not None: + return r + else: + for iface in provided: + comps = components.get(iface) + if comps: + r = comps.get(name) + if r is not None: + return r + + return None + +def _lookupAll(components, specs, provided, result, i, l): + if i < l: + for spec in reversed(specs[i].__sro__): + comps = components.get(spec) + if comps: + _lookupAll(comps, specs, provided, result, i+1, l) + else: + for iface in reversed(provided): + comps = components.get(iface) + if comps: + result.update(comps) + +def _subscriptions(components, specs, provided, name, result, i, l): + if i < l: + for spec in reversed(specs[i].__sro__): + comps = components.get(spec) + if comps: + _subscriptions(comps, specs, provided, name, result, i+1, l) + else: + for iface in reversed(provided): + comps = components.get(iface) + if comps: + comps = comps.get(name) + if comps: + result.extend(comps) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/advice.py b/thesisenv/lib/python3.6/site-packages/zope/interface/advice.py new file mode 100644 index 0000000..e55930d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/advice.py @@ -0,0 +1,205 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Class advice. + +This module was adapted from 'protocols.advice', part of the Python +Enterprise Application Kit (PEAK). Please notify the PEAK authors +(pje@telecommunity.com and tsarna@sarna.org) if bugs are found or +Zope-specific changes are required, so that the PEAK version of this module +can be kept in sync. + +PEAK is a Python application framework that interoperates with (but does +not require) Zope 3 and Twisted. It provides tools for manipulating UML +models, object-relational persistence, aspect-oriented programming, and more. +Visit the PEAK home page at http://peak.telecommunity.com for more information. +""" + +from types import FunctionType +try: + from types import ClassType +except ImportError: + __python3 = True +else: + __python3 = False + +import sys + +def getFrameInfo(frame): + """Return (kind,module,locals,globals) for a frame + + 'kind' is one of "exec", "module", "class", "function call", or "unknown". + """ + + f_locals = frame.f_locals + f_globals = frame.f_globals + + sameNamespace = f_locals is f_globals + hasModule = '__module__' in f_locals + hasName = '__name__' in f_globals + + sameName = hasModule and hasName + sameName = sameName and f_globals['__name__']==f_locals['__module__'] + + module = hasName and sys.modules.get(f_globals['__name__']) or None + + namespaceIsModule = module and module.__dict__ is f_globals + + if not namespaceIsModule: + # some kind of funky exec + kind = "exec" + elif sameNamespace and not hasModule: + kind = "module" + elif sameName and not sameNamespace: + kind = "class" + elif not sameNamespace: + kind = "function call" + else: # pragma: no cover + # How can you have f_locals is f_globals, and have '__module__' set? + # This is probably module-level code, but with a '__module__' variable. + kind = "unknown" + return kind, module, f_locals, f_globals + + +def addClassAdvisor(callback, depth=2): + """Set up 'callback' to be passed the containing class upon creation + + This function is designed to be called by an "advising" function executed + in a class suite. The "advising" function supplies a callback that it + wishes to have executed when the containing class is created. The + callback will be given one argument: the newly created containing class. + The return value of the callback will be used in place of the class, so + the callback should return the input if it does not wish to replace the + class. + + The optional 'depth' argument to this function determines the number of + frames between this function and the targeted class suite. 'depth' + defaults to 2, since this skips this function's frame and one calling + function frame. If you use this function from a function called directly + in the class suite, the default will be correct, otherwise you will need + to determine the correct depth yourself. + + This function works by installing a special class factory function in + place of the '__metaclass__' of the containing class. Therefore, only + callbacks *after* the last '__metaclass__' assignment in the containing + class will be executed. Be sure that classes using "advising" functions + declare any '__metaclass__' *first*, to ensure all callbacks are run.""" + # This entire approach is invalid under Py3K. Don't even try to fix + # the coverage for this block there. :( + if __python3: # pragma: no cover + raise TypeError('Class advice impossible in Python3') + + frame = sys._getframe(depth) + kind, module, caller_locals, caller_globals = getFrameInfo(frame) + + # This causes a problem when zope interfaces are used from doctest. + # In these cases, kind == "exec". + # + #if kind != "class": + # raise SyntaxError( + # "Advice must be in the body of a class statement" + # ) + + previousMetaclass = caller_locals.get('__metaclass__') + if __python3: # pragma: no cover + defaultMetaclass = caller_globals.get('__metaclass__', type) + else: + defaultMetaclass = caller_globals.get('__metaclass__', ClassType) + + + def advise(name, bases, cdict): + + if '__metaclass__' in cdict: + del cdict['__metaclass__'] + + if previousMetaclass is None: + if bases: + # find best metaclass or use global __metaclass__ if no bases + meta = determineMetaclass(bases) + else: + meta = defaultMetaclass + + elif isClassAdvisor(previousMetaclass): + # special case: we can't compute the "true" metaclass here, + # so we need to invoke the previous metaclass and let it + # figure it out for us (and apply its own advice in the process) + meta = previousMetaclass + + else: + meta = determineMetaclass(bases, previousMetaclass) + + newClass = meta(name,bases,cdict) + + # this lets the callback replace the class completely, if it wants to + return callback(newClass) + + # introspection data only, not used by inner function + advise.previousMetaclass = previousMetaclass + advise.callback = callback + + # install the advisor + caller_locals['__metaclass__'] = advise + + +def isClassAdvisor(ob): + """True if 'ob' is a class advisor function""" + return isinstance(ob,FunctionType) and hasattr(ob,'previousMetaclass') + + +def determineMetaclass(bases, explicit_mc=None): + """Determine metaclass from 1+ bases and optional explicit __metaclass__""" + + meta = [getattr(b,'__class__',type(b)) for b in bases] + + if explicit_mc is not None: + # The explicit metaclass needs to be verified for compatibility + # as well, and allowed to resolve the incompatible bases, if any + meta.append(explicit_mc) + + if len(meta)==1: + # easy case + return meta[0] + + candidates = minimalBases(meta) # minimal set of metaclasses + + if not candidates: # pragma: no cover + # they're all "classic" classes + assert(not __python3) # This should not happen under Python 3 + return ClassType + + elif len(candidates)>1: + # We could auto-combine, but for now we won't... + raise TypeError("Incompatible metatypes",bases) + + # Just one, return it + return candidates[0] + + +def minimalBases(classes): + """Reduce a list of base classes to its ordered minimum equivalent""" + + if not __python3: # pragma: no cover + classes = [c for c in classes if c is not ClassType] + candidates = [] + + for m in classes: + for n in classes: + if issubclass(n,m) and m is not n: + break + else: + # m has no subclasses in 'classes' + if m in candidates: + candidates.remove(m) # ensure that we're later in the list + candidates.append(m) + + return candidates diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/common/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/interface/common/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/common/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/common/idatetime.py b/thesisenv/lib/python3.6/site-packages/zope/interface/common/idatetime.py new file mode 100644 index 0000000..82f0059 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/common/idatetime.py @@ -0,0 +1,606 @@ +############################################################################## +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## +"""Datetime interfaces. + +This module is called idatetime because if it were called datetime the import +of the real datetime would fail. +""" +from datetime import timedelta, date, datetime, time, tzinfo + +from zope.interface import Interface, Attribute +from zope.interface import classImplements + + +class ITimeDeltaClass(Interface): + """This is the timedelta class interface. + + This is symbolic; this module does **not** make + `datetime.timedelta` provide this interface. + """ + + min = Attribute("The most negative timedelta object") + + max = Attribute("The most positive timedelta object") + + resolution = Attribute( + "The smallest difference between non-equal timedelta objects") + + +class ITimeDelta(ITimeDeltaClass): + """Represent the difference between two datetime objects. + + Implemented by `datetime.timedelta`. + + Supported operators: + + - add, subtract timedelta + - unary plus, minus, abs + - compare to timedelta + - multiply, divide by int/long + + In addition, `.datetime` supports subtraction of two `.datetime` objects + returning a `.timedelta`, and addition or subtraction of a `.datetime` + and a `.timedelta` giving a `.datetime`. + + Representation: (days, seconds, microseconds). + """ + + days = Attribute("Days between -999999999 and 999999999 inclusive") + + seconds = Attribute("Seconds between 0 and 86399 inclusive") + + microseconds = Attribute("Microseconds between 0 and 999999 inclusive") + + +class IDateClass(Interface): + """This is the date class interface. + + This is symbolic; this module does **not** make + `datetime.date` provide this interface. + """ + + min = Attribute("The earliest representable date") + + max = Attribute("The latest representable date") + + resolution = Attribute( + "The smallest difference between non-equal date objects") + + def today(): + """Return the current local time. + + This is equivalent to ``date.fromtimestamp(time.time())``""" + + def fromtimestamp(timestamp): + """Return the local date from a POSIX timestamp (like time.time()) + + This may raise `ValueError`, if the timestamp is out of the range of + values supported by the platform C ``localtime()`` function. It's common + for this to be restricted to years from 1970 through 2038. Note that + on non-POSIX systems that include leap seconds in their notion of a + timestamp, leap seconds are ignored by `fromtimestamp`. + """ + + def fromordinal(ordinal): + """Return the date corresponding to the proleptic Gregorian ordinal. + + January 1 of year 1 has ordinal 1. `ValueError` is raised unless + 1 <= ordinal <= date.max.toordinal(). + + For any date *d*, ``date.fromordinal(d.toordinal()) == d``. + """ + + +class IDate(IDateClass): + """Represents a date (year, month and day) in an idealized calendar. + + Implemented by `datetime.date`. + + Operators: + + __repr__, __str__ + __cmp__, __hash__ + __add__, __radd__, __sub__ (add/radd only with timedelta arg) + """ + + year = Attribute("Between MINYEAR and MAXYEAR inclusive.") + + month = Attribute("Between 1 and 12 inclusive") + + day = Attribute( + "Between 1 and the number of days in the given month of the given year.") + + def replace(year, month, day): + """Return a date with the same value. + + Except for those members given new values by whichever keyword + arguments are specified. For example, if ``d == date(2002, 12, 31)``, then + ``d.replace(day=26) == date(2000, 12, 26)``. + """ + + def timetuple(): + """Return a 9-element tuple of the form returned by `time.localtime`. + + The hours, minutes and seconds are 0, and the DST flag is -1. + ``d.timetuple()`` is equivalent to + ``(d.year, d.month, d.day, 0, 0, 0, d.weekday(), d.toordinal() - + date(d.year, 1, 1).toordinal() + 1, -1)`` + """ + + def toordinal(): + """Return the proleptic Gregorian ordinal of the date + + January 1 of year 1 has ordinal 1. For any date object *d*, + ``date.fromordinal(d.toordinal()) == d``. + """ + + def weekday(): + """Return the day of the week as an integer. + + Monday is 0 and Sunday is 6. For example, + ``date(2002, 12, 4).weekday() == 2``, a Wednesday. + + .. seealso:: `isoweekday`. + """ + + def isoweekday(): + """Return the day of the week as an integer. + + Monday is 1 and Sunday is 7. For example, + date(2002, 12, 4).isoweekday() == 3, a Wednesday. + + .. seealso:: `weekday`, `isocalendar`. + """ + + def isocalendar(): + """Return a 3-tuple, (ISO year, ISO week number, ISO weekday). + + The ISO calendar is a widely used variant of the Gregorian calendar. + See http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm for a good + explanation. + + The ISO year consists of 52 or 53 full weeks, and where a week starts + on a Monday and ends on a Sunday. The first week of an ISO year is the + first (Gregorian) calendar week of a year containing a Thursday. This + is called week number 1, and the ISO year of that Thursday is the same + as its Gregorian year. + + For example, 2004 begins on a Thursday, so the first week of ISO year + 2004 begins on Monday, 29 Dec 2003 and ends on Sunday, 4 Jan 2004, so + that ``date(2003, 12, 29).isocalendar() == (2004, 1, 1)`` and + ``date(2004, 1, 4).isocalendar() == (2004, 1, 7)``. + """ + + def isoformat(): + """Return a string representing the date in ISO 8601 format. + + This is 'YYYY-MM-DD'. + For example, ``date(2002, 12, 4).isoformat() == '2002-12-04'``. + """ + + def __str__(): + """For a date *d*, ``str(d)`` is equivalent to ``d.isoformat()``.""" + + def ctime(): + """Return a string representing the date. + + For example date(2002, 12, 4).ctime() == 'Wed Dec 4 00:00:00 2002'. + d.ctime() is equivalent to time.ctime(time.mktime(d.timetuple())) + on platforms where the native C ctime() function + (which `time.ctime` invokes, but which date.ctime() does not invoke) + conforms to the C standard. + """ + + def strftime(format): + """Return a string representing the date. + + Controlled by an explicit format string. Format codes referring to + hours, minutes or seconds will see 0 values. + """ + + +class IDateTimeClass(Interface): + """This is the datetime class interface. + + This is symbolic; this module does **not** make + `datetime.datetime` provide this interface. + """ + + min = Attribute("The earliest representable datetime") + + max = Attribute("The latest representable datetime") + + resolution = Attribute( + "The smallest possible difference between non-equal datetime objects") + + def today(): + """Return the current local datetime, with tzinfo None. + + This is equivalent to ``datetime.fromtimestamp(time.time())``. + + .. seealso:: `now`, `fromtimestamp`. + """ + + def now(tz=None): + """Return the current local date and time. + + If optional argument *tz* is None or not specified, this is like `today`, + but, if possible, supplies more precision than can be gotten from going + through a `time.time` timestamp (for example, this may be possible on + platforms supplying the C ``gettimeofday()`` function). + + Else tz must be an instance of a class tzinfo subclass, and the current + date and time are converted to tz's time zone. In this case the result + is equivalent to tz.fromutc(datetime.utcnow().replace(tzinfo=tz)). + + .. seealso:: `today`, `utcnow`. + """ + + def utcnow(): + """Return the current UTC date and time, with tzinfo None. + + This is like `now`, but returns the current UTC date and time, as a + naive datetime object. + + .. seealso:: `now`. + """ + + def fromtimestamp(timestamp, tz=None): + """Return the local date and time corresponding to the POSIX timestamp. + + Same as is returned by time.time(). If optional argument tz is None or + not specified, the timestamp is converted to the platform's local date + and time, and the returned datetime object is naive. + + Else tz must be an instance of a class tzinfo subclass, and the + timestamp is converted to tz's time zone. In this case the result is + equivalent to + ``tz.fromutc(datetime.utcfromtimestamp(timestamp).replace(tzinfo=tz))``. + + fromtimestamp() may raise `ValueError`, if the timestamp is out of the + range of values supported by the platform C localtime() or gmtime() + functions. It's common for this to be restricted to years in 1970 + through 2038. Note that on non-POSIX systems that include leap seconds + in their notion of a timestamp, leap seconds are ignored by + fromtimestamp(), and then it's possible to have two timestamps + differing by a second that yield identical datetime objects. + + .. seealso:: `utcfromtimestamp`. + """ + + def utcfromtimestamp(timestamp): + """Return the UTC datetime from the POSIX timestamp with tzinfo None. + + This may raise `ValueError`, if the timestamp is out of the range of + values supported by the platform C ``gmtime()`` function. It's common for + this to be restricted to years in 1970 through 2038. + + .. seealso:: `fromtimestamp`. + """ + + def fromordinal(ordinal): + """Return the datetime from the proleptic Gregorian ordinal. + + January 1 of year 1 has ordinal 1. `ValueError` is raised unless + 1 <= ordinal <= datetime.max.toordinal(). + The hour, minute, second and microsecond of the result are all 0, and + tzinfo is None. + """ + + def combine(date, time): + """Return a new datetime object. + + Its date members are equal to the given date object's, and whose time + and tzinfo members are equal to the given time object's. For any + datetime object *d*, ``d == datetime.combine(d.date(), d.timetz())``. + If date is a datetime object, its time and tzinfo members are ignored. + """ + + +class IDateTime(IDate, IDateTimeClass): + """Object contains all the information from a date object and a time object. + + Implemented by `datetime.datetime`. + """ + + year = Attribute("Year between MINYEAR and MAXYEAR inclusive") + + month = Attribute("Month between 1 and 12 inclusive") + + day = Attribute( + "Day between 1 and the number of days in the given month of the year") + + hour = Attribute("Hour in range(24)") + + minute = Attribute("Minute in range(60)") + + second = Attribute("Second in range(60)") + + microsecond = Attribute("Microsecond in range(1000000)") + + tzinfo = Attribute( + """The object passed as the tzinfo argument to the datetime constructor + or None if none was passed""") + + def date(): + """Return date object with same year, month and day.""" + + def time(): + """Return time object with same hour, minute, second, microsecond. + + tzinfo is None. + + .. seealso:: Method :meth:`timetz`. + """ + + def timetz(): + """Return time object with same hour, minute, second, microsecond, + and tzinfo. + + .. seealso:: Method :meth:`time`. + """ + + def replace(year, month, day, hour, minute, second, microsecond, tzinfo): + """Return a datetime with the same members, except for those members + given new values by whichever keyword arguments are specified. + + Note that ``tzinfo=None`` can be specified to create a naive datetime from + an aware datetime with no conversion of date and time members. + """ + + def astimezone(tz): + """Return a datetime object with new tzinfo member tz, adjusting the + date and time members so the result is the same UTC time as self, but + in tz's local time. + + tz must be an instance of a tzinfo subclass, and its utcoffset() and + dst() methods must not return None. self must be aware (self.tzinfo + must not be None, and self.utcoffset() must not return None). + + If self.tzinfo is tz, self.astimezone(tz) is equal to self: no + adjustment of date or time members is performed. Else the result is + local time in time zone tz, representing the same UTC time as self: + + after astz = dt.astimezone(tz), astz - astz.utcoffset() + + will usually have the same date and time members as dt - dt.utcoffset(). + The discussion of class `datetime.tzinfo` explains the cases at Daylight Saving + Time transition boundaries where this cannot be achieved (an issue only + if tz models both standard and daylight time). + + If you merely want to attach a time zone object *tz* to a datetime *dt* + without adjustment of date and time members, use ``dt.replace(tzinfo=tz)``. + If you merely want to remove the time zone object from an aware + datetime dt without conversion of date and time members, use + ``dt.replace(tzinfo=None)``. + + Note that the default `tzinfo.fromutc` method can be overridden in a + tzinfo subclass to effect the result returned by `astimezone`. + """ + + def utcoffset(): + """Return the timezone offset in minutes east of UTC (negative west of + UTC).""" + + def dst(): + """Return 0 if DST is not in effect, or the DST offset (in minutes + eastward) if DST is in effect. + """ + + def tzname(): + """Return the timezone name.""" + + def timetuple(): + """Return a 9-element tuple of the form returned by `time.localtime`.""" + + def utctimetuple(): + """Return UTC time tuple compatilble with `time.gmtime`.""" + + def toordinal(): + """Return the proleptic Gregorian ordinal of the date. + + The same as self.date().toordinal(). + """ + + def weekday(): + """Return the day of the week as an integer. + + Monday is 0 and Sunday is 6. The same as self.date().weekday(). + See also isoweekday(). + """ + + def isoweekday(): + """Return the day of the week as an integer. + + Monday is 1 and Sunday is 7. The same as self.date().isoweekday. + + .. seealso:: `weekday`, `isocalendar`. + """ + + def isocalendar(): + """Return a 3-tuple, (ISO year, ISO week number, ISO weekday). + + The same as self.date().isocalendar(). + """ + + def isoformat(sep='T'): + """Return a string representing the date and time in ISO 8601 format. + + YYYY-MM-DDTHH:MM:SS.mmmmmm or YYYY-MM-DDTHH:MM:SS if microsecond is 0 + + If `utcoffset` does not return None, a 6-character string is appended, + giving the UTC offset in (signed) hours and minutes: + + YYYY-MM-DDTHH:MM:SS.mmmmmm+HH:MM or YYYY-MM-DDTHH:MM:SS+HH:MM + if microsecond is 0. + + The optional argument sep (default 'T') is a one-character separator, + placed between the date and time portions of the result. + """ + + def __str__(): + """For a datetime instance *d*, ``str(d)`` is equivalent to ``d.isoformat(' ')``. + """ + + def ctime(): + """Return a string representing the date and time. + + ``datetime(2002, 12, 4, 20, 30, 40).ctime() == 'Wed Dec 4 20:30:40 2002'``. + ``d.ctime()`` is equivalent to ``time.ctime(time.mktime(d.timetuple()))`` on + platforms where the native C ``ctime()`` function (which `time.ctime` + invokes, but which `datetime.ctime` does not invoke) conforms to the + C standard. + """ + + def strftime(format): + """Return a string representing the date and time. + + This is controlled by an explicit format string. + """ + + +class ITimeClass(Interface): + """This is the time class interface. + + This is symbolic; this module does **not** make + `datetime.time` provide this interface. + + """ + + min = Attribute("The earliest representable time") + + max = Attribute("The latest representable time") + + resolution = Attribute( + "The smallest possible difference between non-equal time objects") + + +class ITime(ITimeClass): + """Represent time with time zone. + + Implemented by `datetime.time`. + + Operators: + + __repr__, __str__ + __cmp__, __hash__ + """ + + hour = Attribute("Hour in range(24)") + + minute = Attribute("Minute in range(60)") + + second = Attribute("Second in range(60)") + + microsecond = Attribute("Microsecond in range(1000000)") + + tzinfo = Attribute( + """The object passed as the tzinfo argument to the time constructor + or None if none was passed.""") + + def replace(hour, minute, second, microsecond, tzinfo): + """Return a time with the same value. + + Except for those members given new values by whichever keyword + arguments are specified. Note that tzinfo=None can be specified + to create a naive time from an aware time, without conversion of the + time members. + """ + + def isoformat(): + """Return a string representing the time in ISO 8601 format. + + That is HH:MM:SS.mmmmmm or, if self.microsecond is 0, HH:MM:SS + If utcoffset() does not return None, a 6-character string is appended, + giving the UTC offset in (signed) hours and minutes: + HH:MM:SS.mmmmmm+HH:MM or, if self.microsecond is 0, HH:MM:SS+HH:MM + """ + + def __str__(): + """For a time t, str(t) is equivalent to t.isoformat().""" + + def strftime(format): + """Return a string representing the time. + + This is controlled by an explicit format string. + """ + + def utcoffset(): + """Return the timezone offset in minutes east of UTC (negative west of + UTC). + + If tzinfo is None, returns None, else returns + self.tzinfo.utcoffset(None), and raises an exception if the latter + doesn't return None or a timedelta object representing a whole number + of minutes with magnitude less than one day. + """ + + def dst(): + """Return 0 if DST is not in effect, or the DST offset (in minutes + eastward) if DST is in effect. + + If tzinfo is None, returns None, else returns self.tzinfo.dst(None), + and raises an exception if the latter doesn't return None, or a + timedelta object representing a whole number of minutes with + magnitude less than one day. + """ + + def tzname(): + """Return the timezone name. + + If tzinfo is None, returns None, else returns self.tzinfo.tzname(None), + or raises an exception if the latter doesn't return None or a string + object. + """ + + +class ITZInfo(Interface): + """Time zone info class. + """ + + def utcoffset(dt): + """Return offset of local time from UTC, in minutes east of UTC. + + If local time is west of UTC, this should be negative. + Note that this is intended to be the total offset from UTC; + for example, if a tzinfo object represents both time zone and DST + adjustments, utcoffset() should return their sum. If the UTC offset + isn't known, return None. Else the value returned must be a timedelta + object specifying a whole number of minutes in the range -1439 to 1439 + inclusive (1440 = 24*60; the magnitude of the offset must be less + than one day). + """ + + def dst(dt): + """Return the daylight saving time (DST) adjustment, in minutes east + of UTC, or None if DST information isn't known. + """ + + def tzname(dt): + """Return the time zone name corresponding to the datetime object as + a string. + """ + + def fromutc(dt): + """Return an equivalent datetime in self's local time.""" + + +classImplements(timedelta, ITimeDelta) +classImplements(date, IDate) +classImplements(datetime, IDateTime) +classImplements(time, ITime) +classImplements(tzinfo, ITZInfo) + +## directlyProvides(timedelta, ITimeDeltaClass) +## directlyProvides(date, IDateClass) +## directlyProvides(datetime, IDateTimeClass) +## directlyProvides(time, ITimeClass) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/common/interfaces.py b/thesisenv/lib/python3.6/site-packages/zope/interface/common/interfaces.py new file mode 100644 index 0000000..4308e0a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/common/interfaces.py @@ -0,0 +1,212 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Interfaces for standard python exceptions +""" +from zope.interface import Interface +from zope.interface import classImplements + +class IException(Interface): + "Interface for `Exception`" +classImplements(Exception, IException) + + +class IStandardError(IException): + "Interface for `StandardError` (Python 2 only.)" +try: + classImplements(StandardError, IStandardError) +except NameError: #pragma NO COVER + pass # StandardError does not exist in Python 3 + + +class IWarning(IException): + "Interface for `Warning`" +classImplements(Warning, IWarning) + + +class ISyntaxError(IStandardError): + "Interface for `SyntaxError`" +classImplements(SyntaxError, ISyntaxError) + + +class ILookupError(IStandardError): + "Interface for `LookupError`" +classImplements(LookupError, ILookupError) + + +class IValueError(IStandardError): + "Interface for `ValueError`" +classImplements(ValueError, IValueError) + + +class IRuntimeError(IStandardError): + "Interface for `RuntimeError`" +classImplements(RuntimeError, IRuntimeError) + + +class IArithmeticError(IStandardError): + "Interface for `ArithmeticError`" +classImplements(ArithmeticError, IArithmeticError) + + +class IAssertionError(IStandardError): + "Interface for `AssertionError`" +classImplements(AssertionError, IAssertionError) + + +class IAttributeError(IStandardError): + "Interface for `AttributeError`" +classImplements(AttributeError, IAttributeError) + + +class IDeprecationWarning(IWarning): + "Interface for `DeprecationWarning`" +classImplements(DeprecationWarning, IDeprecationWarning) + + +class IEOFError(IStandardError): + "Interface for `EOFError`" +classImplements(EOFError, IEOFError) + + +class IEnvironmentError(IStandardError): + "Interface for `EnvironmentError`" +classImplements(EnvironmentError, IEnvironmentError) + + +class IFloatingPointError(IArithmeticError): + "Interface for `FloatingPointError`" +classImplements(FloatingPointError, IFloatingPointError) + + +class IIOError(IEnvironmentError): + "Interface for `IOError`" +classImplements(IOError, IIOError) + + +class IImportError(IStandardError): + "Interface for `ImportError`" +classImplements(ImportError, IImportError) + + +class IIndentationError(ISyntaxError): + "Interface for `IndentationError`" +classImplements(IndentationError, IIndentationError) + + +class IIndexError(ILookupError): + "Interface for `IndexError`" +classImplements(IndexError, IIndexError) + + +class IKeyError(ILookupError): + "Interface for `KeyError`" +classImplements(KeyError, IKeyError) + + +class IKeyboardInterrupt(IStandardError): + "Interface for `KeyboardInterrupt`" +classImplements(KeyboardInterrupt, IKeyboardInterrupt) + + +class IMemoryError(IStandardError): + "Interface for `MemoryError`" +classImplements(MemoryError, IMemoryError) + + +class INameError(IStandardError): + "Interface for `NameError`" +classImplements(NameError, INameError) + + +class INotImplementedError(IRuntimeError): + "Interface for `NotImplementedError`" +classImplements(NotImplementedError, INotImplementedError) + + +class IOSError(IEnvironmentError): + "Interface for `OSError`" +classImplements(OSError, IOSError) + + +class IOverflowError(IArithmeticError): + "Interface for `ArithmeticError`" +classImplements(OverflowError, IOverflowError) + + +class IOverflowWarning(IWarning): + """Deprecated, no standard class implements this. + + This was the interface for ``OverflowWarning`` prior to Python 2.5, + but that class was removed for all versions after that. + """ + + +class IReferenceError(IStandardError): + "Interface for `ReferenceError`" +classImplements(ReferenceError, IReferenceError) + + +class IRuntimeWarning(IWarning): + "Interface for `RuntimeWarning`" +classImplements(RuntimeWarning, IRuntimeWarning) + + +class IStopIteration(IException): + "Interface for `StopIteration`" +classImplements(StopIteration, IStopIteration) + + +class ISyntaxWarning(IWarning): + "Interface for `SyntaxWarning`" +classImplements(SyntaxWarning, ISyntaxWarning) + + +class ISystemError(IStandardError): + "Interface for `SystemError`" +classImplements(SystemError, ISystemError) + + +class ISystemExit(IException): + "Interface for `SystemExit`" +classImplements(SystemExit, ISystemExit) + + +class ITabError(IIndentationError): + "Interface for `TabError`" +classImplements(TabError, ITabError) + + +class ITypeError(IStandardError): + "Interface for `TypeError`" +classImplements(TypeError, ITypeError) + + +class IUnboundLocalError(INameError): + "Interface for `UnboundLocalError`" +classImplements(UnboundLocalError, IUnboundLocalError) + + +class IUnicodeError(IValueError): + "Interface for `UnicodeError`" +classImplements(UnicodeError, IUnicodeError) + + +class IUserWarning(IWarning): + "Interface for `UserWarning`" +classImplements(UserWarning, IUserWarning) + + +class IZeroDivisionError(IArithmeticError): + "Interface for `ZeroDivisionError`" +classImplements(ZeroDivisionError, IZeroDivisionError) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/common/mapping.py b/thesisenv/lib/python3.6/site-packages/zope/interface/common/mapping.py new file mode 100644 index 0000000..1c5661a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/common/mapping.py @@ -0,0 +1,150 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Mapping Interfaces. + +Importing this module does *not* mark any standard classes +as implementing any of these interfaces. +""" +from zope.interface import Interface + +class IItemMapping(Interface): + """Simplest readable mapping object + """ + + def __getitem__(key): + """Get a value for a key + + A `KeyError` is raised if there is no value for the key. + """ + + +class IReadMapping(IItemMapping): + """Basic mapping interface + """ + + def get(key, default=None): + """Get a value for a key + + The default is returned if there is no value for the key. + """ + + def __contains__(key): + """Tell if a key exists in the mapping.""" + + +class IWriteMapping(Interface): + """Mapping methods for changing data""" + + def __delitem__(key): + """Delete a value from the mapping using the key.""" + + def __setitem__(key, value): + """Set a new item in the mapping.""" + + +class IEnumerableMapping(IReadMapping): + """Mapping objects whose items can be enumerated. + """ + + def keys(): + """Return the keys of the mapping object. + """ + + def __iter__(): + """Return an iterator for the keys of the mapping object. + """ + + def values(): + """Return the values of the mapping object. + """ + + def items(): + """Return the items of the mapping object. + """ + + def __len__(): + """Return the number of items. + """ + +class IMapping(IWriteMapping, IEnumerableMapping): + ''' Simple mapping interface ''' + +class IIterableMapping(IEnumerableMapping): + """A mapping that has distinct methods for iterating + without copying. + + On Python 2, a `dict` has these methods, but on Python 3 + the methods defined in `IEnumerableMapping` already iterate + without copying. + """ + + def iterkeys(): + "iterate over keys; equivalent to ``__iter__``" + + def itervalues(): + "iterate over values" + + def iteritems(): + "iterate over items" + +class IClonableMapping(Interface): + """Something that can produce a copy of itself. + + This is available in `dict`. + """ + + def copy(): + "return copy of dict" + +class IExtendedReadMapping(IIterableMapping): + """ + Something with a particular method equivalent to ``__contains__``. + + On Python 2, `dict` provides this method, but it was removed + in Python 3. + """ + + def has_key(key): + """Tell if a key exists in the mapping; equivalent to ``__contains__``""" + +class IExtendedWriteMapping(IWriteMapping): + """Additional mutation methods. + + These are all provided by `dict`. + """ + + def clear(): + "delete all items" + + def update(d): + " Update D from E: for k in E.keys(): D[k] = E[k]" + + def setdefault(key, default=None): + "D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D" + + def pop(k, *args): + """Remove specified key and return the corresponding value. + + ``*args`` may contain a single default value, or may not be supplied. + If key is not found, default is returned if given, otherwise + `KeyError` is raised""" + + def popitem(): + """remove and return some (key, value) pair as a + 2-tuple; but raise KeyError if mapping is empty""" + +class IFullMapping( + IExtendedReadMapping, IExtendedWriteMapping, IClonableMapping, IMapping): + ''' Full mapping interface ''' # IMapping included so tests for IMapping + # succeed with IFullMapping diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/common/sequence.py b/thesisenv/lib/python3.6/site-packages/zope/interface/common/sequence.py new file mode 100644 index 0000000..393918e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/common/sequence.py @@ -0,0 +1,165 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Sequence Interfaces + +Importing this module does *not* mark any standard classes +as implementing any of these interfaces. +""" + +__docformat__ = 'restructuredtext' +from zope.interface import Interface + +class IMinimalSequence(Interface): + """Most basic sequence interface. + + All sequences are iterable. This requires at least one of the + following: + + - a `__getitem__()` method that takes a single argument; integer + values starting at 0 must be supported, and `IndexError` should + be raised for the first index for which there is no value, or + + - an `__iter__()` method that returns an iterator as defined in + the Python documentation (http://docs.python.org/lib/typeiter.html). + + """ + + def __getitem__(index): + """``x.__getitem__(index) <==> x[index]`` + + Declaring this interface does not specify whether `__getitem__` + supports slice objects.""" + +class IFiniteSequence(IMinimalSequence): + + def __len__(): + """``x.__len__() <==> len(x)``""" + +class IReadSequence(IFiniteSequence): + """read interface shared by tuple and list""" + + def __contains__(item): + """``x.__contains__(item) <==> item in x``""" + + def __lt__(other): + """``x.__lt__(other) <==> x < other``""" + + def __le__(other): + """``x.__le__(other) <==> x <= other``""" + + def __eq__(other): + """``x.__eq__(other) <==> x == other``""" + + def __ne__(other): + """``x.__ne__(other) <==> x != other``""" + + def __gt__(other): + """``x.__gt__(other) <==> x > other``""" + + def __ge__(other): + """``x.__ge__(other) <==> x >= other``""" + + def __add__(other): + """``x.__add__(other) <==> x + other``""" + + def __mul__(n): + """``x.__mul__(n) <==> x * n``""" + + def __rmul__(n): + """``x.__rmul__(n) <==> n * x``""" + + def __getslice__(i, j): + """``x.__getslice__(i, j) <==> x[i:j]`` + + Use of negative indices is not supported. + + Deprecated since Python 2.0 but still a part of `UserList`. + """ + +class IExtendedReadSequence(IReadSequence): + """Full read interface for lists""" + + def count(item): + """Return number of occurrences of value""" + + def index(item, *args): + """index(value, [start, [stop]]) -> int + + Return first index of *value* + """ + +class IUniqueMemberWriteSequence(Interface): + """The write contract for a sequence that may enforce unique members""" + + def __setitem__(index, item): + """``x.__setitem__(index, item) <==> x[index] = item`` + + Declaring this interface does not specify whether `__setitem__` + supports slice objects. + """ + + def __delitem__(index): + """``x.__delitem__(index) <==> del x[index]`` + + Declaring this interface does not specify whether `__delitem__` + supports slice objects. + """ + + def __setslice__(i, j, other): + """``x.__setslice__(i, j, other) <==> x[i:j] = other`` + + Use of negative indices is not supported. + + Deprecated since Python 2.0 but still a part of `UserList`. + """ + + def __delslice__(i, j): + """``x.__delslice__(i, j) <==> del x[i:j]`` + + Use of negative indices is not supported. + + Deprecated since Python 2.0 but still a part of `UserList`. + """ + def __iadd__(y): + """``x.__iadd__(y) <==> x += y``""" + + def append(item): + """Append item to end""" + + def insert(index, item): + """Insert item before index""" + + def pop(index=-1): + """Remove and return item at index (default last)""" + + def remove(item): + """Remove first occurrence of value""" + + def reverse(): + """Reverse *IN PLACE*""" + + def sort(cmpfunc=None): + """Stable sort *IN PLACE*; `cmpfunc(x, y)` -> -1, 0, 1""" + + def extend(iterable): + """Extend list by appending elements from the iterable""" + +class IWriteSequence(IUniqueMemberWriteSequence): + """Full write contract for sequences""" + + def __imul__(n): + """``x.__imul__(n) <==> x *= n``""" + +class ISequence(IReadSequence, IWriteSequence): + """Full sequence contract""" diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/common/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/interface/common/tests/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/common/tests/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/common/tests/basemapping.py b/thesisenv/lib/python3.6/site-packages/zope/interface/common/tests/basemapping.py new file mode 100644 index 0000000..b756dca --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/common/tests/basemapping.py @@ -0,0 +1,107 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Base Mapping tests +""" +from operator import __getitem__ + +def testIReadMapping(self, inst, state, absent): + for key in state: + self.assertEqual(inst[key], state[key]) + self.assertEqual(inst.get(key, None), state[key]) + self.assertTrue(key in inst) + + for key in absent: + self.assertEqual(inst.get(key, None), None) + self.assertEqual(inst.get(key), None) + self.assertEqual(inst.get(key, self), self) + self.assertRaises(KeyError, __getitem__, inst, key) + + +def test_keys(self, inst, state): + # Return the keys of the mapping object + inst_keys = list(inst.keys()); inst_keys.sort() + state_keys = list(state.keys()) ; state_keys.sort() + self.assertEqual(inst_keys, state_keys) + +def test_iter(self, inst, state): + # Return the keys of the mapping object + inst_keys = list(inst); inst_keys.sort() + state_keys = list(state.keys()) ; state_keys.sort() + self.assertEqual(inst_keys, state_keys) + +def test_values(self, inst, state): + # Return the values of the mapping object + inst_values = list(inst.values()); inst_values.sort() + state_values = list(state.values()) ; state_values.sort() + self.assertEqual(inst_values, state_values) + +def test_items(self, inst, state): + # Return the items of the mapping object + inst_items = list(inst.items()); inst_items.sort() + state_items = list(state.items()) ; state_items.sort() + self.assertEqual(inst_items, state_items) + +def test___len__(self, inst, state): + # Return the number of items + self.assertEqual(len(inst), len(state)) + +def testIEnumerableMapping(self, inst, state): + test_keys(self, inst, state) + test_items(self, inst, state) + test_values(self, inst, state) + test___len__(self, inst, state) + + +class BaseTestIReadMapping(object): + def testIReadMapping(self): + inst = self._IReadMapping__sample() + state = self._IReadMapping__stateDict() + absent = self._IReadMapping__absentKeys() + testIReadMapping(self, inst, state, absent) + + +class BaseTestIEnumerableMapping(BaseTestIReadMapping): + # Mapping objects whose items can be enumerated + def test_keys(self): + # Return the keys of the mapping object + inst = self._IEnumerableMapping__sample() + state = self._IEnumerableMapping__stateDict() + test_keys(self, inst, state) + + def test_values(self): + # Return the values of the mapping object + inst = self._IEnumerableMapping__sample() + state = self._IEnumerableMapping__stateDict() + test_values(self, inst, state) + + def test_items(self): + # Return the items of the mapping object + inst = self._IEnumerableMapping__sample() + state = self._IEnumerableMapping__stateDict() + test_items(self, inst, state) + + def test___len__(self): + # Return the number of items + inst = self._IEnumerableMapping__sample() + state = self._IEnumerableMapping__stateDict() + test___len__(self, inst, state) + + def _IReadMapping__stateDict(self): + return self._IEnumerableMapping__stateDict() + + def _IReadMapping__sample(self): + return self._IEnumerableMapping__sample() + + def _IReadMapping__absentKeys(self): + return self._IEnumerableMapping__absentKeys() diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/common/tests/test_idatetime.py b/thesisenv/lib/python3.6/site-packages/zope/interface/common/tests/test_idatetime.py new file mode 100644 index 0000000..496a5c9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/common/tests/test_idatetime.py @@ -0,0 +1,37 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test for datetime interfaces +""" + +import unittest + +from zope.interface.verify import verifyObject, verifyClass +from zope.interface.common.idatetime import ITimeDelta, ITimeDeltaClass +from zope.interface.common.idatetime import IDate, IDateClass +from zope.interface.common.idatetime import IDateTime, IDateTimeClass +from zope.interface.common.idatetime import ITime, ITimeClass, ITZInfo +from datetime import timedelta, date, datetime, time, tzinfo + +class TestDateTimeInterfaces(unittest.TestCase): + + def test_interfaces(self): + verifyObject(ITimeDelta, timedelta(minutes=20)) + verifyObject(IDate, date(2000, 1, 2)) + verifyObject(IDateTime, datetime(2000, 1, 2, 10, 20)) + verifyObject(ITime, time(20, 30, 15, 1234)) + verifyObject(ITZInfo, tzinfo()) + verifyClass(ITimeDeltaClass, timedelta) + verifyClass(IDateClass, date) + verifyClass(IDateTimeClass, datetime) + verifyClass(ITimeClass, time) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/common/tests/test_import_interfaces.py b/thesisenv/lib/python3.6/site-packages/zope/interface/common/tests/test_import_interfaces.py new file mode 100644 index 0000000..fe3766f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/common/tests/test_import_interfaces.py @@ -0,0 +1,20 @@ +############################################################################## +# +# Copyright (c) 2006 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + +class TestInterfaceImport(unittest.TestCase): + + def test_import(self): + import zope.interface.common.interfaces as x + self.assertIsNotNone(x) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/declarations.py b/thesisenv/lib/python3.6/site-packages/zope/interface/declarations.py new file mode 100644 index 0000000..b80245f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/declarations.py @@ -0,0 +1,929 @@ +############################################################################## +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +############################################################################## +"""Implementation of interface declarations + +There are three flavors of declarations: + + - Declarations are used to simply name declared interfaces. + + - ImplementsDeclarations are used to express the interfaces that a + class implements (that instances of the class provides). + + Implements specifications support inheriting interfaces. + + - ProvidesDeclarations are used to express interfaces directly + provided by objects. + +""" +__docformat__ = 'restructuredtext' + +import sys +from types import FunctionType +from types import MethodType +from types import ModuleType +import weakref + +from zope.interface.advice import addClassAdvisor +from zope.interface.interface import InterfaceClass +from zope.interface.interface import SpecificationBase +from zope.interface.interface import Specification +from zope.interface._compat import CLASS_TYPES as DescriptorAwareMetaClasses +from zope.interface._compat import PYTHON3 + +# Registry of class-implementation specifications +BuiltinImplementationSpecifications = {} + +_ADVICE_ERROR = ('Class advice impossible in Python3. ' + 'Use the @%s class decorator instead.') + +_ADVICE_WARNING = ('The %s API is deprecated, and will not work in Python3 ' + 'Use the @%s class decorator instead.') + +class named(object): + + def __init__(self, name): + self.name = name + + def __call__(self, ob): + ob.__component_name__ = self.name + return ob + +class Declaration(Specification): + """Interface declarations""" + + def __init__(self, *interfaces): + Specification.__init__(self, _normalizeargs(interfaces)) + + def changed(self, originally_changed): + Specification.changed(self, originally_changed) + try: + del self._v_attrs + except AttributeError: + pass + + def __contains__(self, interface): + """Test whether an interface is in the specification + """ + + return self.extends(interface) and interface in self.interfaces() + + def __iter__(self): + """Return an iterator for the interfaces in the specification + """ + return self.interfaces() + + def flattened(self): + """Return an iterator of all included and extended interfaces + """ + return iter(self.__iro__) + + def __sub__(self, other): + """Remove interfaces from a specification + """ + return Declaration( + *[i for i in self.interfaces() + if not [j for j in other.interfaces() + if i.extends(j, 0)] + ] + ) + + def __add__(self, other): + """Add two specifications or a specification and an interface + """ + seen = {} + result = [] + for i in self.interfaces(): + seen[i] = 1 + result.append(i) + for i in other.interfaces(): + if i not in seen: + seen[i] = 1 + result.append(i) + + return Declaration(*result) + + __radd__ = __add__ + + +############################################################################## +# +# Implementation specifications +# +# These specify interfaces implemented by instances of classes + +class Implements(Declaration): + + # class whose specification should be used as additional base + inherit = None + + # interfaces actually declared for a class + declared = () + + __name__ = '?' + + @classmethod + def named(cls, name, *interfaces): + # Implementation method: Produce an Implements interface with + # a fully fleshed out __name__ before calling the constructor, which + # sets bases to the given interfaces and which may pass this object to + # other objects (e.g., to adjust dependents). If they're sorting or comparing + # by name, this needs to be set. + inst = cls.__new__(cls) + inst.__name__ = name + inst.__init__(*interfaces) + return inst + + def __repr__(self): + return '' % (self.__name__) + + def __reduce__(self): + return implementedBy, (self.inherit, ) + + def __cmp(self, other): + # Yes, I did mean to name this __cmp, rather than __cmp__. + # It is a private method used by __lt__ and __gt__. + # This is based on, and compatible with, InterfaceClass. + # (The two must be mutually comparable to be able to work in e.g., BTrees.) + # Instances of this class generally don't have a __module__ other than + # `zope.interface.declarations`, whereas they *do* have a __name__ that is the + # fully qualified name of the object they are representing. + + # Note, though, that equality and hashing are still identity based. This + # accounts for things like nested objects that have the same name (typically + # only in tests) and is consistent with pickling. As far as comparisons to InterfaceClass + # goes, we'll never have equal name and module to those, so we're still consistent there. + # Instances of this class are essentially intended to be unique and are + # heavily cached (note how our __reduce__ handles this) so having identity + # based hash and eq should also work. + if other is None: + return -1 + + n1 = (self.__name__, self.__module__) + n2 = (getattr(other, '__name__', ''), getattr(other, '__module__', '')) + + # This spelling works under Python3, which doesn't have cmp(). + return (n1 > n2) - (n1 < n2) + + def __hash__(self): + return Declaration.__hash__(self) + + # We want equality to be based on identity. However, we can't actually + # implement __eq__/__ne__ to do this because sometimes we get wrapped in a proxy. + # We need to let the proxy types implement these methods so they can handle unwrapping + # and then rely on: (1) the interpreter automatically changing `implements == proxy` into + # `proxy == implements` (which will call proxy.__eq__ to do the unwrapping) and then + # (2) the default equality semantics being identity based. + + def __lt__(self, other): + c = self.__cmp(other) + return c < 0 + + def __le__(self, other): + c = self.__cmp(other) + return c <= 0 + + def __gt__(self, other): + c = self.__cmp(other) + return c > 0 + + def __ge__(self, other): + c = self.__cmp(other) + return c >= 0 + +def _implements_name(ob): + # Return the __name__ attribute to be used by its __implemented__ + # property. + # This must be stable for the "same" object across processes + # because it is used for sorting. It needn't be unique, though, in cases + # like nested classes named Foo created by different functions, because + # equality and hashing is still based on identity. + # It might be nice to use __qualname__ on Python 3, but that would produce + # different values between Py2 and Py3. + return (getattr(ob, '__module__', '?') or '?') + \ + '.' + (getattr(ob, '__name__', '?') or '?') + +def implementedByFallback(cls): + """Return the interfaces implemented for a class' instances + + The value returned is an `~zope.interface.interfaces.IDeclaration`. + """ + try: + spec = cls.__dict__.get('__implemented__') + except AttributeError: + + # we can't get the class dict. This is probably due to a + # security proxy. If this is the case, then probably no + # descriptor was installed for the class. + + # We don't want to depend directly on zope.security in + # zope.interface, but we'll try to make reasonable + # accommodations in an indirect way. + + # We'll check to see if there's an implements: + + spec = getattr(cls, '__implemented__', None) + if spec is None: + # There's no spec stred in the class. Maybe its a builtin: + spec = BuiltinImplementationSpecifications.get(cls) + if spec is not None: + return spec + return _empty + + if spec.__class__ == Implements: + # we defaulted to _empty or there was a spec. Good enough. + # Return it. + return spec + + # TODO: need old style __implements__ compatibility? + # Hm, there's an __implemented__, but it's not a spec. Must be + # an old-style declaration. Just compute a spec for it + return Declaration(*_normalizeargs((spec, ))) + + if isinstance(spec, Implements): + return spec + + if spec is None: + spec = BuiltinImplementationSpecifications.get(cls) + if spec is not None: + return spec + + # TODO: need old style __implements__ compatibility? + spec_name = _implements_name(cls) + if spec is not None: + # old-style __implemented__ = foo declaration + spec = (spec, ) # tuplefy, as it might be just an int + spec = Implements.named(spec_name, *_normalizeargs(spec)) + spec.inherit = None # old-style implies no inherit + del cls.__implemented__ # get rid of the old-style declaration + else: + try: + bases = cls.__bases__ + except AttributeError: + if not callable(cls): + raise TypeError("ImplementedBy called for non-factory", cls) + bases = () + + spec = Implements.named(spec_name, *[implementedBy(c) for c in bases]) + spec.inherit = cls + + try: + cls.__implemented__ = spec + if not hasattr(cls, '__providedBy__'): + cls.__providedBy__ = objectSpecificationDescriptor + + if (isinstance(cls, DescriptorAwareMetaClasses) + and + '__provides__' not in cls.__dict__): + # Make sure we get a __provides__ descriptor + cls.__provides__ = ClassProvides( + cls, + getattr(cls, '__class__', type(cls)), + ) + + except TypeError: + if not isinstance(cls, type): + raise TypeError("ImplementedBy called for non-type", cls) + BuiltinImplementationSpecifications[cls] = spec + + return spec + +implementedBy = implementedByFallback + +def classImplementsOnly(cls, *interfaces): + """Declare the only interfaces implemented by instances of a class + + The arguments after the class are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` objects). + + The interfaces given (including the interfaces in the specifications) + replace any previous declarations. + """ + spec = implementedBy(cls) + spec.declared = () + spec.inherit = None + classImplements(cls, *interfaces) + +def classImplements(cls, *interfaces): + """Declare additional interfaces implemented for instances of a class + + The arguments after the class are one or more interfaces or + interface specifications (`~zope.interface.interfaces.IDeclaration` objects). + + The interfaces given (including the interfaces in the specifications) + are added to any interfaces previously declared. + """ + spec = implementedBy(cls) + spec.declared += tuple(_normalizeargs(interfaces)) + + # compute the bases + bases = [] + seen = {} + for b in spec.declared: + if b not in seen: + seen[b] = 1 + bases.append(b) + + if spec.inherit is not None: + + for c in spec.inherit.__bases__: + b = implementedBy(c) + if b not in seen: + seen[b] = 1 + bases.append(b) + + spec.__bases__ = tuple(bases) + +def _implements_advice(cls): + interfaces, classImplements = cls.__dict__['__implements_advice_data__'] + del cls.__implements_advice_data__ + classImplements(cls, *interfaces) + return cls + + +class implementer(object): + """Declare the interfaces implemented by instances of a class. + + This function is called as a class decorator. + + The arguments are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` objects). + + The interfaces given (including the interfaces in the + specifications) are added to any interfaces previously + declared. + + Previous declarations include declarations for base classes + unless implementsOnly was used. + + This function is provided for convenience. It provides a more + convenient way to call `classImplements`. For example:: + + @implementer(I1) + class C(object): + pass + + is equivalent to calling:: + + classImplements(C, I1) + + after the class has been created. + """ + + def __init__(self, *interfaces): + self.interfaces = interfaces + + def __call__(self, ob): + if isinstance(ob, DescriptorAwareMetaClasses): + classImplements(ob, *self.interfaces) + return ob + + spec_name = _implements_name(ob) + spec = Implements.named(spec_name, *self.interfaces) + try: + ob.__implemented__ = spec + except AttributeError: + raise TypeError("Can't declare implements", ob) + return ob + +class implementer_only(object): + """Declare the only interfaces implemented by instances of a class + + This function is called as a class decorator. + + The arguments are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` objects). + + Previous declarations including declarations for base classes + are overridden. + + This function is provided for convenience. It provides a more + convenient way to call `classImplementsOnly`. For example:: + + @implementer_only(I1) + class C(object): pass + + is equivalent to calling:: + + classImplementsOnly(I1) + + after the class has been created. + """ + + def __init__(self, *interfaces): + self.interfaces = interfaces + + def __call__(self, ob): + if isinstance(ob, (FunctionType, MethodType)): + # XXX Does this decorator make sense for anything but classes? + # I don't think so. There can be no inheritance of interfaces + # on a method pr function.... + raise ValueError('The implementer_only decorator is not ' + 'supported for methods or functions.') + else: + # Assume it's a class: + classImplementsOnly(ob, *self.interfaces) + return ob + +def _implements(name, interfaces, classImplements): + # This entire approach is invalid under Py3K. Don't even try to fix + # the coverage for this block there. :( + frame = sys._getframe(2) + locals = frame.f_locals + + # Try to make sure we were called from a class def. In 2.2.0 we can't + # check for __module__ since it doesn't seem to be added to the locals + # until later on. + if locals is frame.f_globals or '__module__' not in locals: + raise TypeError(name+" can be used only from a class definition.") + + if '__implements_advice_data__' in locals: + raise TypeError(name+" can be used only once in a class definition.") + + locals['__implements_advice_data__'] = interfaces, classImplements + addClassAdvisor(_implements_advice, depth=3) + +def implements(*interfaces): + """Declare interfaces implemented by instances of a class + + This function is called in a class definition. + + The arguments are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` objects). + + The interfaces given (including the interfaces in the + specifications) are added to any interfaces previously + declared. + + Previous declarations include declarations for base classes + unless `implementsOnly` was used. + + This function is provided for convenience. It provides a more + convenient way to call `classImplements`. For example:: + + implements(I1) + + is equivalent to calling:: + + classImplements(C, I1) + + after the class has been created. + """ + # This entire approach is invalid under Py3K. Don't even try to fix + # the coverage for this block there. :( + if PYTHON3: + raise TypeError(_ADVICE_ERROR % 'implementer') + _implements("implements", interfaces, classImplements) + +def implementsOnly(*interfaces): + """Declare the only interfaces implemented by instances of a class + + This function is called in a class definition. + + The arguments are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` objects). + + Previous declarations including declarations for base classes + are overridden. + + This function is provided for convenience. It provides a more + convenient way to call `classImplementsOnly`. For example:: + + implementsOnly(I1) + + is equivalent to calling:: + + classImplementsOnly(I1) + + after the class has been created. + """ + # This entire approach is invalid under Py3K. Don't even try to fix + # the coverage for this block there. :( + if PYTHON3: + raise TypeError(_ADVICE_ERROR % 'implementer_only') + _implements("implementsOnly", interfaces, classImplementsOnly) + +############################################################################## +# +# Instance declarations + +class Provides(Declaration): # Really named ProvidesClass + """Implement ``__provides__``, the instance-specific specification + + When an object is pickled, we pickle the interfaces that it implements. + """ + + def __init__(self, cls, *interfaces): + self.__args = (cls, ) + interfaces + self._cls = cls + Declaration.__init__(self, *(interfaces + (implementedBy(cls), ))) + + def __reduce__(self): + return Provides, self.__args + + __module__ = 'zope.interface' + + def __get__(self, inst, cls): + """Make sure that a class __provides__ doesn't leak to an instance + """ + if inst is None and cls is self._cls: + # We were accessed through a class, so we are the class' + # provides spec. Just return this object, but only if we are + # being called on the same class that we were defined for: + return self + + raise AttributeError('__provides__') + +ProvidesClass = Provides + +# Registry of instance declarations +# This is a memory optimization to allow objects to share specifications. +InstanceDeclarations = weakref.WeakValueDictionary() + +def Provides(*interfaces): + """Cache instance declarations + + Instance declarations are shared among instances that have the same + declaration. The declarations are cached in a weak value dictionary. + """ + spec = InstanceDeclarations.get(interfaces) + if spec is None: + spec = ProvidesClass(*interfaces) + InstanceDeclarations[interfaces] = spec + + return spec + +Provides.__safe_for_unpickling__ = True + + +def directlyProvides(object, *interfaces): + """Declare interfaces declared directly for an object + + The arguments after the object are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` objects). + + The interfaces given (including the interfaces in the specifications) + replace interfaces previously declared for the object. + """ + cls = getattr(object, '__class__', None) + if cls is not None and getattr(cls, '__class__', None) is cls: + # It's a meta class (well, at least it it could be an extension class) + # Note that we can't get here from Py3k tests: there is no normal + # class which isn't descriptor aware. + if not isinstance(object, + DescriptorAwareMetaClasses): + raise TypeError("Attempt to make an interface declaration on a " + "non-descriptor-aware class") + + interfaces = _normalizeargs(interfaces) + if cls is None: + cls = type(object) + + issub = False + for damc in DescriptorAwareMetaClasses: + if issubclass(cls, damc): + issub = True + break + if issub: + # we have a class or type. We'll use a special descriptor + # that provides some extra caching + object.__provides__ = ClassProvides(object, cls, *interfaces) + else: + object.__provides__ = Provides(cls, *interfaces) + + +def alsoProvides(object, *interfaces): + """Declare interfaces declared directly for an object + + The arguments after the object are one or more interfaces or interface + specifications (`~zope.interface.interfaces.IDeclaration` objects). + + The interfaces given (including the interfaces in the specifications) are + added to the interfaces previously declared for the object. + """ + directlyProvides(object, directlyProvidedBy(object), *interfaces) + +def noLongerProvides(object, interface): + """ Removes a directly provided interface from an object. + """ + directlyProvides(object, directlyProvidedBy(object) - interface) + if interface.providedBy(object): + raise ValueError("Can only remove directly provided interfaces.") + +class ClassProvidesBaseFallback(object): + + def __get__(self, inst, cls): + if cls is self._cls: + # We only work if called on the class we were defined for + + if inst is None: + # We were accessed through a class, so we are the class' + # provides spec. Just return this object as is: + return self + + return self._implements + + raise AttributeError('__provides__') + +ClassProvidesBasePy = ClassProvidesBaseFallback # BBB +ClassProvidesBase = ClassProvidesBaseFallback + +# Try to get C base: +try: + import zope.interface._zope_interface_coptimizations +except ImportError: + pass +else: + from zope.interface._zope_interface_coptimizations import ClassProvidesBase + + +class ClassProvides(Declaration, ClassProvidesBase): + """Special descriptor for class ``__provides__`` + + The descriptor caches the implementedBy info, so that + we can get declarations for objects without instance-specific + interfaces a bit quicker. + """ + def __init__(self, cls, metacls, *interfaces): + self._cls = cls + self._implements = implementedBy(cls) + self.__args = (cls, metacls, ) + interfaces + Declaration.__init__(self, *(interfaces + (implementedBy(metacls), ))) + + def __reduce__(self): + return self.__class__, self.__args + + # Copy base-class method for speed + __get__ = ClassProvidesBase.__get__ + +def directlyProvidedBy(object): + """Return the interfaces directly provided by the given object + + The value returned is an `~zope.interface.interfaces.IDeclaration`. + """ + provides = getattr(object, "__provides__", None) + if (provides is None # no spec + or + # We might have gotten the implements spec, as an + # optimization. If so, it's like having only one base, that we + # lop off to exclude class-supplied declarations: + isinstance(provides, Implements) + ): + return _empty + + # Strip off the class part of the spec: + return Declaration(provides.__bases__[:-1]) + +def classProvides(*interfaces): + """Declare interfaces provided directly by a class + + This function is called in a class definition. + + The arguments are one or more interfaces or interface specifications + (`~zope.interface.interfaces.IDeclaration` objects). + + The given interfaces (including the interfaces in the specifications) + are used to create the class's direct-object interface specification. + An error will be raised if the module class has an direct interface + specification. In other words, it is an error to call this function more + than once in a class definition. + + Note that the given interfaces have nothing to do with the interfaces + implemented by instances of the class. + + This function is provided for convenience. It provides a more convenient + way to call `directlyProvides` for a class. For example:: + + classProvides(I1) + + is equivalent to calling:: + + directlyProvides(theclass, I1) + + after the class has been created. + """ + # This entire approach is invalid under Py3K. Don't even try to fix + # the coverage for this block there. :( + + if PYTHON3: + raise TypeError(_ADVICE_ERROR % 'provider') + + frame = sys._getframe(1) + locals = frame.f_locals + + # Try to make sure we were called from a class def + if (locals is frame.f_globals) or ('__module__' not in locals): + raise TypeError("classProvides can be used only from a " + "class definition.") + + if '__provides__' in locals: + raise TypeError( + "classProvides can only be used once in a class definition.") + + locals["__provides__"] = _normalizeargs(interfaces) + + addClassAdvisor(_classProvides_advice, depth=2) + +def _classProvides_advice(cls): + # This entire approach is invalid under Py3K. Don't even try to fix + # the coverage for this block there. :( + interfaces = cls.__dict__['__provides__'] + del cls.__provides__ + directlyProvides(cls, *interfaces) + return cls + +class provider(object): + """Class decorator version of classProvides""" + + def __init__(self, *interfaces): + self.interfaces = interfaces + + def __call__(self, ob): + directlyProvides(ob, *self.interfaces) + return ob + +def moduleProvides(*interfaces): + """Declare interfaces provided by a module + + This function is used in a module definition. + + The arguments are one or more interfaces or interface specifications + (`~zope.interface.interfaces.IDeclaration` objects). + + The given interfaces (including the interfaces in the specifications) are + used to create the module's direct-object interface specification. An + error will be raised if the module already has an interface specification. + In other words, it is an error to call this function more than once in a + module definition. + + This function is provided for convenience. It provides a more convenient + way to call directlyProvides. For example:: + + moduleImplements(I1) + + is equivalent to:: + + directlyProvides(sys.modules[__name__], I1) + """ + frame = sys._getframe(1) + locals = frame.f_locals + + # Try to make sure we were called from a class def + if (locals is not frame.f_globals) or ('__name__' not in locals): + raise TypeError( + "moduleProvides can only be used from a module definition.") + + if '__provides__' in locals: + raise TypeError( + "moduleProvides can only be used once in a module definition.") + + locals["__provides__"] = Provides(ModuleType, + *_normalizeargs(interfaces)) + +############################################################################## +# +# Declaration querying support + +# XXX: is this a fossil? Nobody calls it, no unit tests exercise it, no +# doctests import it, and the package __init__ doesn't import it. +def ObjectSpecification(direct, cls): + """Provide object specifications + + These combine information for the object and for it's classes. + """ + return Provides(cls, direct) # pragma: no cover fossil + +def getObjectSpecificationFallback(ob): + + provides = getattr(ob, '__provides__', None) + if provides is not None: + if isinstance(provides, SpecificationBase): + return provides + + try: + cls = ob.__class__ + except AttributeError: + # We can't get the class, so just consider provides + return _empty + + return implementedBy(cls) + +getObjectSpecification = getObjectSpecificationFallback + +def providedByFallback(ob): + + # Here we have either a special object, an old-style declaration + # or a descriptor + + # Try to get __providedBy__ + try: + r = ob.__providedBy__ + except AttributeError: + # Not set yet. Fall back to lower-level thing that computes it + return getObjectSpecification(ob) + + try: + # We might have gotten a descriptor from an instance of a + # class (like an ExtensionClass) that doesn't support + # descriptors. We'll make sure we got one by trying to get + # the only attribute, which all specs have. + r.extends + + except AttributeError: + + # The object's class doesn't understand descriptors. + # Sigh. We need to get an object descriptor, but we have to be + # careful. We want to use the instance's __provides__, if + # there is one, but only if it didn't come from the class. + + try: + r = ob.__provides__ + except AttributeError: + # No __provides__, so just fall back to implementedBy + return implementedBy(ob.__class__) + + # We need to make sure we got the __provides__ from the + # instance. We'll do this by making sure we don't get the same + # thing from the class: + + try: + cp = ob.__class__.__provides__ + except AttributeError: + # The ob doesn't have a class or the class has no + # provides, assume we're done: + return r + + if r is cp: + # Oops, we got the provides from the class. This means + # the object doesn't have it's own. We should use implementedBy + return implementedBy(ob.__class__) + + return r +providedBy = providedByFallback + +class ObjectSpecificationDescriptorFallback(object): + """Implement the `__providedBy__` attribute + + The `__providedBy__` attribute computes the interfaces peovided by + an object. + """ + + def __get__(self, inst, cls): + """Get an object specification for an object + """ + if inst is None: + return getObjectSpecification(cls) + + provides = getattr(inst, '__provides__', None) + if provides is not None: + return provides + + return implementedBy(cls) + +ObjectSpecificationDescriptor = ObjectSpecificationDescriptorFallback + +############################################################################## + +def _normalizeargs(sequence, output = None): + """Normalize declaration arguments + + Normalization arguments might contain Declarions, tuples, or single + interfaces. + + Anything but individial interfaces or implements specs will be expanded. + """ + if output is None: + output = [] + + cls = sequence.__class__ + if InterfaceClass in cls.__mro__ or Implements in cls.__mro__: + output.append(sequence) + else: + for v in sequence: + _normalizeargs(v, output) + + return output + +_empty = Declaration() + +try: + import zope.interface._zope_interface_coptimizations +except ImportError: + pass +else: + from zope.interface._zope_interface_coptimizations import implementedBy + from zope.interface._zope_interface_coptimizations import providedBy + from zope.interface._zope_interface_coptimizations import ( + getObjectSpecification) + from zope.interface._zope_interface_coptimizations import ( + ObjectSpecificationDescriptor) + +objectSpecificationDescriptor = ObjectSpecificationDescriptor() diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/document.py b/thesisenv/lib/python3.6/site-packages/zope/interface/document.py new file mode 100644 index 0000000..e6d6e88 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/document.py @@ -0,0 +1,120 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" Pretty-Print an Interface object as structured text (Yum) + +This module provides a function, asStructuredText, for rendering an +interface as structured text. +""" +import zope.interface + + +def asStructuredText(I, munge=0, rst=False): + """ Output structured text format. Note, this will whack any existing + 'structured' format of the text. + + If `rst=True`, then the output will quote all code as inline literals in + accordance with 'reStructuredText' markup principles. + """ + + if rst: + inline_literal = lambda s: "``%s``" % (s,) + else: + inline_literal = lambda s: s + + r = [inline_literal(I.getName())] + outp = r.append + level = 1 + + if I.getDoc(): + outp(_justify_and_indent(_trim_doc_string(I.getDoc()), level)) + + bases = [base + for base in I.__bases__ + if base is not zope.interface.Interface + ] + if bases: + outp(_justify_and_indent("This interface extends:", level, munge)) + level += 1 + for b in bases: + item = "o %s" % inline_literal(b.getName()) + outp(_justify_and_indent(_trim_doc_string(item), level, munge)) + level -= 1 + + namesAndDescriptions = sorted(I.namesAndDescriptions()) + + outp(_justify_and_indent("Attributes:", level, munge)) + level += 1 + for name, desc in namesAndDescriptions: + if not hasattr(desc, 'getSignatureString'): # ugh... + item = "%s -- %s" % (inline_literal(desc.getName()), + desc.getDoc() or 'no documentation') + outp(_justify_and_indent(_trim_doc_string(item), level, munge)) + level -= 1 + + outp(_justify_and_indent("Methods:", level, munge)) + level += 1 + for name, desc in namesAndDescriptions: + if hasattr(desc, 'getSignatureString'): # ugh... + _call = "%s%s" % (desc.getName(), desc.getSignatureString()) + item = "%s -- %s" % (inline_literal(_call), + desc.getDoc() or 'no documentation') + outp(_justify_and_indent(_trim_doc_string(item), level, munge)) + + return "\n\n".join(r) + "\n\n" + + +def asReStructuredText(I, munge=0): + """ Output reStructuredText format. Note, this will whack any existing + 'structured' format of the text.""" + return asStructuredText(I, munge=munge, rst=True) + + +def _trim_doc_string(text): + """ Trims a doc string to make it format + correctly with structured text. """ + + lines = text.replace('\r\n', '\n').split('\n') + nlines = [lines.pop(0)] + if lines: + min_indent = min([len(line) - len(line.lstrip()) + for line in lines]) + for line in lines: + nlines.append(line[min_indent:]) + + return '\n'.join(nlines) + + +def _justify_and_indent(text, level, munge=0, width=72): + """ indent and justify text, rejustify (munge) if specified """ + + indent = " " * level + + if munge: + lines = [] + line = indent + text = text.split() + + for word in text: + line = ' '.join([line, word]) + if len(line) > width: + lines.append(line) + line = indent + else: + lines.append(line) + + return '\n'.join(lines) + + else: + return indent + \ + text.strip().replace("\r\n", "\n") .replace("\n", "\n" + indent) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/exceptions.py b/thesisenv/lib/python3.6/site-packages/zope/interface/exceptions.py new file mode 100644 index 0000000..e9a4788 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/exceptions.py @@ -0,0 +1,67 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Interface-specific exceptions +""" + +class Invalid(Exception): + """A specification is violated + """ + +class DoesNotImplement(Invalid): + """ This object does not implement """ + def __init__(self, interface): + self.interface = interface + + def __str__(self): + return """An object does not implement interface %(interface)s + + """ % self.__dict__ + +class BrokenImplementation(Invalid): + """An attribute is not completely implemented. + """ + + def __init__(self, interface, name): + self.interface=interface + self.name=name + + def __str__(self): + return """An object has failed to implement interface %(interface)s + + The %(name)s attribute was not provided. + """ % self.__dict__ + +class BrokenMethodImplementation(Invalid): + """An method is not completely implemented. + """ + + def __init__(self, method, mess): + self.method=method + self.mess=mess + + def __str__(self): + return """The implementation of %(method)s violates its contract + because %(mess)s. + """ % self.__dict__ + +class InvalidInterface(Exception): + """The interface has invalid contents + """ + +class BadImplements(TypeError): + """An implementation assertion is invalid + + because it doesn't contain an interface or a sequence of valid + implementation assertions. + """ diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/interface.py b/thesisenv/lib/python3.6/site-packages/zope/interface/interface.py new file mode 100644 index 0000000..d4e5a94 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/interface.py @@ -0,0 +1,687 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Interface object implementation +""" +from __future__ import generators + +import sys +from types import MethodType +from types import FunctionType +import warnings +import weakref + +from zope.interface.exceptions import Invalid +from zope.interface.ro import ro + + +CO_VARARGS = 4 +CO_VARKEYWORDS = 8 +TAGGED_DATA = '__interface_tagged_values__' + +_decorator_non_return = object() + +def invariant(call): + f_locals = sys._getframe(1).f_locals + tags = f_locals.setdefault(TAGGED_DATA, {}) + invariants = tags.setdefault('invariants', []) + invariants.append(call) + return _decorator_non_return + + +def taggedValue(key, value): + """Attaches a tagged value to an interface at definition time.""" + f_locals = sys._getframe(1).f_locals + tagged_values = f_locals.setdefault(TAGGED_DATA, {}) + tagged_values[key] = value + return _decorator_non_return + + +class Element(object): + """ + Default implementation of `zope.interface.interfaces.IElement`. + """ + + # We can't say this yet because we don't have enough + # infrastructure in place. + # + #implements(IElement) + + def __init__(self, __name__, __doc__=''): + if not __doc__ and __name__.find(' ') >= 0: + __doc__ = __name__ + __name__ = None + + self.__name__=__name__ + self.__doc__=__doc__ + self.__tagged_values = {} + + def getName(self): + """ Returns the name of the object. """ + return self.__name__ + + def getDoc(self): + """ Returns the documentation for the object. """ + return self.__doc__ + + def getTaggedValue(self, tag): + """ Returns the value associated with 'tag'. """ + return self.__tagged_values[tag] + + def queryTaggedValue(self, tag, default=None): + """ Returns the value associated with 'tag'. """ + return self.__tagged_values.get(tag, default) + + def getTaggedValueTags(self): + """ Returns a list of all tags. """ + return self.__tagged_values.keys() + + def setTaggedValue(self, tag, value): + """ Associates 'value' with 'key'. """ + self.__tagged_values[tag] = value + +class SpecificationBasePy(object): + + def providedBy(self, ob): + """Is the interface implemented by an object + """ + spec = providedBy(ob) + return self in spec._implied + + def implementedBy(self, cls): + """Test whether the specification is implemented by a class or factory. + + Raise TypeError if argument is neither a class nor a callable. + """ + spec = implementedBy(cls) + return self in spec._implied + + def isOrExtends(self, interface): + """Is the interface the same as or extend the given interface + """ + return interface in self._implied + + __call__ = isOrExtends + +SpecificationBase = SpecificationBasePy +try: + from zope.interface._zope_interface_coptimizations import SpecificationBase +except ImportError: + pass + +_marker = object() +class InterfaceBasePy(object): + """Base class that wants to be replaced with a C base :) + """ + + def __call__(self, obj, alternate=_marker): + """Adapt an object to the interface + """ + conform = getattr(obj, '__conform__', None) + if conform is not None: + adapter = self._call_conform(conform) + if adapter is not None: + return adapter + + adapter = self.__adapt__(obj) + + if adapter is not None: + return adapter + elif alternate is not _marker: + return alternate + else: + raise TypeError("Could not adapt", obj, self) + + def __adapt__(self, obj): + """Adapt an object to the reciever + """ + if self.providedBy(obj): + return obj + + for hook in adapter_hooks: + adapter = hook(self, obj) + if adapter is not None: + return adapter + + +InterfaceBase = InterfaceBasePy +try: + from zope.interface._zope_interface_coptimizations import InterfaceBase +except ImportError: + pass + + +adapter_hooks = [] +try: + from zope.interface._zope_interface_coptimizations import adapter_hooks +except ImportError: + pass + + +class Specification(SpecificationBase): + """Specifications + + An interface specification is used to track interface declarations + and component registrations. + + This class is a base class for both interfaces themselves and for + interface specifications (declarations). + + Specifications are mutable. If you reassign their bases, their + relations with other specifications are adjusted accordingly. + """ + + # Copy some base class methods for speed + isOrExtends = SpecificationBase.isOrExtends + providedBy = SpecificationBase.providedBy + + def __init__(self, bases=()): + self._implied = {} + self.dependents = weakref.WeakKeyDictionary() + self.__bases__ = tuple(bases) + + def subscribe(self, dependent): + self.dependents[dependent] = self.dependents.get(dependent, 0) + 1 + + def unsubscribe(self, dependent): + n = self.dependents.get(dependent, 0) - 1 + if not n: + del self.dependents[dependent] + elif n > 0: + self.dependents[dependent] = n + else: + raise KeyError(dependent) + + def __setBases(self, bases): + # Register ourselves as a dependent of our old bases + for b in self.__bases__: + b.unsubscribe(self) + + # Register ourselves as a dependent of our bases + self.__dict__['__bases__'] = bases + for b in bases: + b.subscribe(self) + + self.changed(self) + + __bases__ = property( + + lambda self: self.__dict__.get('__bases__', ()), + __setBases, + ) + + def changed(self, originally_changed): + """We, or something we depend on, have changed + """ + try: + del self._v_attrs + except AttributeError: + pass + + implied = self._implied + implied.clear() + + ancestors = ro(self) + + try: + if Interface not in ancestors: + ancestors.append(Interface) + except NameError: + pass # defining Interface itself + + self.__sro__ = tuple(ancestors) + self.__iro__ = tuple([ancestor for ancestor in ancestors + if isinstance(ancestor, InterfaceClass) + ]) + + for ancestor in ancestors: + # We directly imply our ancestors: + implied[ancestor] = () + + # Now, advise our dependents of change: + for dependent in tuple(self.dependents.keys()): + dependent.changed(originally_changed) + + + def interfaces(self): + """Return an iterator for the interfaces in the specification. + """ + seen = {} + for base in self.__bases__: + for interface in base.interfaces(): + if interface not in seen: + seen[interface] = 1 + yield interface + + + def extends(self, interface, strict=True): + """Does the specification extend the given interface? + + Test whether an interface in the specification extends the + given interface + """ + return ((interface in self._implied) + and + ((not strict) or (self != interface)) + ) + + def weakref(self, callback=None): + return weakref.ref(self, callback) + + def get(self, name, default=None): + """Query for an attribute description + """ + try: + attrs = self._v_attrs + except AttributeError: + attrs = self._v_attrs = {} + attr = attrs.get(name) + if attr is None: + for iface in self.__iro__: + attr = iface.direct(name) + if attr is not None: + attrs[name] = attr + break + + if attr is None: + return default + else: + return attr + +class InterfaceClass(Element, InterfaceBase, Specification): + """Prototype (scarecrow) Interfaces Implementation.""" + + # We can't say this yet because we don't have enough + # infrastructure in place. + # + #implements(IInterface) + + def __init__(self, name, bases=(), attrs=None, __doc__=None, + __module__=None): + + if attrs is None: + attrs = {} + + if __module__ is None: + __module__ = attrs.get('__module__') + if isinstance(__module__, str): + del attrs['__module__'] + else: + try: + # Figure out what module defined the interface. + # This is how cPython figures out the module of + # a class, but of course it does it in C. :-/ + __module__ = sys._getframe(1).f_globals['__name__'] + except (AttributeError, KeyError): # pragma: no cover + pass + + self.__module__ = __module__ + + d = attrs.get('__doc__') + if d is not None: + if not isinstance(d, Attribute): + if __doc__ is None: + __doc__ = d + del attrs['__doc__'] + + if __doc__ is None: + __doc__ = '' + + Element.__init__(self, name, __doc__) + + tagged_data = attrs.pop(TAGGED_DATA, None) + if tagged_data is not None: + for key, val in tagged_data.items(): + self.setTaggedValue(key, val) + + for base in bases: + if not isinstance(base, InterfaceClass): + raise TypeError('Expected base interfaces') + + Specification.__init__(self, bases) + + # Make sure that all recorded attributes (and methods) are of type + # `Attribute` and `Method` + for name, attr in list(attrs.items()): + if name in ('__locals__', '__qualname__', '__annotations__'): + # __locals__: Python 3 sometimes adds this. + # __qualname__: PEP 3155 (Python 3.3+) + # __annotations__: PEP 3107 (Python 3.0+) + del attrs[name] + continue + if isinstance(attr, Attribute): + attr.interface = self + if not attr.__name__: + attr.__name__ = name + elif isinstance(attr, FunctionType): + attrs[name] = fromFunction(attr, self, name=name) + elif attr is _decorator_non_return: + del attrs[name] + else: + raise InvalidInterface("Concrete attribute, " + name) + + self.__attrs = attrs + + self.__identifier__ = "%s.%s" % (self.__module__, self.__name__) + + def interfaces(self): + """Return an iterator for the interfaces in the specification. + """ + yield self + + def getBases(self): + return self.__bases__ + + def isEqualOrExtendedBy(self, other): + """Same interface or extends?""" + return self == other or other.extends(self) + + def names(self, all=False): + """Return the attribute names defined by the interface.""" + if not all: + return self.__attrs.keys() + + r = self.__attrs.copy() + + for base in self.__bases__: + r.update(dict.fromkeys(base.names(all))) + + return r.keys() + + def __iter__(self): + return iter(self.names(all=True)) + + def namesAndDescriptions(self, all=False): + """Return attribute names and descriptions defined by interface.""" + if not all: + return self.__attrs.items() + + r = {} + for base in self.__bases__[::-1]: + r.update(dict(base.namesAndDescriptions(all))) + + r.update(self.__attrs) + + return r.items() + + def getDescriptionFor(self, name): + """Return the attribute description for the given name.""" + r = self.get(name) + if r is not None: + return r + + raise KeyError(name) + + __getitem__ = getDescriptionFor + + def __contains__(self, name): + return self.get(name) is not None + + def direct(self, name): + return self.__attrs.get(name) + + def queryDescriptionFor(self, name, default=None): + return self.get(name, default) + + def validateInvariants(self, obj, errors=None): + """validate object to defined invariants.""" + for call in self.queryTaggedValue('invariants', []): + try: + call(obj) + except Invalid as e: + if errors is None: + raise + else: + errors.append(e) + for base in self.__bases__: + try: + base.validateInvariants(obj, errors) + except Invalid: + if errors is None: + raise + if errors: + raise Invalid(errors) + + def __repr__(self): # pragma: no cover + try: + return self._v_repr + except AttributeError: + name = self.__name__ + m = self.__module__ + if m: + name = '%s.%s' % (m, name) + r = "<%s %s>" % (self.__class__.__name__, name) + self._v_repr = r + return r + + def _call_conform(self, conform): + try: + return conform(self) + except TypeError: # pragma: no cover + # We got a TypeError. It might be an error raised by + # the __conform__ implementation, or *we* may have + # made the TypeError by calling an unbound method + # (object is a class). In the later case, we behave + # as though there is no __conform__ method. We can + # detect this case by checking whether there is more + # than one traceback object in the traceback chain: + if sys.exc_info()[2].tb_next is not None: + # There is more than one entry in the chain, so + # reraise the error: + raise + # This clever trick is from Phillip Eby + + return None # pragma: no cover + + def __reduce__(self): + return self.__name__ + + def __cmp(self, other): + # Yes, I did mean to name this __cmp, rather than __cmp__. + # It is a private method used by __lt__ and __gt__. + # I don't want to override __eq__ because I want the default + # __eq__, which is really fast. + """Make interfaces sortable + + TODO: It would ne nice if: + + More specific interfaces should sort before less specific ones. + Otherwise, sort on name and module. + + But this is too complicated, and we're going to punt on it + for now. + + For now, sort on interface and module name. + + None is treated as a pseudo interface that implies the loosest + contact possible, no contract. For that reason, all interfaces + sort before None. + + """ + if other is None: + return -1 + + n1 = (getattr(self, '__name__', ''), getattr(self, '__module__', '')) + n2 = (getattr(other, '__name__', ''), getattr(other, '__module__', '')) + + # This spelling works under Python3, which doesn't have cmp(). + return (n1 > n2) - (n1 < n2) + + def __hash__(self): + d = self.__dict__ + if '__module__' not in d or '__name__' not in d: # pragma: no cover + warnings.warn('Hashing uninitialized InterfaceClass instance') + return 1 + return hash((self.__name__, self.__module__)) + + def __eq__(self, other): + c = self.__cmp(other) + return c == 0 + + def __ne__(self, other): + c = self.__cmp(other) + return c != 0 + + def __lt__(self, other): + c = self.__cmp(other) + return c < 0 + + def __le__(self, other): + c = self.__cmp(other) + return c <= 0 + + def __gt__(self, other): + c = self.__cmp(other) + return c > 0 + + def __ge__(self, other): + c = self.__cmp(other) + return c >= 0 + + +Interface = InterfaceClass("Interface", __module__ = 'zope.interface') + +class Attribute(Element): + """Attribute descriptions + """ + + # We can't say this yet because we don't have enough + # infrastructure in place. + # + # implements(IAttribute) + + interface = None + + +class Method(Attribute): + """Method interfaces + + The idea here is that you have objects that describe methods. + This provides an opportunity for rich meta-data. + """ + + # We can't say this yet because we don't have enough + # infrastructure in place. + # + # implements(IMethod) + + positional = required = () + _optional = varargs = kwargs = None + def _get_optional(self): + if self._optional is None: + return {} + return self._optional + def _set_optional(self, opt): + self._optional = opt + def _del_optional(self): + self._optional = None + optional = property(_get_optional, _set_optional, _del_optional) + + def __call__(self, *args, **kw): + raise BrokenImplementation(self.interface, self.__name__) + + def getSignatureInfo(self): + return {'positional': self.positional, + 'required': self.required, + 'optional': self.optional, + 'varargs': self.varargs, + 'kwargs': self.kwargs, + } + + def getSignatureString(self): + sig = [] + for v in self.positional: + sig.append(v) + if v in self.optional.keys(): + sig[-1] += "=" + repr(self.optional[v]) + if self.varargs: + sig.append("*" + self.varargs) + if self.kwargs: + sig.append("**" + self.kwargs) + + return "(%s)" % ", ".join(sig) + +def fromFunction(func, interface=None, imlevel=0, name=None): + name = name or func.__name__ + method = Method(name, func.__doc__) + defaults = getattr(func, '__defaults__', None) or () + code = func.__code__ + # Number of positional arguments + na = code.co_argcount-imlevel + names = code.co_varnames[imlevel:] + opt = {} + # Number of required arguments + nr = na-len(defaults) + if nr < 0: + defaults=defaults[-nr:] + nr = 0 + + # Determine the optional arguments. + opt.update(dict(zip(names[nr:], defaults))) + + method.positional = names[:na] + method.required = names[:nr] + method.optional = opt + + argno = na + + # Determine the function's variable argument's name (i.e. *args) + if code.co_flags & CO_VARARGS: + method.varargs = names[argno] + argno = argno + 1 + else: + method.varargs = None + + # Determine the function's keyword argument's name (i.e. **kw) + if code.co_flags & CO_VARKEYWORDS: + method.kwargs = names[argno] + else: + method.kwargs = None + + method.interface = interface + + for key, value in func.__dict__.items(): + method.setTaggedValue(key, value) + + return method + + +def fromMethod(meth, interface=None, name=None): + if isinstance(meth, MethodType): + func = meth.__func__ + else: + func = meth + return fromFunction(func, interface, imlevel=1, name=name) + + +# Now we can create the interesting interfaces and wire them up: +def _wire(): + from zope.interface.declarations import classImplements + + from zope.interface.interfaces import IAttribute + classImplements(Attribute, IAttribute) + + from zope.interface.interfaces import IMethod + classImplements(Method, IMethod) + + from zope.interface.interfaces import IInterface + classImplements(InterfaceClass, IInterface) + + from zope.interface.interfaces import ISpecification + classImplements(Specification, ISpecification) + +# We import this here to deal with module dependencies. +from zope.interface.declarations import implementedBy +from zope.interface.declarations import providedBy +from zope.interface.exceptions import InvalidInterface +from zope.interface.exceptions import BrokenImplementation diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/interfaces.py b/thesisenv/lib/python3.6/site-packages/zope/interface/interfaces.py new file mode 100644 index 0000000..27e64e9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/interfaces.py @@ -0,0 +1,1282 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Interface Package Interfaces +""" +__docformat__ = 'restructuredtext' + +from zope.interface.interface import Attribute +from zope.interface.interface import Interface +from zope.interface.declarations import implementer + + +_BLANK = u'' + +class IElement(Interface): + """Objects that have basic documentation and tagged values. + """ + + __name__ = Attribute('__name__', 'The object name') + __doc__ = Attribute('__doc__', 'The object doc string') + + def getTaggedValue(tag): + """Returns the value associated with `tag`. + + Raise a `KeyError` of the tag isn't set. + """ + + def queryTaggedValue(tag, default=None): + """Returns the value associated with `tag`. + + Return the default value of the tag isn't set. + """ + + def getTaggedValueTags(): + """Returns a list of all tags.""" + + def setTaggedValue(tag, value): + """Associates `value` with `key`.""" + + +class IAttribute(IElement): + """Attribute descriptors""" + + interface = Attribute('interface', + 'Stores the interface instance in which the ' + 'attribute is located.') + + +class IMethod(IAttribute): + """Method attributes""" + + def getSignatureInfo(): + """Returns the signature information. + + This method returns a dictionary with the following keys: + + o `positional` - All positional arguments. + + o `required` - A list of all required arguments. + + o `optional` - A list of all optional arguments. + + o `varargs` - The name of the varargs argument. + + o `kwargs` - The name of the kwargs argument. + """ + + def getSignatureString(): + """Return a signature string suitable for inclusion in documentation. + + This method returns the function signature string. For example, if you + have `func(a, b, c=1, d='f')`, then the signature string is `(a, b, + c=1, d='f')`. + """ + +class ISpecification(Interface): + """Object Behavioral specifications""" + + def providedBy(object): + """Test whether the interface is implemented by the object + + Return true of the object asserts that it implements the + interface, including asserting that it implements an extended + interface. + """ + + def implementedBy(class_): + """Test whether the interface is implemented by instances of the class + + Return true of the class asserts that its instances implement the + interface, including asserting that they implement an extended + interface. + """ + + def isOrExtends(other): + """Test whether the specification is or extends another + """ + + def extends(other, strict=True): + """Test whether a specification extends another + + The specification extends other if it has other as a base + interface or if one of it's bases extends other. + + If strict is false, then the specification extends itself. + """ + + def weakref(callback=None): + """Return a weakref to the specification + + This method is, regrettably, needed to allow weakrefs to be + computed to security-proxied specifications. While the + zope.interface package does not require zope.security or + zope.proxy, it has to be able to coexist with it. + + """ + + __bases__ = Attribute("""Base specifications + + A tuple if specifications from which this specification is + directly derived. + + """) + + __sro__ = Attribute("""Specification-resolution order + + A tuple of the specification and all of it's ancestor + specifications from most specific to least specific. + + (This is similar to the method-resolution order for new-style classes.) + """) + + __iro__ = Attribute("""Interface-resolution order + + A tuple of the of the specification's ancestor interfaces from + most specific to least specific. The specification itself is + included if it is an interface. + + (This is similar to the method-resolution order for new-style classes.) + """) + + def get(name, default=None): + """Look up the description for a name + + If the named attribute is not defined, the default is + returned. + """ + + +class IInterface(ISpecification, IElement): + """Interface objects + + Interface objects describe the behavior of an object by containing + useful information about the object. This information includes: + + - Prose documentation about the object. In Python terms, this + is called the "doc string" of the interface. In this element, + you describe how the object works in prose language and any + other useful information about the object. + + - Descriptions of attributes. Attribute descriptions include + the name of the attribute and prose documentation describing + the attributes usage. + + - Descriptions of methods. Method descriptions can include: + + - Prose "doc string" documentation about the method and its + usage. + + - A description of the methods arguments; how many arguments + are expected, optional arguments and their default values, + the position or arguments in the signature, whether the + method accepts arbitrary arguments and whether the method + accepts arbitrary keyword arguments. + + - Optional tagged data. Interface objects (and their attributes and + methods) can have optional, application specific tagged data + associated with them. Examples uses for this are examples, + security assertions, pre/post conditions, and other possible + information you may want to associate with an Interface or its + attributes. + + Not all of this information is mandatory. For example, you may + only want the methods of your interface to have prose + documentation and not describe the arguments of the method in + exact detail. Interface objects are flexible and let you give or + take any of these components. + + Interfaces are created with the Python class statement using + either `zope.interface.Interface` or another interface, as in:: + + from zope.interface import Interface + + class IMyInterface(Interface): + '''Interface documentation''' + + def meth(arg1, arg2): + '''Documentation for meth''' + + # Note that there is no self argument + + class IMySubInterface(IMyInterface): + '''Interface documentation''' + + def meth2(): + '''Documentation for meth2''' + + You use interfaces in two ways: + + - You assert that your object implement the interfaces. + + There are several ways that you can assert that an object + implements an interface: + + 1. Call `zope.interface.implements` in your class definition. + + 2. Call `zope.interfaces.directlyProvides` on your object. + + 3. Call `zope.interface.classImplements` to assert that instances + of a class implement an interface. + + For example:: + + from zope.interface import classImplements + + classImplements(some_class, some_interface) + + This approach is useful when it is not an option to modify + the class source. Note that this doesn't affect what the + class itself implements, but only what its instances + implement. + + - You query interface meta-data. See the IInterface methods and + attributes for details. + + """ + + def names(all=False): + """Get the interface attribute names + + Return a sequence of the names of the attributes, including + methods, included in the interface definition. + + Normally, only directly defined attributes are included. If + a true positional or keyword argument is given, then + attributes defined by base classes will be included. + """ + + def namesAndDescriptions(all=False): + """Get the interface attribute names and descriptions + + Return a sequence of the names and descriptions of the + attributes, including methods, as name-value pairs, included + in the interface definition. + + Normally, only directly defined attributes are included. If + a true positional or keyword argument is given, then + attributes defined by base classes will be included. + """ + + def __getitem__(name): + """Get the description for a name + + If the named attribute is not defined, a `KeyError` is raised. + """ + + def direct(name): + """Get the description for the name if it was defined by the interface + + If the interface doesn't define the name, returns None. + """ + + def validateInvariants(obj, errors=None): + """Validate invariants + + Validate object to defined invariants. If errors is None, + raises first Invalid error; if errors is a list, appends all errors + to list, then raises Invalid with the errors as the first element + of the "args" tuple.""" + + def __contains__(name): + """Test whether the name is defined by the interface""" + + def __iter__(): + """Return an iterator over the names defined by the interface + + The names iterated include all of the names defined by the + interface directly and indirectly by base interfaces. + """ + + __module__ = Attribute("""The name of the module defining the interface""") + +class IDeclaration(ISpecification): + """Interface declaration + + Declarations are used to express the interfaces implemented by + classes or provided by objects. + """ + + def __contains__(interface): + """Test whether an interface is in the specification + + Return true if the given interface is one of the interfaces in + the specification and false otherwise. + """ + + def __iter__(): + """Return an iterator for the interfaces in the specification + """ + + def flattened(): + """Return an iterator of all included and extended interfaces + + An iterator is returned for all interfaces either included in + or extended by interfaces included in the specifications + without duplicates. The interfaces are in "interface + resolution order". The interface resolution order is such that + base interfaces are listed after interfaces that extend them + and, otherwise, interfaces are included in the order that they + were defined in the specification. + """ + + def __sub__(interfaces): + """Create an interface specification with some interfaces excluded + + The argument can be an interface or an interface + specifications. The interface or interfaces given in a + specification are subtracted from the interface specification. + + Removing an interface that is not in the specification does + not raise an error. Doing so has no effect. + + Removing an interface also removes sub-interfaces of the interface. + + """ + + def __add__(interfaces): + """Create an interface specification with some interfaces added + + The argument can be an interface or an interface + specifications. The interface or interfaces given in a + specification are added to the interface specification. + + Adding an interface that is already in the specification does + not raise an error. Doing so has no effect. + """ + + def __nonzero__(): + """Return a true value of the interface specification is non-empty + """ + +class IInterfaceDeclaration(Interface): + """Declare and check the interfaces of objects + + The functions defined in this interface are used to declare the + interfaces that objects provide and to query the interfaces that have + been declared. + + Interfaces can be declared for objects in two ways: + + - Interfaces are declared for instances of the object's class + + - Interfaces are declared for the object directly. + + The interfaces declared for an object are, therefore, the union of + interfaces declared for the object directly and the interfaces + declared for instances of the object's class. + + Note that we say that a class implements the interfaces provided + by it's instances. An instance can also provide interfaces + directly. The interfaces provided by an object are the union of + the interfaces provided directly and the interfaces implemented by + the class. + """ + + def providedBy(ob): + """Return the interfaces provided by an object + + This is the union of the interfaces directly provided by an + object and interfaces implemented by it's class. + + The value returned is an `IDeclaration`. + """ + + def implementedBy(class_): + """Return the interfaces implemented for a class' instances + + The value returned is an `IDeclaration`. + """ + + def classImplements(class_, *interfaces): + """Declare additional interfaces implemented for instances of a class + + The arguments after the class are one or more interfaces or + interface specifications (`IDeclaration` objects). + + The interfaces given (including the interfaces in the + specifications) are added to any interfaces previously + declared. + + Consider the following example:: + + class C(A, B): + ... + + classImplements(C, I1, I2) + + + Instances of ``C`` provide ``I1``, ``I2``, and whatever interfaces + instances of ``A`` and ``B`` provide. + """ + + def implementer(*interfaces): + """Create a decorator for declaring interfaces implemented by a factory. + + A callable is returned that makes an implements declaration on + objects passed to it. + """ + + def classImplementsOnly(class_, *interfaces): + """Declare the only interfaces implemented by instances of a class + + The arguments after the class are one or more interfaces or + interface specifications (`IDeclaration` objects). + + The interfaces given (including the interfaces in the + specifications) replace any previous declarations. + + Consider the following example:: + + class C(A, B): + ... + + classImplements(C, IA, IB. IC) + classImplementsOnly(C. I1, I2) + + Instances of ``C`` provide only ``I1``, ``I2``, and regardless of + whatever interfaces instances of ``A`` and ``B`` implement. + """ + + def implementer_only(*interfaces): + """Create a decorator for declaring the only interfaces implemented + + A callable is returned that makes an implements declaration on + objects passed to it. + """ + + def directlyProvidedBy(object): + """Return the interfaces directly provided by the given object + + The value returned is an `IDeclaration`. + """ + + def directlyProvides(object, *interfaces): + """Declare interfaces declared directly for an object + + The arguments after the object are one or more interfaces or + interface specifications (`IDeclaration` objects). + + The interfaces given (including the interfaces in the + specifications) replace interfaces previously + declared for the object. + + Consider the following example:: + + class C(A, B): + ... + + ob = C() + directlyProvides(ob, I1, I2) + + The object, ``ob`` provides ``I1``, ``I2``, and whatever interfaces + instances have been declared for instances of ``C``. + + To remove directly provided interfaces, use `directlyProvidedBy` and + subtract the unwanted interfaces. For example:: + + directlyProvides(ob, directlyProvidedBy(ob)-I2) + + removes I2 from the interfaces directly provided by + ``ob``. The object, ``ob`` no longer directly provides ``I2``, + although it might still provide ``I2`` if it's class + implements ``I2``. + + To add directly provided interfaces, use `directlyProvidedBy` and + include additional interfaces. For example:: + + directlyProvides(ob, directlyProvidedBy(ob), I2) + + adds I2 to the interfaces directly provided by ob. + """ + + def alsoProvides(object, *interfaces): + """Declare additional interfaces directly for an object:: + + alsoProvides(ob, I1) + + is equivalent to:: + + directlyProvides(ob, directlyProvidedBy(ob), I1) + """ + + def noLongerProvides(object, interface): + """Remove an interface from the list of an object's directly + provided interfaces:: + + noLongerProvides(ob, I1) + + is equivalent to:: + + directlyProvides(ob, directlyProvidedBy(ob) - I1) + + with the exception that if ``I1`` is an interface that is + provided by ``ob`` through the class's implementation, + `ValueError` is raised. + """ + + def implements(*interfaces): + """Declare interfaces implemented by instances of a class + + This function is called in a class definition (Python 2.x only). + + The arguments are one or more interfaces or interface + specifications (`IDeclaration` objects). + + The interfaces given (including the interfaces in the + specifications) are added to any interfaces previously + declared. + + Previous declarations include declarations for base classes + unless implementsOnly was used. + + This function is provided for convenience. It provides a more + convenient way to call `classImplements`. For example:: + + implements(I1) + + is equivalent to calling:: + + classImplements(C, I1) + + after the class has been created. + + Consider the following example (Python 2.x only):: + + class C(A, B): + implements(I1, I2) + + + Instances of ``C`` implement ``I1``, ``I2``, and whatever interfaces + instances of ``A`` and ``B`` implement. + """ + + def implementsOnly(*interfaces): + """Declare the only interfaces implemented by instances of a class + + This function is called in a class definition (Python 2.x only). + + The arguments are one or more interfaces or interface + specifications (`IDeclaration` objects). + + Previous declarations including declarations for base classes + are overridden. + + This function is provided for convenience. It provides a more + convenient way to call `classImplementsOnly`. For example:: + + implementsOnly(I1) + + is equivalent to calling:: + + classImplementsOnly(I1) + + after the class has been created. + + Consider the following example (Python 2.x only):: + + class C(A, B): + implementsOnly(I1, I2) + + + Instances of ``C`` implement ``I1``, ``I2``, regardless of what + instances of ``A`` and ``B`` implement. + """ + + def classProvides(*interfaces): + """Declare interfaces provided directly by a class + + This function is called in a class definition. + + The arguments are one or more interfaces or interface + specifications (`IDeclaration` objects). + + The given interfaces (including the interfaces in the + specifications) are used to create the class's direct-object + interface specification. An error will be raised if the module + class has an direct interface specification. In other words, it is + an error to call this function more than once in a class + definition. + + Note that the given interfaces have nothing to do with the + interfaces implemented by instances of the class. + + This function is provided for convenience. It provides a more + convenient way to call `directlyProvides` for a class. For example:: + + classProvides(I1) + + is equivalent to calling:: + + directlyProvides(theclass, I1) + + after the class has been created. + """ + def provider(*interfaces): + """A class decorator version of `classProvides`""" + + def moduleProvides(*interfaces): + """Declare interfaces provided by a module + + This function is used in a module definition. + + The arguments are one or more interfaces or interface + specifications (`IDeclaration` objects). + + The given interfaces (including the interfaces in the + specifications) are used to create the module's direct-object + interface specification. An error will be raised if the module + already has an interface specification. In other words, it is + an error to call this function more than once in a module + definition. + + This function is provided for convenience. It provides a more + convenient way to call `directlyProvides` for a module. For example:: + + moduleImplements(I1) + + is equivalent to:: + + directlyProvides(sys.modules[__name__], I1) + """ + + def Declaration(*interfaces): + """Create an interface specification + + The arguments are one or more interfaces or interface + specifications (`IDeclaration` objects). + + A new interface specification (`IDeclaration`) with + the given interfaces is returned. + """ + +class IAdapterRegistry(Interface): + """Provide an interface-based registry for adapters + + This registry registers objects that are in some sense "from" a + sequence of specification to an interface and a name. + + No specific semantics are assumed for the registered objects, + however, the most common application will be to register factories + that adapt objects providing required specifications to a provided + interface. + """ + + def register(required, provided, name, value): + """Register a value + + A value is registered for a *sequence* of required specifications, a + provided interface, and a name, which must be text. + """ + + def registered(required, provided, name=_BLANK): + """Return the component registered for the given interfaces and name + + name must be text. + + Unlike the lookup method, this methods won't retrieve + components registered for more specific required interfaces or + less specific provided interfaces. + + If no component was registered exactly for the given + interfaces and name, then None is returned. + + """ + + def lookup(required, provided, name='', default=None): + """Lookup a value + + A value is looked up based on a *sequence* of required + specifications, a provided interface, and a name, which must be + text. + """ + + def queryMultiAdapter(objects, provided, name=_BLANK, default=None): + """Adapt a sequence of objects to a named, provided, interface + """ + + def lookup1(required, provided, name=_BLANK, default=None): + """Lookup a value using a single required interface + + A value is looked up based on a single required + specifications, a provided interface, and a name, which must be + text. + """ + + def queryAdapter(object, provided, name=_BLANK, default=None): + """Adapt an object using a registered adapter factory. + """ + + def adapter_hook(provided, object, name=_BLANK, default=None): + """Adapt an object using a registered adapter factory. + + name must be text. + """ + + def lookupAll(required, provided): + """Find all adapters from the required to the provided interfaces + + An iterable object is returned that provides name-value two-tuples. + """ + + def names(required, provided): + """Return the names for which there are registered objects + """ + + def subscribe(required, provided, subscriber, name=_BLANK): + """Register a subscriber + + A subscriber is registered for a *sequence* of required + specifications, a provided interface, and a name. + + Multiple subscribers may be registered for the same (or + equivalent) interfaces. + """ + + def subscriptions(required, provided, name=_BLANK): + """Get a sequence of subscribers + + Subscribers for a *sequence* of required interfaces, and a provided + interface are returned. + """ + + def subscribers(objects, provided, name=_BLANK): + """Get a sequence of subscription adapters + """ + +# begin formerly in zope.component + +class ComponentLookupError(LookupError): + """A component could not be found.""" + +class Invalid(Exception): + """A component doesn't satisfy a promise.""" + +class IObjectEvent(Interface): + """An event related to an object. + + The object that generated this event is not necessarily the object + refered to by location. + """ + + object = Attribute("The subject of the event.") + + +@implementer(IObjectEvent) +class ObjectEvent(object): + + def __init__(self, object): + self.object = object + +class IComponentLookup(Interface): + """Component Manager for a Site + + This object manages the components registered at a particular site. The + definition of a site is intentionally vague. + """ + + adapters = Attribute( + "Adapter Registry to manage all registered adapters.") + + utilities = Attribute( + "Adapter Registry to manage all registered utilities.") + + def queryAdapter(object, interface, name=_BLANK, default=None): + """Look for a named adapter to an interface for an object + + If a matching adapter cannot be found, returns the default. + """ + + def getAdapter(object, interface, name=_BLANK): + """Look for a named adapter to an interface for an object + + If a matching adapter cannot be found, a `ComponentLookupError` + is raised. + """ + + def queryMultiAdapter(objects, interface, name=_BLANK, default=None): + """Look for a multi-adapter to an interface for multiple objects + + If a matching adapter cannot be found, returns the default. + """ + + def getMultiAdapter(objects, interface, name=_BLANK): + """Look for a multi-adapter to an interface for multiple objects + + If a matching adapter cannot be found, a `ComponentLookupError` + is raised. + """ + + def getAdapters(objects, provided): + """Look for all matching adapters to a provided interface for objects + + Return an iterable of name-adapter pairs for adapters that + provide the given interface. + """ + + def subscribers(objects, provided): + """Get subscribers + + Subscribers are returned that provide the provided interface + and that depend on and are comuted from the sequence of + required objects. + """ + + def handle(*objects): + """Call handlers for the given objects + + Handlers registered for the given objects are called. + """ + + def queryUtility(interface, name='', default=None): + """Look up a utility that provides an interface. + + If one is not found, returns default. + """ + + def getUtilitiesFor(interface): + """Look up the registered utilities that provide an interface. + + Returns an iterable of name-utility pairs. + """ + + def getAllUtilitiesRegisteredFor(interface): + """Return all registered utilities for an interface + + This includes overridden utilities. + + An iterable of utility instances is returned. No names are + returned. + """ + +class IRegistration(Interface): + """A registration-information object + """ + + registry = Attribute("The registry having the registration") + + name = Attribute("The registration name") + + info = Attribute("""Information about the registration + + This is information deemed useful to people browsing the + configuration of a system. It could, for example, include + commentary or information about the source of the configuration. + """) + +class IUtilityRegistration(IRegistration): + """Information about the registration of a utility + """ + + factory = Attribute("The factory used to create the utility. Optional.") + component = Attribute("The object registered") + provided = Attribute("The interface provided by the component") + +class _IBaseAdapterRegistration(IRegistration): + """Information about the registration of an adapter + """ + + factory = Attribute("The factory used to create adapters") + + required = Attribute("""The adapted interfaces + + This is a sequence of interfaces adapters by the registered + factory. The factory will be caled with a sequence of objects, as + positional arguments, that provide these interfaces. + """) + + provided = Attribute("""The interface provided by the adapters. + + This interface is implemented by the factory + """) + +class IAdapterRegistration(_IBaseAdapterRegistration): + """Information about the registration of an adapter + """ + +class ISubscriptionAdapterRegistration(_IBaseAdapterRegistration): + """Information about the registration of a subscription adapter + """ + +class IHandlerRegistration(IRegistration): + + handler = Attribute("An object called used to handle an event") + + required = Attribute("""The handled interfaces + + This is a sequence of interfaces handled by the registered + handler. The handler will be caled with a sequence of objects, as + positional arguments, that provide these interfaces. + """) + +class IRegistrationEvent(IObjectEvent): + """An event that involves a registration""" + + +@implementer(IRegistrationEvent) +class RegistrationEvent(ObjectEvent): + """There has been a change in a registration + """ + def __repr__(self): + return "%s event:\n%r" % (self.__class__.__name__, self.object) + +class IRegistered(IRegistrationEvent): + """A component or factory was registered + """ + +@implementer(IRegistered) +class Registered(RegistrationEvent): + pass + +class IUnregistered(IRegistrationEvent): + """A component or factory was unregistered + """ + +@implementer(IUnregistered) +class Unregistered(RegistrationEvent): + """A component or factory was unregistered + """ + pass + +class IComponentRegistry(Interface): + """Register components + """ + + def registerUtility(component=None, provided=None, name=_BLANK, + info=_BLANK, factory=None): + """Register a utility + + :param factory: + Factory for the component to be registered. + + :param component: + The registered component + + :param provided: + This is the interface provided by the utility. If the + component provides a single interface, then this + argument is optional and the component-implemented + interface will be used. + + :param name: + The utility name. + + :param info: + An object that can be converted to a string to provide + information about the registration. + + Only one of *component* and *factory* can be used. + + A `IRegistered` event is generated with an `IUtilityRegistration`. + """ + + def unregisterUtility(component=None, provided=None, name=_BLANK, + factory=None): + """Unregister a utility + + :returns: + A boolean is returned indicating whether the registry was + changed. If the given *component* is None and there is no + component registered, or if the given *component* is not + None and is not registered, then the function returns + False, otherwise it returns True. + + :param factory: + Factory for the component to be unregistered. + + :param component: + The registered component The given component can be + None, in which case any component registered to provide + the given provided interface with the given name is + unregistered. + + :param provided: + This is the interface provided by the utility. If the + component is not None and provides a single interface, + then this argument is optional and the + component-implemented interface will be used. + + :param name: + The utility name. + + Only one of *component* and *factory* can be used. + An `IUnregistered` event is generated with an `IUtilityRegistration`. + """ + + def registeredUtilities(): + """Return an iterable of `IUtilityRegistration` instances. + + These registrations describe the current utility registrations + in the object. + """ + + def registerAdapter(factory, required=None, provided=None, name=_BLANK, + info=_BLANK): + """Register an adapter factory + + :param factory: + The object used to compute the adapter + + :param required: + This is a sequence of specifications for objects to be + adapted. If omitted, then the value of the factory's + ``__component_adapts__`` attribute will be used. The + ``__component_adapts__`` attribute is + normally set in class definitions using + the `.adapter` + decorator. If the factory doesn't have a + ``__component_adapts__`` adapts attribute, then this + argument is required. + + :param provided: + This is the interface provided by the adapter and + implemented by the factory. If the factory + implements a single interface, then this argument is + optional and the factory-implemented interface will be + used. + + :param name: + The adapter name. + + :param info: + An object that can be converted to a string to provide + information about the registration. + + A `IRegistered` event is generated with an `IAdapterRegistration`. + """ + + def unregisterAdapter(factory=None, required=None, + provided=None, name=_BLANK): + """Unregister an adapter factory + + :returns: + A boolean is returned indicating whether the registry was + changed. If the given component is None and there is no + component registered, or if the given component is not + None and is not registered, then the function returns + False, otherwise it returns True. + + :param factory: + This is the object used to compute the adapter. The + factory can be None, in which case any factory + registered to implement the given provided interface + for the given required specifications with the given + name is unregistered. + + :param required: + This is a sequence of specifications for objects to be + adapted. If the factory is not None and the required + arguments is omitted, then the value of the factory's + __component_adapts__ attribute will be used. The + __component_adapts__ attribute attribute is normally + set in class definitions using adapts function, or for + callables using the adapter decorator. If the factory + is None or doesn't have a __component_adapts__ adapts + attribute, then this argument is required. + + :param provided: + This is the interface provided by the adapter and + implemented by the factory. If the factory is not + None and implements a single interface, then this + argument is optional and the factory-implemented + interface will be used. + + :param name: + The adapter name. + + An `IUnregistered` event is generated with an `IAdapterRegistration`. + """ + + def registeredAdapters(): + """Return an iterable of `IAdapterRegistration` instances. + + These registrations describe the current adapter registrations + in the object. + """ + + def registerSubscriptionAdapter(factory, required=None, provides=None, + name=_BLANK, info=''): + """Register a subscriber factory + + :param factory: + The object used to compute the adapter + + :param required: + This is a sequence of specifications for objects to be + adapted. If omitted, then the value of the factory's + ``__component_adapts__`` attribute will be used. The + ``__component_adapts__`` attribute is + normally set using the adapter + decorator. If the factory doesn't have a + ``__component_adapts__`` adapts attribute, then this + argument is required. + + :param provided: + This is the interface provided by the adapter and + implemented by the factory. If the factory implements + a single interface, then this argument is optional and + the factory-implemented interface will be used. + + :param name: + The adapter name. + + Currently, only the empty string is accepted. Other + strings will be accepted in the future when support for + named subscribers is added. + + :param info: + An object that can be converted to a string to provide + information about the registration. + + A `IRegistered` event is generated with an + `ISubscriptionAdapterRegistration`. + """ + + def unregisterSubscriptionAdapter(factory=None, required=None, + provides=None, name=_BLANK): + """Unregister a subscriber factory. + + :returns: + A boolean is returned indicating whether the registry was + changed. If the given component is None and there is no + component registered, or if the given component is not + None and is not registered, then the function returns + False, otherwise it returns True. + + :param factory: + This is the object used to compute the adapter. The + factory can be None, in which case any factories + registered to implement the given provided interface + for the given required specifications with the given + name are unregistered. + + :param required: + This is a sequence of specifications for objects to be + adapted. If omitted, then the value of the factory's + ``__component_adapts__`` attribute will be used. The + ``__component_adapts__`` attribute is + normally set using the adapter + decorator. If the factory doesn't have a + ``__component_adapts__`` adapts attribute, then this + argument is required. + + :param provided: + This is the interface provided by the adapter and + implemented by the factory. If the factory is not + None implements a single interface, then this argument + is optional and the factory-implemented interface will + be used. + + :param name: + The adapter name. + + Currently, only the empty string is accepted. Other + strings will be accepted in the future when support for + named subscribers is added. + + An `IUnregistered` event is generated with an + `ISubscriptionAdapterRegistration`. + """ + + def registeredSubscriptionAdapters(): + """Return an iterable of `ISubscriptionAdapterRegistration` instances. + + These registrations describe the current subscription adapter + registrations in the object. + """ + + def registerHandler(handler, required=None, name=_BLANK, info=''): + """Register a handler. + + A handler is a subscriber that doesn't compute an adapter + but performs some function when called. + + :param handler: + The object used to handle some event represented by + the objects passed to it. + + :param required: + This is a sequence of specifications for objects to be + adapted. If omitted, then the value of the factory's + ``__component_adapts__`` attribute will be used. The + ``__component_adapts__`` attribute is + normally set using the adapter + decorator. If the factory doesn't have a + ``__component_adapts__`` adapts attribute, then this + argument is required. + + :param name: + The handler name. + + Currently, only the empty string is accepted. Other + strings will be accepted in the future when support for + named handlers is added. + + :param info: + An object that can be converted to a string to provide + information about the registration. + + + A `IRegistered` event is generated with an `IHandlerRegistration`. + """ + + def unregisterHandler(handler=None, required=None, name=_BLANK): + """Unregister a handler. + + A handler is a subscriber that doesn't compute an adapter + but performs some function when called. + + :returns: A boolean is returned indicating whether the registry was + changed. + + :param handler: + This is the object used to handle some event + represented by the objects passed to it. The handler + can be None, in which case any handlers registered for + the given required specifications with the given are + unregistered. + + :param required: + This is a sequence of specifications for objects to be + adapted. If omitted, then the value of the factory's + ``__component_adapts__`` attribute will be used. The + ``__component_adapts__`` attribute is + normally set using the adapter + decorator. If the factory doesn't have a + ``__component_adapts__`` adapts attribute, then this + argument is required. + + :param name: + The handler name. + + Currently, only the empty string is accepted. Other + strings will be accepted in the future when support for + named handlers is added. + + An `IUnregistered` event is generated with an `IHandlerRegistration`. + """ + + def registeredHandlers(): + """Return an iterable of `IHandlerRegistration` instances. + + These registrations describe the current handler registrations + in the object. + """ + + +class IComponents(IComponentLookup, IComponentRegistry): + """Component registration and access + """ + + +# end formerly in zope.component diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/registry.py b/thesisenv/lib/python3.6/site-packages/zope/interface/registry.py new file mode 100644 index 0000000..bba0267 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/registry.py @@ -0,0 +1,654 @@ +############################################################################## +# +# Copyright (c) 2006 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Basic components support +""" +from collections import defaultdict + +try: + from zope.event import notify +except ImportError: # pragma: no cover + def notify(*arg, **kw): pass + +from zope.interface.interfaces import ISpecification +from zope.interface.interfaces import ComponentLookupError +from zope.interface.interfaces import IAdapterRegistration +from zope.interface.interfaces import IComponents +from zope.interface.interfaces import IHandlerRegistration +from zope.interface.interfaces import ISubscriptionAdapterRegistration +from zope.interface.interfaces import IUtilityRegistration +from zope.interface.interfaces import Registered +from zope.interface.interfaces import Unregistered + +from zope.interface.interface import Interface +from zope.interface.declarations import implementedBy +from zope.interface.declarations import implementer +from zope.interface.declarations import implementer_only +from zope.interface.declarations import providedBy +from zope.interface.adapter import AdapterRegistry +from zope.interface._compat import CLASS_TYPES +from zope.interface._compat import STRING_TYPES + + +class _UnhashableComponentCounter(object): + # defaultdict(int)-like object for unhashable components + + def __init__(self, otherdict): + # [(component, count)] + self._data = [item for item in otherdict.items()] + + def __getitem__(self, key): + for component, count in self._data: + if component == key: + return count + return 0 + + def __setitem__(self, component, count): + for i, data in enumerate(self._data): + if data[0] == component: + self._data[i] = component, count + return + self._data.append((component, count)) + + def __delitem__(self, component): + for i, data in enumerate(self._data): + if data[0] == component: + del self._data[i] + return + raise KeyError(component) # pragma: no cover + +def _defaultdict_int(): + return defaultdict(int) + +class _UtilityRegistrations(object): + + def __init__(self, utilities, utility_registrations): + # {provided -> {component: count}} + self._cache = defaultdict(_defaultdict_int) + self._utilities = utilities + self._utility_registrations = utility_registrations + + self.__populate_cache() + + def __populate_cache(self): + for ((p, _), data) in iter(self._utility_registrations.items()): + component = data[0] + self.__cache_utility(p, component) + + def __cache_utility(self, provided, component): + try: + self._cache[provided][component] += 1 + except TypeError: + # The component is not hashable, and we have a dict. Switch to a strategy + # that doesn't use hashing. + prov = self._cache[provided] = _UnhashableComponentCounter(self._cache[provided]) + prov[component] += 1 + + def __uncache_utility(self, provided, component): + provided = self._cache[provided] + # It seems like this line could raise a TypeError if component isn't + # hashable and we haven't yet switched to _UnhashableComponentCounter. However, + # we can't actually get in that situation. In order to get here, we would + # have had to cache the utility already which would have switched + # the datastructure if needed. + count = provided[component] + count -= 1 + if count == 0: + del provided[component] + else: + provided[component] = count + return count > 0 + + def _is_utility_subscribed(self, provided, component): + try: + return self._cache[provided][component] > 0 + except TypeError: + # Not hashable and we're still using a dict + return False + + def registerUtility(self, provided, name, component, info, factory): + subscribed = self._is_utility_subscribed(provided, component) + + self._utility_registrations[(provided, name)] = component, info, factory + self._utilities.register((), provided, name, component) + + if not subscribed: + self._utilities.subscribe((), provided, component) + + self.__cache_utility(provided, component) + + def unregisterUtility(self, provided, name, component): + del self._utility_registrations[(provided, name)] + self._utilities.unregister((), provided, name) + + subscribed = self.__uncache_utility(provided, component) + + if not subscribed: + self._utilities.unsubscribe((), provided, component) + + +@implementer(IComponents) +class Components(object): + + _v_utility_registrations_cache = None + + def __init__(self, name='', bases=()): + # __init__ is used for test cleanup as well as initialization. + # XXX add a separate API for test cleanup. + assert isinstance(name, STRING_TYPES) + self.__name__ = name + self._init_registries() + self._init_registrations() + self.__bases__ = tuple(bases) + self._v_utility_registrations_cache = None + + def __repr__(self): + return "<%s %s>" % (self.__class__.__name__, self.__name__) + + def __reduce__(self): + # Mimic what a persistent.Persistent object does and elide + # _v_ attributes so that they don't get saved in ZODB. + # This allows us to store things that cannot be pickled in such + # attributes. + reduction = super(Components, self).__reduce__() + # (callable, args, state, listiter, dictiter) + # We assume the state is always a dict; the last three items + # are technically optional and can be missing or None. + filtered_state = {k: v for k, v in reduction[2].items() + if not k.startswith('_v_')} + reduction = list(reduction) + reduction[2] = filtered_state + return tuple(reduction) + + def _init_registries(self): + # Subclasses have never been required to call this method + # if they override it, merely to fill in these two attributes. + self.adapters = AdapterRegistry() + self.utilities = AdapterRegistry() + + def _init_registrations(self): + self._utility_registrations = {} + self._adapter_registrations = {} + self._subscription_registrations = [] + self._handler_registrations = [] + + @property + def _utility_registrations_cache(self): + # We use a _v_ attribute internally so that data aren't saved in ZODB, + # because this object cannot be pickled. + cache = self._v_utility_registrations_cache + if (cache is None + or cache._utilities is not self.utilities + or cache._utility_registrations is not self._utility_registrations): + cache = self._v_utility_registrations_cache = _UtilityRegistrations( + self.utilities, + self._utility_registrations) + return cache + + def _getBases(self): + # Subclasses might override + return self.__dict__.get('__bases__', ()) + + def _setBases(self, bases): + # Subclasses might override + self.adapters.__bases__ = tuple([ + base.adapters for base in bases]) + self.utilities.__bases__ = tuple([ + base.utilities for base in bases]) + self.__dict__['__bases__'] = tuple(bases) + + __bases__ = property( + lambda self: self._getBases(), + lambda self, bases: self._setBases(bases), + ) + + def registerUtility(self, component=None, provided=None, name=u'', + info=u'', event=True, factory=None): + if factory: + if component: + raise TypeError("Can't specify factory and component.") + component = factory() + + if provided is None: + provided = _getUtilityProvided(component) + + if name == u'': + name = _getName(component) + + reg = self._utility_registrations.get((provided, name)) + if reg is not None: + if reg[:2] == (component, info): + # already registered + return + self.unregisterUtility(reg[0], provided, name) + + self._utility_registrations_cache.registerUtility( + provided, name, component, info, factory) + + if event: + notify(Registered( + UtilityRegistration(self, provided, name, component, info, + factory) + )) + + def unregisterUtility(self, component=None, provided=None, name=u'', + factory=None): + if factory: + if component: + raise TypeError("Can't specify factory and component.") + component = factory() + + if provided is None: + if component is None: + raise TypeError("Must specify one of component, factory and " + "provided") + provided = _getUtilityProvided(component) + + old = self._utility_registrations.get((provided, name)) + if (old is None) or ((component is not None) and + (component != old[0])): + return False + + if component is None: + component = old[0] + + # Note that component is now the old thing registered + self._utility_registrations_cache.unregisterUtility( + provided, name, component) + + notify(Unregistered( + UtilityRegistration(self, provided, name, component, *old[1:]) + )) + + return True + + def registeredUtilities(self): + for ((provided, name), data + ) in iter(self._utility_registrations.items()): + yield UtilityRegistration(self, provided, name, *data) + + def queryUtility(self, provided, name=u'', default=None): + return self.utilities.lookup((), provided, name, default) + + def getUtility(self, provided, name=u''): + utility = self.utilities.lookup((), provided, name) + if utility is None: + raise ComponentLookupError(provided, name) + return utility + + def getUtilitiesFor(self, interface): + for name, utility in self.utilities.lookupAll((), interface): + yield name, utility + + def getAllUtilitiesRegisteredFor(self, interface): + return self.utilities.subscriptions((), interface) + + def registerAdapter(self, factory, required=None, provided=None, + name=u'', info=u'', event=True): + if provided is None: + provided = _getAdapterProvided(factory) + required = _getAdapterRequired(factory, required) + if name == u'': + name = _getName(factory) + self._adapter_registrations[(required, provided, name) + ] = factory, info + self.adapters.register(required, provided, name, factory) + + if event: + notify(Registered( + AdapterRegistration(self, required, provided, name, + factory, info) + )) + + + def unregisterAdapter(self, factory=None, + required=None, provided=None, name=u'', + ): + if provided is None: + if factory is None: + raise TypeError("Must specify one of factory and provided") + provided = _getAdapterProvided(factory) + + if (required is None) and (factory is None): + raise TypeError("Must specify one of factory and required") + + required = _getAdapterRequired(factory, required) + old = self._adapter_registrations.get((required, provided, name)) + if (old is None) or ((factory is not None) and + (factory != old[0])): + return False + + del self._adapter_registrations[(required, provided, name)] + self.adapters.unregister(required, provided, name) + + notify(Unregistered( + AdapterRegistration(self, required, provided, name, + *old) + )) + + return True + + def registeredAdapters(self): + for ((required, provided, name), (component, info) + ) in iter(self._adapter_registrations.items()): + yield AdapterRegistration(self, required, provided, name, + component, info) + + def queryAdapter(self, object, interface, name=u'', default=None): + return self.adapters.queryAdapter(object, interface, name, default) + + def getAdapter(self, object, interface, name=u''): + adapter = self.adapters.queryAdapter(object, interface, name) + if adapter is None: + raise ComponentLookupError(object, interface, name) + return adapter + + def queryMultiAdapter(self, objects, interface, name=u'', + default=None): + return self.adapters.queryMultiAdapter( + objects, interface, name, default) + + def getMultiAdapter(self, objects, interface, name=u''): + adapter = self.adapters.queryMultiAdapter(objects, interface, name) + if adapter is None: + raise ComponentLookupError(objects, interface, name) + return adapter + + def getAdapters(self, objects, provided): + for name, factory in self.adapters.lookupAll( + list(map(providedBy, objects)), + provided): + adapter = factory(*objects) + if adapter is not None: + yield name, adapter + + def registerSubscriptionAdapter(self, + factory, required=None, provided=None, + name=u'', info=u'', + event=True): + if name: + raise TypeError("Named subscribers are not yet supported") + if provided is None: + provided = _getAdapterProvided(factory) + required = _getAdapterRequired(factory, required) + self._subscription_registrations.append( + (required, provided, name, factory, info) + ) + self.adapters.subscribe(required, provided, factory) + + if event: + notify(Registered( + SubscriptionRegistration(self, required, provided, name, + factory, info) + )) + + def registeredSubscriptionAdapters(self): + for data in self._subscription_registrations: + yield SubscriptionRegistration(self, *data) + + def unregisterSubscriptionAdapter(self, factory=None, + required=None, provided=None, name=u'', + ): + if name: + raise TypeError("Named subscribers are not yet supported") + if provided is None: + if factory is None: + raise TypeError("Must specify one of factory and provided") + provided = _getAdapterProvided(factory) + + if (required is None) and (factory is None): + raise TypeError("Must specify one of factory and required") + + required = _getAdapterRequired(factory, required) + + if factory is None: + new = [(r, p, n, f, i) + for (r, p, n, f, i) + in self._subscription_registrations + if not (r == required and p == provided) + ] + else: + new = [(r, p, n, f, i) + for (r, p, n, f, i) + in self._subscription_registrations + if not (r == required and p == provided and f == factory) + ] + + if len(new) == len(self._subscription_registrations): + return False + + + self._subscription_registrations[:] = new + self.adapters.unsubscribe(required, provided, factory) + + notify(Unregistered( + SubscriptionRegistration(self, required, provided, name, + factory, '') + )) + + return True + + def subscribers(self, objects, provided): + return self.adapters.subscribers(objects, provided) + + def registerHandler(self, + factory, required=None, + name=u'', info=u'', + event=True): + if name: + raise TypeError("Named handlers are not yet supported") + required = _getAdapterRequired(factory, required) + self._handler_registrations.append( + (required, name, factory, info) + ) + self.adapters.subscribe(required, None, factory) + + if event: + notify(Registered( + HandlerRegistration(self, required, name, factory, info) + )) + + def registeredHandlers(self): + for data in self._handler_registrations: + yield HandlerRegistration(self, *data) + + def unregisterHandler(self, factory=None, required=None, name=u''): + if name: + raise TypeError("Named subscribers are not yet supported") + + if (required is None) and (factory is None): + raise TypeError("Must specify one of factory and required") + + required = _getAdapterRequired(factory, required) + + if factory is None: + new = [(r, n, f, i) + for (r, n, f, i) + in self._handler_registrations + if r != required + ] + else: + new = [(r, n, f, i) + for (r, n, f, i) + in self._handler_registrations + if not (r == required and f == factory) + ] + + if len(new) == len(self._handler_registrations): + return False + + self._handler_registrations[:] = new + self.adapters.unsubscribe(required, None, factory) + + notify(Unregistered( + HandlerRegistration(self, required, name, factory, '') + )) + + return True + + def handle(self, *objects): + self.adapters.subscribers(objects, None) + + +def _getName(component): + try: + return component.__component_name__ + except AttributeError: + return u'' + +def _getUtilityProvided(component): + provided = list(providedBy(component)) + if len(provided) == 1: + return provided[0] + raise TypeError( + "The utility doesn't provide a single interface " + "and no provided interface was specified.") + +def _getAdapterProvided(factory): + provided = list(implementedBy(factory)) + if len(provided) == 1: + return provided[0] + raise TypeError( + "The adapter factory doesn't implement a single interface " + "and no provided interface was specified.") + +def _getAdapterRequired(factory, required): + if required is None: + try: + required = factory.__component_adapts__ + except AttributeError: + raise TypeError( + "The adapter factory doesn't have a __component_adapts__ " + "attribute and no required specifications were specified" + ) + elif ISpecification.providedBy(required): + raise TypeError("the required argument should be a list of " + "interfaces, not a single interface") + + result = [] + for r in required: + if r is None: + r = Interface + elif not ISpecification.providedBy(r): + if isinstance(r, CLASS_TYPES): + r = implementedBy(r) + else: + raise TypeError("Required specification must be a " + "specification or class." + ) + result.append(r) + return tuple(result) + + +@implementer(IUtilityRegistration) +class UtilityRegistration(object): + + def __init__(self, registry, provided, name, component, doc, factory=None): + (self.registry, self.provided, self.name, self.component, self.info, + self.factory + ) = registry, provided, name, component, doc, factory + + def __repr__(self): + return '%s(%r, %s, %r, %s, %r, %r)' % ( + self.__class__.__name__, + self.registry, + getattr(self.provided, '__name__', None), self.name, + getattr(self.component, '__name__', repr(self.component)), + self.factory, self.info, + ) + + def __hash__(self): + return id(self) + + def __eq__(self, other): + return repr(self) == repr(other) + + def __ne__(self, other): + return repr(self) != repr(other) + + def __lt__(self, other): + return repr(self) < repr(other) + + def __le__(self, other): + return repr(self) <= repr(other) + + def __gt__(self, other): + return repr(self) > repr(other) + + def __ge__(self, other): + return repr(self) >= repr(other) + +@implementer(IAdapterRegistration) +class AdapterRegistration(object): + + def __init__(self, registry, required, provided, name, component, doc): + (self.registry, self.required, self.provided, self.name, + self.factory, self.info + ) = registry, required, provided, name, component, doc + + def __repr__(self): + return '%s(%r, %s, %s, %r, %s, %r)' % ( + self.__class__.__name__, + self.registry, + '[' + ", ".join([r.__name__ for r in self.required]) + ']', + getattr(self.provided, '__name__', None), self.name, + getattr(self.factory, '__name__', repr(self.factory)), self.info, + ) + + def __hash__(self): + return id(self) + + def __eq__(self, other): + return repr(self) == repr(other) + + def __ne__(self, other): + return repr(self) != repr(other) + + def __lt__(self, other): + return repr(self) < repr(other) + + def __le__(self, other): + return repr(self) <= repr(other) + + def __gt__(self, other): + return repr(self) > repr(other) + + def __ge__(self, other): + return repr(self) >= repr(other) + +@implementer_only(ISubscriptionAdapterRegistration) +class SubscriptionRegistration(AdapterRegistration): + pass + + +@implementer_only(IHandlerRegistration) +class HandlerRegistration(AdapterRegistration): + + def __init__(self, registry, required, name, handler, doc): + (self.registry, self.required, self.name, self.handler, self.info + ) = registry, required, name, handler, doc + + @property + def factory(self): + return self.handler + + provided = None + + def __repr__(self): + return '%s(%r, %s, %r, %s, %r)' % ( + self.__class__.__name__, + self.registry, + '[' + ", ".join([r.__name__ for r in self.required]) + ']', + self.name, + getattr(self.factory, '__name__', repr(self.factory)), self.info, + ) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/ro.py b/thesisenv/lib/python3.6/site-packages/zope/interface/ro.py new file mode 100644 index 0000000..84f10dc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/ro.py @@ -0,0 +1,64 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Compute a resolution order for an object and its bases +""" +__docformat__ = 'restructuredtext' + +def _mergeOrderings(orderings): + """Merge multiple orderings so that within-ordering order is preserved + + Orderings are constrained in such a way that if an object appears + in two or more orderings, then the suffix that begins with the + object must be in both orderings. + + For example: + + >>> _mergeOrderings([ + ... ['x', 'y', 'z'], + ... ['q', 'z'], + ... [1, 3, 5], + ... ['z'] + ... ]) + ['x', 'y', 'q', 1, 3, 5, 'z'] + + """ + + seen = {} + result = [] + for ordering in reversed(orderings): + for o in reversed(ordering): + if o not in seen: + seen[o] = 1 + result.insert(0, o) + + return result + +def _flatten(ob): + result = [ob] + i = 0 + for ob in iter(result): + i += 1 + # The recursive calls can be avoided by inserting the base classes + # into the dynamically growing list directly after the currently + # considered object; the iterator makes sure this will keep working + # in the future, since it cannot rely on the length of the list + # by definition. + result[i:i] = ob.__bases__ + return result + + +def ro(object): + """Compute a "resolution order" for an object + """ + return _mergeOrderings([_flatten(object)]) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/__init__.py new file mode 100644 index 0000000..15259c1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/__init__.py @@ -0,0 +1 @@ +# Make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/advisory_testing.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/advisory_testing.py new file mode 100644 index 0000000..b159e93 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/advisory_testing.py @@ -0,0 +1,42 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import sys + +from zope.interface.advice import addClassAdvisor +from zope.interface.advice import getFrameInfo + +my_globals = globals() + +def ping(log, value): + + def pong(klass): + log.append((value,klass)) + return [klass] + + addClassAdvisor(pong) + +try: + from types import ClassType + + class ClassicClass: + __metaclass__ = ClassType + classLevelFrameInfo = getFrameInfo(sys._getframe()) +except ImportError: + ClassicClass = None + +class NewStyleClass: + __metaclass__ = type + classLevelFrameInfo = getFrameInfo(sys._getframe()) + +moduleLevelFrameInfo = getFrameInfo(sys._getframe()) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/dummy.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/dummy.py new file mode 100644 index 0000000..6b142ff --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/dummy.py @@ -0,0 +1,23 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" Dummy Module +""" +from zope.interface import moduleProvides +from zope.interface.tests.idummy import IDummyModule + +moduleProvides(IDummyModule) + +def bar(baz): + # Note: no 'self', because the module provides the interface directly. + raise NotImplementedError() diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/idummy.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/idummy.py new file mode 100644 index 0000000..1e34fe0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/idummy.py @@ -0,0 +1,23 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" Interface describing API of zope.interface.tests.dummy test module +""" +from zope.interface import Interface + +class IDummyModule(Interface): + """ Dummy interface for unit tests. + """ + def bar(baz): + """ Just a note. + """ diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/ifoo.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/ifoo.py new file mode 100644 index 0000000..29a7877 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/ifoo.py @@ -0,0 +1,26 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""IFoo test module +""" +from zope.interface import Interface + +class IFoo(Interface): + """ + Dummy interface for unit tests. + """ + + def bar(baz): + """ + Just a note. + """ diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/ifoo_other.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/ifoo_other.py new file mode 100644 index 0000000..29a7877 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/ifoo_other.py @@ -0,0 +1,26 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""IFoo test module +""" +from zope.interface import Interface + +class IFoo(Interface): + """ + Dummy interface for unit tests. + """ + + def bar(baz): + """ + Just a note. + """ diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/m1.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/m1.py new file mode 100644 index 0000000..d311fb4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/m1.py @@ -0,0 +1,21 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test module that declares an interface +""" +from zope.interface import Interface, moduleProvides + +class I1(Interface): pass +class I2(Interface): pass + +moduleProvides(I1, I2) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/m2.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/m2.py new file mode 100644 index 0000000..511cd9c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/m2.py @@ -0,0 +1,15 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test module that doesn't declare an interface +""" diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/odd.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/odd.py new file mode 100644 index 0000000..74c6158 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/odd.py @@ -0,0 +1,128 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Odd meta class that doesn't subclass type. + +This is used for testing support for ExtensionClass in new interfaces. + + >>> class A(object): + ... __metaclass__ = MetaClass + ... a = 1 + ... + >>> A.__name__ + 'A' + >>> A.__bases__ == (object,) + True + >>> class B(object): + ... __metaclass__ = MetaClass + ... b = 1 + ... + >>> class C(A, B): pass + ... + >>> C.__name__ + 'C' + >>> int(C.__bases__ == (A, B)) + 1 + >>> a = A() + >>> aa = A() + >>> a.a + 1 + >>> aa.a + 1 + >>> aa.a = 2 + >>> a.a + 1 + >>> aa.a + 2 + >>> c = C() + >>> c.a + 1 + >>> c.b + 1 + >>> c.b = 2 + >>> c.b + 2 + >>> C.c = 1 + >>> c.c + 1 + >>> import sys + >>> if sys.version[0] == '2': # This test only makes sense under Python 2.x + ... from types import ClassType + ... assert not isinstance(C, (type, ClassType)) + + >>> int(C.__class__.__class__ is C.__class__) + 1 +""" + +# class OddClass is an odd meta class + +class MetaMetaClass(type): + + def __getattribute__(cls, name): + if name == '__class__': + return cls + # Under Python 3.6, __prepare__ gets requested + return type.__getattribute__(cls, name) + + +class MetaClass(object): + """Odd classes + """ + + def __init__(self, name, bases, dict): + self.__name__ = name + self.__bases__ = bases + self.__dict__.update(dict) + + def __call__(self): + return OddInstance(self) + + def __getattr__(self, name): + for b in self.__bases__: + v = getattr(b, name, self) + if v is not self: + return v + raise AttributeError(name) + + def __repr__(self): # pragma: no cover + return "" % (self.__name__, hex(id(self))) + + +MetaClass = MetaMetaClass('MetaClass', + MetaClass.__bases__, + {k: v for k, v in MetaClass.__dict__.items() + if k not in ('__dict__',)}) + +class OddInstance(object): + + def __init__(self, cls): + self.__dict__['__class__'] = cls + + def __getattribute__(self, name): + dict = object.__getattribute__(self, '__dict__') + if name == '__dict__': + return dict + v = dict.get(name, self) + if v is not self: + return v + return getattr(dict['__class__'], name) + + def __setattr__(self, name, v): + self.__dict__[name] = v + + def __delattr__(self, name): + raise NotImplementedError() + + def __repr__(self): # pragma: no cover + return "" % ( + self.__class__.__name__, hex(id(self))) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_adapter.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_adapter.py new file mode 100644 index 0000000..41c618c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_adapter.py @@ -0,0 +1,1419 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Adapter registry tests +""" +import unittest + + +def _makeInterfaces(): + from zope.interface import Interface + + class IB0(Interface): pass + class IB1(IB0): pass + class IB2(IB0): pass + class IB3(IB2, IB1): pass + class IB4(IB1, IB2): pass + + class IF0(Interface): pass + class IF1(IF0): pass + + class IR0(Interface): pass + class IR1(IR0): pass + + return IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 + + +class BaseAdapterRegistryTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.adapter import BaseAdapterRegistry + class _CUT(BaseAdapterRegistry): + class LookupClass(object): + _changed = _extendors = () + def __init__(self, reg): + pass + def changed(self, orig): + self._changed += (orig,) + def add_extendor(self, provided): + self._extendors += (provided,) + def remove_extendor(self, provided): + self._extendors = tuple([x for x in self._extendors + if x != provided]) + for name in BaseAdapterRegistry._delegated: + setattr(_CUT.LookupClass, name, object()) + return _CUT + + def _makeOne(self): + return self._getTargetClass()() + + def test_lookup_delegation(self): + CUT = self._getTargetClass() + registry = CUT() + for name in CUT._delegated: + self.assertTrue( + getattr(registry, name) is getattr(registry._v_lookup, name)) + + def test__generation_on_first_creation(self): + registry = self._makeOne() + # Bumped to 1 in BaseAdapterRegistry.__init__ + self.assertEqual(registry._generation, 1) + + def test__generation_after_calling_changed(self): + registry = self._makeOne() + orig = object() + registry.changed(orig) + # Bumped to 1 in BaseAdapterRegistry.__init__ + self.assertEqual(registry._generation, 2) + self.assertEqual(registry._v_lookup._changed, (registry, orig,)) + + def test__generation_after_changing___bases__(self): + class _Base(object): pass + registry = self._makeOne() + registry.__bases__ = (_Base,) + self.assertEqual(registry._generation, 2) + + def test_register(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + registry.register([IB0], IR0, '', 'A1') + self.assertEqual(registry.registered([IB0], IR0, ''), 'A1') + self.assertEqual(len(registry._adapters), 2) #order 0 and order 1 + self.assertEqual(registry._generation, 2) + + def test_register_with_invalid_name(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + with self.assertRaises(ValueError): + registry.register([IB0], IR0, object(), 'A1') + + def test_register_with_value_None_unregisters(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + registry.register([None], IR0, '', 'A1') + registry.register([None], IR0, '', None) + self.assertEqual(len(registry._adapters), 0) + + def test_register_with_same_value(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + _value = object() + registry.register([None], IR0, '', _value) + _before = registry._generation + registry.register([None], IR0, '', _value) + self.assertEqual(registry._generation, _before) # skipped changed() + + def test_registered_empty(self): + registry = self._makeOne() + self.assertEqual(registry.registered([None], None, ''), None) + + def test_registered_non_empty_miss(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + registry.register([IB1], None, '', 'A1') + self.assertEqual(registry.registered([IB2], None, ''), None) + + def test_registered_non_empty_hit(self): + registry = self._makeOne() + registry.register([None], None, '', 'A1') + self.assertEqual(registry.registered([None], None, ''), 'A1') + + def test_unregister_empty(self): + registry = self._makeOne() + registry.unregister([None], None, '') #doesn't raise + self.assertEqual(registry.registered([None], None, ''), None) + + def test_unregister_non_empty_miss_on_required(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + registry.register([IB1], None, '', 'A1') + registry.unregister([IB2], None, '') #doesn't raise + self.assertEqual(registry.registered([IB1], None, ''), 'A1') + + def test_unregister_non_empty_miss_on_name(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + registry.register([IB1], None, '', 'A1') + registry.unregister([IB1], None, 'nonesuch') #doesn't raise + self.assertEqual(registry.registered([IB1], None, ''), 'A1') + + def test_unregister_with_value_not_None_miss(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + orig = object() + nomatch = object() + registry.register([IB1], None, '', orig) + registry.unregister([IB1], None, '', nomatch) #doesn't raise + self.assertTrue(registry.registered([IB1], None, '') is orig) + + def test_unregister_hit_clears_empty_subcomponents(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + one = object() + another = object() + registry.register([IB1, IB2], None, '', one) + registry.register([IB1, IB3], None, '', another) + self.assertTrue(IB2 in registry._adapters[2][IB1]) + self.assertTrue(IB3 in registry._adapters[2][IB1]) + registry.unregister([IB1, IB3], None, '', another) + self.assertTrue(IB2 in registry._adapters[2][IB1]) + self.assertFalse(IB3 in registry._adapters[2][IB1]) + + def test_unsubscribe_empty(self): + registry = self._makeOne() + registry.unsubscribe([None], None, '') #doesn't raise + self.assertEqual(registry.registered([None], None, ''), None) + + def test_unsubscribe_hit(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + orig = object() + registry.subscribe([IB1], None, orig) + registry.unsubscribe([IB1], None, orig) #doesn't raise + self.assertEqual(len(registry._subscribers), 0) + + def test_unsubscribe_after_multiple(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + first = object() + second = object() + third = object() + fourth = object() + registry.subscribe([IB1], None, first) + registry.subscribe([IB1], None, second) + registry.subscribe([IB1], IR0, third) + registry.subscribe([IB1], IR0, fourth) + registry.unsubscribe([IB1], IR0, fourth) + registry.unsubscribe([IB1], IR0, third) + registry.unsubscribe([IB1], None, second) + registry.unsubscribe([IB1], None, first) + self.assertEqual(len(registry._subscribers), 0) + + def test_unsubscribe_w_None_after_multiple(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + first = object() + second = object() + third = object() + registry.subscribe([IB1], None, first) + registry.subscribe([IB1], None, second) + registry.unsubscribe([IB1], None) + self.assertEqual(len(registry._subscribers), 0) + + def test_unsubscribe_non_empty_miss_on_required(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + registry.subscribe([IB1], None, 'A1') + self.assertEqual(len(registry._subscribers), 2) + registry.unsubscribe([IB2], None, '') #doesn't raise + self.assertEqual(len(registry._subscribers), 2) + + def test_unsubscribe_non_empty_miss_on_value(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + registry.subscribe([IB1], None, 'A1') + self.assertEqual(len(registry._subscribers), 2) + registry.unsubscribe([IB1], None, 'A2') #doesn't raise + self.assertEqual(len(registry._subscribers), 2) + + def test_unsubscribe_with_value_not_None_miss(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + orig = object() + nomatch = object() + registry.subscribe([IB1], None, orig) + registry.unsubscribe([IB1], None, nomatch) #doesn't raise + self.assertEqual(len(registry._subscribers), 2) + + def _instance_method_notify_target(self): + self.fail("Example method, not intended to be called.") + + def test_unsubscribe_instance_method(self): + IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces() + registry = self._makeOne() + self.assertEqual(len(registry._subscribers), 0) + registry.subscribe([IB1], None, self._instance_method_notify_target) + registry.unsubscribe([IB1], None, self._instance_method_notify_target) + self.assertEqual(len(registry._subscribers), 0) + + +class LookupBaseFallbackTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.adapter import LookupBaseFallback + return LookupBaseFallback + + def _makeOne(self, uc_lookup=None, uc_lookupAll=None, + uc_subscriptions=None): + if uc_lookup is None: + def uc_lookup(self, required, provided, name): + pass + if uc_lookupAll is None: + def uc_lookupAll(self, required, provided): + raise NotImplementedError() + if uc_subscriptions is None: + def uc_subscriptions(self, required, provided): + raise NotImplementedError() + class Derived(self._getTargetClass()): + _uncached_lookup = uc_lookup + _uncached_lookupAll = uc_lookupAll + _uncached_subscriptions = uc_subscriptions + return Derived() + + def test_lookup_w_invalid_name(self): + def _lookup(self, required, provided, name): + self.fail("This should never be called") + lb = self._makeOne(uc_lookup=_lookup) + with self.assertRaises(ValueError): + lb.lookup(('A',), 'B', object()) + + def test_lookup_miss_no_default(self): + _called_with = [] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return None + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A',), 'B', 'C') + self.assertTrue(found is None) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + + def test_lookup_miss_w_default(self): + _called_with = [] + _default = object() + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return None + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A',), 'B', 'C', _default) + self.assertTrue(found is _default) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + + def test_lookup_not_cached(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A',), 'B', 'C') + self.assertTrue(found is a) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + + def test_lookup_cached(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A',), 'B', 'C') + found = lb.lookup(('A',), 'B', 'C') + self.assertTrue(found is a) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + + def test_lookup_not_cached_multi_required(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A', 'D'), 'B', 'C') + self.assertTrue(found is a) + self.assertEqual(_called_with, [(('A', 'D'), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + + def test_lookup_cached_multi_required(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A', 'D'), 'B', 'C') + found = lb.lookup(('A', 'D'), 'B', 'C') + self.assertTrue(found is a) + self.assertEqual(_called_with, [(('A', 'D'), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + + def test_lookup_not_cached_after_changed(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup(('A',), 'B', 'C') + lb.changed(lb) + found = lb.lookup(('A',), 'B', 'C') + self.assertTrue(found is b) + self.assertEqual(_called_with, + [(('A',), 'B', 'C'), (('A',), 'B', 'C')]) + self.assertEqual(_results, [c]) + + def test_lookup1_w_invalid_name(self): + def _lookup(self, required, provided, name): + self.fail("This should never be called") + + lb = self._makeOne(uc_lookup=_lookup) + with self.assertRaises(ValueError): + lb.lookup1('A', 'B', object()) + + def test_lookup1_miss_no_default(self): + _called_with = [] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return None + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C') + self.assertTrue(found is None) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + + def test_lookup1_miss_w_default(self): + _called_with = [] + _default = object() + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return None + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C', _default) + self.assertTrue(found is _default) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + + def test_lookup1_miss_w_default_negative_cache(self): + _called_with = [] + _default = object() + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return None + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C', _default) + self.assertTrue(found is _default) + found = lb.lookup1('A', 'B', 'C', _default) + self.assertTrue(found is _default) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + + def test_lookup1_not_cached(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C') + self.assertTrue(found is a) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + + def test_lookup1_cached(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C') + found = lb.lookup1('A', 'B', 'C') + self.assertTrue(found is a) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + + def test_lookup1_not_cached_after_changed(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + lb = self._makeOne(uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C') + lb.changed(lb) + found = lb.lookup1('A', 'B', 'C') + self.assertTrue(found is b) + self.assertEqual(_called_with, + [(('A',), 'B', 'C'), (('A',), 'B', 'C')]) + self.assertEqual(_results, [c]) + + def test_adapter_hook_w_invalid_name(self): + req, prv = object(), object() + lb = self._makeOne() + with self.assertRaises(ValueError): + lb.adapter_hook(prv, req, object()) + + def test_adapter_hook_miss_no_default(self): + req, prv = object(), object() + lb = self._makeOne() + found = lb.adapter_hook(prv, req, '') + self.assertTrue(found is None) + + def test_adapter_hook_miss_w_default(self): + req, prv, _default = object(), object(), object() + lb = self._makeOne() + found = lb.adapter_hook(prv, req, '', _default) + self.assertTrue(found is _default) + + def test_adapter_hook_hit_factory_returns_None(self): + _f_called_with = [] + def _factory(context): + _f_called_with.append(context) + return None + def _lookup(self, required, provided, name): + return _factory + req, prv, _default = object(), object(), object() + lb = self._makeOne(uc_lookup=_lookup) + adapted = lb.adapter_hook(prv, req, 'C', _default) + self.assertTrue(adapted is _default) + self.assertEqual(_f_called_with, [req]) + + def test_adapter_hook_hit_factory_returns_adapter(self): + _f_called_with = [] + _adapter = object() + def _factory(context): + _f_called_with.append(context) + return _adapter + def _lookup(self, required, provided, name): + return _factory + req, prv, _default = object(), object(), object() + lb = self._makeOne(uc_lookup=_lookup) + adapted = lb.adapter_hook(prv, req, 'C', _default) + self.assertTrue(adapted is _adapter) + self.assertEqual(_f_called_with, [req]) + + def test_queryAdapter(self): + _f_called_with = [] + _adapter = object() + def _factory(context): + _f_called_with.append(context) + return _adapter + def _lookup(self, required, provided, name): + return _factory + req, prv, _default = object(), object(), object() + lb = self._makeOne(uc_lookup=_lookup) + adapted = lb.queryAdapter(req, prv, 'C', _default) + self.assertTrue(adapted is _adapter) + self.assertEqual(_f_called_with, [req]) + + def test_lookupAll_uncached(self): + _called_with = [] + _results = [object(), object(), object()] + def _lookupAll(self, required, provided): + _called_with.append((required, provided)) + return tuple(_results) + lb = self._makeOne(uc_lookupAll=_lookupAll) + found = lb.lookupAll('A', 'B') + self.assertEqual(found, tuple(_results)) + self.assertEqual(_called_with, [(('A',), 'B')]) + + def test_lookupAll_cached(self): + _called_with = [] + _results = [object(), object(), object()] + def _lookupAll(self, required, provided): + _called_with.append((required, provided)) + return tuple(_results) + lb = self._makeOne(uc_lookupAll=_lookupAll) + found = lb.lookupAll('A', 'B') + found = lb.lookupAll('A', 'B') + self.assertEqual(found, tuple(_results)) + self.assertEqual(_called_with, [(('A',), 'B')]) + + def test_subscriptions_uncached(self): + _called_with = [] + _results = [object(), object(), object()] + def _subscriptions(self, required, provided): + _called_with.append((required, provided)) + return tuple(_results) + lb = self._makeOne(uc_subscriptions=_subscriptions) + found = lb.subscriptions('A', 'B') + self.assertEqual(found, tuple(_results)) + self.assertEqual(_called_with, [(('A',), 'B')]) + + def test_subscriptions_cached(self): + _called_with = [] + _results = [object(), object(), object()] + def _subscriptions(self, required, provided): + _called_with.append((required, provided)) + return tuple(_results) + lb = self._makeOne(uc_subscriptions=_subscriptions) + found = lb.subscriptions('A', 'B') + found = lb.subscriptions('A', 'B') + self.assertEqual(found, tuple(_results)) + self.assertEqual(_called_with, [(('A',), 'B')]) + + +class LookupBaseTests(LookupBaseFallbackTests): + + def _getTargetClass(self): + from zope.interface.adapter import LookupBase + return LookupBase + + def test_optimizations(self): + from zope.interface.adapter import LookupBaseFallback + try: + import zope.interface._zope_interface_coptimizations + except ImportError: + self.assertIs(self._getTargetClass(), LookupBaseFallback) + else: + self.assertIsNot(self._getTargetClass(), LookupBaseFallback) + + +class VerifyingBaseFallbackTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.adapter import VerifyingBaseFallback + return VerifyingBaseFallback + + def _makeOne(self, registry, uc_lookup=None, uc_lookupAll=None, + uc_subscriptions=None): + if uc_lookup is None: + def uc_lookup(self, required, provided, name): + raise NotImplementedError() + if uc_lookupAll is None: + def uc_lookupAll(self, required, provided): + raise NotImplementedError() + if uc_subscriptions is None: + def uc_subscriptions(self, required, provided): + raise NotImplementedError() + class Derived(self._getTargetClass()): + _uncached_lookup = uc_lookup + _uncached_lookupAll = uc_lookupAll + _uncached_subscriptions = uc_subscriptions + def __init__(self, registry): + super(Derived, self).__init__() + self._registry = registry + derived = Derived(registry) + derived.changed(derived) # init. '_verify_ro' / '_verify_generations' + return derived + + def _makeRegistry(self, depth): + class WithGeneration(object): + _generation = 1 + class Registry: + def __init__(self, depth): + self.ro = [WithGeneration() for i in range(depth)] + return Registry(depth) + + def test_lookup(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + reg = self._makeRegistry(3) + lb = self._makeOne(reg, uc_lookup=_lookup) + found = lb.lookup(('A',), 'B', 'C') + found = lb.lookup(('A',), 'B', 'C') + self.assertTrue(found is a) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + reg.ro[1]._generation += 1 + found = lb.lookup(('A',), 'B', 'C') + self.assertTrue(found is b) + self.assertEqual(_called_with, + [(('A',), 'B', 'C'), (('A',), 'B', 'C')]) + self.assertEqual(_results, [c]) + + def test_lookup1(self): + _called_with = [] + a, b, c = object(), object(), object() + _results = [a, b, c] + def _lookup(self, required, provided, name): + _called_with.append((required, provided, name)) + return _results.pop(0) + reg = self._makeRegistry(3) + lb = self._makeOne(reg, uc_lookup=_lookup) + found = lb.lookup1('A', 'B', 'C') + found = lb.lookup1('A', 'B', 'C') + self.assertTrue(found is a) + self.assertEqual(_called_with, [(('A',), 'B', 'C')]) + self.assertEqual(_results, [b, c]) + reg.ro[1]._generation += 1 + found = lb.lookup1('A', 'B', 'C') + self.assertTrue(found is b) + self.assertEqual(_called_with, + [(('A',), 'B', 'C'), (('A',), 'B', 'C')]) + self.assertEqual(_results, [c]) + + def test_adapter_hook(self): + a, b, _c = [object(), object(), object()] + def _factory1(context): + return a + def _factory2(context): + return b + def _factory3(context): + self.fail("This should never be called") + _factories = [_factory1, _factory2, _factory3] + def _lookup(self, required, provided, name): + return _factories.pop(0) + req, prv, _default = object(), object(), object() + reg = self._makeRegistry(3) + lb = self._makeOne(reg, uc_lookup=_lookup) + adapted = lb.adapter_hook(prv, req, 'C', _default) + self.assertTrue(adapted is a) + adapted = lb.adapter_hook(prv, req, 'C', _default) + self.assertTrue(adapted is a) + reg.ro[1]._generation += 1 + adapted = lb.adapter_hook(prv, req, 'C', _default) + self.assertTrue(adapted is b) + + def test_queryAdapter(self): + a, b, _c = [object(), object(), object()] + def _factory1(context): + return a + def _factory2(context): + return b + def _factory3(context): + self.fail("This should never be called") + _factories = [_factory1, _factory2, _factory3] + def _lookup(self, required, provided, name): + return _factories.pop(0) + req, prv, _default = object(), object(), object() + reg = self._makeRegistry(3) + lb = self._makeOne(reg, uc_lookup=_lookup) + adapted = lb.queryAdapter(req, prv, 'C', _default) + self.assertTrue(adapted is a) + adapted = lb.queryAdapter(req, prv, 'C', _default) + self.assertTrue(adapted is a) + reg.ro[1]._generation += 1 + adapted = lb.adapter_hook(prv, req, 'C', _default) + self.assertTrue(adapted is b) + + def test_lookupAll(self): + _results_1 = [object(), object(), object()] + _results_2 = [object(), object(), object()] + _results = [_results_1, _results_2] + def _lookupAll(self, required, provided): + return tuple(_results.pop(0)) + reg = self._makeRegistry(3) + lb = self._makeOne(reg, uc_lookupAll=_lookupAll) + found = lb.lookupAll('A', 'B') + self.assertEqual(found, tuple(_results_1)) + found = lb.lookupAll('A', 'B') + self.assertEqual(found, tuple(_results_1)) + reg.ro[1]._generation += 1 + found = lb.lookupAll('A', 'B') + self.assertEqual(found, tuple(_results_2)) + + def test_subscriptions(self): + _results_1 = [object(), object(), object()] + _results_2 = [object(), object(), object()] + _results = [_results_1, _results_2] + def _subscriptions(self, required, provided): + return tuple(_results.pop(0)) + reg = self._makeRegistry(3) + lb = self._makeOne(reg, uc_subscriptions=_subscriptions) + found = lb.subscriptions('A', 'B') + self.assertEqual(found, tuple(_results_1)) + found = lb.subscriptions('A', 'B') + self.assertEqual(found, tuple(_results_1)) + reg.ro[1]._generation += 1 + found = lb.subscriptions('A', 'B') + self.assertEqual(found, tuple(_results_2)) + + +class VerifyingBaseTests(VerifyingBaseFallbackTests): + + def _getTargetClass(self): + from zope.interface.adapter import VerifyingBase + return VerifyingBase + + def test_optimizations(self): + from zope.interface.adapter import VerifyingBaseFallback + try: + import zope.interface._zope_interface_coptimizations + except ImportError: + self.assertIs(self._getTargetClass(), VerifyingBaseFallback) + else: + self.assertIsNot(self._getTargetClass(), VerifyingBaseFallback) + + +class AdapterLookupBaseTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.adapter import AdapterLookupBase + return AdapterLookupBase + + def _makeOne(self, registry): + return self._getTargetClass()(registry) + + def _makeSubregistry(self, *provided): + class Subregistry: + def __init__(self): + self._adapters = [] + self._subscribers = [] + return Subregistry() + + def _makeRegistry(self, *provided): + class Registry: + def __init__(self, provided): + self._provided = provided + self.ro = [] + return Registry(provided) + + def test_ctor_empty_registry(self): + registry = self._makeRegistry() + alb = self._makeOne(registry) + self.assertEqual(alb._extendors, {}) + + def test_ctor_w_registry_provided(self): + from zope.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry(IFoo, IBar) + alb = self._makeOne(registry) + self.assertEqual(sorted(alb._extendors.keys()), + sorted([IBar, IFoo, Interface])) + self.assertEqual(alb._extendors[IFoo], [IFoo]) + self.assertEqual(alb._extendors[IBar], [IBar]) + self.assertEqual(sorted(alb._extendors[Interface]), + sorted([IFoo, IBar])) + + def test_changed_empty_required(self): + # ALB.changed expects to call a mixed in changed. + class Mixin(object): + def changed(self, *other): + pass + class Derived(self._getTargetClass(), Mixin): + pass + registry = self._makeRegistry() + alb = Derived(registry) + alb.changed(alb) + + def test_changed_w_required(self): + # ALB.changed expects to call a mixed in changed. + class Mixin(object): + def changed(self, *other): + pass + class Derived(self._getTargetClass(), Mixin): + pass + class FauxWeakref(object): + _unsub = None + def __init__(self, here): + self._here = here + def __call__(self): + if self._here: + return self + def unsubscribe(self, target): + self._unsub = target + gone = FauxWeakref(False) + here = FauxWeakref(True) + registry = self._makeRegistry() + alb = Derived(registry) + alb._required[gone] = 1 + alb._required[here] = 1 + alb.changed(alb) + self.assertEqual(len(alb._required), 0) + self.assertEqual(gone._unsub, None) + self.assertEqual(here._unsub, alb) + + def test_init_extendors_after_registry_update(self): + from zope.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry() + alb = self._makeOne(registry) + registry._provided = [IFoo, IBar] + alb.init_extendors() + self.assertEqual(sorted(alb._extendors.keys()), + sorted([IBar, IFoo, Interface])) + self.assertEqual(alb._extendors[IFoo], [IFoo]) + self.assertEqual(alb._extendors[IBar], [IBar]) + self.assertEqual(sorted(alb._extendors[Interface]), + sorted([IFoo, IBar])) + + def test_add_extendor(self): + from zope.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry() + alb = self._makeOne(registry) + alb.add_extendor(IFoo) + alb.add_extendor(IBar) + self.assertEqual(sorted(alb._extendors.keys()), + sorted([IBar, IFoo, Interface])) + self.assertEqual(alb._extendors[IFoo], [IFoo]) + self.assertEqual(alb._extendors[IBar], [IBar]) + self.assertEqual(sorted(alb._extendors[Interface]), + sorted([IFoo, IBar])) + + def test_remove_extendor(self): + from zope.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry(IFoo, IBar) + alb = self._makeOne(registry) + alb.remove_extendor(IFoo) + self.assertEqual(sorted(alb._extendors.keys()), + sorted([IFoo, IBar, Interface])) + self.assertEqual(alb._extendors[IFoo], []) + self.assertEqual(alb._extendors[IBar], [IBar]) + self.assertEqual(sorted(alb._extendors[Interface]), + sorted([IBar])) + + # test '_subscribe' via its callers, '_uncached_lookup', etc. + + def test__uncached_lookup_empty_ro(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry() + alb = self._makeOne(registry) + result = alb._uncached_lookup((IFoo,), IBar) + self.assertEqual(result, None) + self.assertEqual(len(alb._required), 1) + self.assertTrue(IFoo.weakref() in alb._required) + + def test__uncached_lookup_order_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + registry.ro.append(subr) + alb = self._makeOne(registry) + result = alb._uncached_lookup((IFoo,), IBar) + self.assertEqual(result, None) + + def test__uncached_lookup_extendors_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry() + subr = self._makeSubregistry() + subr._adapters = [{}, {}] #utilities, single adapters + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookup((IFoo,), IBar) + self.assertEqual(result, None) + + def test__uncached_lookup_components_miss_wrong_iface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + IQux = InterfaceClass('IQux') + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + irrelevant = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IQux: {'': irrelevant}, + }}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookup((IFoo,), IBar) + self.assertEqual(result, None) + + def test__uncached_lookup_components_miss_wrong_name(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + irrelevant = object() + wrongname = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'wrongname': wrongname}, + }}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookup((IFoo,), IBar) + self.assertEqual(result, None) + + def test__uncached_lookup_simple_hit(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _expected = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': _expected}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookup((IFoo,), IBar) + self.assertTrue(result is _expected) + + def test__uncached_lookup_repeated_hit(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _expected = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': _expected}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookup((IFoo,), IBar) + result2 = alb._uncached_lookup((IFoo,), IBar) + self.assertTrue(result is _expected) + self.assertTrue(result2 is _expected) + + def test_queryMultiAdaptor_lookup_miss(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + registry = self._makeRegistry() + subr = self._makeSubregistry() + subr._adapters = [ #utilities, single adapters + {}, + {}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + alb.lookup = alb._uncached_lookup # provided by derived + subr._v_lookup = alb + _default = object() + result = alb.queryMultiAdapter((foo,), IBar, default=_default) + self.assertTrue(result is _default) + + def test_queryMultiAdaptor_factory_miss(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _expected = object() + _called_with = [] + def _factory(context): + _called_with.append(context) + return None + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': _factory}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + alb.lookup = alb._uncached_lookup # provided by derived + subr._v_lookup = alb + _default = object() + result = alb.queryMultiAdapter((foo,), IBar, default=_default) + self.assertTrue(result is _default) + self.assertEqual(_called_with, [foo]) + + def test_queryMultiAdaptor_factory_hit(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _expected = object() + _called_with = [] + def _factory(context): + _called_with.append(context) + return _expected + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': _factory}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + alb.lookup = alb._uncached_lookup # provided by derived + subr._v_lookup = alb + _default = object() + result = alb.queryMultiAdapter((foo,), IBar, default=_default) + self.assertTrue(result is _expected) + self.assertEqual(_called_with, [foo]) + + def test__uncached_lookupAll_empty_ro(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry() + alb = self._makeOne(registry) + result = alb._uncached_lookupAll((IFoo,), IBar) + self.assertEqual(result, ()) + self.assertEqual(len(alb._required), 1) + self.assertTrue(IFoo.weakref() in alb._required) + + def test__uncached_lookupAll_order_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookupAll((IFoo,), IBar) + self.assertEqual(result, ()) + + def test__uncached_lookupAll_extendors_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry() + subr = self._makeSubregistry() + subr._adapters = [{}, {}] #utilities, single adapters + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookupAll((IFoo,), IBar) + self.assertEqual(result, ()) + + def test__uncached_lookupAll_components_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + IQux = InterfaceClass('IQux') + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + irrelevant = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IQux: {'': irrelevant}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookupAll((IFoo,), IBar) + self.assertEqual(result, ()) + + def test__uncached_lookupAll_simple_hit(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _expected = object() + _named = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': _expected, 'named': _named}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_lookupAll((IFoo,), IBar) + self.assertEqual(sorted(result), [('', _expected), ('named', _named)]) + + def test_names(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _expected = object() + _named = object() + subr._adapters = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': _expected, 'named': _named}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + alb.lookupAll = alb._uncached_lookupAll + subr._v_lookup = alb + result = alb.names((IFoo,), IBar) + self.assertEqual(sorted(result), ['', 'named']) + + def test__uncached_subscriptions_empty_ro(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry() + alb = self._makeOne(registry) + result = alb._uncached_subscriptions((IFoo,), IBar) + self.assertEqual(result, []) + self.assertEqual(len(alb._required), 1) + self.assertTrue(IFoo.weakref() in alb._required) + + def test__uncached_subscriptions_order_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_subscriptions((IFoo,), IBar) + self.assertEqual(result, []) + + def test__uncached_subscriptions_extendors_miss(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry() + subr = self._makeSubregistry() + subr._subscribers = [{}, {}] #utilities, single adapters + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_subscriptions((IFoo,), IBar) + self.assertEqual(result, []) + + def test__uncached_subscriptions_components_miss_wrong_iface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + IQux = InterfaceClass('IQux') + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + irrelevant = object() + subr._subscribers = [ #utilities, single adapters + {}, + {IFoo: {IQux: {'': irrelevant}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_subscriptions((IFoo,), IBar) + self.assertEqual(result, []) + + def test__uncached_subscriptions_components_miss_wrong_name(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + wrongname = object() + subr._subscribers = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'wrongname': wrongname}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_subscriptions((IFoo,), IBar) + self.assertEqual(result, []) + + def test__uncached_subscriptions_simple_hit(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + class Foo(object): + def __lt__(self, other): + return True + _exp1, _exp2 = Foo(), Foo() + subr._subscribers = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': (_exp1, _exp2)}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + subr._v_lookup = alb + result = alb._uncached_subscriptions((IFoo,), IBar) + self.assertEqual(sorted(result), sorted([_exp1, _exp2])) + + def test_subscribers_wo_provided(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + registry = self._makeRegistry(IFoo, IBar) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _called = {} + def _factory1(context): + _called.setdefault('_factory1', []).append(context) + def _factory2(context): + _called.setdefault('_factory2', []).append(context) + subr._subscribers = [ #utilities, single adapters + {}, + {IFoo: {None: {'': (_factory1, _factory2)}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + alb.subscriptions = alb._uncached_subscriptions + subr._v_lookup = alb + result = alb.subscribers((foo,), None) + self.assertEqual(result, ()) + self.assertEqual(_called, {'_factory1': [foo], '_factory2': [foo]}) + + def test_subscribers_w_provided(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', IFoo) + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + registry = self._makeRegistry(IFoo, IBar) + registry = self._makeRegistry(IFoo, IBar) + subr = self._makeSubregistry() + _called = {} + _exp1, _exp2 = object(), object() + def _factory1(context): + _called.setdefault('_factory1', []).append(context) + return _exp1 + def _factory2(context): + _called.setdefault('_factory2', []).append(context) + return _exp2 + def _side_effect_only(context): + _called.setdefault('_side_effect_only', []).append(context) + return None + subr._subscribers = [ #utilities, single adapters + {}, + {IFoo: {IBar: {'': (_factory1, _factory2, _side_effect_only)}}}, + ] + registry.ro.append(subr) + alb = self._makeOne(registry) + alb.subscriptions = alb._uncached_subscriptions + subr._v_lookup = alb + result = alb.subscribers((foo,), IBar) + self.assertEqual(result, [_exp1, _exp2]) + self.assertEqual(_called, + {'_factory1': [foo], + '_factory2': [foo], + '_side_effect_only': [foo], + }) + + +class AdapterRegistryTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.adapter import AdapterRegistry + return AdapterRegistry + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_no_bases(self): + ar = self._makeOne() + self.assertEqual(len(ar._v_subregistries), 0) + + def test_ctor_w_bases(self): + base = self._makeOne() + sub = self._makeOne([base]) + self.assertEqual(len(sub._v_subregistries), 0) + self.assertEqual(len(base._v_subregistries), 1) + self.assertTrue(sub in base._v_subregistries) + + # test _addSubregistry / _removeSubregistry via only caller, _setBases + + def test__setBases_removing_existing_subregistry(self): + before = self._makeOne() + after = self._makeOne() + sub = self._makeOne([before]) + sub.__bases__ = [after] + self.assertEqual(len(before._v_subregistries), 0) + self.assertEqual(len(after._v_subregistries), 1) + self.assertTrue(sub in after._v_subregistries) + + def test__setBases_wo_stray_entry(self): + before = self._makeOne() + stray = self._makeOne() + after = self._makeOne() + sub = self._makeOne([before]) + sub.__dict__['__bases__'].append(stray) + sub.__bases__ = [after] + self.assertEqual(len(before._v_subregistries), 0) + self.assertEqual(len(after._v_subregistries), 1) + self.assertTrue(sub in after._v_subregistries) + + def test__setBases_w_existing_entry_continuing(self): + before = self._makeOne() + after = self._makeOne() + sub = self._makeOne([before]) + sub.__bases__ = [before, after] + self.assertEqual(len(before._v_subregistries), 1) + self.assertEqual(len(after._v_subregistries), 1) + self.assertTrue(sub in before._v_subregistries) + self.assertTrue(sub in after._v_subregistries) + + def test_changed_w_subregistries(self): + base = self._makeOne() + class Derived(object): + _changed = None + def changed(self, originally_changed): + self._changed = originally_changed + derived1, derived2 = Derived(), Derived() + base._addSubregistry(derived1) + base._addSubregistry(derived2) + orig = object() + base.changed(orig) + self.assertTrue(derived1._changed is orig) + self.assertTrue(derived2._changed is orig) + + +class Test_utils(unittest.TestCase): + + def test__convert_None_to_Interface_w_None(self): + from zope.interface.adapter import _convert_None_to_Interface + from zope.interface.interface import Interface + self.assertTrue(_convert_None_to_Interface(None) is Interface) + + def test__convert_None_to_Interface_w_other(self): + from zope.interface.adapter import _convert_None_to_Interface + other = object() + self.assertTrue(_convert_None_to_Interface(other) is other) + + def test__normalize_name_str(self): + import sys + from zope.interface.adapter import _normalize_name + STR = b'str' + if sys.version_info[0] < 3: + self.assertEqual(_normalize_name(STR), unicode(STR)) + else: + self.assertEqual(_normalize_name(STR), str(STR, 'ascii')) + + def test__normalize_name_unicode(self): + from zope.interface.adapter import _normalize_name + + USTR = u'ustr' + self.assertEqual(_normalize_name(USTR), USTR) + + def test__normalize_name_other(self): + from zope.interface.adapter import _normalize_name + for other in 1, 1.0, (), [], {}, object(): + self.assertRaises(TypeError, _normalize_name, other) + + # _lookup, _lookupAll, and _subscriptions tested via their callers + # (AdapterLookupBase.{lookup,lookupAll,subscriptions}). diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_advice.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_advice.py new file mode 100644 index 0000000..0739ac1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_advice.py @@ -0,0 +1,355 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests for advice + +This module was adapted from 'protocols.tests.advice', part of the Python +Enterprise Application Kit (PEAK). Please notify the PEAK authors +(pje@telecommunity.com and tsarna@sarna.org) if bugs are found or +Zope-specific changes are required, so that the PEAK version of this module +can be kept in sync. + +PEAK is a Python application framework that interoperates with (but does +not require) Zope 3 and Twisted. It provides tools for manipulating UML +models, object-relational persistence, aspect-oriented programming, and more. +Visit the PEAK home page at http://peak.telecommunity.com for more information. +""" + +import unittest +import sys + +from zope.interface._compat import _skip_under_py2 +from zope.interface._compat import _skip_under_py3k + + +class FrameInfoTest(unittest.TestCase): + + def test_w_module(self): + from zope.interface.tests import advisory_testing + (kind, module, + f_locals, f_globals) = advisory_testing.moduleLevelFrameInfo + self.assertEqual(kind, "module") + for d in module.__dict__, f_locals, f_globals: + self.assertTrue(d is advisory_testing.my_globals) + + @_skip_under_py3k + def test_w_ClassicClass(self): + from zope.interface.tests import advisory_testing + (kind, + module, + f_locals, + f_globals) = advisory_testing.ClassicClass.classLevelFrameInfo + self.assertEqual(kind, "class") + + self.assertTrue( + f_locals is advisory_testing.ClassicClass.__dict__) # ??? + for d in module.__dict__, f_globals: + self.assertTrue(d is advisory_testing.my_globals) + + def test_w_NewStyleClass(self): + from zope.interface.tests import advisory_testing + (kind, + module, + f_locals, + f_globals) = advisory_testing.NewStyleClass.classLevelFrameInfo + self.assertEqual(kind, "class") + + for d in module.__dict__, f_globals: + self.assertTrue(d is advisory_testing.my_globals) + + def test_inside_function_call(self): + from zope.interface.advice import getFrameInfo + kind, module, f_locals, f_globals = getFrameInfo(sys._getframe()) + self.assertEqual(kind, "function call") + self.assertTrue(f_locals is locals()) # ??? + for d in module.__dict__, f_globals: + self.assertTrue(d is globals()) + + def test_inside_exec(self): + from zope.interface.advice import getFrameInfo + _globals = {'getFrameInfo': getFrameInfo} + _locals = {} + exec(_FUNKY_EXEC, _globals, _locals) + self.assertEqual(_locals['kind'], "exec") + self.assertTrue(_locals['f_locals'] is _locals) + self.assertTrue(_locals['module'] is None) + self.assertTrue(_locals['f_globals'] is _globals) + + +_FUNKY_EXEC = """\ +import sys +kind, module, f_locals, f_globals = getFrameInfo(sys._getframe()) +""" + +class AdviceTests(unittest.TestCase): + + @_skip_under_py3k + def test_order(self): + from zope.interface.tests.advisory_testing import ping + log = [] + class Foo(object): + ping(log, 1) + ping(log, 2) + ping(log, 3) + + # Strip the list nesting + for i in 1, 2, 3: + self.assertTrue(isinstance(Foo, list)) + Foo, = Foo + + self.assertEqual(log, [(1, Foo), (2, [Foo]), (3, [[Foo]])]) + + @_skip_under_py3k + def test_single_explicit_meta(self): + from zope.interface.tests.advisory_testing import ping + + class Metaclass(type): + pass + + class Concrete(Metaclass): + __metaclass__ = Metaclass + ping([],1) + + Concrete, = Concrete + self.assertTrue(Concrete.__class__ is Metaclass) + + + @_skip_under_py3k + def test_mixed_metas(self): + from zope.interface.tests.advisory_testing import ping + + class Metaclass1(type): + pass + + class Metaclass2(type): + pass + + class Base1: + __metaclass__ = Metaclass1 + + class Base2: + __metaclass__ = Metaclass2 + + try: + class Derived(Base1, Base2): + ping([], 1) + self.fail("Should have gotten incompatibility error") + except TypeError: + pass + + class Metaclass3(Metaclass1, Metaclass2): + pass + + class Derived(Base1, Base2): + __metaclass__ = Metaclass3 + ping([], 1) + + self.assertTrue(isinstance(Derived, list)) + Derived, = Derived + self.assertTrue(isinstance(Derived, Metaclass3)) + + @_skip_under_py3k + def test_meta_no_bases(self): + from zope.interface.tests.advisory_testing import ping + from types import ClassType + class Thing: + ping([], 1) + klass, = Thing # unpack list created by pong + self.assertEqual(type(klass), ClassType) + + +class Test_isClassAdvisor(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.advice import isClassAdvisor + return isClassAdvisor(*args, **kw) + + def test_w_non_function(self): + self.assertEqual(self._callFUT(self), False) + + def test_w_normal_function(self): + def foo(): + raise NotImplementedError() + self.assertEqual(self._callFUT(foo), False) + + def test_w_advisor_function(self): + def bar(): + raise NotImplementedError() + bar.previousMetaclass = object() + self.assertEqual(self._callFUT(bar), True) + + +class Test_determineMetaclass(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.advice import determineMetaclass + return determineMetaclass(*args, **kw) + + @_skip_under_py3k + def test_empty(self): + from types import ClassType + self.assertEqual(self._callFUT(()), ClassType) + + def test_empty_w_explicit_metatype(self): + class Meta(type): + pass + self.assertEqual(self._callFUT((), Meta), Meta) + + def test_single(self): + class Meta(type): + pass + self.assertEqual(self._callFUT((Meta,)), type) + + @_skip_under_py3k + def test_meta_of_class(self): + class Metameta(type): + pass + + class Meta(type): + __metaclass__ = Metameta + + self.assertEqual(self._callFUT((Meta, type)), Metameta) + + @_skip_under_py2 + def test_meta_of_class_py3k(self): + # Work around SyntaxError under Python2. + EXEC = '\n'.join([ + 'class Metameta(type):', + ' pass', + 'class Meta(type, metaclass=Metameta):', + ' pass', + ]) + globs = {} + exec(EXEC, globs) + Meta = globs['Meta'] + Metameta = globs['Metameta'] + + self.assertEqual(self._callFUT((Meta, type)), Metameta) + + @_skip_under_py3k + def test_multiple_in_hierarchy(self): + class Meta_A(type): + pass + class Meta_B(Meta_A): + pass + class A(type): + __metaclass__ = Meta_A + class B(type): + __metaclass__ = Meta_B + self.assertEqual(self._callFUT((A, B,)), Meta_B) + + @_skip_under_py2 + def test_multiple_in_hierarchy_py3k(self): + # Work around SyntaxError under Python2. + EXEC = '\n'.join([ + 'class Meta_A(type):', + ' pass', + 'class Meta_B(Meta_A):', + ' pass', + 'class A(type, metaclass=Meta_A):', + ' pass', + 'class B(type, metaclass=Meta_B):', + ' pass', + ]) + globs = {} + exec(EXEC, globs) + Meta_A = globs['Meta_A'] + Meta_B = globs['Meta_B'] + A = globs['A'] + B = globs['B'] + self.assertEqual(self._callFUT((A, B)), Meta_B) + + @_skip_under_py3k + def test_multiple_not_in_hierarchy(self): + class Meta_A(type): + pass + class Meta_B(type): + pass + class A(type): + __metaclass__ = Meta_A + class B(type): + __metaclass__ = Meta_B + self.assertRaises(TypeError, self._callFUT, (A, B,)) + + @_skip_under_py2 + def test_multiple_not_in_hierarchy_py3k(self): + # Work around SyntaxError under Python2. + EXEC = '\n'.join([ + 'class Meta_A(type):', + ' pass', + 'class Meta_B(type):', + ' pass', + 'class A(type, metaclass=Meta_A):', + ' pass', + 'class B(type, metaclass=Meta_B):', + ' pass', + ]) + globs = {} + exec(EXEC, globs) + Meta_A = globs['Meta_A'] + Meta_B = globs['Meta_B'] + A = globs['A'] + B = globs['B'] + self.assertRaises(TypeError, self._callFUT, (A, B)) + + +class Test_minimalBases(unittest.TestCase): + + def _callFUT(self, klasses): + from zope.interface.advice import minimalBases + return minimalBases(klasses) + + def test_empty(self): + self.assertEqual(self._callFUT([]), []) + + @_skip_under_py3k + def test_w_oldstyle_meta(self): + class C: + pass + self.assertEqual(self._callFUT([type(C)]), []) + + @_skip_under_py3k + def test_w_oldstyle_class(self): + class C: + pass + self.assertEqual(self._callFUT([C]), [C]) + + def test_w_newstyle_meta(self): + self.assertEqual(self._callFUT([type]), [type]) + + def test_w_newstyle_class(self): + class C(object): + pass + self.assertEqual(self._callFUT([C]), [C]) + + def test_simple_hierarchy_skips_implied(self): + class A(object): + pass + class B(A): + pass + class C(B): + pass + class D(object): + pass + self.assertEqual(self._callFUT([A, B, C]), [C]) + self.assertEqual(self._callFUT([A, C]), [C]) + self.assertEqual(self._callFUT([B, C]), [C]) + self.assertEqual(self._callFUT([A, B]), [B]) + self.assertEqual(self._callFUT([D, B, D]), [B, D]) + + def test_repeats_kicked_to_end_of_queue(self): + class A(object): + pass + class B(object): + pass + self.assertEqual(self._callFUT([A, B, A]), [B, A]) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_declarations.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_declarations.py new file mode 100644 index 0000000..43f95c8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_declarations.py @@ -0,0 +1,1658 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test the new API for making and checking interface declarations +""" +import unittest + +from zope.interface._compat import _skip_under_py3k + + +class _Py3ClassAdvice(object): + + def _run_generated_code(self, code, globs, locs, + fails_under_py3k=True, + ): + import warnings + from zope.interface._compat import PYTHON3 + with warnings.catch_warnings(record=True) as log: + warnings.resetwarnings() + if not PYTHON3: + exec(code, globs, locs) + self.assertEqual(len(log), 0) # no longer warn + return True + else: + try: + exec(code, globs, locs) + except TypeError: + return False + else: + if fails_under_py3k: + self.fail("Didn't raise TypeError") + + +class NamedTests(unittest.TestCase): + + def test_class(self): + from zope.interface.declarations import named + + @named(u'foo') + class Foo(object): + pass + + self.assertEqual(Foo.__component_name__, u'foo') + + def test_function(self): + from zope.interface.declarations import named + + @named(u'foo') + def doFoo(o): + raise NotImplementedError() + + self.assertEqual(doFoo.__component_name__, u'foo') + + def test_instance(self): + from zope.interface.declarations import named + + class Foo(object): + pass + foo = Foo() + named(u'foo')(foo) + + self.assertEqual(foo.__component_name__, u'foo') + + +class DeclarationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import Declaration + return Declaration + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_no_bases(self): + decl = self._makeOne() + self.assertEqual(list(decl.__bases__), []) + + def test_ctor_w_interface_in_bases(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decl = self._makeOne(IFoo) + self.assertEqual(list(decl.__bases__), [IFoo]) + + def test_ctor_w_implements_in_bases(self): + from zope.interface.declarations import Implements + impl = Implements() + decl = self._makeOne(impl) + self.assertEqual(list(decl.__bases__), [impl]) + + def test_changed_wo_existing__v_attrs(self): + decl = self._makeOne() + decl.changed(decl) # doesn't raise + self.assertFalse('_v_attrs' in decl.__dict__) + + def test_changed_w_existing__v_attrs(self): + decl = self._makeOne() + decl._v_attrs = object() + decl.changed(decl) + self.assertFalse('_v_attrs' in decl.__dict__) + + def test___contains__w_self(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decl = self._makeOne() + self.assertFalse(decl in decl) + + def test___contains__w_unrelated_iface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decl = self._makeOne() + self.assertFalse(IFoo in decl) + + def test___contains__w_base_interface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decl = self._makeOne(IFoo) + self.assertTrue(IFoo in decl) + + def test___iter___empty(self): + decl = self._makeOne() + self.assertEqual(list(decl), []) + + def test___iter___single_base(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decl = self._makeOne(IFoo) + self.assertEqual(list(decl), [IFoo]) + + def test___iter___multiple_bases(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + decl = self._makeOne(IFoo, IBar) + self.assertEqual(list(decl), [IFoo, IBar]) + + def test___iter___inheritance(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + decl = self._makeOne(IBar) + self.assertEqual(list(decl), [IBar]) #IBar.interfaces() omits bases + + def test___iter___w_nested_sequence_overlap(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + decl = self._makeOne(IBar, (IFoo, IBar)) + self.assertEqual(list(decl), [IBar, IFoo]) + + def test_flattened_empty(self): + from zope.interface.interface import Interface + decl = self._makeOne() + self.assertEqual(list(decl.flattened()), [Interface]) + + def test_flattened_single_base(self): + from zope.interface.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decl = self._makeOne(IFoo) + self.assertEqual(list(decl.flattened()), [IFoo, Interface]) + + def test_flattened_multiple_bases(self): + from zope.interface.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + decl = self._makeOne(IFoo, IBar) + self.assertEqual(list(decl.flattened()), [IFoo, IBar, Interface]) + + def test_flattened_inheritance(self): + from zope.interface.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + decl = self._makeOne(IBar) + self.assertEqual(list(decl.flattened()), [IBar, IFoo, Interface]) + + def test_flattened_w_nested_sequence_overlap(self): + from zope.interface.interface import Interface + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + decl = self._makeOne(IBar, (IFoo, IBar)) + # Note that decl.__iro__ has IFoo first. + self.assertEqual(list(decl.flattened()), [IFoo, IBar, Interface]) + + def test___sub___unrelated_interface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + before = self._makeOne(IFoo) + after = before - IBar + self.assertTrue(isinstance(after, self._getTargetClass())) + self.assertEqual(list(after), [IFoo]) + + def test___sub___related_interface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + before = self._makeOne(IFoo) + after = before - IFoo + self.assertEqual(list(after), []) + + def test___sub___related_interface_by_inheritance(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar', (IFoo,)) + before = self._makeOne(IBar) + after = before - IBar + self.assertEqual(list(after), []) + + def test___add___unrelated_interface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + before = self._makeOne(IFoo) + after = before + IBar + self.assertTrue(isinstance(after, self._getTargetClass())) + self.assertEqual(list(after), [IFoo, IBar]) + + def test___add___related_interface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + IBaz = InterfaceClass('IBaz') + before = self._makeOne(IFoo, IBar) + other = self._makeOne(IBar, IBaz) + after = before + other + self.assertEqual(list(after), [IFoo, IBar, IBaz]) + + +class TestImplements(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import Implements + return Implements + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_no_bases(self): + impl = self._makeOne() + self.assertEqual(impl.inherit, None) + self.assertEqual(impl.declared, ()) + self.assertEqual(impl.__name__, '?') + self.assertEqual(list(impl.__bases__), []) + + def test___repr__(self): + impl = self._makeOne() + impl.__name__ = 'Testing' + self.assertEqual(repr(impl), '') + + def test___reduce__(self): + from zope.interface.declarations import implementedBy + impl = self._makeOne() + self.assertEqual(impl.__reduce__(), (implementedBy, (None,))) + + def test_sort(self): + from zope.interface.declarations import implementedBy + class A(object): + pass + class B(object): + pass + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + + self.assertEqual(implementedBy(A), implementedBy(A)) + self.assertEqual(hash(implementedBy(A)), hash(implementedBy(A))) + self.assertTrue(implementedBy(A) < None) + self.assertTrue(None > implementedBy(A)) + self.assertTrue(implementedBy(A) < implementedBy(B)) + self.assertTrue(implementedBy(A) > IFoo) + self.assertTrue(implementedBy(A) <= implementedBy(B)) + self.assertTrue(implementedBy(A) >= IFoo) + self.assertTrue(implementedBy(A) != IFoo) + + def test_proxy_equality(self): + # https://github.com/zopefoundation/zope.interface/issues/55 + class Proxy(object): + def __init__(self, wrapped): + self._wrapped = wrapped + + def __getattr__(self, name): + raise NotImplementedError() + + def __eq__(self, other): + return self._wrapped == other + + def __ne__(self, other): + return self._wrapped != other + + from zope.interface.declarations import implementedBy + class A(object): + pass + + class B(object): + pass + + implementedByA = implementedBy(A) + implementedByB = implementedBy(B) + proxy = Proxy(implementedByA) + + # The order of arguments to the operators matters, + # test both + self.assertTrue(implementedByA == implementedByA) + self.assertTrue(implementedByA != implementedByB) + self.assertTrue(implementedByB != implementedByA) + + self.assertTrue(proxy == implementedByA) + self.assertTrue(implementedByA == proxy) + self.assertFalse(proxy != implementedByA) + self.assertFalse(implementedByA != proxy) + + self.assertTrue(proxy != implementedByB) + self.assertTrue(implementedByB != proxy) + + +class Test_implementedByFallback(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import implementedByFallback + return implementedByFallback(*args, **kw) + + def test_dictless_wo_existing_Implements_wo_registrations(self): + class Foo(object): + __slots__ = ('__implemented__',) + foo = Foo() + foo.__implemented__ = None + self.assertEqual(list(self._callFUT(foo)), []) + + def test_dictless_wo_existing_Implements_cant_assign___implemented__(self): + class Foo(object): + def _get_impl(self): + raise NotImplementedError() + def _set_impl(self, val): + raise TypeError + __implemented__ = property(_get_impl, _set_impl) + def __call__(self): + # act like a factory + raise NotImplementedError() + foo = Foo() + self.assertRaises(TypeError, self._callFUT, foo) + + def test_dictless_wo_existing_Implements_w_registrations(self): + from zope.interface import declarations + class Foo(object): + __slots__ = ('__implemented__',) + foo = Foo() + foo.__implemented__ = None + reg = object() + with _MonkeyDict(declarations, + 'BuiltinImplementationSpecifications') as specs: + specs[foo] = reg + self.assertTrue(self._callFUT(foo) is reg) + + def test_dictless_w_existing_Implements(self): + from zope.interface.declarations import Implements + impl = Implements() + class Foo(object): + __slots__ = ('__implemented__',) + foo = Foo() + foo.__implemented__ = impl + self.assertTrue(self._callFUT(foo) is impl) + + def test_dictless_w_existing_not_Implements(self): + from zope.interface.interface import InterfaceClass + class Foo(object): + __slots__ = ('__implemented__',) + foo = Foo() + IFoo = InterfaceClass('IFoo') + foo.__implemented__ = (IFoo,) + self.assertEqual(list(self._callFUT(foo)), [IFoo]) + + def test_w_existing_attr_as_Implements(self): + from zope.interface.declarations import Implements + impl = Implements() + class Foo(object): + __implemented__ = impl + self.assertTrue(self._callFUT(Foo) is impl) + + def test_builtins_added_to_cache(self): + from zope.interface import declarations + from zope.interface.declarations import Implements + from zope.interface._compat import _BUILTINS + with _MonkeyDict(declarations, + 'BuiltinImplementationSpecifications') as specs: + self.assertEqual(list(self._callFUT(tuple)), []) + self.assertEqual(list(self._callFUT(list)), []) + self.assertEqual(list(self._callFUT(dict)), []) + for typ in (tuple, list, dict): + spec = specs[typ] + self.assertTrue(isinstance(spec, Implements)) + self.assertEqual(repr(spec), + '' + % (_BUILTINS, typ.__name__)) + + def test_builtins_w_existing_cache(self): + from zope.interface import declarations + t_spec, l_spec, d_spec = object(), object(), object() + with _MonkeyDict(declarations, + 'BuiltinImplementationSpecifications') as specs: + specs[tuple] = t_spec + specs[list] = l_spec + specs[dict] = d_spec + self.assertTrue(self._callFUT(tuple) is t_spec) + self.assertTrue(self._callFUT(list) is l_spec) + self.assertTrue(self._callFUT(dict) is d_spec) + + def test_oldstyle_class_no_assertions(self): + # TODO: Figure out P3 story + class Foo: + pass + self.assertEqual(list(self._callFUT(Foo)), []) + + def test_no_assertions(self): + # TODO: Figure out P3 story + class Foo(object): + pass + self.assertEqual(list(self._callFUT(Foo)), []) + + def test_w_None_no_bases_not_factory(self): + class Foo(object): + __implemented__ = None + foo = Foo() + self.assertRaises(TypeError, self._callFUT, foo) + + def test_w_None_no_bases_w_factory(self): + from zope.interface.declarations import objectSpecificationDescriptor + class Foo(object): + __implemented__ = None + def __call__(self): + raise NotImplementedError() + + foo = Foo() + foo.__name__ = 'foo' + spec = self._callFUT(foo) + self.assertEqual(spec.__name__, + 'zope.interface.tests.test_declarations.foo') + self.assertTrue(spec.inherit is foo) + self.assertTrue(foo.__implemented__ is spec) + self.assertTrue(foo.__providedBy__ is objectSpecificationDescriptor) + self.assertFalse('__provides__' in foo.__dict__) + + def test_w_None_no_bases_w_class(self): + from zope.interface.declarations import ClassProvides + class Foo(object): + __implemented__ = None + spec = self._callFUT(Foo) + self.assertEqual(spec.__name__, + 'zope.interface.tests.test_declarations.Foo') + self.assertTrue(spec.inherit is Foo) + self.assertTrue(Foo.__implemented__ is spec) + self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides)) + self.assertTrue(isinstance(Foo.__provides__, ClassProvides)) + self.assertEqual(Foo.__provides__, Foo.__providedBy__) + + def test_w_existing_Implements(self): + from zope.interface.declarations import Implements + impl = Implements() + class Foo(object): + __implemented__ = impl + self.assertTrue(self._callFUT(Foo) is impl) + + +class Test_implementedBy(Test_implementedByFallback): + # Repeat tests for C optimizations + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import implementedBy + return implementedBy(*args, **kw) + + def test_optimizations(self): + from zope.interface.declarations import implementedByFallback + from zope.interface.declarations import implementedBy + try: + import zope.interface._zope_interface_coptimizations + except ImportError: + self.assertIs(implementedBy, implementedByFallback) + else: + self.assertIsNot(implementedBy, implementedByFallback) + + +class Test_classImplementsOnly(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import classImplementsOnly + return classImplementsOnly(*args, **kw) + + def test_no_existing(self): + from zope.interface.declarations import ClassProvides + from zope.interface.interface import InterfaceClass + class Foo(object): + pass + ifoo = InterfaceClass('IFoo') + self._callFUT(Foo, ifoo) + spec = Foo.__implemented__ + self.assertEqual(spec.__name__, + 'zope.interface.tests.test_declarations.Foo') + self.assertTrue(spec.inherit is None) + self.assertTrue(Foo.__implemented__ is spec) + self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides)) + self.assertTrue(isinstance(Foo.__provides__, ClassProvides)) + self.assertEqual(Foo.__provides__, Foo.__providedBy__) + + def test_w_existing_Implements(self): + from zope.interface.declarations import Implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + impl = Implements(IFoo) + impl.declared = (IFoo,) + class Foo(object): + __implemented__ = impl + impl.inherit = Foo + self._callFUT(Foo, IBar) + # Same spec, now different values + self.assertTrue(Foo.__implemented__ is impl) + self.assertEqual(impl.inherit, None) + self.assertEqual(impl.declared, (IBar,)) + + +class Test_classImplements(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import classImplements + return classImplements(*args, **kw) + + def test_no_existing(self): + from zope.interface.declarations import ClassProvides + from zope.interface.interface import InterfaceClass + class Foo(object): + pass + IFoo = InterfaceClass('IFoo') + self._callFUT(Foo, IFoo) + spec = Foo.__implemented__ + self.assertEqual(spec.__name__, + 'zope.interface.tests.test_declarations.Foo') + self.assertTrue(spec.inherit is Foo) + self.assertTrue(Foo.__implemented__ is spec) + self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides)) + self.assertTrue(isinstance(Foo.__provides__, ClassProvides)) + self.assertEqual(Foo.__provides__, Foo.__providedBy__) + + def test_w_existing_Implements(self): + from zope.interface.declarations import Implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + impl = Implements(IFoo) + impl.declared = (IFoo,) + class Foo(object): + __implemented__ = impl + impl.inherit = Foo + self._callFUT(Foo, IBar) + # Same spec, now different values + self.assertTrue(Foo.__implemented__ is impl) + self.assertEqual(impl.inherit, Foo) + self.assertEqual(impl.declared, (IFoo, IBar,)) + + def test_w_existing_Implements_w_bases(self): + from zope.interface.declarations import Implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + IBaz = InterfaceClass('IBaz', IFoo) + b_impl = Implements(IBaz) + impl = Implements(IFoo) + impl.declared = (IFoo,) + class Base1(object): + __implemented__ = b_impl + class Base2(object): + __implemented__ = b_impl + class Foo(Base1, Base2): + __implemented__ = impl + impl.inherit = Foo + self._callFUT(Foo, IBar) + # Same spec, now different values + self.assertTrue(Foo.__implemented__ is impl) + self.assertEqual(impl.inherit, Foo) + self.assertEqual(impl.declared, (IFoo, IBar,)) + self.assertEqual(impl.__bases__, (IFoo, IBar, b_impl)) + + +class Test__implements_advice(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import _implements_advice + return _implements_advice(*args, **kw) + + def test_no_existing_implements(self): + from zope.interface.declarations import classImplements + from zope.interface.declarations import Implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + class Foo(object): + __implements_advice_data__ = ((IFoo,), classImplements) + self._callFUT(Foo) + self.assertFalse('__implements_advice_data__' in Foo.__dict__) + self.assertTrue(isinstance(Foo.__implemented__, Implements)) + self.assertEqual(list(Foo.__implemented__), [IFoo]) + + +class Test_implementer(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import implementer + return implementer + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_oldstyle_class(self): + # TODO Py3 story + from zope.interface.declarations import ClassProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + class Foo: + pass + decorator = self._makeOne(IFoo) + returned = decorator(Foo) + self.assertTrue(returned is Foo) + spec = Foo.__implemented__ + self.assertEqual(spec.__name__, + 'zope.interface.tests.test_declarations.Foo') + self.assertTrue(spec.inherit is Foo) + self.assertTrue(Foo.__implemented__ is spec) + self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides)) + self.assertTrue(isinstance(Foo.__provides__, ClassProvides)) + self.assertEqual(Foo.__provides__, Foo.__providedBy__) + + def test_newstyle_class(self): + from zope.interface.declarations import ClassProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + class Foo(object): + pass + decorator = self._makeOne(IFoo) + returned = decorator(Foo) + self.assertTrue(returned is Foo) + spec = Foo.__implemented__ + self.assertEqual(spec.__name__, + 'zope.interface.tests.test_declarations.Foo') + self.assertTrue(spec.inherit is Foo) + self.assertTrue(Foo.__implemented__ is spec) + self.assertTrue(isinstance(Foo.__providedBy__, ClassProvides)) + self.assertTrue(isinstance(Foo.__provides__, ClassProvides)) + self.assertEqual(Foo.__provides__, Foo.__providedBy__) + + def test_nonclass_cannot_assign_attr(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decorator = self._makeOne(IFoo) + self.assertRaises(TypeError, decorator, object()) + + def test_nonclass_can_assign_attr(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + class Foo(object): + pass + foo = Foo() + decorator = self._makeOne(IFoo) + returned = decorator(foo) + self.assertTrue(returned is foo) + spec = foo.__implemented__ + self.assertEqual(spec.__name__, 'zope.interface.tests.test_declarations.?') + self.assertTrue(spec.inherit is None) + self.assertTrue(foo.__implemented__ is spec) + + +class Test_implementer_only(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import implementer_only + return implementer_only + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_function(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decorator = self._makeOne(IFoo) + def _function(): + raise NotImplementedError() + self.assertRaises(ValueError, decorator, _function) + + def test_method(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + decorator = self._makeOne(IFoo) + class Bar: + def _method(): + raise NotImplementedError() + self.assertRaises(ValueError, decorator, Bar._method) + + def test_oldstyle_class(self): + # TODO Py3 story + from zope.interface.declarations import Implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + old_spec = Implements(IBar) + class Foo: + __implemented__ = old_spec + decorator = self._makeOne(IFoo) + returned = decorator(Foo) + self.assertTrue(returned is Foo) + spec = Foo.__implemented__ + self.assertEqual(spec.__name__, '?') + self.assertTrue(spec.inherit is None) + self.assertTrue(Foo.__implemented__ is spec) + + def test_newstyle_class(self): + from zope.interface.declarations import Implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass('IFoo') + IBar = InterfaceClass('IBar') + old_spec = Implements(IBar) + class Foo(object): + __implemented__ = old_spec + decorator = self._makeOne(IFoo) + returned = decorator(Foo) + self.assertTrue(returned is Foo) + spec = Foo.__implemented__ + self.assertEqual(spec.__name__, '?') + self.assertTrue(spec.inherit is None) + self.assertTrue(Foo.__implemented__ is spec) + + +# Test '_implements' by way of 'implements{,Only}', its only callers. + +class Test_implementsOnly(unittest.TestCase, _Py3ClassAdvice): + + def test_simple(self): + import warnings + from zope.interface.declarations import implementsOnly + from zope.interface._compat import PYTHON3 + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'implementsOnly': implementsOnly, + 'IFoo': IFoo, + } + locs = {} + CODE = "\n".join([ + 'class Foo(object):' + ' implementsOnly(IFoo)', + ]) + with warnings.catch_warnings(record=True) as log: + warnings.resetwarnings() + try: + exec(CODE, globs, locs) + except TypeError: + self.assertTrue(PYTHON3, "Must be Python 3") + else: + if PYTHON3: + self.fail("Didn't raise TypeError") + Foo = locs['Foo'] + spec = Foo.__implemented__ + self.assertEqual(list(spec), [IFoo]) + self.assertEqual(len(log), 0) # no longer warn + + def test_called_once_from_class_w_bases(self): + from zope.interface.declarations import implements + from zope.interface.declarations import implementsOnly + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + globs = {'implements': implements, + 'implementsOnly': implementsOnly, + 'IFoo': IFoo, + 'IBar': IBar, + } + locs = {} + CODE = "\n".join([ + 'class Foo(object):', + ' implements(IFoo)', + 'class Bar(Foo):' + ' implementsOnly(IBar)', + ]) + if self._run_generated_code(CODE, globs, locs): + Bar = locs['Bar'] + spec = Bar.__implemented__ + self.assertEqual(list(spec), [IBar]) + + +class Test_implements(unittest.TestCase, _Py3ClassAdvice): + + def test_called_from_function(self): + import warnings + from zope.interface.declarations import implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'implements': implements, 'IFoo': IFoo} + locs = {} + CODE = "\n".join([ + 'def foo():', + ' implements(IFoo)' + ]) + if self._run_generated_code(CODE, globs, locs, False): + foo = locs['foo'] + with warnings.catch_warnings(record=True) as log: + warnings.resetwarnings() + self.assertRaises(TypeError, foo) + self.assertEqual(len(log), 0) # no longer warn + + def test_called_twice_from_class(self): + import warnings + from zope.interface.declarations import implements + from zope.interface.interface import InterfaceClass + from zope.interface._compat import PYTHON3 + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + globs = {'implements': implements, 'IFoo': IFoo, 'IBar': IBar} + locs = {} + CODE = "\n".join([ + 'class Foo(object):', + ' implements(IFoo)', + ' implements(IBar)', + ]) + with warnings.catch_warnings(record=True) as log: + warnings.resetwarnings() + try: + exec(CODE, globs, locs) + except TypeError: + if not PYTHON3: + self.assertEqual(len(log), 0) # no longer warn + else: + self.fail("Didn't raise TypeError") + + def test_called_once_from_class(self): + from zope.interface.declarations import implements + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'implements': implements, 'IFoo': IFoo} + locs = {} + CODE = "\n".join([ + 'class Foo(object):', + ' implements(IFoo)', + ]) + if self._run_generated_code(CODE, globs, locs): + Foo = locs['Foo'] + spec = Foo.__implemented__ + self.assertEqual(list(spec), [IFoo]) + + +class ProvidesClassTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import ProvidesClass + return ProvidesClass + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_simple_class_one_interface(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + spec = self._makeOne(Foo, IFoo) + self.assertEqual(list(spec), [IFoo]) + + def test___reduce__(self): + from zope.interface.declarations import Provides # the function + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + spec = self._makeOne(Foo, IFoo) + klass, args = spec.__reduce__() + self.assertTrue(klass is Provides) + self.assertEqual(args, (Foo, IFoo)) + + def test___get___class(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + spec = self._makeOne(Foo, IFoo) + Foo.__provides__ = spec + self.assertTrue(Foo.__provides__ is spec) + + def test___get___instance(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + spec = self._makeOne(Foo, IFoo) + Foo.__provides__ = spec + def _test(): + foo = Foo() + return foo.__provides__ + self.assertRaises(AttributeError, _test) + + +class Test_Provides(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import Provides + return Provides(*args, **kw) + + def test_no_cached_spec(self): + from zope.interface import declarations + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + cache = {} + class Foo(object): + pass + with _Monkey(declarations, InstanceDeclarations=cache): + spec = self._callFUT(Foo, IFoo) + self.assertEqual(list(spec), [IFoo]) + self.assertTrue(cache[(Foo, IFoo)] is spec) + + def test_w_cached_spec(self): + from zope.interface import declarations + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + prior = object() + class Foo(object): + pass + cache = {(Foo, IFoo): prior} + with _Monkey(declarations, InstanceDeclarations=cache): + spec = self._callFUT(Foo, IFoo) + self.assertTrue(spec is prior) + + +class Test_directlyProvides(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import directlyProvides + return directlyProvides(*args, **kw) + + def test_w_normal_object(self): + from zope.interface.declarations import ProvidesClass + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + obj = Foo() + self._callFUT(obj, IFoo) + self.assertTrue(isinstance(obj.__provides__, ProvidesClass)) + self.assertEqual(list(obj.__provides__), [IFoo]) + + def test_w_class(self): + from zope.interface.declarations import ClassProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + self._callFUT(Foo, IFoo) + self.assertTrue(isinstance(Foo.__provides__, ClassProvides)) + self.assertEqual(list(Foo.__provides__), [IFoo]) + + @_skip_under_py3k + def test_w_non_descriptor_aware_metaclass(self): + # There are no non-descriptor-aware types in Py3k + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class MetaClass(type): + def __getattribute__(cls, name): + # Emulate metaclass whose base is not the type object. + if name == '__class__': + return cls + # Under certain circumstances, the implementedByFallback + # can get here for __dict__ + return type.__getattribute__(cls, name) # pragma: no cover + + class Foo(object): + __metaclass__ = MetaClass + obj = Foo() + self.assertRaises(TypeError, self._callFUT, obj, IFoo) + + def test_w_classless_object(self): + from zope.interface.declarations import ProvidesClass + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + the_dict = {} + class Foo(object): + def __getattribute__(self, name): + # Emulate object w/o any class + if name == '__class__': + return None + raise NotImplementedError(name) + def __setattr__(self, name, value): + the_dict[name] = value + obj = Foo() + self._callFUT(obj, IFoo) + self.assertTrue(isinstance(the_dict['__provides__'], ProvidesClass)) + self.assertEqual(list(the_dict['__provides__']), [IFoo]) + + +class Test_alsoProvides(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import alsoProvides + return alsoProvides(*args, **kw) + + def test_wo_existing_provides(self): + from zope.interface.declarations import ProvidesClass + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + obj = Foo() + self._callFUT(obj, IFoo) + self.assertTrue(isinstance(obj.__provides__, ProvidesClass)) + self.assertEqual(list(obj.__provides__), [IFoo]) + + def test_w_existing_provides(self): + from zope.interface.declarations import directlyProvides + from zope.interface.declarations import ProvidesClass + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + class Foo(object): + pass + obj = Foo() + directlyProvides(obj, IFoo) + self._callFUT(obj, IBar) + self.assertTrue(isinstance(obj.__provides__, ProvidesClass)) + self.assertEqual(list(obj.__provides__), [IFoo, IBar]) + + +class Test_noLongerProvides(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import noLongerProvides + return noLongerProvides(*args, **kw) + + def test_wo_existing_provides(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + obj = Foo() + self._callFUT(obj, IFoo) + self.assertEqual(list(obj.__provides__), []) + + def test_w_existing_provides_hit(self): + from zope.interface.declarations import directlyProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + obj = Foo() + directlyProvides(obj, IFoo) + self._callFUT(obj, IFoo) + self.assertEqual(list(obj.__provides__), []) + + def test_w_existing_provides_miss(self): + from zope.interface.declarations import directlyProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + class Foo(object): + pass + obj = Foo() + directlyProvides(obj, IFoo) + self._callFUT(obj, IBar) + self.assertEqual(list(obj.__provides__), [IFoo]) + + def test_w_iface_implemented_by_class(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + @implementer(IFoo) + class Foo(object): + pass + obj = Foo() + self.assertRaises(ValueError, self._callFUT, obj, IFoo) + + +class ClassProvidesBaseFallbackTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import ClassProvidesBaseFallback + return ClassProvidesBaseFallback + + def _makeOne(self, klass, implements): + # Don't instantiate directly: the C version can't have attributes + # assigned. + class Derived(self._getTargetClass()): + def __init__(self, k, i): + self._cls = k + self._implements = i + return Derived(klass, implements) + + def test_w_same_class_via_class(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + cpbp = Foo.__provides__ = self._makeOne(Foo, IFoo) + self.assertTrue(Foo.__provides__ is cpbp) + + def test_w_same_class_via_instance(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + foo = Foo() + cpbp = Foo.__provides__ = self._makeOne(Foo, IFoo) + self.assertTrue(foo.__provides__ is IFoo) + + def test_w_different_class(self): + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + class Bar(Foo): + pass + bar = Bar() + cpbp = Foo.__provides__ = self._makeOne(Foo, IFoo) + self.assertRaises(AttributeError, getattr, Bar, '__provides__') + self.assertRaises(AttributeError, getattr, bar, '__provides__') + + +class ClassProvidesBaseTests(ClassProvidesBaseFallbackTests): + # Repeat tests for C optimizations + + def _getTargetClass(self): + from zope.interface.declarations import ClassProvidesBase + return ClassProvidesBase + + def test_optimizations(self): + from zope.interface.declarations import ClassProvidesBaseFallback + try: + import zope.interface._zope_interface_coptimizations + except ImportError: + self.assertIs(self._getTargetClass(), ClassProvidesBaseFallback) + else: + self.assertIsNot(self._getTargetClass(), ClassProvidesBaseFallback) + + +class ClassProvidesTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import ClassProvides + return ClassProvides + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_w_simple_metaclass(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + @implementer(IFoo) + class Foo(object): + pass + cp = Foo.__provides__ = self._makeOne(Foo, type(Foo), IBar) + self.assertTrue(Foo.__provides__ is cp) + self.assertEqual(list(Foo().__provides__), [IFoo]) + + def test___reduce__(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + @implementer(IFoo) + class Foo(object): + pass + cp = Foo.__provides__ = self._makeOne(Foo, type(Foo), IBar) + self.assertEqual(cp.__reduce__(), + (self._getTargetClass(), (Foo, type(Foo), IBar))) + + +class Test_directlyProvidedBy(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import directlyProvidedBy + return directlyProvidedBy(*args, **kw) + + def test_wo_declarations_in_class_or_instance(self): + class Foo(object): + pass + foo = Foo() + self.assertEqual(list(self._callFUT(foo)), []) + + def test_w_declarations_in_class_but_not_instance(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + self.assertEqual(list(self._callFUT(foo)), []) + + def test_w_declarations_in_instance_but_not_class(self): + from zope.interface.declarations import directlyProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + foo = Foo() + directlyProvides(foo, IFoo) + self.assertEqual(list(self._callFUT(foo)), [IFoo]) + + def test_w_declarations_in_instance_and_class(self): + from zope.interface.declarations import directlyProvides + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + directlyProvides(foo, IBar) + self.assertEqual(list(self._callFUT(foo)), [IBar]) + + +class Test_classProvides(unittest.TestCase, _Py3ClassAdvice): + + def test_called_from_function(self): + import warnings + from zope.interface.declarations import classProvides + from zope.interface.interface import InterfaceClass + from zope.interface._compat import PYTHON3 + IFoo = InterfaceClass("IFoo") + globs = {'classProvides': classProvides, 'IFoo': IFoo} + locs = {} + CODE = "\n".join([ + 'def foo():', + ' classProvides(IFoo)' + ]) + exec(CODE, globs, locs) + foo = locs['foo'] + with warnings.catch_warnings(record=True) as log: + warnings.resetwarnings() + self.assertRaises(TypeError, foo) + if not PYTHON3: + self.assertEqual(len(log), 0) # no longer warn + + def test_called_twice_from_class(self): + import warnings + from zope.interface.declarations import classProvides + from zope.interface.interface import InterfaceClass + from zope.interface._compat import PYTHON3 + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + globs = {'classProvides': classProvides, 'IFoo': IFoo, 'IBar': IBar} + locs = {} + CODE = "\n".join([ + 'class Foo(object):', + ' classProvides(IFoo)', + ' classProvides(IBar)', + ]) + with warnings.catch_warnings(record=True) as log: + warnings.resetwarnings() + try: + exec(CODE, globs, locs) + except TypeError: + if not PYTHON3: + self.assertEqual(len(log), 0) # no longer warn + else: + self.fail("Didn't raise TypeError") + + def test_called_once_from_class(self): + from zope.interface.declarations import classProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'classProvides': classProvides, 'IFoo': IFoo} + locs = {} + CODE = "\n".join([ + 'class Foo(object):', + ' classProvides(IFoo)', + ]) + if self._run_generated_code(CODE, globs, locs): + Foo = locs['Foo'] + spec = Foo.__providedBy__ + self.assertEqual(list(spec), [IFoo]) + +# Test _classProvides_advice through classProvides, its only caller. + + +class Test_provider(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations import provider + return provider + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_w_class(self): + from zope.interface.declarations import ClassProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + @self._makeOne(IFoo) + class Foo(object): + pass + self.assertTrue(isinstance(Foo.__provides__, ClassProvides)) + self.assertEqual(list(Foo.__provides__), [IFoo]) + + +class Test_moduleProvides(unittest.TestCase): + + def test_called_from_function(self): + from zope.interface.declarations import moduleProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'__name__': 'zope.interface.tests.foo', + 'moduleProvides': moduleProvides, 'IFoo': IFoo} + locs = {} + CODE = "\n".join([ + 'def foo():', + ' moduleProvides(IFoo)' + ]) + exec(CODE, globs, locs) + foo = locs['foo'] + self.assertRaises(TypeError, foo) + + def test_called_from_class(self): + from zope.interface.declarations import moduleProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'__name__': 'zope.interface.tests.foo', + 'moduleProvides': moduleProvides, 'IFoo': IFoo} + locs = {} + CODE = "\n".join([ + 'class Foo(object):', + ' moduleProvides(IFoo)', + ]) + with self.assertRaises(TypeError): + exec(CODE, globs, locs) + + def test_called_once_from_module_scope(self): + from zope.interface.declarations import moduleProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'__name__': 'zope.interface.tests.foo', + 'moduleProvides': moduleProvides, 'IFoo': IFoo} + CODE = "\n".join([ + 'moduleProvides(IFoo)', + ]) + exec(CODE, globs) + spec = globs['__provides__'] + self.assertEqual(list(spec), [IFoo]) + + def test_called_twice_from_module_scope(self): + from zope.interface.declarations import moduleProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + globs = {'__name__': 'zope.interface.tests.foo', + 'moduleProvides': moduleProvides, 'IFoo': IFoo} + locs = {} + CODE = "\n".join([ + 'moduleProvides(IFoo)', + 'moduleProvides(IFoo)', + ]) + with self.assertRaises(TypeError): + exec(CODE, globs) + + +class Test_getObjectSpecificationFallback(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import getObjectSpecificationFallback + return getObjectSpecificationFallback(*args, **kw) + + def test_wo_existing_provides_classless(self): + the_dict = {} + class Foo(object): + def __getattribute__(self, name): + # Emulate object w/o any class + if name == '__class__': + raise AttributeError(name) + try: + return the_dict[name] + except KeyError: + raise AttributeError(name) + def __setattr__(self, name, value): + raise NotImplementedError() + foo = Foo() + spec = self._callFUT(foo) + self.assertEqual(list(spec), []) + + def test_existing_provides_is_spec(self): + from zope.interface.declarations import directlyProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + def foo(): + raise NotImplementedError() + directlyProvides(foo, IFoo) + spec = self._callFUT(foo) + self.assertTrue(spec is foo.__provides__) + + def test_existing_provides_is_not_spec(self): + def foo(): + raise NotImplementedError() + foo.__provides__ = object() # not a valid spec + spec = self._callFUT(foo) + self.assertEqual(list(spec), []) + + def test_existing_provides(self): + from zope.interface.declarations import directlyProvides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + foo = Foo() + directlyProvides(foo, IFoo) + spec = self._callFUT(foo) + self.assertEqual(list(spec), [IFoo]) + + def test_wo_provides_on_class_w_implements(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + spec = self._callFUT(foo) + self.assertEqual(list(spec), [IFoo]) + + def test_wo_provides_on_class_wo_implements(self): + class Foo(object): + pass + foo = Foo() + spec = self._callFUT(foo) + self.assertEqual(list(spec), []) + + +class Test_getObjectSpecification(Test_getObjectSpecificationFallback): + # Repeat tests for C optimizations + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import getObjectSpecification + return getObjectSpecification(*args, **kw) + + def test_optimizations(self): + from zope.interface.declarations import getObjectSpecificationFallback + from zope.interface.declarations import getObjectSpecification + try: + import zope.interface._zope_interface_coptimizations + except ImportError: + self.assertIs(getObjectSpecification, + getObjectSpecificationFallback) + else: + self.assertIsNot(getObjectSpecification, + getObjectSpecificationFallback) + + +class Test_providedByFallback(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import providedByFallback + return providedByFallback(*args, **kw) + + def test_wo_providedBy_on_class_wo_implements(self): + class Foo(object): + pass + foo = Foo() + spec = self._callFUT(foo) + self.assertEqual(list(spec), []) + + def test_w_providedBy_valid_spec(self): + from zope.interface.declarations import Provides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + foo = Foo() + foo.__providedBy__ = Provides(Foo, IFoo) + spec = self._callFUT(foo) + self.assertEqual(list(spec), [IFoo]) + + def test_w_providedBy_invalid_spec(self): + class Foo(object): + pass + foo = Foo() + foo.__providedBy__ = object() + spec = self._callFUT(foo) + self.assertEqual(list(spec), []) + + def test_w_providedBy_invalid_spec_class_w_implements(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + foo.__providedBy__ = object() + spec = self._callFUT(foo) + self.assertEqual(list(spec), [IFoo]) + + def test_w_providedBy_invalid_spec_w_provides_no_provides_on_class(self): + class Foo(object): + pass + foo = Foo() + foo.__providedBy__ = object() + expected = foo.__provides__ = object() + spec = self._callFUT(foo) + self.assertTrue(spec is expected) + + def test_w_providedBy_invalid_spec_w_provides_diff_provides_on_class(self): + class Foo(object): + pass + foo = Foo() + foo.__providedBy__ = object() + expected = foo.__provides__ = object() + Foo.__provides__ = object() + spec = self._callFUT(foo) + self.assertTrue(spec is expected) + + def test_w_providedBy_invalid_spec_w_provides_same_provides_on_class(self): + from zope.interface.declarations import implementer + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + foo.__providedBy__ = object() + foo.__provides__ = Foo.__provides__ = object() + spec = self._callFUT(foo) + self.assertEqual(list(spec), [IFoo]) + + +class Test_providedBy(Test_providedByFallback): + # Repeat tests for C optimizations + + def _callFUT(self, *args, **kw): + from zope.interface.declarations import providedBy + return providedBy(*args, **kw) + + def test_optimizations(self): + from zope.interface.declarations import providedByFallback + from zope.interface.declarations import providedBy + try: + import zope.interface._zope_interface_coptimizations + except ImportError: + self.assertIs(providedBy, providedByFallback) + else: + self.assertIsNot(providedBy, providedByFallback) + + +class ObjectSpecificationDescriptorFallbackTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.declarations \ + import ObjectSpecificationDescriptorFallback + return ObjectSpecificationDescriptorFallback + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_accessed_via_class(self): + from zope.interface.declarations import Provides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + class Foo(object): + pass + Foo.__provides__ = Provides(Foo, IFoo) + Foo.__providedBy__ = self._makeOne() + self.assertEqual(list(Foo.__providedBy__), [IFoo]) + + def test_accessed_via_inst_wo_provides(self): + from zope.interface.declarations import implementer + from zope.interface.declarations import Provides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + @implementer(IFoo) + class Foo(object): + pass + Foo.__provides__ = Provides(Foo, IBar) + Foo.__providedBy__ = self._makeOne() + foo = Foo() + self.assertEqual(list(foo.__providedBy__), [IFoo]) + + def test_accessed_via_inst_w_provides(self): + from zope.interface.declarations import directlyProvides + from zope.interface.declarations import implementer + from zope.interface.declarations import Provides + from zope.interface.interface import InterfaceClass + IFoo = InterfaceClass("IFoo") + IBar = InterfaceClass("IBar") + IBaz = InterfaceClass("IBaz") + @implementer(IFoo) + class Foo(object): + pass + Foo.__provides__ = Provides(Foo, IBar) + Foo.__providedBy__ = self._makeOne() + foo = Foo() + directlyProvides(foo, IBaz) + self.assertEqual(list(foo.__providedBy__), [IBaz, IFoo]) + + +class ObjectSpecificationDescriptorTests( + ObjectSpecificationDescriptorFallbackTests): + # Repeat tests for C optimizations + + def _getTargetClass(self): + from zope.interface.declarations import ObjectSpecificationDescriptor + return ObjectSpecificationDescriptor + + def test_optimizations(self): + from zope.interface.declarations import ( + ObjectSpecificationDescriptorFallback) + try: + import zope.interface._zope_interface_coptimizations + except ImportError: + self.assertIs(self._getTargetClass(), + ObjectSpecificationDescriptorFallback) + else: + self.assertIsNot(self._getTargetClass(), + ObjectSpecificationDescriptorFallback) + + +# Test _normalizeargs through its callers. + + +class _Monkey(object): + # context-manager for replacing module names in the scope of a test. + def __init__(self, module, **kw): + self.module = module + self.to_restore = dict([(key, getattr(module, key)) for key in kw]) + for key, value in kw.items(): + setattr(module, key, value) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + for key, value in self.to_restore.items(): + setattr(self.module, key, value) + + +class _MonkeyDict(object): + # context-manager for restoring a dict w/in a module in the scope of a test. + def __init__(self, module, attrname, **kw): + self.module = module + self.target = getattr(module, attrname) + self.to_restore = self.target.copy() + self.target.clear() + self.target.update(kw) + + def __enter__(self): + return self.target + + def __exit__(self, exc_type, exc_val, exc_tb): + self.target.clear() + self.target.update(self.to_restore) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_document.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_document.py new file mode 100644 index 0000000..bffe6a2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_document.py @@ -0,0 +1,505 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Documentation tests. +""" +import unittest + + +class Test_asStructuredText(unittest.TestCase): + + def _callFUT(self, iface): + from zope.interface.document import asStructuredText + return asStructuredText(iface) + + def test_asStructuredText_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "INoDocstring", + " Attributes:", + " Methods:", + "" + ]) + class INoDocstring(Interface): + pass + self.assertEqual(self._callFUT(INoDocstring), EXPECTED) + + def test_asStructuredText_empty_with_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IEmpty", + " This is an empty interface.", + " Attributes:", + " Methods:", + "" + ]) + class IEmpty(Interface): + """ This is an empty interface. + """ + self.assertEqual(self._callFUT(IEmpty), EXPECTED) + + def test_asStructuredText_empty_with_multiline_docstring(self): + from zope.interface import Interface + EXPECTED = '\n'.join([ + "IEmpty", + "", + " This is an empty interface.", + " ", + (" It can be used to annotate any class or object, " + "because it promises"), + " nothing.", + "", + " Attributes:", + "", + " Methods:", + "", + "" + ]) + class IEmpty(Interface): + """ This is an empty interface. + + It can be used to annotate any class or object, because it promises + nothing. + """ + self.assertEqual(self._callFUT(IEmpty), EXPECTED) + + def test_asStructuredText_with_attribute_no_docstring(self): + from zope.interface import Attribute + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasAttribute", + " This interface has an attribute.", + " Attributes:", + " an_attribute -- no documentation", + " Methods:", + "" + ]) + class IHasAttribute(Interface): + """ This interface has an attribute. + """ + an_attribute = Attribute('an_attribute') + + self.assertEqual(self._callFUT(IHasAttribute), EXPECTED) + + def test_asStructuredText_with_attribute_with_docstring(self): + from zope.interface import Attribute + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasAttribute", + " This interface has an attribute.", + " Attributes:", + " an_attribute -- This attribute is documented.", + " Methods:", + "" + ]) + class IHasAttribute(Interface): + """ This interface has an attribute. + """ + an_attribute = Attribute('an_attribute', + 'This attribute is documented.') + + self.assertEqual(self._callFUT(IHasAttribute), EXPECTED) + + def test_asStructuredText_with_method_no_args_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasMethod", + " This interface has a method.", + " Attributes:", + " Methods:", + " aMethod() -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asStructuredText_with_method_positional_args_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasMethod", + " This interface has a method.", + " Attributes:", + " Methods:", + " aMethod(first, second) -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(first, second): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asStructuredText_with_method_starargs_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasMethod", + " This interface has a method.", + " Attributes:", + " Methods:", + " aMethod(first, second, *rest) -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(first, second, *rest): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asStructuredText_with_method_kwargs_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasMethod", + " This interface has a method.", + " Attributes:", + " Methods:", + " aMethod(first, second, **kw) -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(first, second, **kw): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asStructuredText_with_method_with_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IHasMethod", + " This interface has a method.", + " Attributes:", + " Methods:", + " aMethod() -- This method is documented.", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(): + """This method is documented. + """ + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asStructuredText_derived_ignores_base(self): + from zope.interface import Attribute + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "IDerived", + " IDerived doc", + " This interface extends:", + " o IBase", + " Attributes:", + " attr1 -- no documentation", + " attr2 -- attr2 doc", + " Methods:", + " method3() -- method3 doc", + " method4() -- no documentation", + " method5() -- method5 doc", + "", + ]) + + class IBase(Interface): + def method1(): + pass + def method2(): + pass + + class IDerived(IBase): + "IDerived doc" + attr1 = Attribute('attr1') + attr2 = Attribute('attr2', 'attr2 doc') + + def method3(): + "method3 doc" + def method4(): + pass + def method5(): + "method5 doc" + + self.assertEqual(self._callFUT(IDerived), EXPECTED) + + +class Test_asReStructuredText(unittest.TestCase): + + def _callFUT(self, iface): + from zope.interface.document import asReStructuredText + return asReStructuredText(iface) + + def test_asReStructuredText_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``INoDocstring``", + " Attributes:", + " Methods:", + "" + ]) + class INoDocstring(Interface): + pass + self.assertEqual(self._callFUT(INoDocstring), EXPECTED) + + def test_asReStructuredText_empty_with_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IEmpty``", + " This is an empty interface.", + " Attributes:", + " Methods:", + "" + ]) + class IEmpty(Interface): + """ This is an empty interface. + """ + self.assertEqual(self._callFUT(IEmpty), EXPECTED) + + def test_asReStructuredText_empty_with_multiline_docstring(self): + from zope.interface import Interface + EXPECTED = '\n'.join([ + "``IEmpty``", + "", + " This is an empty interface.", + " ", + (" It can be used to annotate any class or object, " + "because it promises"), + " nothing.", + "", + " Attributes:", + "", + " Methods:", + "", + "" + ]) + class IEmpty(Interface): + """ This is an empty interface. + + It can be used to annotate any class or object, because it promises + nothing. + """ + self.assertEqual(self._callFUT(IEmpty), EXPECTED) + + def test_asReStructuredText_with_attribute_no_docstring(self): + from zope.interface import Attribute + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasAttribute``", + " This interface has an attribute.", + " Attributes:", + " ``an_attribute`` -- no documentation", + " Methods:", + "" + ]) + class IHasAttribute(Interface): + """ This interface has an attribute. + """ + an_attribute = Attribute('an_attribute') + + self.assertEqual(self._callFUT(IHasAttribute), EXPECTED) + + def test_asReStructuredText_with_attribute_with_docstring(self): + from zope.interface import Attribute + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasAttribute``", + " This interface has an attribute.", + " Attributes:", + " ``an_attribute`` -- This attribute is documented.", + " Methods:", + "" + ]) + class IHasAttribute(Interface): + """ This interface has an attribute. + """ + an_attribute = Attribute('an_attribute', + 'This attribute is documented.') + + self.assertEqual(self._callFUT(IHasAttribute), EXPECTED) + + def test_asReStructuredText_with_method_no_args_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasMethod``", + " This interface has a method.", + " Attributes:", + " Methods:", + " ``aMethod()`` -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asReStructuredText_with_method_positional_args_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasMethod``", + " This interface has a method.", + " Attributes:", + " Methods:", + " ``aMethod(first, second)`` -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(first, second): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asReStructuredText_with_method_starargs_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasMethod``", + " This interface has a method.", + " Attributes:", + " Methods:", + " ``aMethod(first, second, *rest)`` -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(first, second, *rest): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asReStructuredText_with_method_kwargs_no_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasMethod``", + " This interface has a method.", + " Attributes:", + " Methods:", + " ``aMethod(first, second, **kw)`` -- no documentation", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(first, second, **kw): + pass + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asReStructuredText_with_method_with_docstring(self): + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IHasMethod``", + " This interface has a method.", + " Attributes:", + " Methods:", + " ``aMethod()`` -- This method is documented.", + "" + ]) + class IHasMethod(Interface): + """ This interface has a method. + """ + def aMethod(): + """This method is documented. + """ + + self.assertEqual(self._callFUT(IHasMethod), EXPECTED) + + def test_asReStructuredText_derived_ignores_base(self): + from zope.interface import Attribute + from zope.interface import Interface + EXPECTED = '\n\n'.join([ + "``IDerived``", + " IDerived doc", + " This interface extends:", + " o ``IBase``", + " Attributes:", + " ``attr1`` -- no documentation", + " ``attr2`` -- attr2 doc", + " Methods:", + " ``method3()`` -- method3 doc", + " ``method4()`` -- no documentation", + " ``method5()`` -- method5 doc", + "", + ]) + + class IBase(Interface): + def method1(): + pass + def method2(): + pass + + class IDerived(IBase): + "IDerived doc" + attr1 = Attribute('attr1') + attr2 = Attribute('attr2', 'attr2 doc') + + def method3(): + "method3 doc" + def method4(): + pass + def method5(): + "method5 doc" + + self.assertEqual(self._callFUT(IDerived), EXPECTED) + + +class Test__justify_and_indent(unittest.TestCase): + + def _callFUT(self, text, level, **kw): + from zope.interface.document import _justify_and_indent + return _justify_and_indent(text, level, **kw) + + def test_simple_level_0(self): + LINES = ['Three blind mice', 'See how they run'] + text = '\n'.join(LINES) + self.assertEqual(self._callFUT(text, 0), text) + + def test_simple_level_1(self): + LINES = ['Three blind mice', 'See how they run'] + text = '\n'.join(LINES) + self.assertEqual(self._callFUT(text, 1), + '\n'.join([' ' + line for line in LINES])) + + def test_simple_level_2(self): + LINES = ['Three blind mice', 'See how they run'] + text = '\n'.join(LINES) + self.assertEqual(self._callFUT(text, 1), + '\n'.join([' ' + line for line in LINES])) + + def test_simple_w_CRLF(self): + LINES = ['Three blind mice', 'See how they run'] + text = '\r\n'.join(LINES) + self.assertEqual(self._callFUT(text, 1), + '\n'.join([' ' + line for line in LINES])) + + def test_with_munge(self): + TEXT = ("This is a piece of text longer than 15 characters, \n" + "and split across multiple lines.") + EXPECTED = (" This is a piece\n" + " of text longer\n" + " than 15 characters,\n" + " and split across\n" + " multiple lines.\n" + " ") + self.assertEqual(self._callFUT(TEXT, 1, munge=1, width=15), EXPECTED) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_element.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_element.py new file mode 100644 index 0000000..eb003cd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_element.py @@ -0,0 +1,31 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test Element meta-class. +""" + +import unittest +from zope.interface.interface import Element + +class TestElement(unittest.TestCase): + + def test_taggedValues(self): + """Test that we can update tagged values of more than one element + """ + + e1 = Element("foo") + e2 = Element("bar") + e1.setTaggedValue("x", 1) + e2.setTaggedValue("x", 2) + self.assertEqual(e1.getTaggedValue("x"), 1) + self.assertEqual(e2.getTaggedValue("x"), 2) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_exceptions.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_exceptions.py new file mode 100644 index 0000000..ae73f9c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_exceptions.py @@ -0,0 +1,72 @@ +############################################################################## +# +# Copyright (c) 2010 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" zope.interface.exceptions unit tests +""" +import unittest + +def _makeIface(): + from zope.interface import Interface + class IDummy(Interface): + pass + return IDummy + +class DoesNotImplementTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.exceptions import DoesNotImplement + return DoesNotImplement + + def _makeOne(self): + iface = _makeIface() + return self._getTargetClass()(iface) + + def test___str__(self): + dni = self._makeOne() + # XXX The trailing newlines and blank spaces are a stupid artifact. + self.assertEqual(str(dni), + 'An object does not implement interface \n\n ') + +class BrokenImplementationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.exceptions import BrokenImplementation + return BrokenImplementation + + def _makeOne(self, name='missing'): + iface = _makeIface() + return self._getTargetClass()(iface, name) + + def test___str__(self): + dni = self._makeOne() + # XXX The trailing newlines and blank spaces are a stupid artifact. + self.assertEqual(str(dni), + 'An object has failed to implement interface \n\n' + ' The missing attribute was not provided.\n ') + +class BrokenMethodImplementationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.exceptions import BrokenMethodImplementation + return BrokenMethodImplementation + + def _makeOne(self, method='aMethod', mess='I said so'): + return self._getTargetClass()(method, mess) + + def test___str__(self): + dni = self._makeOne() + self.assertEqual(str(dni), + 'The implementation of aMethod violates its contract\n' + ' because I said so.\n ') diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_interface.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_interface.py new file mode 100644 index 0000000..2bb3d1c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_interface.py @@ -0,0 +1,2123 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test Interface implementation +""" +# pylint:disable=protected-access +import unittest + +from zope.interface._compat import _skip_under_py3k + +_marker = object() + + +class Test_invariant(unittest.TestCase): + + def test_w_single(self): + from zope.interface.interface import invariant + from zope.interface.interface import TAGGED_DATA + + def _check(*args, **kw): + raise NotImplementedError() + + class Foo(object): + invariant(_check) + + self.assertEqual(getattr(Foo, TAGGED_DATA, None), + {'invariants': [_check]}) + + def test_w_multiple(self): + from zope.interface.interface import invariant + from zope.interface.interface import TAGGED_DATA + + def _check(*args, **kw): + raise NotImplementedError() + + def _another_check(*args, **kw): + raise NotImplementedError() + + class Foo(object): + invariant(_check) + invariant(_another_check) + + self.assertEqual(getattr(Foo, TAGGED_DATA, None), + {'invariants': [_check, _another_check]}) + + +class Test_taggedValue(unittest.TestCase): + + def test_w_single(self): + from zope.interface.interface import taggedValue + from zope.interface.interface import TAGGED_DATA + + class Foo(object): + taggedValue('bar', ['baz']) + + self.assertEqual(getattr(Foo, TAGGED_DATA, None), + {'bar': ['baz']}) + + def test_w_multiple(self): + from zope.interface.interface import taggedValue + from zope.interface.interface import TAGGED_DATA + + class Foo(object): + taggedValue('bar', ['baz']) + taggedValue('qux', 'spam') + + self.assertEqual(getattr(Foo, TAGGED_DATA, None), + {'bar': ['baz'], 'qux': 'spam'}) + + def test_w_multiple_overwriting(self): + from zope.interface.interface import taggedValue + from zope.interface.interface import TAGGED_DATA + + class Foo(object): + taggedValue('bar', ['baz']) + taggedValue('qux', 'spam') + taggedValue('bar', 'frob') + + self.assertEqual(getattr(Foo, TAGGED_DATA, None), + {'bar': 'frob', 'qux': 'spam'}) + + +class ElementTests(unittest.TestCase): + + DEFAULT_NAME = 'AnElement' + + def _getTargetClass(self): + from zope.interface.interface import Element + return Element + + def _makeOne(self, name=None): + if name is None: + name = self.DEFAULT_NAME + return self._getTargetClass()(name) + + def test_ctor_defaults(self): + element = self._makeOne() + self.assertEqual(element.__name__, self.DEFAULT_NAME) + self.assertEqual(element.getName(), self.DEFAULT_NAME) + self.assertEqual(element.__doc__, '') + self.assertEqual(element.getDoc(), '') + self.assertEqual(list(element.getTaggedValueTags()), []) + + def test_ctor_no_doc_space_in_name(self): + element = self._makeOne('An Element') + self.assertEqual(element.__name__, None) + self.assertEqual(element.__doc__, 'An Element') + + def test_getTaggedValue_miss(self): + element = self._makeOne() + self.assertRaises(KeyError, element.getTaggedValue, 'nonesuch') + + def test_queryTaggedValue_miss(self): + element = self._makeOne() + self.assertEqual(element.queryTaggedValue('nonesuch'), None) + + def test_queryTaggedValue_miss_w_default(self): + element = self._makeOne() + self.assertEqual(element.queryTaggedValue('nonesuch', 'bar'), 'bar') + + def test_setTaggedValue(self): + element = self._makeOne() + element.setTaggedValue('foo', 'bar') + self.assertEqual(list(element.getTaggedValueTags()), ['foo']) + self.assertEqual(element.getTaggedValue('foo'), 'bar') + self.assertEqual(element.queryTaggedValue('foo'), 'bar') + + +class SpecificationBasePyTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.interface import SpecificationBasePy + return SpecificationBasePy + + def _makeOne(self): + return self._getTargetClass()() + + def test_providedBy_miss(self): + from zope.interface import interface + from zope.interface.declarations import _empty + sb = self._makeOne() + def _providedBy(obj): + return _empty + with _Monkey(interface, providedBy=_providedBy): + self.assertFalse(sb.providedBy(object())) + + def test_providedBy_hit(self): + from zope.interface import interface + sb = self._makeOne() + class _Decl(object): + _implied = {sb: {},} + def _providedBy(obj): + return _Decl() + with _Monkey(interface, providedBy=_providedBy): + self.assertTrue(sb.providedBy(object())) + + def test_implementedBy_miss(self): + from zope.interface import interface + from zope.interface.declarations import _empty + sb = self._makeOne() + def _implementedBy(obj): + return _empty + with _Monkey(interface, implementedBy=_implementedBy): + self.assertFalse(sb.implementedBy(object())) + + def test_implementedBy_hit(self): + from zope.interface import interface + sb = self._makeOne() + class _Decl(object): + _implied = {sb: {},} + def _implementedBy(obj): + return _Decl() + with _Monkey(interface, implementedBy=_implementedBy): + self.assertTrue(sb.implementedBy(object())) + + def test_isOrExtends_miss(self): + sb = self._makeOne() + sb._implied = {} # not defined by SpecificationBasePy + self.assertFalse(sb.isOrExtends(object())) + + def test_isOrExtends_hit(self): + sb = self._makeOne() + testing = object() + sb._implied = {testing: {}} # not defined by SpecificationBasePy + self.assertTrue(sb(testing)) + + def test___call___miss(self): + sb = self._makeOne() + sb._implied = {} # not defined by SpecificationBasePy + self.assertFalse(sb.isOrExtends(object())) + + def test___call___hit(self): + sb = self._makeOne() + testing = object() + sb._implied = {testing: {}} # not defined by SpecificationBasePy + self.assertTrue(sb(testing)) + + +class SpecificationBaseTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.interface import SpecificationBase + return SpecificationBase + + def test_optimizations(self): + from zope.interface.interface import SpecificationBasePy + try: + import zope.interface._zope_interface_coptimizations + except ImportError: + self.assertIs(self._getTargetClass(), SpecificationBasePy) + else: + self.assertIsNot(self._getTargetClass(), SpecificationBasePy) + + +class InterfaceBasePyTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.interface import InterfaceBasePy + return InterfaceBasePy + + def _makeOne(self, object_should_provide): + class IB(self._getTargetClass()): + def _call_conform(self, conform): + return conform(self) + def providedBy(self, obj): + return object_should_provide + return IB() + + def test___call___w___conform___returning_value(self): + ib = self._makeOne(False) + conformed = object() + class _Adapted(object): + def __conform__(self, iface): + return conformed + self.assertTrue(ib(_Adapted()) is conformed) + + def test___call___w___conform___miss_ob_provides(self): + ib = self._makeOne(True) + class _Adapted(object): + def __conform__(self, iface): + return None + adapted = _Adapted() + self.assertTrue(ib(adapted) is adapted) + + def test___call___wo___conform___ob_no_provides_w_alternate(self): + ib = self._makeOne(False) + adapted = object() + alternate = object() + self.assertTrue(ib(adapted, alternate) is alternate) + + def test___call___w___conform___ob_no_provides_wo_alternate(self): + ib = self._makeOne(False) + adapted = object() + self.assertRaises(TypeError, ib, adapted) + + def test___adapt___ob_provides(self): + ib = self._makeOne(True) + adapted = object() + self.assertTrue(ib.__adapt__(adapted) is adapted) + + def test___adapt___ob_no_provides_uses_hooks(self): + from zope.interface import interface + ib = self._makeOne(False) + adapted = object() + _missed = [] + def _hook_miss(iface, obj): + _missed.append((iface, obj)) + return None + def _hook_hit(iface, obj): + return obj + with _Monkey(interface, adapter_hooks=[_hook_miss, _hook_hit]): + self.assertTrue(ib.__adapt__(adapted) is adapted) + self.assertEqual(_missed, [(ib, adapted)]) + + +class InterfaceBaseTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.interface import InterfaceBase + return InterfaceBase + + def test_optimizations(self): + from zope.interface.interface import InterfaceBasePy + try: + import zope.interface._zope_interface_coptimizations + except ImportError: + self.assertIs(self._getTargetClass(), InterfaceBasePy) + else: + self.assertIsNot(self._getTargetClass(), InterfaceBasePy) + + +class SpecificationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.interface import Specification + return Specification + + def _makeOne(self, bases=_marker): + if bases is _marker: + return self._getTargetClass()() + return self._getTargetClass()(bases) + + def test_ctor(self): + from zope.interface.interface import Interface + spec = self._makeOne() + self.assertEqual(spec.__bases__, ()) + self.assertEqual(len(spec._implied), 2) + self.assertTrue(spec in spec._implied) + self.assertTrue(Interface in spec._implied) + self.assertEqual(len(spec.dependents), 0) + + def test_subscribe_first_time(self): + spec = self._makeOne() + dep = DummyDependent() + spec.subscribe(dep) + self.assertEqual(len(spec.dependents), 1) + self.assertEqual(spec.dependents[dep], 1) + + def test_subscribe_again(self): + spec = self._makeOne() + dep = DummyDependent() + spec.subscribe(dep) + spec.subscribe(dep) + self.assertEqual(spec.dependents[dep], 2) + + def test_unsubscribe_miss(self): + spec = self._makeOne() + dep = DummyDependent() + self.assertRaises(KeyError, spec.unsubscribe, dep) + + def test_unsubscribe(self): + spec = self._makeOne() + dep = DummyDependent() + spec.subscribe(dep) + spec.subscribe(dep) + spec.unsubscribe(dep) + self.assertEqual(spec.dependents[dep], 1) + spec.unsubscribe(dep) + self.assertFalse(dep in spec.dependents) + + def test___setBases_subscribes_bases_and_notifies_dependents(self): + from zope.interface.interface import Interface + spec = self._makeOne() + dep = DummyDependent() + spec.subscribe(dep) + class I(Interface): + pass + class J(Interface): + pass + spec.__bases__ = (I,) + self.assertEqual(dep._changed, [spec]) + self.assertEqual(I.dependents[spec], 1) + spec.__bases__ = (J,) + self.assertEqual(I.dependents.get(spec), None) + self.assertEqual(J.dependents[spec], 1) + + def test_changed_clears_volatiles_and_implied(self): + from zope.interface.interface import Interface + class I(Interface): + pass + spec = self._makeOne() + spec._v_attrs = 'Foo' + spec._implied[I] = () + spec.changed(spec) + self.assertTrue(getattr(spec, '_v_attrs', self) is self) + self.assertFalse(I in spec._implied) + + def test_interfaces_skips_already_seen(self): + from zope.interface.interface import Interface + class IFoo(Interface): + pass + spec = self._makeOne([IFoo, IFoo]) + self.assertEqual(list(spec.interfaces()), [IFoo]) + + def test_extends_strict_wo_self(self): + from zope.interface.interface import Interface + class IFoo(Interface): + pass + spec = self._makeOne(IFoo) + self.assertFalse(spec.extends(IFoo, strict=True)) + + def test_extends_strict_w_self(self): + spec = self._makeOne() + self.assertFalse(spec.extends(spec, strict=True)) + + def test_extends_non_strict_w_self(self): + spec = self._makeOne() + self.assertTrue(spec.extends(spec, strict=False)) + + def test_get_hit_w__v_attrs(self): + spec = self._makeOne() + foo = object() + spec._v_attrs = {'foo': foo} + self.assertTrue(spec.get('foo') is foo) + + def test_get_hit_from_base_wo__v_attrs(self): + from zope.interface.interface import Attribute + from zope.interface.interface import Interface + class IFoo(Interface): + foo = Attribute('foo') + class IBar(Interface): + bar = Attribute('bar') + spec = self._makeOne([IFoo, IBar]) + self.assertTrue(spec.get('foo') is IFoo.get('foo')) + self.assertTrue(spec.get('bar') is IBar.get('bar')) + +class InterfaceClassTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.interface import InterfaceClass + return InterfaceClass + + def _makeOne(self, name='ITest', bases=(), attrs=None, __doc__=None, + __module__=None): + return self._getTargetClass()(name, bases, attrs, __doc__, __module__) + + def test_ctor_defaults(self): + klass = self._getTargetClass() + inst = klass('ITesting') + self.assertEqual(inst.__name__, 'ITesting') + self.assertEqual(inst.__doc__, '') + self.assertEqual(inst.__bases__, ()) + self.assertEqual(inst.getBases(), ()) + + def test_ctor_bad_bases(self): + klass = self._getTargetClass() + self.assertRaises(TypeError, klass, 'ITesting', (object(),)) + + def test_ctor_w_attrs_attrib_methods(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + klass = self._getTargetClass() + inst = klass('ITesting', attrs=ATTRS) + self.assertEqual(inst.__name__, 'ITesting') + self.assertEqual(inst.__doc__, '') + self.assertEqual(inst.__bases__, ()) + self.assertEqual(inst.names(), ATTRS.keys()) + + def test_ctor_attrs_w___locals__(self): + ATTRS = {'__locals__': {}} + klass = self._getTargetClass() + inst = klass('ITesting', attrs=ATTRS) + self.assertEqual(inst.__name__, 'ITesting') + self.assertEqual(inst.__doc__, '') + self.assertEqual(inst.__bases__, ()) + self.assertEqual(inst.names(), ATTRS.keys()) + + def test_ctor_attrs_w___annotations__(self): + ATTRS = {'__annotations__': {}} + klass = self._getTargetClass() + inst = klass('ITesting', attrs=ATTRS) + self.assertEqual(inst.__name__, 'ITesting') + self.assertEqual(inst.__doc__, '') + self.assertEqual(inst.__bases__, ()) + self.assertEqual(inst.names(), ATTRS.keys()) + + def test_ctor_attrs_w__decorator_non_return(self): + from zope.interface.interface import _decorator_non_return + ATTRS = {'dropme': _decorator_non_return} + klass = self._getTargetClass() + inst = klass('ITesting', attrs=ATTRS) + self.assertEqual(inst.__name__, 'ITesting') + self.assertEqual(inst.__doc__, '') + self.assertEqual(inst.__bases__, ()) + self.assertEqual(list(inst.names()), []) + + def test_ctor_attrs_w_invalid_attr_type(self): + from zope.interface.exceptions import InvalidInterface + ATTRS = {'invalid': object()} + klass = self._getTargetClass() + self.assertRaises(InvalidInterface, klass, 'ITesting', attrs=ATTRS) + + def test_ctor_w_explicit___doc__(self): + ATTRS = {'__doc__': 'ATTR'} + klass = self._getTargetClass() + inst = klass('ITesting', attrs=ATTRS, __doc__='EXPLICIT') + self.assertEqual(inst.__doc__, 'EXPLICIT') + + def test_interfaces(self): + iface = self._makeOne() + self.assertEqual(list(iface.interfaces()), [iface]) + + def test_getBases(self): + iface = self._makeOne() + sub = self._makeOne('ISub', bases=(iface,)) + self.assertEqual(sub.getBases(), (iface,)) + + def test_isEqualOrExtendedBy_identity(self): + iface = self._makeOne() + self.assertTrue(iface.isEqualOrExtendedBy(iface)) + + def test_isEqualOrExtendedBy_subiface(self): + iface = self._makeOne() + sub = self._makeOne('ISub', bases=(iface,)) + self.assertTrue(iface.isEqualOrExtendedBy(sub)) + self.assertFalse(sub.isEqualOrExtendedBy(iface)) + + def test_isEqualOrExtendedBy_unrelated(self): + one = self._makeOne('One') + another = self._makeOne('Another') + self.assertFalse(one.isEqualOrExtendedBy(another)) + self.assertFalse(another.isEqualOrExtendedBy(one)) + + def test_names_w_all_False_ignores_bases(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived.names(all=False)), ['baz']) + + def test_names_w_all_True_no_bases(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + one = self._makeOne(attrs=ATTRS) + self.assertEqual(sorted(one.names(all=True)), ['bar', 'foo']) + + def test_names_w_all_True_w_bases_simple(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived.names(all=True)), ['bar', 'baz', 'foo']) + + def test_names_w_all_True_bases_w_same_names(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + def _foo(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'foo': fromFunction(_foo), + 'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived.names(all=True)), ['bar', 'baz', 'foo']) + + def test___iter__(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + def _foo(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'foo': fromFunction(_foo), + 'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived), ['bar', 'baz', 'foo']) + + def test_namesAndDescriptions_w_all_False_ignores_bases(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived.namesAndDescriptions(all=False)), + [('baz', DERIVED_ATTRS['baz']), + ]) + + def test_namesAndDescriptions_w_all_True_no_bases(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + one = self._makeOne(attrs=ATTRS) + self.assertEqual(sorted(one.namesAndDescriptions(all=False)), + [('bar', ATTRS['bar']), + ('foo', ATTRS['foo']), + ]) + + def test_namesAndDescriptions_w_all_True_simple(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived.namesAndDescriptions(all=True)), + [('bar', BASE_ATTRS['bar']), + ('baz', DERIVED_ATTRS['baz']), + ('foo', BASE_ATTRS['foo']), + ]) + + def test_namesAndDescriptions_w_all_True_bases_w_same_names(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + def _foo(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'foo': fromFunction(_foo), + 'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(sorted(derived.namesAndDescriptions(all=True)), + [('bar', BASE_ATTRS['bar']), + ('baz', DERIVED_ATTRS['baz']), + ('foo', DERIVED_ATTRS['foo']), + ]) + + def test_getDescriptionFor_miss(self): + one = self._makeOne() + self.assertRaises(KeyError, one.getDescriptionFor, 'nonesuch') + + def test_getDescriptionFor_hit(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + one = self._makeOne(attrs=ATTRS) + self.assertEqual(one.getDescriptionFor('foo'), ATTRS['foo']) + self.assertEqual(one.getDescriptionFor('bar'), ATTRS['bar']) + + def test___getitem___miss(self): + one = self._makeOne() + def _test(): + return one['nonesuch'] + self.assertRaises(KeyError, _test) + + def test___getitem___hit(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + one = self._makeOne(attrs=ATTRS) + self.assertEqual(one['foo'], ATTRS['foo']) + self.assertEqual(one['bar'], ATTRS['bar']) + + def test___contains___miss(self): + one = self._makeOne() + self.assertFalse('nonesuch' in one) + + def test___contains___hit(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + one = self._makeOne(attrs=ATTRS) + self.assertTrue('foo' in one) + self.assertTrue('bar' in one) + + def test_direct_miss(self): + one = self._makeOne() + self.assertEqual(one.direct('nonesuch'), None) + + def test_direct_hit_local_miss_bases(self): + from zope.interface.interface import Attribute + from zope.interface.interface import fromFunction + def _bar(): + """DOCSTRING""" + def _foo(): + """DOCSTRING""" + BASE_ATTRS = {'foo': Attribute('Foo', ''), + 'bar': fromFunction(_bar), + } + DERIVED_ATTRS = {'foo': fromFunction(_foo), + 'baz': Attribute('Baz', ''), + } + base = self._makeOne('IBase', attrs=BASE_ATTRS) + derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS) + self.assertEqual(derived.direct('foo'), DERIVED_ATTRS['foo']) + self.assertEqual(derived.direct('baz'), DERIVED_ATTRS['baz']) + self.assertEqual(derived.direct('bar'), None) + + def test_queryDescriptionFor_miss(self): + iface = self._makeOne() + self.assertEqual(iface.queryDescriptionFor('nonesuch'), None) + + def test_queryDescriptionFor_hit(self): + from zope.interface import Attribute + ATTRS = {'attr': Attribute('Title', 'Description')} + iface = self._makeOne(attrs=ATTRS) + self.assertEqual(iface.queryDescriptionFor('attr'), ATTRS['attr']) + + def test_validateInvariants_pass(self): + _called_with = [] + def _passable(*args, **kw): + _called_with.append((args, kw)) + return True + iface = self._makeOne() + obj = object() + iface.setTaggedValue('invariants', [_passable]) + self.assertEqual(iface.validateInvariants(obj), None) + self.assertEqual(_called_with, [((obj,), {})]) + + def test_validateInvariants_fail_wo_errors_passed(self): + from zope.interface.exceptions import Invalid + _passable_called_with = [] + def _passable(*args, **kw): + _passable_called_with.append((args, kw)) + return True + _fail_called_with = [] + def _fail(*args, **kw): + _fail_called_with.append((args, kw)) + raise Invalid + iface = self._makeOne() + obj = object() + iface.setTaggedValue('invariants', [_passable, _fail]) + self.assertRaises(Invalid, iface.validateInvariants, obj) + self.assertEqual(_passable_called_with, [((obj,), {})]) + self.assertEqual(_fail_called_with, [((obj,), {})]) + + def test_validateInvariants_fail_w_errors_passed(self): + from zope.interface.exceptions import Invalid + _errors = [] + _fail_called_with = [] + def _fail(*args, **kw): + _fail_called_with.append((args, kw)) + raise Invalid + iface = self._makeOne() + obj = object() + iface.setTaggedValue('invariants', [_fail]) + self.assertRaises(Invalid, iface.validateInvariants, obj, _errors) + self.assertEqual(_fail_called_with, [((obj,), {})]) + self.assertEqual(len(_errors), 1) + self.assertTrue(isinstance(_errors[0], Invalid)) + + def test_validateInvariants_fail_in_base_wo_errors_passed(self): + from zope.interface.exceptions import Invalid + _passable_called_with = [] + def _passable(*args, **kw): + _passable_called_with.append((args, kw)) + return True + _fail_called_with = [] + def _fail(*args, **kw): + _fail_called_with.append((args, kw)) + raise Invalid + base = self._makeOne('IBase') + derived = self._makeOne('IDerived', (base,)) + obj = object() + base.setTaggedValue('invariants', [_fail]) + derived.setTaggedValue('invariants', [_passable]) + self.assertRaises(Invalid, derived.validateInvariants, obj) + self.assertEqual(_passable_called_with, [((obj,), {})]) + self.assertEqual(_fail_called_with, [((obj,), {})]) + + def test_validateInvariants_fail_in_base_w_errors_passed(self): + from zope.interface.exceptions import Invalid + _errors = [] + _passable_called_with = [] + def _passable(*args, **kw): + _passable_called_with.append((args, kw)) + return True + _fail_called_with = [] + def _fail(*args, **kw): + _fail_called_with.append((args, kw)) + raise Invalid + base = self._makeOne('IBase') + derived = self._makeOne('IDerived', (base,)) + obj = object() + base.setTaggedValue('invariants', [_fail]) + derived.setTaggedValue('invariants', [_passable]) + self.assertRaises(Invalid, derived.validateInvariants, obj, _errors) + self.assertEqual(_passable_called_with, [((obj,), {})]) + self.assertEqual(_fail_called_with, [((obj,), {})]) + self.assertEqual(len(_errors), 1) + self.assertTrue(isinstance(_errors[0], Invalid)) + + def test___reduce__(self): + iface = self._makeOne('PickleMe') + self.assertEqual(iface.__reduce__(), 'PickleMe') + + def test___hash___normal(self): + iface = self._makeOne('HashMe') + self.assertEqual(hash(iface), + hash((('HashMe', + 'zope.interface.tests.test_interface')))) + + def test___hash___missing_required_attrs(self): + import warnings + from warnings import catch_warnings + + class Derived(self._getTargetClass()): + def __init__(self): + pass # Don't call base class. + derived = Derived() + with catch_warnings(record=True) as warned: + warnings.simplefilter('always') # see LP #825249 + self.assertEqual(hash(derived), 1) + self.assertEqual(len(warned), 1) + self.assertTrue(warned[0].category is UserWarning) + self.assertEqual(str(warned[0].message), + 'Hashing uninitialized InterfaceClass instance') + + def test_comparison_with_None(self): + iface = self._makeOne() + self.assertTrue(iface < None) + self.assertTrue(iface <= None) + self.assertFalse(iface == None) + self.assertTrue(iface != None) + self.assertFalse(iface >= None) + self.assertFalse(iface > None) + + self.assertFalse(None < iface) + self.assertFalse(None <= iface) + self.assertFalse(None == iface) + self.assertTrue(None != iface) + self.assertTrue(None >= iface) + self.assertTrue(None > iface) + + def test_comparison_with_same_instance(self): + iface = self._makeOne() + + self.assertFalse(iface < iface) + self.assertTrue(iface <= iface) + self.assertTrue(iface == iface) + self.assertFalse(iface != iface) + self.assertTrue(iface >= iface) + self.assertFalse(iface > iface) + + def test_comparison_with_same_named_instance_in_other_module(self): + + one = self._makeOne('IName', __module__='zope.interface.tests.one') + other = self._makeOne('IName', __module__='zope.interface.tests.other') + + self.assertTrue(one < other) + self.assertFalse(other < one) + self.assertTrue(one <= other) + self.assertFalse(other <= one) + self.assertFalse(one == other) + self.assertFalse(other == one) + self.assertTrue(one != other) + self.assertTrue(other != one) + self.assertFalse(one >= other) + self.assertTrue(other >= one) + self.assertFalse(one > other) + self.assertTrue(other > one) + + +class InterfaceTests(unittest.TestCase): + + def test_attributes_link_to_interface(self): + from zope.interface import Interface + from zope.interface import Attribute + + class I1(Interface): + attr = Attribute("My attr") + + self.assertTrue(I1['attr'].interface is I1) + + def test_methods_link_to_interface(self): + from zope.interface import Interface + + class I1(Interface): + + def method(foo, bar, bingo): + "A method" + + self.assertTrue(I1['method'].interface is I1) + + def test_classImplements_simple(self): + from zope.interface import Interface + from zope.interface import implementedBy + from zope.interface import providedBy + + class ICurrent(Interface): + def method1(a, b): + pass + def method2(a, b): + pass + + class IOther(Interface): + pass + + class Current(object): + __implemented__ = ICurrent + def method1(self, a, b): + raise NotImplementedError() + def method2(self, a, b): + raise NotImplementedError() + + current = Current() + + self.assertTrue(ICurrent.implementedBy(Current)) + self.assertFalse(IOther.implementedBy(Current)) + self.assertTrue(ICurrent in implementedBy(Current)) + self.assertFalse(IOther in implementedBy(Current)) + self.assertTrue(ICurrent in providedBy(current)) + self.assertFalse(IOther in providedBy(current)) + + def test_classImplements_base_not_derived(self): + from zope.interface import Interface + from zope.interface import implementedBy + from zope.interface import providedBy + class IBase(Interface): + def method(): + pass + class IDerived(IBase): + pass + class Current(): + __implemented__ = IBase + def method(self): + raise NotImplementedError() + current = Current() + + self.assertTrue(IBase.implementedBy(Current)) + self.assertFalse(IDerived.implementedBy(Current)) + self.assertTrue(IBase in implementedBy(Current)) + self.assertFalse(IDerived in implementedBy(Current)) + self.assertTrue(IBase in providedBy(current)) + self.assertFalse(IDerived in providedBy(current)) + + def test_classImplements_base_and_derived(self): + from zope.interface import Interface + from zope.interface import implementedBy + from zope.interface import providedBy + + class IBase(Interface): + def method(): + pass + + class IDerived(IBase): + pass + + class Current(object): + __implemented__ = IDerived + def method(self): + raise NotImplementedError() + + current = Current() + + self.assertTrue(IBase.implementedBy(Current)) + self.assertTrue(IDerived.implementedBy(Current)) + self.assertFalse(IBase in implementedBy(Current)) + self.assertTrue(IBase in implementedBy(Current).flattened()) + self.assertTrue(IDerived in implementedBy(Current)) + self.assertFalse(IBase in providedBy(current)) + self.assertTrue(IBase in providedBy(current).flattened()) + self.assertTrue(IDerived in providedBy(current)) + + def test_classImplements_multiple(self): + from zope.interface import Interface + from zope.interface import implementedBy + from zope.interface import providedBy + + class ILeft(Interface): + def method(): + pass + + class IRight(ILeft): + pass + + class Left(object): + __implemented__ = ILeft + + def method(self): + raise NotImplementedError() + + class Right(object): + __implemented__ = IRight + + class Ambi(Left, Right): + pass + + ambi = Ambi() + + self.assertTrue(ILeft.implementedBy(Ambi)) + self.assertTrue(IRight.implementedBy(Ambi)) + self.assertTrue(ILeft in implementedBy(Ambi)) + self.assertTrue(IRight in implementedBy(Ambi)) + self.assertTrue(ILeft in providedBy(ambi)) + self.assertTrue(IRight in providedBy(ambi)) + + def test_classImplements_multiple_w_explict_implements(self): + from zope.interface import Interface + from zope.interface import implementedBy + from zope.interface import providedBy + + class ILeft(Interface): + + def method(): + pass + + class IRight(ILeft): + pass + + class IOther(Interface): + pass + + class Left(): + __implemented__ = ILeft + + def method(self): + raise NotImplementedError() + + class Right(object): + __implemented__ = IRight + + class Other(object): + __implemented__ = IOther + + class Mixed(Left, Right): + __implemented__ = Left.__implemented__, Other.__implemented__ + + mixed = Mixed() + + self.assertTrue(ILeft.implementedBy(Mixed)) + self.assertFalse(IRight.implementedBy(Mixed)) + self.assertTrue(IOther.implementedBy(Mixed)) + self.assertTrue(ILeft in implementedBy(Mixed)) + self.assertFalse(IRight in implementedBy(Mixed)) + self.assertTrue(IOther in implementedBy(Mixed)) + self.assertTrue(ILeft in providedBy(mixed)) + self.assertFalse(IRight in providedBy(mixed)) + self.assertTrue(IOther in providedBy(mixed)) + + def testInterfaceExtendsInterface(self): + from zope.interface import Interface + + new = Interface.__class__ + FunInterface = new('FunInterface') + BarInterface = new('BarInterface', [FunInterface]) + BobInterface = new('BobInterface') + BazInterface = new('BazInterface', [BobInterface, BarInterface]) + + self.assertTrue(BazInterface.extends(BobInterface)) + self.assertTrue(BazInterface.extends(BarInterface)) + self.assertTrue(BazInterface.extends(FunInterface)) + self.assertFalse(BobInterface.extends(FunInterface)) + self.assertFalse(BobInterface.extends(BarInterface)) + self.assertTrue(BarInterface.extends(FunInterface)) + self.assertFalse(BarInterface.extends(BazInterface)) + + def test_verifyClass(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface.verify import verifyClass + + + class ICheckMe(Interface): + attr = Attribute(u'My attr') + + def method(): + "A method" + + class CheckMe(object): + __implemented__ = ICheckMe + attr = 'value' + + def method(self): + raise NotImplementedError() + + self.assertTrue(verifyClass(ICheckMe, CheckMe)) + + def test_verifyObject(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface.verify import verifyObject + + + class ICheckMe(Interface): + attr = Attribute(u'My attr') + + def method(): + "A method" + + class CheckMe(object): + __implemented__ = ICheckMe + attr = 'value' + + def method(self): + raise NotImplementedError() + + check_me = CheckMe() + + self.assertTrue(verifyObject(ICheckMe, check_me)) + + def test_interface_object_provides_Interface(self): + from zope.interface import Interface + + class AnInterface(Interface): + pass + + self.assertTrue(Interface.providedBy(AnInterface)) + + def test_names_simple(self): + from zope.interface import Attribute + from zope.interface import Interface + + + class ISimple(Interface): + attr = Attribute(u'My attr') + + def method(): + pass + + self.assertEqual(sorted(ISimple.names()), ['attr', 'method']) + + def test_names_derived(self): + from zope.interface import Attribute + from zope.interface import Interface + + + class IBase(Interface): + attr = Attribute(u'My attr') + + def method(): + pass + + class IDerived(IBase): + attr2 = Attribute(u'My attr2') + + def method(): + pass + + def method2(): + pass + + self.assertEqual(sorted(IDerived.names()), + ['attr2', 'method', 'method2']) + self.assertEqual(sorted(IDerived.names(all=True)), + ['attr', 'attr2', 'method', 'method2']) + + def test_namesAndDescriptions_simple(self): + from zope.interface import Attribute + from zope.interface.interface import Method + from zope.interface import Interface + + + class ISimple(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + name_values = sorted(ISimple.namesAndDescriptions()) + + self.assertEqual(len(name_values), 2) + self.assertEqual(name_values[0][0], 'attr') + self.assertTrue(isinstance(name_values[0][1], Attribute)) + self.assertEqual(name_values[0][1].__name__, 'attr') + self.assertEqual(name_values[0][1].__doc__, 'My attr') + self.assertEqual(name_values[1][0], 'method') + self.assertTrue(isinstance(name_values[1][1], Method)) + self.assertEqual(name_values[1][1].__name__, 'method') + self.assertEqual(name_values[1][1].__doc__, 'My method') + + def test_namesAndDescriptions_derived(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface.interface import Method + + + class IBase(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + class IDerived(IBase): + attr2 = Attribute(u'My attr2') + + def method(): + "My method, overridden" + + def method2(): + "My method2" + + name_values = sorted(IDerived.namesAndDescriptions()) + + self.assertEqual(len(name_values), 3) + self.assertEqual(name_values[0][0], 'attr2') + self.assertTrue(isinstance(name_values[0][1], Attribute)) + self.assertEqual(name_values[0][1].__name__, 'attr2') + self.assertEqual(name_values[0][1].__doc__, 'My attr2') + self.assertEqual(name_values[1][0], 'method') + self.assertTrue(isinstance(name_values[1][1], Method)) + self.assertEqual(name_values[1][1].__name__, 'method') + self.assertEqual(name_values[1][1].__doc__, 'My method, overridden') + self.assertEqual(name_values[2][0], 'method2') + self.assertTrue(isinstance(name_values[2][1], Method)) + self.assertEqual(name_values[2][1].__name__, 'method2') + self.assertEqual(name_values[2][1].__doc__, 'My method2') + + name_values = sorted(IDerived.namesAndDescriptions(all=True)) + + self.assertEqual(len(name_values), 4) + self.assertEqual(name_values[0][0], 'attr') + self.assertTrue(isinstance(name_values[0][1], Attribute)) + self.assertEqual(name_values[0][1].__name__, 'attr') + self.assertEqual(name_values[0][1].__doc__, 'My attr') + self.assertEqual(name_values[1][0], 'attr2') + self.assertTrue(isinstance(name_values[1][1], Attribute)) + self.assertEqual(name_values[1][1].__name__, 'attr2') + self.assertEqual(name_values[1][1].__doc__, 'My attr2') + self.assertEqual(name_values[2][0], 'method') + self.assertTrue(isinstance(name_values[2][1], Method)) + self.assertEqual(name_values[2][1].__name__, 'method') + self.assertEqual(name_values[2][1].__doc__, 'My method, overridden') + self.assertEqual(name_values[3][0], 'method2') + self.assertTrue(isinstance(name_values[3][1], Method)) + self.assertEqual(name_values[3][1].__name__, 'method2') + self.assertEqual(name_values[3][1].__doc__, 'My method2') + + def test_getDescriptionFor_nonesuch_no_default(self): + from zope.interface import Interface + + class IEmpty(Interface): + pass + + self.assertRaises(KeyError, IEmpty.getDescriptionFor, 'nonesuch') + + def test_getDescriptionFor_simple(self): + from zope.interface import Attribute + from zope.interface.interface import Method + from zope.interface import Interface + + + class ISimple(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + a_desc = ISimple.getDescriptionFor('attr') + self.assertTrue(isinstance(a_desc, Attribute)) + self.assertEqual(a_desc.__name__, 'attr') + self.assertEqual(a_desc.__doc__, 'My attr') + + m_desc = ISimple.getDescriptionFor('method') + self.assertTrue(isinstance(m_desc, Method)) + self.assertEqual(m_desc.__name__, 'method') + self.assertEqual(m_desc.__doc__, 'My method') + + def test_getDescriptionFor_derived(self): + from zope.interface import Attribute + from zope.interface.interface import Method + from zope.interface import Interface + + + class IBase(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + class IDerived(IBase): + attr2 = Attribute(u'My attr2') + + def method(): + "My method, overridden" + + def method2(): + "My method2" + + a_desc = IDerived.getDescriptionFor('attr') + self.assertTrue(isinstance(a_desc, Attribute)) + self.assertEqual(a_desc.__name__, 'attr') + self.assertEqual(a_desc.__doc__, 'My attr') + + m_desc = IDerived.getDescriptionFor('method') + self.assertTrue(isinstance(m_desc, Method)) + self.assertEqual(m_desc.__name__, 'method') + self.assertEqual(m_desc.__doc__, 'My method, overridden') + + a2_desc = IDerived.getDescriptionFor('attr2') + self.assertTrue(isinstance(a2_desc, Attribute)) + self.assertEqual(a2_desc.__name__, 'attr2') + self.assertEqual(a2_desc.__doc__, 'My attr2') + + m2_desc = IDerived.getDescriptionFor('method2') + self.assertTrue(isinstance(m2_desc, Method)) + self.assertEqual(m2_desc.__name__, 'method2') + self.assertEqual(m2_desc.__doc__, 'My method2') + + def test___getitem__nonesuch(self): + from zope.interface import Interface + + class IEmpty(Interface): + pass + + self.assertRaises(KeyError, IEmpty.__getitem__, 'nonesuch') + + def test___getitem__simple(self): + from zope.interface import Attribute + from zope.interface.interface import Method + from zope.interface import Interface + + + class ISimple(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + a_desc = ISimple['attr'] + self.assertTrue(isinstance(a_desc, Attribute)) + self.assertEqual(a_desc.__name__, 'attr') + self.assertEqual(a_desc.__doc__, 'My attr') + + m_desc = ISimple['method'] + self.assertTrue(isinstance(m_desc, Method)) + self.assertEqual(m_desc.__name__, 'method') + self.assertEqual(m_desc.__doc__, 'My method') + + def test___getitem___derived(self): + from zope.interface import Attribute + from zope.interface.interface import Method + from zope.interface import Interface + + + class IBase(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + class IDerived(IBase): + attr2 = Attribute(u'My attr2') + + def method(): + "My method, overridden" + + def method2(): + "My method2" + + a_desc = IDerived['attr'] + self.assertTrue(isinstance(a_desc, Attribute)) + self.assertEqual(a_desc.__name__, 'attr') + self.assertEqual(a_desc.__doc__, 'My attr') + + m_desc = IDerived['method'] + self.assertTrue(isinstance(m_desc, Method)) + self.assertEqual(m_desc.__name__, 'method') + self.assertEqual(m_desc.__doc__, 'My method, overridden') + + a2_desc = IDerived['attr2'] + self.assertTrue(isinstance(a2_desc, Attribute)) + self.assertEqual(a2_desc.__name__, 'attr2') + self.assertEqual(a2_desc.__doc__, 'My attr2') + + m2_desc = IDerived['method2'] + self.assertTrue(isinstance(m2_desc, Method)) + self.assertEqual(m2_desc.__name__, 'method2') + self.assertEqual(m2_desc.__doc__, 'My method2') + + def test___contains__nonesuch(self): + from zope.interface import Interface + + class IEmpty(Interface): + pass + + self.assertFalse('nonesuch' in IEmpty) + + def test___contains__simple(self): + from zope.interface import Attribute + from zope.interface import Interface + + + class ISimple(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + self.assertTrue('attr' in ISimple) + self.assertTrue('method' in ISimple) + + def test___contains__derived(self): + from zope.interface import Attribute + from zope.interface import Interface + + + class IBase(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + class IDerived(IBase): + attr2 = Attribute(u'My attr2') + + def method(): + "My method, overridden" + + def method2(): + "My method2" + + self.assertTrue('attr' in IDerived) + self.assertTrue('method' in IDerived) + self.assertTrue('attr2' in IDerived) + self.assertTrue('method2' in IDerived) + + def test___iter__empty(self): + from zope.interface import Interface + + class IEmpty(Interface): + pass + + self.assertEqual(list(IEmpty), []) + + def test___iter__simple(self): + from zope.interface import Attribute + from zope.interface import Interface + + + class ISimple(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + self.assertEqual(sorted(list(ISimple)), ['attr', 'method']) + + def test___iter__derived(self): + from zope.interface import Attribute + from zope.interface import Interface + + + class IBase(Interface): + attr = Attribute(u'My attr') + + def method(): + "My method" + + class IDerived(IBase): + attr2 = Attribute(u'My attr2') + + def method(): + "My method, overridden" + + def method2(): + "My method2" + + self.assertEqual(sorted(list(IDerived)), + ['attr', 'attr2', 'method', 'method2']) + + def test_function_attributes_become_tagged_values(self): + from zope.interface import Interface + + class ITagMe(Interface): + def method(): + pass + method.optional = 1 + + method = ITagMe['method'] + self.assertEqual(method.getTaggedValue('optional'), 1) + + def test___doc___non_element(self): + from zope.interface import Interface + + class IHaveADocString(Interface): + "xxx" + + self.assertEqual(IHaveADocString.__doc__, "xxx") + self.assertEqual(list(IHaveADocString), []) + + def test___doc___as_element(self): + from zope.interface import Attribute + from zope.interface import Interface + + class IHaveADocString(Interface): + "xxx" + __doc__ = Attribute('the doc') + + self.assertEqual(IHaveADocString.__doc__, "") + self.assertEqual(list(IHaveADocString), ['__doc__']) + + def _errorsEqual(self, has_invariant, error_len, error_msgs, iface): + from zope.interface.exceptions import Invalid + self.assertRaises(Invalid, iface.validateInvariants, has_invariant) + e = [] + try: + iface.validateInvariants(has_invariant, e) + self.fail("validateInvariants should always raise") + except Invalid as error: + self.assertEqual(error.args[0], e) + + self.assertEqual(len(e), error_len) + msgs = [error.args[0] for error in e] + msgs.sort() + for msg in msgs: + self.assertEqual(msg, error_msgs.pop(0)) + + def test_invariant_simple(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import directlyProvides + from zope.interface import invariant + + class IInvariant(Interface): + foo = Attribute('foo') + bar = Attribute('bar; must eval to Boolean True if foo does') + invariant(_ifFooThenBar) + + class HasInvariant(object): + pass + + # set up + has_invariant = HasInvariant() + directlyProvides(has_invariant, IInvariant) + + # the tests + self.assertEqual(IInvariant.getTaggedValue('invariants'), + [_ifFooThenBar]) + self.assertEqual(IInvariant.validateInvariants(has_invariant), None) + has_invariant.bar = 27 + self.assertEqual(IInvariant.validateInvariants(has_invariant), None) + has_invariant.foo = 42 + self.assertEqual(IInvariant.validateInvariants(has_invariant), None) + del has_invariant.bar + self._errorsEqual(has_invariant, 1, ['If Foo, then Bar!'], + IInvariant) + + def test_invariant_nested(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import directlyProvides + from zope.interface import invariant + + class IInvariant(Interface): + foo = Attribute('foo') + bar = Attribute('bar; must eval to Boolean True if foo does') + invariant(_ifFooThenBar) + + class ISubInvariant(IInvariant): + invariant(_barGreaterThanFoo) + + class HasInvariant(object): + pass + + # nested interfaces with invariants: + self.assertEqual(ISubInvariant.getTaggedValue('invariants'), + [_barGreaterThanFoo]) + has_invariant = HasInvariant() + directlyProvides(has_invariant, ISubInvariant) + has_invariant.foo = 42 + # even though the interface has changed, we should still only have one + # error. + self._errorsEqual(has_invariant, 1, ['If Foo, then Bar!'], + ISubInvariant) + # however, if we set foo to 0 (Boolean False) and bar to a negative + # number then we'll get the new error + has_invariant.foo = 2 + has_invariant.bar = 1 + self._errorsEqual(has_invariant, 1, + ['Please, Boo MUST be greater than Foo!'], + ISubInvariant) + # and if we set foo to a positive number and boo to 0, we'll + # get both errors! + has_invariant.foo = 1 + has_invariant.bar = 0 + self._errorsEqual(has_invariant, 2, + ['If Foo, then Bar!', + 'Please, Boo MUST be greater than Foo!'], + ISubInvariant) + # for a happy ending, we'll make the invariants happy + has_invariant.foo = 1 + has_invariant.bar = 2 + self.assertEqual(IInvariant.validateInvariants(has_invariant), None) + + def test_invariant_mutandis(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import directlyProvides + from zope.interface import invariant + + class IInvariant(Interface): + foo = Attribute('foo') + bar = Attribute('bar; must eval to Boolean True if foo does') + invariant(_ifFooThenBar) + + class HasInvariant(object): + pass + + # now we'll do two invariants on the same interface, + # just to make sure that a small + # multi-invariant interface is at least minimally tested. + has_invariant = HasInvariant() + directlyProvides(has_invariant, IInvariant) + has_invariant.foo = 42 + + # if you really need to mutate, then this would be the way to do it. + # Probably a bad idea, though. :-) + old_invariants = IInvariant.getTaggedValue('invariants') + invariants = old_invariants[:] + invariants.append(_barGreaterThanFoo) + IInvariant.setTaggedValue('invariants', invariants) + + # even though the interface has changed, we should still only have one + # error. + self._errorsEqual(has_invariant, 1, ['If Foo, then Bar!'], + IInvariant) + # however, if we set foo to 0 (Boolean False) and bar to a negative + # number then we'll get the new error + has_invariant.foo = 2 + has_invariant.bar = 1 + self._errorsEqual(has_invariant, 1, + ['Please, Boo MUST be greater than Foo!'], IInvariant) + # and if we set foo to a positive number and boo to 0, we'll + # get both errors! + has_invariant.foo = 1 + has_invariant.bar = 0 + self._errorsEqual(has_invariant, 2, + ['If Foo, then Bar!', + 'Please, Boo MUST be greater than Foo!'], + IInvariant) + # for another happy ending, we'll make the invariants happy again + has_invariant.foo = 1 + has_invariant.bar = 2 + self.assertEqual(IInvariant.validateInvariants(has_invariant), None) + # clean up + IInvariant.setTaggedValue('invariants', old_invariants) + + def test___doc___element(self): + from zope.interface import Interface + from zope.interface import Attribute + class I(Interface): + "xxx" + + self.assertEqual(I.__doc__, "xxx") + self.assertEqual(list(I), []) + + class I(Interface): + "xxx" + + __doc__ = Attribute('the doc') + + self.assertEqual(I.__doc__, "") + self.assertEqual(list(I), ['__doc__']) + + @_skip_under_py3k + def testIssue228(self): + # Test for http://collector.zope.org/Zope3-dev/228 + # Old style classes don't have a '__class__' attribute + # No old style classes in Python 3, so the test becomes moot. + import sys + + from zope.interface import Interface + + class I(Interface): + "xxx" + + class OldStyle: + __providedBy__ = None + + self.assertRaises(AttributeError, I.providedBy, OldStyle) + + def test_invariant_as_decorator(self): + from zope.interface import Interface + from zope.interface import Attribute + from zope.interface import implementer + from zope.interface import invariant + from zope.interface.exceptions import Invalid + + class IRange(Interface): + min = Attribute("Lower bound") + max = Attribute("Upper bound") + + @invariant + def range_invariant(ob): + if ob.max < ob.min: + raise Invalid('max < min') + + @implementer(IRange) + class Range(object): + + def __init__(self, min, max): + self.min, self.max = min, max + + IRange.validateInvariants(Range(1,2)) + IRange.validateInvariants(Range(1,1)) + try: + IRange.validateInvariants(Range(2,1)) + except Invalid as e: + self.assertEqual(str(e), 'max < min') + + def test_taggedValue(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import taggedValue + + class ITagged(Interface): + foo = Attribute('foo') + bar = Attribute('bar; must eval to Boolean True if foo does') + taggedValue('qux', 'Spam') + + class HasInvariant(object): + pass + + self.assertEqual(ITagged.getTaggedValue('qux'), 'Spam') + self.assertTrue('qux' in ITagged.getTaggedValueTags()) + + def test_description_cache_management(self): + # See https://bugs.launchpad.net/zope.interface/+bug/185974 + # There was a bug where the cache used by Specification.get() was not + # cleared when the bases were changed. + from zope.interface import Interface + from zope.interface import Attribute + + class I1(Interface): + a = Attribute('a') + + class I2(I1): + pass + + class I3(I2): + pass + + self.assertTrue(I3.get('a') is I1.get('a')) + + I2.__bases__ = (Interface,) + self.assertTrue(I3.get('a') is None) + + def test___call___defers_to___conform___(self): + from zope.interface import Interface + from zope.interface import implementer + + class I(Interface): + pass + + @implementer(I) + class C(object): + def __conform__(self, proto): + return 0 + + self.assertEqual(I(C()), 0) + + def test___call___object_implements(self): + from zope.interface import Interface + from zope.interface import implementer + + class I(Interface): + pass + + @implementer(I) + class C(object): + pass + + c = C() + self.assertTrue(I(c) is c) + + def test___call___miss_wo_alternate(self): + from zope.interface import Interface + + class I(Interface): + pass + + class C(object): + pass + + c = C() + self.assertRaises(TypeError, I, c) + + def test___call___miss_w_alternate(self): + from zope.interface import Interface + + class I(Interface): + pass + + class C(object): + pass + + c = C() + self.assertTrue(I(c, self) is self) + + def test___call___w_adapter_hook(self): + from zope.interface import Interface + from zope.interface.interface import adapter_hooks + old_hooks = adapter_hooks[:] + + def _miss(iface, obj): + pass + + def _hit(iface, obj): + return self + + class I(Interface): + pass + + class C(object): + pass + + c = C() + + old_adapter_hooks = adapter_hooks[:] + adapter_hooks[:] = [_miss, _hit] + try: + self.assertTrue(I(c) is self) + finally: + adapter_hooks[:] = old_adapter_hooks + + +class AttributeTests(ElementTests): + + DEFAULT_NAME = 'TestAttribute' + + def _getTargetClass(self): + from zope.interface.interface import Attribute + return Attribute + + +class MethodTests(AttributeTests): + + DEFAULT_NAME = 'TestMethod' + + def _getTargetClass(self): + from zope.interface.interface import Method + return Method + + def test_optional_as_property(self): + method = self._makeOne() + self.assertEqual(method.optional, {}) + method.optional = {'foo': 'bar'} + self.assertEqual(method.optional, {'foo': 'bar'}) + del method.optional + self.assertEqual(method.optional, {}) + + def test___call___raises_BrokenImplementation(self): + from zope.interface.exceptions import BrokenImplementation + method = self._makeOne() + try: + method() + except BrokenImplementation as e: + self.assertEqual(e.interface, None) + self.assertEqual(e.name, self.DEFAULT_NAME) + else: + self.fail('__call__ should raise BrokenImplementation') + + def test_getSignatureInfo_bare(self): + method = self._makeOne() + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + + def test_getSignatureString_bare(self): + method = self._makeOne() + self.assertEqual(method.getSignatureString(), '()') + + def test_getSignatureString_w_only_required(self): + method = self._makeOne() + method.positional = method.required = ['foo'] + self.assertEqual(method.getSignatureString(), '(foo)') + + def test_getSignatureString_w_optional(self): + method = self._makeOne() + method.positional = method.required = ['foo'] + method.optional = {'foo': 'bar'} + self.assertEqual(method.getSignatureString(), "(foo='bar')") + + def test_getSignatureString_w_varargs(self): + method = self._makeOne() + method.varargs = 'args' + self.assertEqual(method.getSignatureString(), "(*args)") + + def test_getSignatureString_w_kwargs(self): + method = self._makeOne() + method.kwargs = 'kw' + self.assertEqual(method.getSignatureString(), "(**kw)") + + +class Test_fromFunction(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.interface import fromFunction + return fromFunction(*args, **kw) + + def test_bare(self): + def _func(): + "DOCSTRING" + method = self._callFUT(_func) + self.assertEqual(method.getName(), '_func') + self.assertEqual(method.getDoc(), 'DOCSTRING') + self.assertEqual(method.interface, None) + self.assertEqual(list(method.getTaggedValueTags()), []) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + + def test_w_interface(self): + from zope.interface.interface import InterfaceClass + class IFoo(InterfaceClass): + pass + def _func(): + "DOCSTRING" + method = self._callFUT(_func, interface=IFoo) + self.assertEqual(method.interface, IFoo) + + def test_w_name(self): + def _func(): + "DOCSTRING" + method = self._callFUT(_func, name='anotherName') + self.assertEqual(method.getName(), 'anotherName') + + def test_w_only_required(self): + def _func(foo): + "DOCSTRING" + method = self._callFUT(_func) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), ['foo']) + self.assertEqual(list(info['required']), ['foo']) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + + def test_w_optional(self): + def _func(foo='bar'): + "DOCSTRING" + method = self._callFUT(_func) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), ['foo']) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {'foo': 'bar'}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + + def test_w_optional_self(self): + # XXX This is a weird case, trying to cover the following code in + # FUT:: + # + # nr = na-len(defaults) + # if nr < 0: + # defaults=defaults[-nr:] + # nr = 0 + def _func(self='bar'): + "DOCSTRING" + method = self._callFUT(_func, imlevel=1) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + + def test_w_varargs(self): + def _func(*args): + "DOCSTRING" + method = self._callFUT(_func) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], 'args') + self.assertEqual(info['kwargs'], None) + + def test_w_kwargs(self): + def _func(**kw): + "DOCSTRING" + method = self._callFUT(_func) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], 'kw') + + def test_full_spectrum(self): + def _func(foo, bar='baz', *args, **kw): + "DOCSTRING" + method = self._callFUT(_func) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), ['foo', 'bar']) + self.assertEqual(list(info['required']), ['foo']) + self.assertEqual(info['optional'], {'bar': 'baz'}) + self.assertEqual(info['varargs'], 'args') + self.assertEqual(info['kwargs'], 'kw') + + +class Test_fromMethod(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.interface.interface import fromMethod + return fromMethod(*args, **kw) + + def test_no_args(self): + class Foo(object): + def bar(self): + "DOCSTRING" + method = self._callFUT(Foo.bar) + self.assertEqual(method.getName(), 'bar') + self.assertEqual(method.getDoc(), 'DOCSTRING') + self.assertEqual(method.interface, None) + self.assertEqual(list(method.getTaggedValueTags()), []) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + + def test_full_spectrum(self): + class Foo(object): + def bar(self, foo, bar='baz', *args, **kw): + "DOCSTRING" + method = self._callFUT(Foo.bar) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), ['foo', 'bar']) + self.assertEqual(list(info['required']), ['foo']) + self.assertEqual(info['optional'], {'bar': 'baz'}) + self.assertEqual(info['varargs'], 'args') + self.assertEqual(info['kwargs'], 'kw') + + def test_w_non_method(self): + def foo(): + "DOCSTRING" + method = self._callFUT(foo) + self.assertEqual(method.getName(), 'foo') + self.assertEqual(method.getDoc(), 'DOCSTRING') + self.assertEqual(method.interface, None) + self.assertEqual(list(method.getTaggedValueTags()), []) + info = method.getSignatureInfo() + self.assertEqual(list(info['positional']), []) + self.assertEqual(list(info['required']), []) + self.assertEqual(info['optional'], {}) + self.assertEqual(info['varargs'], None) + self.assertEqual(info['kwargs'], None) + +class DummyDependent(object): + + def __init__(self): + self._changed = [] + + def changed(self, originally_changed): + self._changed.append(originally_changed) + + +def _barGreaterThanFoo(obj): + from zope.interface.exceptions import Invalid + foo = getattr(obj, 'foo', None) + bar = getattr(obj, 'bar', None) + if foo is not None and isinstance(foo, type(bar)): + # type checking should be handled elsewhere (like, say, + # schema); these invariants should be intra-interface + # constraints. This is a hacky way to do it, maybe, but you + # get the idea + if not bar > foo: + raise Invalid('Please, Boo MUST be greater than Foo!') + +def _ifFooThenBar(obj): + from zope.interface.exceptions import Invalid + if getattr(obj, 'foo', None) and not getattr(obj, 'bar', None): + raise Invalid('If Foo, then Bar!') + + +class _Monkey(object): + # context-manager for replacing module names in the scope of a test. + def __init__(self, module, **kw): + self.module = module + self.to_restore = dict([(key, getattr(module, key)) for key in kw]) + for key, value in kw.items(): + setattr(module, key, value) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + for key, value in self.to_restore.items(): + setattr(self.module, key, value) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_interfaces.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_interfaces.py new file mode 100644 index 0000000..285d857 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_interfaces.py @@ -0,0 +1,95 @@ +import unittest + + +class _ConformsToIObjectEvent(object): + + def _makeOne(self, target=None): + if target is None: + target = object() + return self._getTargetClass()(target) + + def test_class_conforms_to_IObjectEvent(self): + from zope.interface.interfaces import IObjectEvent + from zope.interface.verify import verifyClass + verifyClass(IObjectEvent, self._getTargetClass()) + + def test_instance_conforms_to_IObjectEvent(self): + from zope.interface.interfaces import IObjectEvent + from zope.interface.verify import verifyObject + verifyObject(IObjectEvent, self._makeOne()) + + +class _ConformsToIRegistrationEvent(_ConformsToIObjectEvent): + + def test_class_conforms_to_IRegistrationEvent(self): + from zope.interface.interfaces import IRegistrationEvent + from zope.interface.verify import verifyClass + verifyClass(IRegistrationEvent, self._getTargetClass()) + + def test_instance_conforms_to_IRegistrationEvent(self): + from zope.interface.interfaces import IRegistrationEvent + from zope.interface.verify import verifyObject + verifyObject(IRegistrationEvent, self._makeOne()) + + +class ObjectEventTests(unittest.TestCase, _ConformsToIObjectEvent): + + def _getTargetClass(self): + from zope.interface.interfaces import ObjectEvent + return ObjectEvent + + def test_ctor(self): + target = object() + event = self._makeOne(target) + self.assertTrue(event.object is target) + + +class RegistrationEventTests(unittest.TestCase, + _ConformsToIRegistrationEvent): + + def _getTargetClass(self): + from zope.interface.interfaces import RegistrationEvent + return RegistrationEvent + + def test___repr__(self): + target = object() + event = self._makeOne(target) + r = repr(event) + self.assertEqual(r.splitlines(), + ['RegistrationEvent event:', repr(target)]) + + +class RegisteredTests(unittest.TestCase, + _ConformsToIRegistrationEvent): + + def _getTargetClass(self): + from zope.interface.interfaces import Registered + return Registered + + def test_class_conforms_to_IRegistered(self): + from zope.interface.interfaces import IRegistered + from zope.interface.verify import verifyClass + verifyClass(IRegistered, self._getTargetClass()) + + def test_instance_conforms_to_IRegistered(self): + from zope.interface.interfaces import IRegistered + from zope.interface.verify import verifyObject + verifyObject(IRegistered, self._makeOne()) + + +class UnregisteredTests(unittest.TestCase, + _ConformsToIRegistrationEvent): + + def _getTargetClass(self): + from zope.interface.interfaces import Unregistered + return Unregistered + + def test_class_conforms_to_IUnregistered(self): + from zope.interface.interfaces import IUnregistered + from zope.interface.verify import verifyClass + verifyClass(IUnregistered, self._getTargetClass()) + + def test_instance_conforms_to_IUnregistered(self): + from zope.interface.interfaces import IUnregistered + from zope.interface.verify import verifyObject + verifyObject(IUnregistered, self._makeOne()) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_odd_declarations.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_odd_declarations.py new file mode 100644 index 0000000..46e7675 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_odd_declarations.py @@ -0,0 +1,268 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test interface declarations against ExtensionClass-like classes. + +These tests are to make sure we do something sane in the presence of +classic ExtensionClass classes and instances. +""" +import unittest + +from zope.interface.tests import odd +from zope.interface import Interface +from zope.interface import implementer +from zope.interface import directlyProvides +from zope.interface import providedBy +from zope.interface import directlyProvidedBy +from zope.interface import classImplements +from zope.interface import classImplementsOnly +from zope.interface import implementedBy +from zope.interface._compat import _skip_under_py3k + +class I1(Interface): pass +class I2(Interface): pass +class I3(Interface): pass +class I31(I3): pass +class I4(Interface): pass +class I5(Interface): pass + +class Odd(object): + pass +Odd = odd.MetaClass('Odd', Odd.__bases__, {}) + + +class B(Odd): __implemented__ = I2 + + +# TODO: We are going to need more magic to make classProvides work with odd +# classes. This will work in the next iteration. For now, we'll use +# a different mechanism. + +# from zope.interface import classProvides +class A(Odd): + pass +classImplements(A, I1) + +class C(A, B): + pass +classImplements(C, I31) + + +class Test(unittest.TestCase): + + def test_ObjectSpecification(self): + c = C() + directlyProvides(c, I4) + self.assertEqual([i.getName() for i in providedBy(c)], + ['I4', 'I31', 'I1', 'I2'] + ) + self.assertEqual([i.getName() for i in providedBy(c).flattened()], + ['I4', 'I31', 'I3', 'I1', 'I2', 'Interface'] + ) + self.assertTrue(I1 in providedBy(c)) + self.assertFalse(I3 in providedBy(c)) + self.assertTrue(providedBy(c).extends(I3)) + self.assertTrue(providedBy(c).extends(I31)) + self.assertFalse(providedBy(c).extends(I5)) + + class COnly(A, B): + pass + classImplementsOnly(COnly, I31) + + class D(COnly): + pass + classImplements(D, I5) + + classImplements(D, I5) + + c = D() + directlyProvides(c, I4) + self.assertEqual([i.getName() for i in providedBy(c)], + ['I4', 'I5', 'I31']) + self.assertEqual([i.getName() for i in providedBy(c).flattened()], + ['I4', 'I5', 'I31', 'I3', 'Interface']) + self.assertFalse(I1 in providedBy(c)) + self.assertFalse(I3 in providedBy(c)) + self.assertTrue(providedBy(c).extends(I3)) + self.assertFalse(providedBy(c).extends(I1)) + self.assertTrue(providedBy(c).extends(I31)) + self.assertTrue(providedBy(c).extends(I5)) + + class COnly(A, B): __implemented__ = I31 + class D(COnly): + pass + classImplements(D, I5) + + classImplements(D, I5) + c = D() + directlyProvides(c, I4) + self.assertEqual([i.getName() for i in providedBy(c)], + ['I4', 'I5', 'I31']) + self.assertEqual([i.getName() for i in providedBy(c).flattened()], + ['I4', 'I5', 'I31', 'I3', 'Interface']) + self.assertFalse(I1 in providedBy(c)) + self.assertFalse(I3 in providedBy(c)) + self.assertTrue(providedBy(c).extends(I3)) + self.assertFalse(providedBy(c).extends(I1)) + self.assertTrue(providedBy(c).extends(I31)) + self.assertTrue(providedBy(c).extends(I5)) + + def test_classImplements(self): + + @implementer(I3) + class A(Odd): + pass + + @implementer(I4) + class B(Odd): + pass + + class C(A, B): + pass + classImplements(C, I1, I2) + self.assertEqual([i.getName() for i in implementedBy(C)], + ['I1', 'I2', 'I3', 'I4']) + classImplements(C, I5) + self.assertEqual([i.getName() for i in implementedBy(C)], + ['I1', 'I2', 'I5', 'I3', 'I4']) + + def test_classImplementsOnly(self): + @implementer(I3) + class A(Odd): + pass + + @implementer(I4) + class B(Odd): + pass + + class C(A, B): + pass + classImplementsOnly(C, I1, I2) + self.assertEqual([i.__name__ for i in implementedBy(C)], + ['I1', 'I2']) + + + def test_directlyProvides(self): + class IA1(Interface): pass + class IA2(Interface): pass + class IB(Interface): pass + class IC(Interface): pass + class A(Odd): + pass + classImplements(A, IA1, IA2) + + class B(Odd): + pass + classImplements(B, IB) + + class C(A, B): + pass + classImplements(C, IC) + + + ob = C() + directlyProvides(ob, I1, I2) + self.assertTrue(I1 in providedBy(ob)) + self.assertTrue(I2 in providedBy(ob)) + self.assertTrue(IA1 in providedBy(ob)) + self.assertTrue(IA2 in providedBy(ob)) + self.assertTrue(IB in providedBy(ob)) + self.assertTrue(IC in providedBy(ob)) + + directlyProvides(ob, directlyProvidedBy(ob)-I2) + self.assertTrue(I1 in providedBy(ob)) + self.assertFalse(I2 in providedBy(ob)) + self.assertFalse(I2 in providedBy(ob)) + directlyProvides(ob, directlyProvidedBy(ob), I2) + self.assertTrue(I2 in providedBy(ob)) + + @_skip_under_py3k + def test_directlyProvides_fails_for_odd_class(self): + self.assertRaises(TypeError, directlyProvides, C, I5) + + # see above + #def TODO_test_classProvides_fails_for_odd_class(self): + # try: + # class A(Odd): + # classProvides(I1) + # except TypeError: + # pass # Sucess + # self.assert_(False, + # "Shouldn't be able to use directlyProvides on odd class." + # ) + + def test_implementedBy(self): + class I2(I1): pass + + class C1(Odd): + pass + classImplements(C1, I2) + + class C2(C1): + pass + classImplements(C2, I3) + + self.assertEqual([i.getName() for i in implementedBy(C2)], + ['I3', 'I2']) + + def test_odd_metaclass_that_doesnt_subclass_type(self): + # This was originally a doctest in odd.py. + # It verifies that the metaclass the rest of these tests use + # works as expected. + + # This is used for testing support for ExtensionClass in new interfaces. + + class A(object): + a = 1 + + A = odd.MetaClass('A', A.__bases__, A.__dict__) + + class B(object): + b = 1 + + B = odd.MetaClass('B', B.__bases__, B.__dict__) + + class C(A, B): + pass + + self.assertEqual(C.__bases__, (A, B)) + + a = A() + aa = A() + self.assertEqual(a.a, 1) + self.assertEqual(aa.a, 1) + + aa.a = 2 + self.assertEqual(a.a, 1) + self.assertEqual(aa.a, 2) + + c = C() + self.assertEqual(c.a, 1) + self.assertEqual(c.b, 1) + + c.b = 2 + self.assertEqual(c.b, 2) + + C.c = 1 + self.assertEqual(c.c, 1) + c.c + + try: + from types import ClassType + except ImportError: + pass + else: + # This test only makes sense under Python 2.x + assert not isinstance(C, (type, ClassType)) + + self.assertIs(C.__class__.__class__, C.__class__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_registry.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_registry.py new file mode 100644 index 0000000..e5a8eb0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_registry.py @@ -0,0 +1,2788 @@ +############################################################################## +# +# Copyright (c) 2001, 2002, 2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Component Registry Tests""" +# pylint:disable=protected-access +import unittest + +from zope.interface import Interface +from zope.interface.adapter import VerifyingAdapterRegistry + +from zope.interface.registry import Components + +class ComponentsTests(unittest.TestCase): + + def _getTargetClass(self): + return Components + + def _makeOne(self, name='test', *args, **kw): + return self._getTargetClass()(name, *args, **kw) + + def _wrapEvents(self): + from zope.interface import registry + _events = [] + def _notify(*args, **kw): + _events.append((args, kw)) + _monkey = _Monkey(registry, notify=_notify) + return _monkey, _events + + def test_ctor_no_bases(self): + from zope.interface.adapter import AdapterRegistry + comp = self._makeOne('testing') + self.assertEqual(comp.__name__, 'testing') + self.assertEqual(comp.__bases__, ()) + self.assertTrue(isinstance(comp.adapters, AdapterRegistry)) + self.assertTrue(isinstance(comp.utilities, AdapterRegistry)) + self.assertEqual(comp.adapters.__bases__, ()) + self.assertEqual(comp.utilities.__bases__, ()) + self.assertEqual(comp._utility_registrations, {}) + self.assertEqual(comp._adapter_registrations, {}) + self.assertEqual(comp._subscription_registrations, []) + self.assertEqual(comp._handler_registrations, []) + + def test_ctor_w_base(self): + base = self._makeOne('base') + comp = self._makeOne('testing', (base,)) + self.assertEqual(comp.__name__, 'testing') + self.assertEqual(comp.__bases__, (base,)) + self.assertEqual(comp.adapters.__bases__, (base.adapters,)) + self.assertEqual(comp.utilities.__bases__, (base.utilities,)) + + def test___repr__(self): + comp = self._makeOne('testing') + self.assertEqual(repr(comp), '') + + # test _init_registries / _init_registrations via only caller, __init__. + + def test_assign_to___bases__(self): + base1 = self._makeOne('base1') + base2 = self._makeOne('base2') + comp = self._makeOne() + comp.__bases__ = (base1, base2) + self.assertEqual(comp.__bases__, (base1, base2)) + self.assertEqual(comp.adapters.__bases__, + (base1.adapters, base2.adapters)) + self.assertEqual(comp.utilities.__bases__, + (base1.utilities, base2.utilities)) + + def test_registerUtility_with_component_name(self): + from zope.interface.declarations import named, InterfaceClass + + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + + @named(u'foo') + class Foo(object): + pass + foo = Foo() + _info = u'info' + + comp = self._makeOne() + comp.registerUtility(foo, ifoo, info=_info) + self.assertEqual( + comp._utility_registrations[ifoo, u'foo'], + (foo, _info, None)) + + def test_registerUtility_both_factory_and_component(self): + def _factory(): + raise NotImplementedError() + _to_reg = object() + comp = self._makeOne() + self.assertRaises(TypeError, comp.registerUtility, + component=_to_reg, factory=_factory) + + def test_registerUtility_w_component(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = object() + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, ifoo, _name, _info) + self.assertTrue(comp.utilities._adapters[0][ifoo][_name] is _to_reg) + self.assertEqual(comp._utility_registrations[ifoo, _name], + (_to_reg, _info, None)) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,)) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is None) + + def test_registerUtility_w_factory(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = object() + def _factory(): + return _to_reg + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(None, ifoo, _name, _info, factory=_factory) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _factory) + + def test_registerUtility_no_provided_available(self): + class Foo(object): + pass + + _info = u'info' + _name = u'name' + _to_reg = Foo() + comp = self._makeOne() + self.assertRaises(TypeError, + comp.registerUtility, _to_reg, None, _name, _info) + + def test_registerUtility_wo_provided(self): + from zope.interface.declarations import directlyProvides + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + class Foo(object): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = Foo() + directlyProvides(_to_reg, ifoo) + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, None, _name, _info) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is None) + + def test_registerUtility_duplicates_existing_reg(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, ifoo, _name, _info) + self.assertEqual(len(_events), 0) + + def test_registerUtility_w_different_info(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info1 = u'info1' + _info2 = u'info2' + _name = u'name' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name, _info1) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, ifoo, _name, _info2) + self.assertEqual(len(_events), 2) # unreg, reg + self.assertEqual(comp._utility_registrations[(ifoo, _name)], + (_to_reg, _info2, None)) # replaced + self.assertEqual(comp.utilities._subscribers[0][ifoo][u''], + (_to_reg,)) + + def test_registerUtility_w_different_names_same_component(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _other_reg = object() + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_other_reg, ifoo, _name1, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, ifoo, _name2, _info) + self.assertEqual(len(_events), 1) # reg + self.assertEqual(comp._utility_registrations[(ifoo, _name1)], + (_other_reg, _info, None)) + self.assertEqual(comp._utility_registrations[(ifoo, _name2)], + (_to_reg, _info, None)) + self.assertEqual(comp.utilities._subscribers[0][ifoo][u''], + (_other_reg, _to_reg,)) + + def test_registerUtility_replaces_existing_reg(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.interfaces import Registered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _before, _after = object(), object() + comp = self._makeOne() + comp.registerUtility(_before, ifoo, _name, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_after, ifoo, _name, _info) + self.assertEqual(len(_events), 2) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _before) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is None) + args, kw = _events[1] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _after) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is None) + + def test_registerUtility_w_existing_subscr(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name1, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, ifoo, _name2, _info) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,)) + + def test_registerUtility_wo_event(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = object() + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerUtility(_to_reg, ifoo, _name, _info, False) + self.assertEqual(len(_events), 0) + + def test_registerUtility_changes_object_identity_after(self): + # If a subclass changes the identity of the _utility_registrations, + # the cache is updated and the right thing still happens. + class CompThatChangesAfter1Reg(self._getTargetClass()): + reg_count = 0 + def registerUtility(self, *args): + self.reg_count += 1 + super(CompThatChangesAfter1Reg, self).registerUtility(*args) + if self.reg_count == 1: + self._utility_registrations = dict(self._utility_registrations) + + comp = CompThatChangesAfter1Reg() + comp.registerUtility(object(), Interface) + + self.assertEqual(len(list(comp.registeredUtilities())), 1) + + class IFoo(Interface): + pass + + comp.registerUtility(object(), IFoo) + self.assertEqual(len(list(comp.registeredUtilities())), 2) + + def test_registerUtility_changes_object_identity_before(self): + # If a subclass changes the identity of the _utility_registrations, + # the cache is updated and the right thing still happens. + class CompThatChangesAfter2Reg(self._getTargetClass()): + reg_count = 0 + def registerUtility(self, *args): + self.reg_count += 1 + if self.reg_count == 2: + self._utility_registrations = dict(self._utility_registrations) + + super(CompThatChangesAfter2Reg, self).registerUtility(*args) + + comp = CompThatChangesAfter2Reg() + comp.registerUtility(object(), Interface) + + self.assertEqual(len(list(comp.registeredUtilities())), 1) + + class IFoo(Interface): + pass + + comp.registerUtility(object(), IFoo) + self.assertEqual(len(list(comp.registeredUtilities())), 2) + + + class IBar(Interface): + pass + + comp.registerUtility(object(), IBar) + self.assertEqual(len(list(comp.registeredUtilities())), 3) + + + def test_unregisterUtility_neither_factory_nor_component_nor_provided(self): + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterUtility, + component=None, provided=None, factory=None) + + def test_unregisterUtility_both_factory_and_component(self): + def _factory(): + raise NotImplementedError() + _to_reg = object() + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterUtility, + component=_to_reg, factory=_factory) + + def test_unregisterUtility_w_component_miss(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _name = u'name' + _to_reg = object() + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterUtility(_to_reg, ifoo, _name) + self.assertFalse(unreg) + self.assertFalse(_events) + + def test_unregisterUtility_w_component(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _name = u'name' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterUtility(_to_reg, ifoo, _name) + self.assertTrue(unreg) + self.assertFalse(comp.utilities._adapters) # all erased + self.assertFalse((ifoo, _name) in comp._utility_registrations) + self.assertFalse(comp.utilities._subscribers) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.factory is None) + + def test_unregisterUtility_w_factory(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = object() + def _factory(): + return _to_reg + comp = self._makeOne() + comp.registerUtility(None, ifoo, _name, _info, factory=_factory) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterUtility(None, ifoo, _name, factory=_factory) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.factory is _factory) + + def test_unregisterUtility_wo_explicit_provided(self): + from zope.interface.declarations import directlyProvides + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + class Foo(object): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = Foo() + directlyProvides(_to_reg, ifoo) + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterUtility(_to_reg, None, _name) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is None) + + def test_unregisterUtility_wo_component_or_factory(self): + from zope.interface.declarations import directlyProvides + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import UtilityRegistration + + class IFoo(InterfaceClass): + pass + class Foo(object): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + _to_reg = Foo() + directlyProvides(_to_reg, ifoo) + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + # Just pass the interface / name + unreg = comp.unregisterUtility(provided=ifoo, name=_name) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, UtilityRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.component is _to_reg) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is None) + + def test_unregisterUtility_w_existing_subscr(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.unregisterUtility(_to_reg, ifoo, _name2) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,)) + + def test_unregisterUtility_w_existing_subscr_non_hashable(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _to_reg = dict() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.unregisterUtility(_to_reg, ifoo, _name2) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,)) + + def test_unregisterUtility_w_existing_subscr_non_hashable_fresh_cache(self): + # We correctly populate the cache of registrations if it has gone away + # (for example, the Components was unpickled) + from zope.interface.declarations import InterfaceClass + from zope.interface.registry import _UtilityRegistrations + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _to_reg = dict() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + + _monkey, _events = self._wrapEvents() + with _monkey: + comp.unregisterUtility(_to_reg, ifoo, _name2) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,)) + + def test_unregisterUtility_w_existing_subscr_non_hashable_reinitted(self): + # We correctly populate the cache of registrations if the base objects change + # out from under us + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _to_reg = dict() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + + # zope.component.testing does this + comp.__init__('base') + + comp.registerUtility(_to_reg, ifoo, _name2, _info) + + _monkey, _events = self._wrapEvents() + with _monkey: + # Nothing to do, but we don't break either + comp.unregisterUtility(_to_reg, ifoo, _name2) + self.assertEqual(0, len(comp.utilities._subscribers)) + + def test_unregisterUtility_w_existing_subscr_other_component(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _other_reg = object() + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_other_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.unregisterUtility(_to_reg, ifoo, _name2) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], + (_other_reg,)) + + def test_unregisterUtility_w_existing_subscr_other_component_mixed_hash(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + # First register something hashable + _other_reg = object() + # Then it transfers to something unhashable + _to_reg = dict() + comp = self._makeOne() + comp.registerUtility(_other_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + _monkey, _events = self._wrapEvents() + with _monkey: + comp.unregisterUtility(_to_reg, ifoo, _name2) + self.assertEqual(comp.utilities._subscribers[0][ifoo][''], + (_other_reg,)) + + def test_registeredUtilities_empty(self): + comp = self._makeOne() + self.assertEqual(list(comp.registeredUtilities()), []) + + def test_registeredUtilities_notempty(self): + from zope.interface.declarations import InterfaceClass + + from zope.interface.registry import UtilityRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, _name1, _info) + comp.registerUtility(_to_reg, ifoo, _name2, _info) + reg = sorted(comp.registeredUtilities(), key=lambda r: r.name) + self.assertEqual(len(reg), 2) + self.assertTrue(isinstance(reg[0], UtilityRegistration)) + self.assertTrue(reg[0].registry is comp) + self.assertTrue(reg[0].provided is ifoo) + self.assertTrue(reg[0].name is _name1) + self.assertTrue(reg[0].component is _to_reg) + self.assertTrue(reg[0].info is _info) + self.assertTrue(reg[0].factory is None) + self.assertTrue(isinstance(reg[1], UtilityRegistration)) + self.assertTrue(reg[1].registry is comp) + self.assertTrue(reg[1].provided is ifoo) + self.assertTrue(reg[1].name is _name2) + self.assertTrue(reg[1].component is _to_reg) + self.assertTrue(reg[1].info is _info) + self.assertTrue(reg[1].factory is None) + + def test_queryUtility_miss_no_default(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + self.assertTrue(comp.queryUtility(ifoo) is None) + + def test_queryUtility_miss_w_default(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + _default = object() + self.assertTrue(comp.queryUtility(ifoo, default=_default) is _default) + + def test_queryUtility_hit(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo) + self.assertTrue(comp.queryUtility(ifoo) is _to_reg) + + def test_getUtility_miss(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import ComponentLookupError + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + self.assertRaises(ComponentLookupError, comp.getUtility, ifoo) + + def test_getUtility_hit(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo) + self.assertTrue(comp.getUtility(ifoo) is _to_reg) + + def test_getUtilitiesFor_miss(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + self.assertEqual(list(comp.getUtilitiesFor(ifoo)), []) + + def test_getUtilitiesFor_hit(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _name1 = u'name1' + _name2 = u'name2' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, name=_name1) + comp.registerUtility(_to_reg, ifoo, name=_name2) + self.assertEqual(sorted(comp.getUtilitiesFor(ifoo)), + [(_name1, _to_reg), (_name2, _to_reg)]) + + def test_getAllUtilitiesRegisteredFor_miss(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + self.assertEqual(list(comp.getAllUtilitiesRegisteredFor(ifoo)), []) + + def test_getAllUtilitiesRegisteredFor_hit(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _name1 = u'name1' + _name2 = u'name2' + _to_reg = object() + comp = self._makeOne() + comp.registerUtility(_to_reg, ifoo, name=_name1) + comp.registerUtility(_to_reg, ifoo, name=_name2) + self.assertEqual(list(comp.getAllUtilitiesRegisteredFor(ifoo)), + [_to_reg]) + + def test_registerAdapter_with_component_name(self): + from zope.interface.declarations import named, InterfaceClass + + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + + @named(u'foo') + class Foo(object): + pass + _info = u'info' + + comp = self._makeOne() + comp.registerAdapter(Foo, (ibar,), ifoo, info=_info) + + self.assertEqual( + comp._adapter_registrations[(ibar,), ifoo, u'foo'], + (Foo, _info)) + + def test_registerAdapter_w_explicit_provided_and_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import AdapterRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + + def _factory(context): + raise NotImplementedError() + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerAdapter(_factory, (ibar,), ifoo, _name, _info) + self.assertTrue(comp.adapters._adapters[1][ibar][ifoo][_name] + is _factory) + self.assertEqual(comp._adapter_registrations[(ibar,), ifoo, _name], + (_factory, _info)) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _factory) + + def test_registerAdapter_no_provided_available(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + _to_reg = object() + class _Factory(object): + pass + + comp = self._makeOne() + self.assertRaises(TypeError, comp.registerAdapter, _Factory, (ibar,), + name=_name, info=_info) + + def test_registerAdapter_wo_explicit_provided(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.interfaces import Registered + from zope.interface.registry import AdapterRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + _to_reg = object() + + @implementer(ifoo) + class _Factory(object): + pass + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerAdapter(_Factory, (ibar,), name=_name, info=_info) + self.assertTrue(comp.adapters._adapters[1][ibar][ifoo][_name] + is _Factory) + self.assertEqual(comp._adapter_registrations[(ibar,), ifoo, _name], + (_Factory, _info)) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _Factory) + + def test_registerAdapter_no_required_available(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + + _info = u'info' + _name = u'name' + class _Factory(object): + pass + + comp = self._makeOne() + self.assertRaises(TypeError, comp.registerAdapter, _Factory, + provided=ifoo, name=_name, info=_info) + + def test_registerAdapter_w_invalid_required(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + class _Factory(object): + pass + comp = self._makeOne() + self.assertRaises(TypeError, comp.registerAdapter, _Factory, + ibar, provided=ifoo, name=_name, info=_info) + + def test_registerAdapter_w_required_containing_None(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interface import Interface + from zope.interface.interfaces import Registered + from zope.interface.registry import AdapterRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _name = u'name' + class _Factory(object): + pass + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerAdapter(_Factory, [None], provided=ifoo, + name=_name, info=_info) + self.assertTrue(comp.adapters._adapters[1][Interface][ifoo][_name] + is _Factory) + self.assertEqual(comp._adapter_registrations[(Interface,), ifoo, _name], + (_Factory, _info)) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (Interface,)) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _Factory) + + def test_registerAdapter_w_required_containing_class(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.declarations import implementedBy + from zope.interface.interfaces import Registered + from zope.interface.registry import AdapterRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + class _Factory(object): + pass + + @implementer(ibar) + class _Context(object): + pass + _ctx_impl = implementedBy(_Context) + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerAdapter(_Factory, [_Context], provided=ifoo, + name=_name, info=_info) + self.assertTrue(comp.adapters._adapters[1][_ctx_impl][ifoo][_name] + is _Factory) + self.assertEqual(comp._adapter_registrations[(_ctx_impl,), ifoo, _name], + (_Factory, _info)) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (_ctx_impl,)) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _Factory) + + def test_registerAdapter_w_required_containing_junk(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + + _info = u'info' + _name = u'name' + class _Factory(object): + pass + comp = self._makeOne() + self.assertRaises(TypeError, comp.registerAdapter, _Factory, [object()], + provided=ifoo, name=_name, info=_info) + + def test_registerAdapter_wo_explicit_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import AdapterRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + class _Factory(object): + __component_adapts__ = (ibar,) + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerAdapter(_Factory, provided=ifoo, name=_name, + info=_info) + self.assertTrue(comp.adapters._adapters[1][ibar][ifoo][_name] + is _Factory) + self.assertEqual(comp._adapter_registrations[(ibar,), ifoo, _name], + (_Factory, _info)) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertTrue(event.object.name is _name) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _Factory) + + def test_registerAdapter_wo_event(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _name = u'name' + + def _factory(context): + raise NotImplementedError() + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerAdapter(_factory, (ibar,), ifoo, _name, _info, + event=False) + self.assertEqual(len(_events), 0) + + def test_unregisterAdapter_neither_factory_nor_provided(self): + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterAdapter, + factory=None, provided=None) + + def test_unregisterAdapter_neither_factory_nor_required(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterAdapter, + factory=None, provided=ifoo, required=None) + + def test_unregisterAdapter_miss(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + pass + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterAdapter(_Factory, (ibar,), ifoo) + self.assertFalse(unreg) + + def test_unregisterAdapter_hit_w_explicit_provided_and_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import AdapterRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterAdapter(_Factory, (ibar,), ifoo) + self.assertTrue(unreg) + self.assertFalse(comp.adapters._adapters) + self.assertFalse(comp._adapter_registrations) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_unregisterAdapter_wo_explicit_provided(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.interfaces import Unregistered + from zope.interface.registry import AdapterRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + @implementer(ifoo) + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterAdapter(_Factory, (ibar,)) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_unregisterAdapter_wo_explicit_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import AdapterRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + __component_adapts__ = (ibar,) + + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterAdapter(_Factory, provided=ifoo) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, AdapterRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_registeredAdapters_empty(self): + comp = self._makeOne() + self.assertEqual(list(comp.registeredAdapters()), []) + + def test_registeredAdapters_notempty(self): + from zope.interface.declarations import InterfaceClass + + from zope.interface.registry import AdapterRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IFoo') + _info = u'info' + _name1 = u'name1' + _name2 = u'name2' + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar,), ifoo, _name1, _info) + comp.registerAdapter(_Factory, (ibar,), ifoo, _name2, _info) + reg = sorted(comp.registeredAdapters(), key=lambda r: r.name) + self.assertEqual(len(reg), 2) + self.assertTrue(isinstance(reg[0], AdapterRegistration)) + self.assertTrue(reg[0].registry is comp) + self.assertTrue(reg[0].provided is ifoo) + self.assertEqual(reg[0].required, (ibar,)) + self.assertTrue(reg[0].name is _name1) + self.assertTrue(reg[0].info is _info) + self.assertTrue(reg[0].factory is _Factory) + self.assertTrue(isinstance(reg[1], AdapterRegistration)) + self.assertTrue(reg[1].registry is comp) + self.assertTrue(reg[1].provided is ifoo) + self.assertEqual(reg[1].required, (ibar,)) + self.assertTrue(reg[1].name is _name2) + self.assertTrue(reg[1].info is _info) + self.assertTrue(reg[1].factory is _Factory) + + def test_queryAdapter_miss_no_default(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + _context = object() + self.assertTrue(comp.queryAdapter(_context, ifoo) is None) + + def test_queryAdapter_miss_w_default(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + _context = object() + _default = object() + self.assertTrue( + comp.queryAdapter(_context, ifoo, default=_default) is _default) + + def test_queryAdapter_hit(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + def __init__(self, context): + self.context = context + @implementer(ibar) + class _Context(object): + pass + _context = _Context() + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar,), ifoo) + adapter = comp.queryAdapter(_context, ifoo) + self.assertTrue(isinstance(adapter, _Factory)) + self.assertTrue(adapter.context is _context) + + def test_getAdapter_miss(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.interfaces import ComponentLookupError + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + @implementer(ibar) + class _Context(object): + pass + _context = _Context() + comp = self._makeOne() + self.assertRaises(ComponentLookupError, + comp.getAdapter, _context, ifoo) + + def test_getAdapter_hit(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + def __init__(self, context): + self.context = context + @implementer(ibar) + class _Context(object): + pass + _context = _Context() + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar,), ifoo) + adapter = comp.getAdapter(_context, ifoo) + self.assertTrue(isinstance(adapter, _Factory)) + self.assertTrue(adapter.context is _context) + + def test_queryMultiAdapter_miss(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + comp = self._makeOne() + self.assertEqual(comp.queryMultiAdapter((_context1, _context2), ifoo), + None) + + def test_queryMultiAdapter_miss_w_default(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + _default = object() + comp = self._makeOne() + self.assertTrue( + comp.queryMultiAdapter((_context1, _context2), ifoo, + default=_default) is _default) + + def test_queryMultiAdapter_hit(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + class _Factory(object): + def __init__(self, context1, context2): + self.context = context1, context2 + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar, ibaz), ifoo) + adapter = comp.queryMultiAdapter((_context1, _context2), ifoo) + self.assertTrue(isinstance(adapter, _Factory)) + self.assertEqual(adapter.context, (_context1, _context2)) + + def test_getMultiAdapter_miss(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.interfaces import ComponentLookupError + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + comp = self._makeOne() + self.assertRaises(ComponentLookupError, + comp.getMultiAdapter, (_context1, _context2), ifoo) + + def test_getMultiAdapter_hit(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + class _Factory(object): + def __init__(self, context1, context2): + self.context = context1, context2 + comp = self._makeOne() + comp.registerAdapter(_Factory, (ibar, ibaz), ifoo) + adapter = comp.getMultiAdapter((_context1, _context2), ifoo) + self.assertTrue(isinstance(adapter, _Factory)) + self.assertEqual(adapter.context, (_context1, _context2)) + + def test_getAdapters_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + comp = self._makeOne() + self.assertEqual( + list(comp.getAdapters((_context1, _context2), ifoo)), []) + + def test_getAdapters_factory_returns_None(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + comp = self._makeOne() + _called_with = [] + def _side_effect_only(context1, context2): + _called_with.append((context1, context2)) + return None + comp.registerAdapter(_side_effect_only, (ibar, ibaz), ifoo) + self.assertEqual( + list(comp.getAdapters((_context1, _context2), ifoo)), []) + self.assertEqual(_called_with, [(_context1, _context2)]) + + def test_getAdapters_non_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + ibaz = IFoo('IBaz') + @implementer(ibar) + class _Context1(object): + pass + @implementer(ibaz) + class _Context2(object): + pass + _context1 = _Context1() + _context2 = _Context2() + class _Factory1(object): + def __init__(self, context1, context2): + self.context = context1, context2 + class _Factory2(object): + def __init__(self, context1, context2): + self.context = context1, context2 + _name1 = u'name1' + _name2 = u'name2' + comp = self._makeOne() + comp.registerAdapter(_Factory1, (ibar, ibaz), ifoo, name=_name1) + comp.registerAdapter(_Factory2, (ibar, ibaz), ifoo, name=_name2) + found = sorted(comp.getAdapters((_context1, _context2), ifoo)) + self.assertEqual(len(found), 2) + self.assertEqual(found[0][0], _name1) + self.assertTrue(isinstance(found[0][1], _Factory1)) + self.assertEqual(found[1][0], _name2) + self.assertTrue(isinstance(found[1][1], _Factory2)) + + def test_registerSubscriptionAdapter_w_nonblank_name(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _name = u'name' + _info = u'info' + def _factory(context): + raise NotImplementedError() + + comp = self._makeOne() + self.assertRaises(TypeError, comp.registerSubscriptionAdapter, + _factory, (ibar,), ifoo, _name, _info) + + def test_registerSubscriptionAdapter_w_explicit_provided_and_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import SubscriptionRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _blank = u'' + _info = u'info' + def _factory(context): + raise NotImplementedError() + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerSubscriptionAdapter(_factory, (ibar,), ifoo, + info=_info) + reg = comp.adapters._subscribers[1][ibar][ifoo][_blank] + self.assertEqual(len(reg), 1) + self.assertTrue(reg[0] is _factory) + self.assertEqual(comp._subscription_registrations, + [((ibar,), ifoo, _blank, _factory, _info)]) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, _blank) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _factory) + + def test_registerSubscriptionAdapter_wo_explicit_provided(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.interfaces import Registered + from zope.interface.registry import SubscriptionRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _blank = u'' + + @implementer(ifoo) + class _Factory(object): + pass + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerSubscriptionAdapter(_Factory, (ibar,), info=_info) + reg = comp.adapters._subscribers[1][ibar][ifoo][_blank] + self.assertEqual(len(reg), 1) + self.assertTrue(reg[0] is _Factory) + self.assertEqual(comp._subscription_registrations, + [((ibar,), ifoo, _blank, _Factory, _info)]) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, _blank) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _Factory) + + def test_registerSubscriptionAdapter_wo_explicit_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import SubscriptionRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _info = u'info' + _blank = u'' + class _Factory(object): + __component_adapts__ = (ibar,) + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerSubscriptionAdapter( + _Factory, provided=ifoo, info=_info) + reg = comp.adapters._subscribers[1][ibar][ifoo][_blank] + self.assertEqual(len(reg), 1) + self.assertTrue(reg[0] is _Factory) + self.assertEqual(comp._subscription_registrations, + [((ibar,), ifoo, _blank, _Factory, _info)]) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, _blank) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _Factory) + + def test_registerSubscriptionAdapter_wo_event(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _blank = u'' + _info = u'info' + + def _factory(context): + raise NotImplementedError() + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerSubscriptionAdapter(_factory, (ibar,), ifoo, + info=_info, event=False) + self.assertEqual(len(_events), 0) + + def test_registeredSubscriptionAdapters_empty(self): + comp = self._makeOne() + self.assertEqual(list(comp.registeredSubscriptionAdapters()), []) + + def test_registeredSubscriptionAdapters_notempty(self): + from zope.interface.declarations import InterfaceClass + + from zope.interface.registry import SubscriptionRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IFoo') + _info = u'info' + _blank = u'' + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo, info=_info) + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo, info=_info) + reg = list(comp.registeredSubscriptionAdapters()) + self.assertEqual(len(reg), 2) + self.assertTrue(isinstance(reg[0], SubscriptionRegistration)) + self.assertTrue(reg[0].registry is comp) + self.assertTrue(reg[0].provided is ifoo) + self.assertEqual(reg[0].required, (ibar,)) + self.assertEqual(reg[0].name, _blank) + self.assertTrue(reg[0].info is _info) + self.assertTrue(reg[0].factory is _Factory) + self.assertTrue(isinstance(reg[1], SubscriptionRegistration)) + self.assertTrue(reg[1].registry is comp) + self.assertTrue(reg[1].provided is ifoo) + self.assertEqual(reg[1].required, (ibar,)) + self.assertEqual(reg[1].name, _blank) + self.assertTrue(reg[1].info is _info) + self.assertTrue(reg[1].factory is _Factory) + + def test_unregisterSubscriptionAdapter_w_nonblank_name(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + _nonblank = u'nonblank' + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterSubscriptionAdapter, + required=ifoo, provided=ibar, name=_nonblank) + + def test_unregisterSubscriptionAdapter_neither_factory_nor_provided(self): + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterSubscriptionAdapter, + factory=None, provided=None) + + def test_unregisterSubscriptionAdapter_neither_factory_nor_required(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterSubscriptionAdapter, + factory=None, provided=ifoo, required=None) + + def test_unregisterSubscriptionAdapter_miss(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + pass + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterSubscriptionAdapter(_Factory, (ibar,), ifoo) + self.assertFalse(unreg) + self.assertFalse(_events) + + def test_unregisterSubscriptionAdapter_hit_wo_factory(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import SubscriptionRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterSubscriptionAdapter(None, (ibar,), ifoo) + self.assertTrue(unreg) + self.assertFalse(comp.adapters._subscribers) + self.assertFalse(comp._subscription_registrations) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is None) + + def test_unregisterSubscriptionAdapter_hit_w_factory(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import SubscriptionRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterSubscriptionAdapter(_Factory, (ibar,), ifoo) + self.assertTrue(unreg) + self.assertFalse(comp.adapters._subscribers) + self.assertFalse(comp._subscription_registrations) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_unregisterSubscriptionAdapter_wo_explicit_provided(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + from zope.interface.interfaces import Unregistered + from zope.interface.registry import SubscriptionRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + @implementer(ifoo) + class _Factory(object): + pass + + comp = self._makeOne() + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterSubscriptionAdapter(_Factory, (ibar,)) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_unregisterSubscriptionAdapter_wo_explicit_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import SubscriptionRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + __component_adapts__ = (ibar,) + + comp = self._makeOne() + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterSubscriptionAdapter(_Factory, provided=ifoo) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, SubscriptionRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertTrue(event.object.provided is ifoo) + self.assertEqual(event.object.required, (ibar,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_subscribers_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + comp = self._makeOne() + @implementer(ibar) + class Bar(object): + pass + bar = Bar() + self.assertEqual(list(comp.subscribers((bar,), ifoo)), []) + + def test_subscribers_non_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Factory(object): + __component_adapts__ = (ibar,) + def __init__(self, context): + self._context = context + class _Derived(_Factory): + pass + comp = self._makeOne() + comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo) + comp.registerSubscriptionAdapter(_Derived, (ibar,), ifoo) + @implementer(ibar) + class Bar(object): + pass + bar = Bar() + subscribers = comp.subscribers((bar,), ifoo) + def _klassname(x): + return x.__class__.__name__ + subscribers = sorted(subscribers, key=_klassname) + self.assertEqual(len(subscribers), 2) + self.assertTrue(isinstance(subscribers[0], _Derived)) + self.assertTrue(isinstance(subscribers[1], _Factory)) + + def test_registerHandler_w_nonblank_name(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _nonblank = u'nonblank' + comp = self._makeOne() + def _factory(context): + raise NotImplementedError() + + self.assertRaises(TypeError, comp.registerHandler, _factory, + required=ifoo, name=_nonblank) + + def test_registerHandler_w_explicit_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Registered + from zope.interface.registry import HandlerRegistration + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _blank = u'' + _info = u'info' + def _factory(context): + raise NotImplementedError() + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerHandler(_factory, (ifoo,), info=_info) + reg = comp.adapters._subscribers[1][ifoo][None][_blank] + self.assertEqual(len(reg), 1) + self.assertTrue(reg[0] is _factory) + self.assertEqual(comp._handler_registrations, + [((ifoo,), _blank, _factory, _info)]) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Registered)) + self.assertTrue(isinstance(event.object, HandlerRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertEqual(event.object.required, (ifoo,)) + self.assertEqual(event.object.name, _blank) + self.assertTrue(event.object.info is _info) + self.assertTrue(event.object.factory is _factory) + + def test_registerHandler_wo_explicit_required_no_event(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _info = u'info' + _blank = u'' + class _Factory(object): + __component_adapts__ = (ifoo,) + pass + + comp = self._makeOne() + _monkey, _events = self._wrapEvents() + with _monkey: + comp.registerHandler(_Factory, info=_info, event=False) + reg = comp.adapters._subscribers[1][ifoo][None][_blank] + self.assertEqual(len(reg), 1) + self.assertTrue(reg[0] is _Factory) + self.assertEqual(comp._handler_registrations, + [((ifoo,), _blank, _Factory, _info)]) + self.assertEqual(len(_events), 0) + + def test_registeredHandlers_empty(self): + comp = self._makeOne() + self.assertFalse(list(comp.registeredHandlers())) + + def test_registeredHandlers_non_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.registry import HandlerRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + def _factory1(context): + raise NotImplementedError() + def _factory2(context): + raise NotImplementedError() + comp = self._makeOne() + comp.registerHandler(_factory1, (ifoo,)) + comp.registerHandler(_factory2, (ifoo,)) + def _factory_name(x): + return x.factory.__code__.co_name + subscribers = sorted(comp.registeredHandlers(), key=_factory_name) + self.assertEqual(len(subscribers), 2) + self.assertTrue(isinstance(subscribers[0], HandlerRegistration)) + self.assertEqual(subscribers[0].required, (ifoo,)) + self.assertEqual(subscribers[0].name, '') + self.assertEqual(subscribers[0].factory, _factory1) + self.assertEqual(subscribers[0].info, '') + self.assertTrue(isinstance(subscribers[1], HandlerRegistration)) + self.assertEqual(subscribers[1].required, (ifoo,)) + self.assertEqual(subscribers[1].name, '') + self.assertEqual(subscribers[1].factory, _factory2) + self.assertEqual(subscribers[1].info, '') + + def test_unregisterHandler_w_nonblank_name(self): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _nonblank = u'nonblank' + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterHandler, + required=(ifoo,), name=_nonblank) + + def test_unregisterHandler_neither_factory_nor_required(self): + comp = self._makeOne() + self.assertRaises(TypeError, comp.unregisterHandler) + + def test_unregisterHandler_miss(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + unreg = comp.unregisterHandler(required=(ifoo,)) + self.assertFalse(unreg) + + def test_unregisterHandler_hit_w_factory_and_explicit_provided(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import HandlerRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + def _factory(context): + raise NotImplementedError() + comp = self._makeOne() + comp.registerHandler(_factory, (ifoo,)) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterHandler(_factory, (ifoo,)) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, HandlerRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertEqual(event.object.required, (ifoo,)) + self.assertEqual(event.object.name, '') + self.assertTrue(event.object.factory is _factory) + + def test_unregisterHandler_hit_w_only_explicit_provided(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import HandlerRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + def _factory(context): + raise NotImplementedError() + comp = self._makeOne() + comp.registerHandler(_factory, (ifoo,)) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterHandler(required=(ifoo,)) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, HandlerRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertEqual(event.object.required, (ifoo,)) + self.assertEqual(event.object.name, '') + self.assertTrue(event.object.factory is None) + + def test_unregisterHandler_wo_explicit_required(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.interfaces import Unregistered + from zope.interface.registry import HandlerRegistration + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + class _Factory(object): + __component_adapts__ = (ifoo,) + + comp = self._makeOne() + comp.registerHandler(_Factory) + _monkey, _events = self._wrapEvents() + with _monkey: + unreg = comp.unregisterHandler(_Factory) + self.assertTrue(unreg) + self.assertEqual(len(_events), 1) + args, kw = _events[0] + event, = args + self.assertEqual(kw, {}) + self.assertTrue(isinstance(event, Unregistered)) + self.assertTrue(isinstance(event.object, HandlerRegistration)) + self.assertTrue(event.object.registry is comp) + self.assertEqual(event.object.required, (ifoo,)) + self.assertEqual(event.object.name, '') + self.assertEqual(event.object.info, '') + self.assertTrue(event.object.factory is _Factory) + + def test_handle_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + comp = self._makeOne() + @implementer(ifoo) + class Bar(object): + pass + bar = Bar() + comp.handle((bar,)) # doesn't raise + + def test_handle_non_empty(self): + from zope.interface.declarations import InterfaceClass + from zope.interface.declarations import implementer + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + _called_1 = [] + def _factory_1(context): + _called_1.append(context) + _called_2 = [] + def _factory_2(context): + _called_2.append(context) + comp = self._makeOne() + comp.registerHandler(_factory_1, (ifoo,)) + comp.registerHandler(_factory_2, (ifoo,)) + @implementer(ifoo) + class Bar(object): + pass + bar = Bar() + comp.handle(bar) + self.assertEqual(_called_1, [bar]) + self.assertEqual(_called_2, [bar]) + + +class UnhashableComponentsTests(ComponentsTests): + + def _getTargetClass(self): + # Mimic what pyramid does to create an unhashable + # registry + class Components(super(UnhashableComponentsTests, self)._getTargetClass(), dict): + pass + return Components + +# Test _getUtilityProvided, _getAdapterProvided, _getAdapterRequired via their +# callers (Component.registerUtility, Component.registerAdapter). + + +class UtilityRegistrationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.registry import UtilityRegistration + return UtilityRegistration + + def _makeOne(self, component=None, factory=None): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + class _Registry(object): + def __repr__(self): + return '_REGISTRY' + registry = _Registry() + name = u'name' + doc = 'DOCSTRING' + klass = self._getTargetClass() + return (klass(registry, ifoo, name, component, doc, factory), + registry, + name, + ) + + def test_class_conforms_to_IUtilityRegistration(self): + from zope.interface.verify import verifyClass + from zope.interface.interfaces import IUtilityRegistration + verifyClass(IUtilityRegistration, self._getTargetClass()) + + def test_instance_conforms_to_IUtilityRegistration(self): + from zope.interface.verify import verifyObject + from zope.interface.interfaces import IUtilityRegistration + ur, _, _ = self._makeOne() + verifyObject(IUtilityRegistration, ur) + + def test___repr__(self): + class _Component(object): + __name__ = 'TEST' + _component = _Component() + ur, _registry, _name = self._makeOne(_component) + self.assertEqual(repr(ur), + "UtilityRegistration(_REGISTRY, IFoo, %r, TEST, None, 'DOCSTRING')" + % (_name)) + + def test___repr___provided_wo_name(self): + class _Component(object): + def __repr__(self): + return 'TEST' + _component = _Component() + ur, _registry, _name = self._makeOne(_component) + ur.provided = object() + self.assertEqual(repr(ur), + "UtilityRegistration(_REGISTRY, None, %r, TEST, None, 'DOCSTRING')" + % (_name)) + + def test___repr___component_wo_name(self): + class _Component(object): + def __repr__(self): + return 'TEST' + _component = _Component() + ur, _registry, _name = self._makeOne(_component) + ur.provided = object() + self.assertEqual(repr(ur), + "UtilityRegistration(_REGISTRY, None, %r, TEST, None, 'DOCSTRING')" + % (_name)) + + def test___hash__(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertEqual(ur.__hash__(), id(ur)) + + def test___eq___identity(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertTrue(ur == ur) + + def test___eq___hit(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component) + self.assertTrue(ur == ur2) + + def test___eq___miss(self): + _component = object() + _component2 = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component2) + self.assertFalse(ur == ur2) + + def test___ne___identity(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertFalse(ur != ur) + + def test___ne___hit(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component) + self.assertFalse(ur != ur2) + + def test___ne___miss(self): + _component = object() + _component2 = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component2) + self.assertTrue(ur != ur2) + + def test___lt___identity(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertFalse(ur < ur) + + def test___lt___hit(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component) + self.assertFalse(ur < ur2) + + def test___lt___miss(self): + _component = object() + _component2 = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component2) + ur2.name = _name + '2' + self.assertTrue(ur < ur2) + + def test___le___identity(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertTrue(ur <= ur) + + def test___le___hit(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component) + self.assertTrue(ur <= ur2) + + def test___le___miss(self): + _component = object() + _component2 = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component2) + ur2.name = _name + '2' + self.assertTrue(ur <= ur2) + + def test___gt___identity(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertFalse(ur > ur) + + def test___gt___hit(self): + _component = object() + _component2 = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component2) + ur2.name = _name + '2' + self.assertTrue(ur2 > ur) + + def test___gt___miss(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component) + self.assertFalse(ur2 > ur) + + def test___ge___identity(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + self.assertTrue(ur >= ur) + + def test___ge___miss(self): + _component = object() + _component2 = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component2) + ur2.name = _name + '2' + self.assertFalse(ur >= ur2) + + def test___ge___hit(self): + _component = object() + ur, _registry, _name = self._makeOne(_component) + ur2, _, _ = self._makeOne(_component) + ur2.name = _name + '2' + self.assertTrue(ur2 >= ur) + + +class AdapterRegistrationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.registry import AdapterRegistration + return AdapterRegistration + + def _makeOne(self, component=None): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Registry(object): + def __repr__(self): + return '_REGISTRY' + registry = _Registry() + name = u'name' + doc = 'DOCSTRING' + klass = self._getTargetClass() + return (klass(registry, (ibar,), ifoo, name, component, doc), + registry, + name, + ) + + def test_class_conforms_to_IAdapterRegistration(self): + from zope.interface.verify import verifyClass + from zope.interface.interfaces import IAdapterRegistration + verifyClass(IAdapterRegistration, self._getTargetClass()) + + def test_instance_conforms_to_IAdapterRegistration(self): + from zope.interface.verify import verifyObject + from zope.interface.interfaces import IAdapterRegistration + ar, _, _ = self._makeOne() + verifyObject(IAdapterRegistration, ar) + + def test___repr__(self): + class _Component(object): + __name__ = 'TEST' + _component = _Component() + ar, _registry, _name = self._makeOne(_component) + self.assertEqual(repr(ar), + ("AdapterRegistration(_REGISTRY, [IBar], IFoo, %r, TEST, " + + "'DOCSTRING')") % (_name)) + + def test___repr___provided_wo_name(self): + class _Component(object): + def __repr__(self): + return 'TEST' + _component = _Component() + ar, _registry, _name = self._makeOne(_component) + ar.provided = object() + self.assertEqual(repr(ar), + ("AdapterRegistration(_REGISTRY, [IBar], None, %r, TEST, " + + "'DOCSTRING')") % (_name)) + + def test___repr___component_wo_name(self): + class _Component(object): + def __repr__(self): + return 'TEST' + _component = _Component() + ar, _registry, _name = self._makeOne(_component) + ar.provided = object() + self.assertEqual(repr(ar), + ("AdapterRegistration(_REGISTRY, [IBar], None, %r, TEST, " + + "'DOCSTRING')") % (_name)) + + def test___hash__(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertEqual(ar.__hash__(), id(ar)) + + def test___eq___identity(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertTrue(ar == ar) + + def test___eq___hit(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + self.assertTrue(ar == ar2) + + def test___eq___miss(self): + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + self.assertFalse(ar == ar2) + + def test___ne___identity(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertFalse(ar != ar) + + def test___ne___miss(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + self.assertFalse(ar != ar2) + + def test___ne___hit_component(self): + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + self.assertTrue(ar != ar2) + + def test___ne___hit_provided(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ibaz = IFoo('IBaz') + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + ar2.provided = ibaz + self.assertTrue(ar != ar2) + + def test___ne___hit_required(self): + from zope.interface.declarations import InterfaceClass + class IFoo(InterfaceClass): + pass + ibaz = IFoo('IBaz') + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + ar2.required = (ibaz,) + self.assertTrue(ar != ar2) + + def test___lt___identity(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertFalse(ar < ar) + + def test___lt___hit(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + self.assertFalse(ar < ar2) + + def test___lt___miss(self): + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + ar2.name = _name + '2' + self.assertTrue(ar < ar2) + + def test___le___identity(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertTrue(ar <= ar) + + def test___le___hit(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + self.assertTrue(ar <= ar2) + + def test___le___miss(self): + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + ar2.name = _name + '2' + self.assertTrue(ar <= ar2) + + def test___gt___identity(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertFalse(ar > ar) + + def test___gt___hit(self): + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + ar2.name = _name + '2' + self.assertTrue(ar2 > ar) + + def test___gt___miss(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + self.assertFalse(ar2 > ar) + + def test___ge___identity(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + self.assertTrue(ar >= ar) + + def test___ge___miss(self): + _component = object() + _component2 = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component2) + ar2.name = _name + '2' + self.assertFalse(ar >= ar2) + + def test___ge___hit(self): + _component = object() + ar, _registry, _name = self._makeOne(_component) + ar2, _, _ = self._makeOne(_component) + ar2.name = _name + '2' + self.assertTrue(ar2 >= ar) + + +class SubscriptionRegistrationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.registry import SubscriptionRegistration + return SubscriptionRegistration + + def _makeOne(self, component=None): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + ibar = IFoo('IBar') + class _Registry(object): + def __repr__(self): # pragma: no cover + return '_REGISTRY' + registry = _Registry() + name = u'name' + doc = 'DOCSTRING' + klass = self._getTargetClass() + return (klass(registry, (ibar,), ifoo, name, component, doc), + registry, + name, + ) + + def test_class_conforms_to_ISubscriptionAdapterRegistration(self): + from zope.interface.verify import verifyClass + from zope.interface.interfaces import ISubscriptionAdapterRegistration + verifyClass(ISubscriptionAdapterRegistration, self._getTargetClass()) + + def test_instance_conforms_to_ISubscriptionAdapterRegistration(self): + from zope.interface.verify import verifyObject + from zope.interface.interfaces import ISubscriptionAdapterRegistration + sar, _, _ = self._makeOne() + verifyObject(ISubscriptionAdapterRegistration, sar) + + +class HandlerRegistrationTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.interface.registry import HandlerRegistration + return HandlerRegistration + + def _makeOne(self, component=None): + from zope.interface.declarations import InterfaceClass + + class IFoo(InterfaceClass): + pass + ifoo = IFoo('IFoo') + class _Registry(object): + def __repr__(self): + return '_REGISTRY' + registry = _Registry() + name = u'name' + doc = 'DOCSTRING' + klass = self._getTargetClass() + return (klass(registry, (ifoo,), name, component, doc), + registry, + name, + ) + + def test_class_conforms_to_IHandlerRegistration(self): + from zope.interface.verify import verifyClass + from zope.interface.interfaces import IHandlerRegistration + verifyClass(IHandlerRegistration, self._getTargetClass()) + + def test_instance_conforms_to_IHandlerRegistration(self): + from zope.interface.verify import verifyObject + from zope.interface.interfaces import IHandlerRegistration + hr, _, _ = self._makeOne() + verifyObject(IHandlerRegistration, hr) + + def test_properties(self): + def _factory(context): + raise NotImplementedError() + hr, _, _ = self._makeOne(_factory) + self.assertTrue(hr.handler is _factory) + self.assertTrue(hr.factory is hr.handler) + self.assertTrue(hr.provided is None) + + def test___repr___factory_w_name(self): + class _Factory(object): + __name__ = 'TEST' + hr, _registry, _name = self._makeOne(_Factory()) + self.assertEqual(repr(hr), + ("HandlerRegistration(_REGISTRY, [IFoo], %r, TEST, " + + "'DOCSTRING')") % (_name)) + + def test___repr___factory_wo_name(self): + class _Factory(object): + def __repr__(self): + return 'TEST' + hr, _registry, _name = self._makeOne(_Factory()) + self.assertEqual(repr(hr), + ("HandlerRegistration(_REGISTRY, [IFoo], %r, TEST, " + + "'DOCSTRING')") % (_name)) + +class PersistentAdapterRegistry(VerifyingAdapterRegistry): + + def __getstate__(self): + state = self.__dict__.copy() + for k in list(state): + if k in self._delegated or k.startswith('_v'): + state.pop(k) + state.pop('ro', None) + return state + + def __setstate__(self, state): + bases = state.pop('__bases__', ()) + self.__dict__.update(state) + self._createLookup() + self.__bases__ = bases + self._v_lookup.changed(self) + +class PersistentComponents(Components): + # Mimic zope.component.persistentregistry.PersistentComponents: + # we should be picklalable, but not persistent.Persistent ourself. + + def _init_registries(self): + self.adapters = PersistentAdapterRegistry() + self.utilities = PersistentAdapterRegistry() + +class PersistentDictComponents(PersistentComponents, dict): + # Like Pyramid's Registry, we subclass Components and dict + pass + + +class PersistentComponentsDict(dict, PersistentComponents): + # Like the above, but inheritance is flipped + def __init__(self, name): + dict.__init__(self) + PersistentComponents.__init__(self, name) + +class TestPersistentComponents(unittest.TestCase): + + def _makeOne(self): + return PersistentComponents('test') + + def _check_equality_after_pickle(self, made): + pass + + def test_pickles_empty(self): + import pickle + comp = self._makeOne() + pickle.dumps(comp) + comp2 = pickle.loads(pickle.dumps(comp)) + + self.assertEqual(comp2.__name__, 'test') + + def test_pickles_with_utility_registration(self): + import pickle + comp = self._makeOne() + utility = object() + comp.registerUtility( + utility, + Interface) + + self.assertIs(utility, + comp.getUtility(Interface)) + + comp2 = pickle.loads(pickle.dumps(comp)) + self.assertEqual(comp2.__name__, 'test') + + # The utility is still registered + self.assertIsNotNone(comp2.getUtility(Interface)) + + # We can register another one + comp2.registerUtility( + utility, + Interface) + self.assertIs(utility, + comp2.getUtility(Interface)) + + self._check_equality_after_pickle(comp2) + + +class TestPersistentDictComponents(TestPersistentComponents): + + def _getTargetClass(self): + return PersistentDictComponents + + def _makeOne(self): + comp = self._getTargetClass()(name='test') + comp['key'] = 42 + return comp + + def _check_equality_after_pickle(self, made): + self.assertIn('key', made) + self.assertEqual(made['key'], 42) + +class TestPersistentComponentsDict(TestPersistentDictComponents): + + def _getTargetClass(self): + return PersistentComponentsDict + +class _Monkey(object): + # context-manager for replacing module names in the scope of a test. + def __init__(self, module, **kw): + self.module = module + self.to_restore = dict([(key, getattr(module, key)) for key in kw]) + for key, value in kw.items(): + setattr(module, key, value) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + for key, value in self.to_restore.items(): + setattr(self.module, key, value) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_ro.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_ro.py new file mode 100644 index 0000000..0756c6d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_ro.py @@ -0,0 +1,115 @@ +############################################################################## +# +# Copyright (c) 2014 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Resolution ordering utility tests""" +import unittest + + +class Test__mergeOrderings(unittest.TestCase): + + def _callFUT(self, orderings): + from zope.interface.ro import _mergeOrderings + return _mergeOrderings(orderings) + + def test_empty(self): + self.assertEqual(self._callFUT([]), []) + + def test_single(self): + self.assertEqual(self._callFUT(['a', 'b', 'c']), ['a', 'b', 'c']) + + def test_w_duplicates(self): + self.assertEqual(self._callFUT([['a'], ['b', 'a']]), ['b', 'a']) + + def test_suffix_across_multiple_duplicats(self): + O1 = ['x', 'y', 'z'] + O2 = ['q', 'z'] + O3 = [1, 3, 5] + O4 = ['z'] + self.assertEqual(self._callFUT([O1, O2, O3, O4]), + ['x', 'y', 'q', 1, 3, 5, 'z']) + + +class Test__flatten(unittest.TestCase): + + def _callFUT(self, ob): + from zope.interface.ro import _flatten + return _flatten(ob) + + def test_w_empty_bases(self): + class Foo(object): + pass + foo = Foo() + foo.__bases__ = () + self.assertEqual(self._callFUT(foo), [foo]) + + def test_w_single_base(self): + class Foo(object): + pass + self.assertEqual(self._callFUT(Foo), [Foo, object]) + + def test_w_bases(self): + class Foo(object): + pass + class Bar(Foo): + pass + self.assertEqual(self._callFUT(Bar), [Bar, Foo, object]) + + def test_w_diamond(self): + class Foo(object): + pass + class Bar(Foo): + pass + class Baz(Foo): + pass + class Qux(Bar, Baz): + pass + self.assertEqual(self._callFUT(Qux), + [Qux, Bar, Foo, object, Baz, Foo, object]) + + +class Test_ro(unittest.TestCase): + + def _callFUT(self, ob): + from zope.interface.ro import ro + return ro(ob) + + def test_w_empty_bases(self): + class Foo(object): + pass + foo = Foo() + foo.__bases__ = () + self.assertEqual(self._callFUT(foo), [foo]) + + def test_w_single_base(self): + class Foo(object): + pass + self.assertEqual(self._callFUT(Foo), [Foo, object]) + + def test_w_bases(self): + class Foo(object): + pass + class Bar(Foo): + pass + self.assertEqual(self._callFUT(Bar), [Bar, Foo, object]) + + def test_w_diamond(self): + class Foo(object): + pass + class Bar(Foo): + pass + class Baz(Foo): + pass + class Qux(Bar, Baz): + pass + self.assertEqual(self._callFUT(Qux), + [Qux, Bar, Baz, Foo, object]) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_sorting.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_sorting.py new file mode 100644 index 0000000..73613d0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_sorting.py @@ -0,0 +1,47 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test interface sorting +""" + +import unittest + +from zope.interface import Interface + +class I1(Interface): pass +class I2(I1): pass +class I3(I1): pass +class I4(Interface): pass +class I5(I4): pass +class I6(I2): pass + + +class Test(unittest.TestCase): + + def test(self): + l = [I1, I3, I5, I6, I4, I2] + l.sort() + self.assertEqual(l, [I1, I2, I3, I4, I5, I6]) + + def test_w_None(self): + l = [I1, None, I3, I5, I6, I4, I2] + l.sort() + self.assertEqual(l, [I1, I2, I3, I4, I5, I6, None]) + + def test_w_equal_names(self): + # interfaces with equal names but different modules should sort by + # module name + from zope.interface.tests.m1 import I1 as m1_I1 + l = [I1, m1_I1] + l.sort() + self.assertEqual(l, [m1_I1, I1]) diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_verify.py b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_verify.py new file mode 100644 index 0000000..5ad8bff --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/tests/test_verify.py @@ -0,0 +1,582 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" zope.interface.verify unit tests +""" +import unittest + + +class Test_verifyClass(unittest.TestCase): + + def _callFUT(self, iface, klass): + from zope.interface.verify import verifyClass + return verifyClass(iface, klass) + + def test_class_doesnt_implement(self): + from zope.interface import Interface + from zope.interface.exceptions import DoesNotImplement + + class ICurrent(Interface): + pass + + class Current(object): + pass + + self.assertRaises(DoesNotImplement, self._callFUT, ICurrent, Current) + + def test_class_doesnt_implement_but_classImplements_later(self): + from zope.interface import Interface + from zope.interface import classImplements + + class ICurrent(Interface): + pass + + class Current(object): + pass + + classImplements(Current, ICurrent) + + self._callFUT(ICurrent, Current) + + def test_class_doesnt_have_required_method_simple(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenImplementation + + class ICurrent(Interface): + def method(): pass + + @implementer(ICurrent) + class Current(object): + pass + + self.assertRaises(BrokenImplementation, + self._callFUT, ICurrent, Current) + + def test_class_has_required_method_simple(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + def method(): pass + + @implementer(ICurrent) + class Current(object): + + def method(self): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_class_doesnt_have_required_method_derived(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenImplementation + + class IBase(Interface): + def method(): + pass + + class IDerived(IBase): + pass + + @implementer(IDerived) + class Current(object): + pass + + self.assertRaises(BrokenImplementation, + self._callFUT, IDerived, Current) + + def test_class_has_required_method_derived(self): + from zope.interface import Interface + from zope.interface import implementer + + class IBase(Interface): + def method(): + pass + + class IDerived(IBase): + pass + + @implementer(IDerived) + class Current(object): + + def method(self): + raise NotImplementedError() + + self._callFUT(IDerived, Current) + + def test_method_takes_wrong_arg_names_but_OK(self): + # We no longer require names to match. + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, b): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_not_enough_args(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_method_doesnt_take_required_starargs(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(*args): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_method_doesnt_take_required_only_kwargs(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(**kw): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_method_takes_extra_arg(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a, b): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_method_takes_extra_arg_with_default(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a, b=None): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_only_positional_args(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, *args): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_only_kwargs(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, **kw): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_method_takes_extra_starargs(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a, *args): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_extra_starargs_and_kwargs(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a, *args, **kw): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_doesnt_take_required_positional_and_starargs(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(a, *args): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_method_takes_required_positional_and_starargs(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a, *args): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a, *args): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_only_starargs(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(a, *args): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, *args): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_required_kwargs(self): + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + + def method(**kwargs): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, **kw): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_method_takes_positional_plus_required_starargs(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(*args): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a, *args): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + + def test_method_doesnt_take_required_kwargs(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + + def method(**kwargs): + pass + + @implementer(ICurrent) + class Current(object): + + def method(self, a): + raise NotImplementedError() + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + + def test_class_has_method_for_iface_attr(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + attr = Attribute("The foo Attribute") + + @implementer(ICurrent) + class Current: + + def attr(self): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + + def test_class_has_nonmethod_for_method(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenMethodImplementation + + class ICurrent(Interface): + def method(): + pass + + @implementer(ICurrent) + class Current: + method = 1 + + self.assertRaises(BrokenMethodImplementation, + self._callFUT, ICurrent, Current) + + def test_class_has_attribute_for_attribute(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + attr = Attribute("The foo Attribute") + + @implementer(ICurrent) + class Current: + + attr = 1 + + self._callFUT(ICurrent, Current) + + def test_class_misses_attribute_for_attribute(self): + # This check *passes* for verifyClass + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import implementer + + class ICurrent(Interface): + attr = Attribute("The foo Attribute") + + @implementer(ICurrent) + class Current: + pass + + self._callFUT(ICurrent, Current) + + def test_w_callable_non_func_method(self): + from zope.interface.interface import Method + from zope.interface import Interface + from zope.interface import implementer + + class QuasiMethod(Method): + def __call__(self, *args, **kw): + raise NotImplementedError() + + class QuasiCallable(object): + def __call__(self, *args, **kw): + raise NotImplementedError() + + class ICurrent(Interface): + attr = QuasiMethod('This is callable') + + @implementer(ICurrent) + class Current: + attr = QuasiCallable() + + self._callFUT(ICurrent, Current) + + + def test_w_decorated_method(self): + from zope.interface import Interface + from zope.interface import implementer + + def decorator(func): + # this is, in fact, zope.proxy.non_overridable + return property(lambda self: func.__get__(self)) + + class ICurrent(Interface): + + def method(a): + pass + + @implementer(ICurrent) + class Current(object): + + @decorator + def method(self, a): + raise NotImplementedError() + + self._callFUT(ICurrent, Current) + +class Test_verifyObject(Test_verifyClass): + + def _callFUT(self, iface, target): + from zope.interface.verify import verifyObject + if isinstance(target, (type, type(OldSkool))): + target = target() + return verifyObject(iface, target) + + def test_class_misses_attribute_for_attribute(self): + # This check *fails* for verifyObject + from zope.interface import Attribute + from zope.interface import Interface + from zope.interface import implementer + from zope.interface.exceptions import BrokenImplementation + + class ICurrent(Interface): + attr = Attribute("The foo Attribute") + + @implementer(ICurrent) + class Current: + pass + + self.assertRaises(BrokenImplementation, + self._callFUT, ICurrent, Current) + + def test_module_hit(self): + from zope.interface.tests.idummy import IDummyModule + from zope.interface.tests import dummy + + self._callFUT(IDummyModule, dummy) + + def test_module_miss(self): + from zope.interface import Interface + from zope.interface.tests import dummy + from zope.interface.exceptions import DoesNotImplement + + # same name, different object + class IDummyModule(Interface): + pass + + self.assertRaises(DoesNotImplement, + self._callFUT, IDummyModule, dummy) + + def test_staticmethod_hit_on_class(self): + from zope.interface import Interface + from zope.interface import provider + from zope.interface.verify import verifyObject + + class IFoo(Interface): + + def bar(a, b): + "The bar method" + + @provider(IFoo) + class Foo(object): + + @staticmethod + def bar(a, b): + raise AssertionError("We're never actually called") + + # Don't use self._callFUT, we don't want to instantiate the + # class. + verifyObject(IFoo, Foo) + +class OldSkool: + pass diff --git a/thesisenv/lib/python3.6/site-packages/zope/interface/verify.py b/thesisenv/lib/python3.6/site-packages/zope/interface/verify.py new file mode 100644 index 0000000..62bb64c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/interface/verify.py @@ -0,0 +1,123 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Verify interface implementations +""" +from zope.interface.exceptions import BrokenImplementation, DoesNotImplement +from zope.interface.exceptions import BrokenMethodImplementation +from types import FunctionType, MethodType +from zope.interface.interface import fromMethod, fromFunction, Method +import sys + +# This will be monkey-patched when running under Zope 2, so leave this +# here: +MethodTypes = (MethodType, ) + + +def _verify(iface, candidate, tentative=0, vtype=None): + """Verify that 'candidate' might correctly implements 'iface'. + + This involves: + + o Making sure the candidate defines all the necessary methods + + o Making sure the methods have the correct signature + + o Making sure the candidate asserts that it implements the interface + + Note that this isn't the same as verifying that the class does + implement the interface. + + If optional tentative is true, suppress the "is implemented by" test. + """ + + if vtype == 'c': + tester = iface.implementedBy + else: + tester = iface.providedBy + + if not tentative and not tester(candidate): + raise DoesNotImplement(iface) + + # Here the `desc` is either an `Attribute` or `Method` instance + for name, desc in iface.namesAndDescriptions(1): + try: + attr = getattr(candidate, name) + except AttributeError: + if (not isinstance(desc, Method)) and vtype == 'c': + # We can't verify non-methods on classes, since the + # class may provide attrs in it's __init__. + continue + + raise BrokenImplementation(iface, name) + + if not isinstance(desc, Method): + # If it's not a method, there's nothing else we can test + continue + + if isinstance(attr, FunctionType): + if sys.version_info[0] >= 3 and isinstance(candidate, type) and vtype == 'c': + # This is an "unbound method" in Python 3. + # Only unwrap this if we're verifying implementedBy; + # otherwise we can unwrap @staticmethod on classes that directly + # provide an interface. + meth = fromFunction(attr, iface, name=name, + imlevel=1) + else: + # Nope, just a normal function + meth = fromFunction(attr, iface, name=name) + elif (isinstance(attr, MethodTypes) + and type(attr.__func__) is FunctionType): + meth = fromMethod(attr, iface, name) + elif isinstance(attr, property) and vtype == 'c': + # We without an instance we cannot be sure it's not a + # callable. + continue + else: + if not callable(attr): + raise BrokenMethodImplementation(name, "Not a method") + # sigh, it's callable, but we don't know how to introspect it, so + # we have to give it a pass. + continue + + # Make sure that the required and implemented method signatures are + # the same. + desc = desc.getSignatureInfo() + meth = meth.getSignatureInfo() + + mess = _incompat(desc, meth) + if mess: + raise BrokenMethodImplementation(name, mess) + + return True + +def verifyClass(iface, candidate, tentative=0): + return _verify(iface, candidate, tentative, vtype='c') + +def verifyObject(iface, candidate, tentative=0): + return _verify(iface, candidate, tentative, vtype='o') + +def _incompat(required, implemented): + #if (required['positional'] != + # implemented['positional'][:len(required['positional'])] + # and implemented['kwargs'] is None): + # return 'imlementation has different argument names' + if len(implemented['required']) > len(required['required']): + return 'implementation requires too many arguments' + if ((len(implemented['positional']) < len(required['positional'])) + and not implemented['varargs']): + return "implementation doesn't allow enough arguments" + if required['kwargs'] and not implemented['kwargs']: + return "implementation doesn't support keyword arguments" + if required['varargs'] and not implemented['varargs']: + return "implementation doesn't support variable arguments" diff --git a/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/README.rst b/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/README.rst new file mode 100644 index 0000000..6402318 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/README.rst @@ -0,0 +1,302 @@ +============= + Quick Start +============= + +.. module:: zope.lifecycleevent + +This document describes the various event types defined by this +package and provides some basic examples of using them to inform parts +of the system about object changes. + +All events have three components: an *interface* defining the event's +structure, a default *implementation* of that interface (the *event +object*), and a high-level *convenience function* (defined by the +:class:`~.IZopeLifecycleEvent` interface) for easily sending that +event in a single function call. + +.. note:: The convenience functions are simple wrappers for + constructing an event object and sending it via + :func:`zope.event.notify`. Here we will only discuss using these + functions; for more information on the advanced usage of when and + how to construct and send event objects manually, see + :doc:`manual`. + +.. note:: This document will not discuss actually *handling* these + events (setting up *subscribers* for them). For information on + that topic, see :doc:`handling`. + +We will go through the events in approximate order of how they would +be used to follow the life-cycle of an object. + +Creation +======== + +The first event is :class:`~.IObjectCreatedEvent`, implemented by +:class:`~.ObjectCreatedEvent`, which is used to communicate that a single object +has been created. It can be sent with the +:func:`zope.lifecycleevent.created` function. + + +For example: + + >>> from zope.lifecycleevent import created + + >>> obj = {} + >>> created(obj) + +Copying +======= + +Copying an object is a special case of creating one. It can happen at +any time and is implemented with :class:`~.IObjectCopiedEvent`, +:class:`~.ObjectCopiedEvent`, or the API +:func:`zope.lifecycleevent.copied`. + + >>> from zope.lifecycleevent import copied + >>> import pickle + >>> copy = pickle.loads(pickle.dumps(obj)) + >>> copied(copy, obj) + +.. note:: + Handlers for :class:`~.IObjectCreatedEvent` can expect to + receive events for :class:`~.IObjectCopiedEvent` as well. + +.. _addition: + +Addition +======== + +After objects are created, it is common to *add* them somewhere for +storage or access. This can be accomplished with the +:class:`~.IObjectAddedEvent` and its implementation +:class:`~.ObjectAddedEvent`, or the API +:func:`zope.lifecycleevent.added`. + + >>> from zope.lifecycleevent import ObjectAddedEvent + >>> from zope.lifecycleevent import added + + >>> container = {} + >>> container['name'] = obj + >>> added(obj, container, 'name') + +If the object being added has a non-None ``__name__`` or ``__parent__`` +attribute, we can omit those values when we call ``added`` and the +attributes will be used. + + >>> class Location(object): + ... __parent__ = None + ... __name__ = None + + >>> location = Location() + >>> location.__name__ = "location" + >>> location.__parent__ = container + >>> container[location.__name__] = location + >>> added(location) + +.. tip:: + The interface :class:`zope.location.interfaces.ILocation` + defines these attributes (although we don't require the object to + implement that interface), and containers that implement + :class:`zope.container.interfaces.IWriteContainer` are expected to + set them (such containers will also automatically send the + :class:`~.IObjectAddedEvent`). + + +Modification +============ + +One of the most common types of events used from this package is the +:class:`~.IObjectModifiedEvent` (implemented by +:class:`~.ObjectModifiedEvent`) that represents object modification. + +In the simplest case, it may be enough to simply notify interested +parties that the object has changed. Like the other events, this can +be done manually or through the convenience API +(:func:`zope.lifecycleevent.modified`): + + >>> obj['key'] = 42 + + >>> from zope.lifecycleevent import modified + >>> modified(obj) + +Providing Additional Information +-------------------------------- + +Some event consumers like indexes (catalogs) and caches may need more +information to update themselves in an efficient manner. The necessary +information can be provided as optional "modification descriptions" of +the :class:`~.ObjectModifiedEvent` (or again, via the +:func:`~zope.lifecycleevent.modified` function). + +This package doesn't strictly define what a "modification description" +must be. The most common (and thus most interoperable) descriptions +are based on interfaces. + +We could simply pass an interface itself to say "something about the +way this object implements the interface changed": + + >>> from zope.interface import Interface, Attribute, implementer + >>> class IFile(Interface): + ... data = Attribute("The data of the file.") + ... name = Attribute("The name of the file.") + + >>> @implementer(IFile) + ... class File(object): + ... data = '' + ... name = '' + + >>> file = File() + >>> created(file) + >>> file.data = "123" + >>> modified(file, IFile) + +Attributes +~~~~~~~~~~ + +We can also be more specific in a case like this where we know exactly +what attribute of the interface we modified. There is a helper class +:class:`zope.lifecycleevent.Attributes` that assists: + + >>> from zope.lifecycleevent import Attributes + >>> file.data = "abc" + >>> modified(file, Attributes(IFile, "data")) + +If we modify multiple attributes of an interface at the same time, we +can include that information in a single ``Attributes`` object: + + >>> file.data = "123" + >>> file.name = "123.txt" + >>> modified(file, Attributes(IFile, "data", "name")) + +Sometimes we may change attributes from multiple interfaces at the +same time. We can also represent this by including more than one +``Attributes`` instance: + + >>> import time + >>> class IModified(Interface): + ... lastModified = Attribute("The timestamp when the object was modified.") + + >>> @implementer(IModified) + ... class ModifiedFile(File): + ... lastModified = 0 + + >>> file = ModifiedFile() + >>> created(file) + + >>> file.data = "abc" + >>> file.lastModified = time.time() + >>> modified(file, + ... Attributes(IFile, "data"), + ... Attributes(IModified, "lastModified")) + + +Sequences +~~~~~~~~~ + +When an object is a sequence or container, we can specify +the individual indexes or keys that we changed using +:class:`zope.lifecycleevent.Sequence`. + +First we'll need to define a sequence and create an instance: + + >>> from zope.interface.common.sequence import ISequence + >>> class IFileList(ISequence): + ... "A sequence of IFile objects." + >>> @implementer(IFileList) + ... class FileList(list): + ... pass + + >>> files = FileList() + >>> created(files) + +Now we can modify the sequence by adding an object to it: + + >>> files.append(File()) + >>> from zope.lifecycleevent import Sequence + >>> modified(files, Sequence(IFileList, len(files) - 1)) + +We can also replace an existing object: + + >>> files[0] = File() + >>> modified(files, Sequence(IFileList, 0)) + +Of course ``Attributes`` and ``Sequences`` can be combined in any +order and length necessary to describe the modifications fully. + +Modification Descriptions +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Although this package does not require any particular definition or +implementation of modification descriptions, it provides the two that +we've already seen: :class:`~zope.lifecycleevent.Attributes` and +:class:`~zope.lifecycleevent.Sequence`. Both of these classes +implement the marker interface +:class:`~zope.lifecycleevent.interfaces.IModificationDescription`. If +you implement custom modification descriptions, consider implementing +this marker interface. + +Movement +======== + +Sometimes objects move from one place to another. This can be +described with the interface :class:`~.IObjectMovedEvent`, its +implementation :class:`~.ObjectMovedEvent` or the API +:func:`zope.lifecycleevent.moved`. + +Objects may move within a single container by changing their name: + + >>> from zope.lifecycleevent import moved + >>> container['new name'] = obj + >>> del container['name'] + >>> moved(obj, + ... oldParent=container, oldName='name', + ... newParent=container, newName='new name') + +Or they may move to a new container (under the same name, or a +different name): + + >>> container2 = {} + >>> container2['new name'] = obj + >>> del container['new name'] + >>> moved(obj, + ... oldParent=container, oldName='new name', + ... newParent=container2, newName='new name') + +Unlike :ref:`addition `, any ``__name__`` and ``__parent__`` +attribute on the object are ignored and must be provided explicitly. + +.. tip:: + Much like the addition of objects, + :class:`zope.container.interfaces.IWriteContainer` implementations + are expected to update the ``__name__`` and ``__parent__`` + attributes automatically, and to automatically send the appropriate + movement event. + +Removal +======= + +Finally, objects can be removed from the system altogether with +:class:`IObjectRemovedEvent`, :class:`ObjectRemovedEvent` and +:func:`zope.lifecycleevent.removed`. + + >>> from zope.lifecycleevent import removed + >>> del container2['new name'] + >>> removed(obj, container2, 'new name') + +.. note:: + This is a special case of movement where the new parent and + new name are always ``None``. Handlers for + :class:`~.IObjectMovedEvent` can expect to receive events for + :class:`~.IObjectRemovedEvent` as well. + +If the object being removed provides the ``__name__`` or +``__parent__`` attribute, those arguments can be omitted and the +attributes will be used instead. + + >>> location = container['location'] + >>> del container[location.__name__] + >>> removed(location) + +.. tip:: + Once again, :class:`~zope.container.interfaces.IWriteContainer` + implementations will send the correct event automatically. diff --git a/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/__init__.py new file mode 100644 index 0000000..7fec8f1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/__init__.py @@ -0,0 +1,163 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Life cycle events. + +This module provides the :class:`~.IZopeLifecycleEvent` interface, +in addition to concrete classes implementing the various event interfaces. +""" +__docformat__ = 'restructuredtext' + +from zope.interface.interfaces import ObjectEvent +from zope.interface import implementer, moduleProvides +from zope.event import notify + +from zope.lifecycleevent.interfaces import IZopeLifecycleEvent +from zope.lifecycleevent.interfaces import IObjectCreatedEvent +from zope.lifecycleevent.interfaces import IObjectModifiedEvent +from zope.lifecycleevent.interfaces import IObjectCopiedEvent +from zope.lifecycleevent.interfaces import IObjectMovedEvent +from zope.lifecycleevent.interfaces import IObjectAddedEvent +from zope.lifecycleevent.interfaces import IObjectRemovedEvent +from zope.lifecycleevent.interfaces import IAttributes +from zope.lifecycleevent.interfaces import ISequence + + +moduleProvides(IZopeLifecycleEvent) + +@implementer(IObjectCreatedEvent) +class ObjectCreatedEvent(ObjectEvent): + """An object has been created""" + + +def created(object): + "See :meth:`.IZopeLifecycleEvent.created`" + notify(ObjectCreatedEvent(object)) + + +@implementer(IAttributes) +class Attributes(object): + """Describes modified attributes of an interface.""" + + def __init__(self, interface, *attributes): + self.interface = interface + self.attributes = attributes + + +@implementer(ISequence) +class Sequence(object): + """Describes modified keys of an interface.""" + + def __init__(self, interface, *keys): + self.interface = interface + self.keys = keys + + +@implementer(IObjectModifiedEvent) +class ObjectModifiedEvent(ObjectEvent): + """An object has been modified""" + + def __init__(self, object, *descriptions): + """Init with a list of modification descriptions.""" + super(ObjectModifiedEvent, self).__init__(object) + self.descriptions = descriptions + + +def modified(object, *descriptions): + "See :meth:`.IZopeLifecycleEvent.modified`" + notify(ObjectModifiedEvent(object, *descriptions)) + + +@implementer(IObjectCopiedEvent) +class ObjectCopiedEvent(ObjectCreatedEvent): + """An object has been copied""" + + def __init__(self, object, original): + super(ObjectCopiedEvent, self).__init__(object) + self.original = original + + +def copied(object, original): + "See :meth:`.IZopeLifecycleEvent.copied`" + notify(ObjectCopiedEvent(object, original)) + + +@implementer(IObjectMovedEvent) +class ObjectMovedEvent(ObjectEvent): + """An object has been moved""" + + def __init__(self, object, oldParent, oldName, newParent, newName): + ObjectEvent.__init__(self, object) + self.oldParent = oldParent + self.oldName = oldName + self.newParent = newParent + self.newName = newName + + +def moved(object, oldParent, oldName, newParent, newName): + "See :meth:`.IZopeLifecycleEvent.moved`" + notify(ObjectMovedEvent(object, oldParent, oldName, newParent, newName)) + + +@implementer(IObjectAddedEvent) +class ObjectAddedEvent(ObjectMovedEvent): + """An object has been added to a container. + + If ``newParent`` or ``newName`` is not provided or is ``None``, + they will be taken from the values of ``object.__parent__`` or + ``object.__name__``, respectively. + """ + + def __init__(self, object, newParent=None, newName=None): + if newParent is None: + newParent = object.__parent__ + if newName is None: + newName = object.__name__ + ObjectMovedEvent.__init__(self, object, None, None, newParent, newName) + + +def added(object, newParent=None, newName=None): + "See :meth:`.IZopeLifecycleEvent.added`" + notify(ObjectAddedEvent(object, newParent, newName)) + + +@implementer(IObjectRemovedEvent) +class ObjectRemovedEvent(ObjectMovedEvent): + """An object has been removed from a container. + + If ``oldParent`` or ``oldName`` is not provided or is ``None``, + they will be taken from the values of ``object.__parent__`` or + ``object.__name__``, respectively. + """ + + def __init__(self, object, oldParent=None, oldName=None): + if oldParent is None: + oldParent = object.__parent__ + if oldName is None: + oldName = object.__name__ + ObjectMovedEvent.__init__(self, object, oldParent, oldName, None, None) + + +def removed(object, oldParent=None, oldName=None): + "See :meth:`.IZopeLifecycleEvent.removed`" + notify(ObjectRemovedEvent(object, oldParent, oldName)) + + + +def _copy_docs(): + for func_name, func_value in IZopeLifecycleEvent.namesAndDescriptions(): + func = globals()[func_name] + func.__doc__ = func_value.__doc__ + +_copy_docs() +del _copy_docs diff --git a/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/handling.rst b/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/handling.rst new file mode 100644 index 0000000..dac99ba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/handling.rst @@ -0,0 +1,246 @@ +================= + Handling Events +================= + +This document provides information on how to handle the lifycycle +events defined and sent by this package. + +Background information on handling events is found in +:mod:`zope.event's documentation `. + +Class Based Handling +==================== + +:mod:`zope.event` includes `a simple framework`_ for dispatching +events based on the class of the event. This could be used to provide +handlers for each of the event classes defined by this package +(:class:`ObjectCreatedEvent`, etc). However, it doesn't allow +configuring handlers based on the kind of *object* the event contains. +To do that, we need another level of dispatching. + +Fortunately, that level of dispatching already exists within +:mod:`zope.component`. + +.. _a simple framework: https://zopeevent.readthedocs.io/en/latest/classhandler.html + + +Component Based Handling +======================== + +:mod:`zope.component` includes an `event dispatching framework`_ that +lets us dispatch events based not just on the kind of the event, but +also on the kind of object the event contains. + +All of the events defined by this package are implementations of +:class:`zope.interface.interfaces.IObjectEvent`. :mod:`zope.component` +`includes special support`_ for these kinds of events. That document +walks through a generic example in Python code. Here we will show an +example specific to life cycle events using the type of configuration +that is more likely to be used in a real application. + +For this to work, it's important that :mod:`zope.component` is configured +correctly. Usually this is done with ZCML executed at startup time (we +will be using strings in this documentation, but usually this resides +in files, most often named ``configure.zcml``): + + >>> from zope.configuration import xmlconfig + >>> _ = xmlconfig.string(""" + ... + ... + ... + ... """) + +First we will define an object we're interested in getting events for: + + >>> from zope.interface import Interface, Attribute, implementer + >>> class IFile(Interface): + ... data = Attribute("The data of the file.") + ... name = Attribute("The name of the file.") + >>> @implementer(IFile) + ... class File(object): + ... data = '' + ... name = '' + + +Next, we will write our subscriber. Normally, ``zope.event`` +subscribers take just one argument, the event object. But when we use +the automatic dispatching that ``zope.component`` provides, our +function will receive *two* arguments: the object of the event, and +the event. We can use the decorators that ``zope.component`` supplies +to annotate the function with the kinds of arguments it wants to +handle. Alternatively, we could specify that information when we +register the handler with zope.component (we'll see an example of that +later). + + >>> from zope.component import adapter + >>> from zope.lifecycleevent import IObjectCreatedEvent + >>> @adapter(IFile, IObjectCreatedEvent) + ... def on_file_created(file, event): + ... print("A file of type '%s' was created" % (file.__class__.__name__)) + +Finally, we will register our handler with zope.component. This is +also usually done with ZCML executed at startup time: + + >>> _ = xmlconfig.string(""" + ... + ... + ... + ... + ... """) + +Now we can send an event noting that a file was created, and our handler +will be called: + + >>> from zope.lifecycleevent import created + >>> file = File() + >>> created(file) + A file of type 'File' was created + +Other types of objects don't trigger our handler: + + >>> created(object) + +The hierarchy is respected, so if we define a subclass of ``File`` and +indeed, even a sub-interface of ``IFile``, our handler will be +invoked. + + >>> class SubFile(File): pass + >>> created(SubFile()) + A file of type 'SubFile' was created + + >>> class ISubFile(IFile): pass + >>> @implementer(ISubFile) + ... class IndependentSubFile(object): + ... data = name = '' + >>> created(IndependentSubFile()) + A file of type 'IndependentSubFile' was created + +We can further register a handler just for the subinterface we +created. Here we'll also demonstrate supplying this information in +ZCML. + + >>> def generic_object_event(obj, event): + ... print("Got '%s' for an object of type '%s'" % (event.__class__.__name__, obj.__class__.__name__)) + >>> _ = xmlconfig.string(""" + ... + ... + ... + ... + ... """) + +Now both handlers will be called for implementations of ``ISubFile``, +but still only the original implementation will be called for base ``IFiles``. + + >>> created(IndependentSubFile()) + A file of type 'IndependentSubFile' was created + Got 'ObjectCreatedEvent' for an object of type 'IndependentSubFile' + >>> created(File()) + A file of type 'File' was created + +Projects That Rely on Dispatched Events +--------------------------------------- + +Handlers for life cycle events are commonly registered with +``zope.component`` as a means for keeping projects uncoupled. This +section provides a partial list of such projects for reference. + +As mentioned in :doc:`quickstart`, the containers provided by +`zope.container`_ generally automatically send the correct life +cycle events. + +At a low-level, there are utilities that assign integer IDs to objects +as they are created such as `zope.intid`_ and `zc.intid`_. +``zc.intid``, in particular, `documents the way it uses events`_. + +``zope.catalog`` can `automatically index documents`_ as part of +handling life cycle events. + +Containers and Sublocations +--------------------------- + +The events :class:`~ObjectAddedEvent` and :class:`~ObjectRemovedEvent` +usually need to be (eventually) sent in pairs for any given object. +That is, when an added event is sent for an object, for symmetry +eventually a removed event should be sent too. This makes sure that +proper cleanup can happen. + +Sometimes one object can be said to contain other objects. This is +obvious in the case of lists, dictionaries and the container objects +provided by `zope.container`_, but the same can sometimes be said for +other types of objects too that reference objects in their own +attributes. + +What happens when a life cycle event for such an object is sent? By +default, *nothing*. This may leave the system in an inconsistent +state. + +For example, lets create a container and add some objects to +it. First we'll set up a generic event handler so we can see the +events that go out. + + >>> _ = xmlconfig.string(""" + ... + ... + ... + ... + ... """) + Got... + >>> from zope.lifecycleevent import added + >>> container = {} + >>> created(container) + Got 'ObjectCreatedEvent' for an object of type 'dict' + >>> object1 = object() + >>> container['object1'] = object1 + >>> added(object1, container, 'object1') + Got 'ObjectAddedEvent' for an object of type 'object' + +We can see that we got an "added" event for the object we stored in +the container. What happens when we remove the container? + + >>> from zope.lifecycleevent import removed + >>> tmp = container + >>> del container + >>> removed(tmp, '', '') + Got 'ObjectRemovedEvent' for an object of type 'dict' + >>> del tmp + +We only got an event for the container, not the objects it contained! +If the handlers that fired when we added "object1" had done anything +that needed to be *undone* for symmetry when "object1" was removed +(e.g., if it had been indexed and needed to be unindexed) the system +is now corrupt because those handlers never got the +``ObjectRemovedEvent`` for "object1". + + +The solution to this problem comes from `zope.container`_. It defines +the concept of :class:`~zope.container.interfaces.ISubLocations`: a +way for any given object to inform other objects about the objects it +contains (and it provides a :class:`default implementation of +ISubLocations ` for +containers). It also provides :func:`a function +` that will send +events that happen to the *parent* object for all the *child* objects +it contains. + +In this way, its possible for any arbitrary life cycle event to +automatically be propagated to its children without any specific +caller of ``remove``, say, needing to have any specific knowledge +about containment relationships. + +For this to work, two things must be done: + +1. Configure `zope.container`_. This too is usually done in ZCML with + ````. +2. Provide an adapter to :class:`~.ISubLocations` when some object can + contain other objects that need events. + + +.. _zope.intid: https://zopeintid.readthedocs.io/en/latest/ +.. _zc.intid: https://zcintid.readthedocs.io/en/latest/ +.. _documents the way it uses events: https://zcintid.readthedocs.io/en/latest/subscribers.html +.. _automatically index documents: https://zopecatalog.readthedocs.io/en/latest/events.html +.. _zope.container: https://zopecontainer.readthedocs.io/en/latest/ +.. _event dispatching framework: https://zopecomponent.readthedocs.io/en/latest/event.html +.. _includes special support : https://zopecomponent.readthedocs.io/en/latest/event.html#object-events diff --git a/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/interfaces.py b/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/interfaces.py new file mode 100644 index 0000000..2ec60a0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/interfaces.py @@ -0,0 +1,152 @@ +############################################################################## +# +# Copyright (c) 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Event-related interfaces +""" +__docformat__ = 'restructuredtext' + +from zope.interface import Interface, Attribute +from zope.interface import interfaces + + +class IZopeLifecycleEvent(Interface): + """ + High-level functions for sending events. + + These are implemented by the :mod:`zope.lifecycleevent` module. + """ + + def created(object): + """Send an :class:`~.IObjectCreatedEvent` for ``object``.""" + + def modified(object, *descriptions): + """Send an :class:`~.IObjectModifiedEvent` for ``object``. + + ``descriptions`` is a sequence of interfaces or fields which were + updated. The :class:`IAttributes` and :class:`ISequence` helpers + can be used. + + """ + + def copied(object, original): + """Send an :class:`~.IObjectCopiedEvent` for ``object``. + + ``original`` is the object the copy was created from. + + """ + + def moved(object, oldParent, oldName, newParent, newName): + """Send an :class:`~.IObjectMovedEvent` for ``object``. + + ``oldParent`` is the container ``object`` was removed from. + ``oldName`` was the name used to store ``object`` in ``oldParent``. + ``newParent`` is the container ``object`` was added to. + ``newName`` is the name used to store ``object`` in ``newParent``. + + Note that ``newParent`` and ``oldParent`` may be the same if the names + are different, and vice versa. + + """ + + def added(object, newParent=None, newName=None): + """Send an :class:`~.IObjectAddedEvent` for ``object``. + + ``newParent`` is the container ``object`` was added to. + ``newName`` is the name used to store ``object`` in the container. + + If either of these is not provided or is ``None``, they will + be taken from the values of ``object.__parent__`` or + ``object.__name__``, respectively. + """ + + def removed(object, oldParent=None, oldName=None): + """Send an :class:`~.IObjectRemovedEvent` for ``object``. + + ``oldParent`` is the container ``object`` was removed from. + ``oldName`` was the name used to store ``object`` in `oldParent`. + + If either of these is not provided or is ``None``, they will + be taken from the values of ``object.__parent__`` or + ``object.__name__``, respectively. + """ + + +class IObjectCreatedEvent(interfaces.IObjectEvent): + """An object has been created. + + The ``object`` attribute will commonly have a value of ``None`` + for its ``__name__`` and ``__parent__`` values (if it has those attributes + at all). + """ + + +class IObjectCopiedEvent(IObjectCreatedEvent): + """An object has been copied.""" + + original = Attribute("The original from which the copy was made.") + + +class IObjectModifiedEvent(interfaces.IObjectEvent): + """An object has been modified""" + + descriptions = Attribute("""The supplied modification descriptions. + + These may be interfaces or implementations of :class:`IModificationDescription` + such as :class:`~.Attributes` or :class:`~.Sequence`""") + + +class IModificationDescription(Interface): + """Marker interface for descriptions of object modifications. + + Can be used as a parameter of an IObjectModifiedEvent.""" + + +class IAttributes(IModificationDescription): + """Describes the attributes of an interface.""" + + interface = Attribute("The involved interface.") + attributes = Attribute("A sequence of modified attributes.") + + +class ISequence(IModificationDescription): + """Describes the modified keys of a sequence-like interface.""" + + interface = Attribute("The involved interface.") + keys = Attribute("A sequence of modified keys.") + + +############################################################################## +# Moving Objects + +class IObjectMovedEvent(interfaces.IObjectEvent): + """An object has been moved.""" + + oldParent = Attribute("The old location parent for the object.") + oldName = Attribute("The old location name for the object.") + newParent = Attribute("The new location parent for the object.") + newName = Attribute("The new location name for the object.") + + +############################################################################## +# Adding objects + +class IObjectAddedEvent(IObjectMovedEvent): + """An object has been added to a container.""" + + +############################################################################## +# Removing objects + + +class IObjectRemovedEvent(IObjectMovedEvent): + """An object has been removed from a container.""" diff --git a/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/manual.rst b/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/manual.rst new file mode 100644 index 0000000..0f9c712 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/manual.rst @@ -0,0 +1,64 @@ +============================= + Creating and Sending Events +============================= + +As discussed in :doc:`quickstart`, most uses of +``zope.lifecycleevent`` will be satisfied with the high level API +described by +:class:`~zope.lifecycleevent.interfaces.IZopeLifecycleEvent`, but it is +possible to create and send events manually, both those defined here +and your own subclasses. + +Provided Events +=============== + +All of the functions described in :doc:`quickstart` are very simple +wrappers that create an event object defined by this package and then +use :func:`zope.event.notify` to send it. You can do the same, as +shown below, but there is usually little reason to do so. + + >>> from zope.event import notify + >>> from zope.lifecycleevent import ObjectCreatedEvent + >>> from zope.lifecycleevent import ObjectCopiedEvent + >>> from zope.lifecycleevent import ObjectModifiedEvent + >>> from zope.lifecycleevent import ObjectMovedEvent + >>> from zope.lifecycleevent import ObjectRemovedEvent + + >>> obj = object() + >>> notify(ObjectCreatedEvent(obj)) + >>> notify(ObjectCopiedEvent(object(), obj)) + >>> notify(ObjectMovedEvent(obj, + ... None, 'oldName', + ... None, 'newName')) + >>> notify(ObjectModifiedEvent(obj, "description 1", "description 2")) + >>> notify(ObjectRemovedEvent(obj, "oldParent", "oldName")) + +Subclassing Events +================== + +It can sometimes be helpful to subclass one of the provided event +classes. If you then want to send a notification of that class, you +must manually construct and notify it. + +One reason to create a subclass is to be able to add additional +attributes to the event object, perhaps changing the constructor +signature in the process. Another reason to create a subclass is to be +able to easily subscribe to all events that are *just* of that class. +The class :class:`zope.container.contained.ContainerModifiedEvent` is +used for this reason. + + +For example, in an application with distinct users, we might want to +let subscribers know which user created the object. We might also want +to be able to distinguish between objects that are created by a user +and those that are automatically created as part of system operation +or administration. The following subclass lets us do both. + + >>> class ObjectCreatedByEvent(ObjectCreatedEvent): + ... "A created event that tells you who created the object." + ... def __init__(self, object, created_by): + ... super(ObjectCreatedByEvent, self).__init__(object) + ... self.created_by = created_by + + >>> obj = object() + >>> notify(ObjectCreatedByEvent(obj, "Black Night")) diff --git a/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/tests.py b/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/tests.py new file mode 100644 index 0000000..0d99530 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/lifecycleevent/tests.py @@ -0,0 +1,296 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Object Event Tests +""" +import doctest +import unittest + +from zope import interface + +from zope.component import testing +from zope.testing import module + +from zope.lifecycleevent import ObjectCreatedEvent, created +from zope.lifecycleevent import Attributes, Sequence +from zope.lifecycleevent import ObjectModifiedEvent, modified +from zope.lifecycleevent import ObjectCopiedEvent, copied +from zope.lifecycleevent import ObjectMovedEvent, moved +from zope.lifecycleevent import ObjectRemovedEvent, removed +from zope.lifecycleevent import ObjectAddedEvent, added + +from zope.interface.verify import verifyObject +from zope.interface.verify import verifyClass + + +class Context(object): + pass + + +class _AbstractListenerCase(object): + + def setUp(self): + super(_AbstractListenerCase, self).setUp() + from zope.event import subscribers + self._old_subscribers = subscribers[:] + self.listener = [] + subscribers[:] = [self.listener.append] + + def tearDown(self): + from zope.event import subscribers + subscribers[:] = self._old_subscribers + super(_AbstractListenerCase, self).tearDown() + + +class _AbstractEventCase(_AbstractListenerCase): + + klass = None + object = object() + notifier = None + + def _getTargetClass(self): + return self.klass + + def _getInitArgs(self): + return (self.object,) + + def _makeOne(self): + return self._getTargetClass()(*self._getInitArgs()) + + def setUp(self): + super(_AbstractEventCase, self).setUp() + self.event = self._makeOne() + + def testGetObject(self): + self.assertEqual(self.event.object, self.object) + + def test_verifyObject(self): + iface = list(interface.providedBy(self.event).flattened())[0] + verifyObject(iface, self.event) + + def test_verifyClass(self): + iface = list(interface.implementedBy(type(self.event)).flattened())[0] + verifyClass(iface, self._getTargetClass()) + + def test_notify(self): + notifier = type(self).notifier + try: + notifier = notifier.__func__ + except AttributeError: + pass # Python 3 + notifier(*self._getInitArgs()) + self.assertEqual(len(self.listener), 1) + self.assertEqual(self.listener[-1].object, self.object) + return self.listener[-1] + + +class TestSequence(unittest.TestCase): + + def testSequence(self): + + from zope.interface import Interface, Attribute + + class ISample(Interface): + field1 = Attribute("A test field") + field2 = Attribute("A test field") + field3 = Attribute("A test field") + + desc = Sequence(ISample, 'field1', 'field2') + self.assertEqual(desc.interface, ISample) + self.assertEqual(desc.keys, ('field1', 'field2')) + + +class TestAttributes(unittest.TestCase): + + def testAttributes(self): + from zope.lifecycleevent.interfaces import IObjectMovedEvent + desc = Attributes(IObjectMovedEvent, "newName", "newParent") + self.assertEqual(desc.interface, IObjectMovedEvent) + self.assertEqual(desc.attributes, ('newName', 'newParent')) + + +class TestObjectCreatedEvent(_AbstractEventCase, + unittest.TestCase): + + klass = ObjectCreatedEvent + notifier = created + +class TestObjectModifiedEvent(_AbstractEventCase, + unittest.TestCase): + + klass = ObjectModifiedEvent + notifier = modified + + def testAttributes(self): + from zope.interface import implementer, Interface, Attribute + + class ISample(Interface): + field = Attribute("A test field") + + @implementer(ISample) + class Sample(object): + pass + obj = Sample() + obj.field = 42 + attrs = Attributes(ISample, "field") + + modified(obj, attrs) + self.assertEqual(self.listener[-1].object, obj) + self.assertEqual(self.listener[-1].descriptions, (attrs,)) + + +class TestObjectCopiedEvent(_AbstractEventCase, + unittest.TestCase): + + klass = ObjectCopiedEvent + original = object() + notifier = copied + + def _getInitArgs(self): + return (self.object, self.original) + + def test_notify(self): + delivered = super(TestObjectCopiedEvent, self).test_notify() + self.assertEqual(delivered.original, self.original) + + +class TestObjectMovedEvent(_AbstractEventCase, + unittest.TestCase): + + klass = ObjectMovedEvent + object = Context() + old_parent = Context() + new_parent = Context() + notifier = moved + + def _getInitArgs(self): + return (self.object, + self.old_parent, 'old_name', + self.new_parent, 'new_name') + + def test_it(self): + event = self.event + self.assertEqual(event.object, self.object) + self.assertEqual(event.oldParent, self.old_parent) + self.assertEqual(event.newParent, self.new_parent) + self.assertEqual(event.newName, 'new_name') + self.assertEqual(event.oldName, 'old_name') + + +class TestObjectAddedEvent(_AbstractEventCase, + unittest.TestCase): + + klass = ObjectAddedEvent + parent = Context() + name = 'new_name' + notifier = added + + def _getInitArgs(self): + return (self.object, self.parent, self.name) + + def test_it(self): + ob = self.object + new_parent = self.parent + event = self.event + self.assertEqual(event.object, ob) + self.assertEqual(event.newParent, new_parent) + self.assertEqual(event.newName, 'new_name') + self.assertEqual(event.oldParent, None) + self.assertEqual(event.oldName, None) + + def test_it_Nones(self): + self.object = ob = Context() + new_parent = Context() + self.parent = None + self.name = None + ob.__parent__ = new_parent + ob.__name__ = 'new_name' + event = self._makeOne() + self.assertEqual(event.object, ob) + self.assertEqual(event.newParent, new_parent) + self.assertEqual(event.newName, 'new_name') + self.assertEqual(event.oldParent, None) + self.assertEqual(event.oldName, None) + + +class TestObjectRemovedEvent(_AbstractEventCase, + unittest.TestCase): + + klass = ObjectRemovedEvent + old_parent = Context() + name = 'name' + notifier = removed + + def _getInitArgs(self): + return (self.object, self.old_parent, self.name) + + def test_it(self): + ob = self.object + parent = self.old_parent + event = self.event + self.assertEqual(event.object, ob) + self.assertEqual(event.newParent, None) + self.assertEqual(event.newName, None) + self.assertEqual(event.oldParent, parent) + self.assertEqual(event.oldName, 'name') + + def test_it_Nones(self): + self.object = ob = Context() + parent = Context() + self.old_parent = None + self.name = None + ob.__parent__ = parent + ob.__name__ = 'name' + event = self._makeOne() + self.assertEqual(event.object, ob) + self.assertEqual(event.newParent, None) + self.assertEqual(event.newName, None) + self.assertEqual(event.oldParent, parent) + self.assertEqual(event.oldName, 'name') + + +class TestMoved(_AbstractListenerCase, + unittest.TestCase): + + def test_it(self): + moved('object', 'oldParent', 'oldName', 'newParent', 'newName') + self.assertEqual(1, len(self.listener)) + event = self.listener[0] + self.assertTrue(isinstance(event, ObjectMovedEvent)) + self.assertEqual(event.object, 'object') + self.assertEqual(event.oldParent, 'oldParent') + self.assertEqual(event.oldName, 'oldName') + self.assertEqual(event.newParent, 'newParent') + self.assertEqual(event.newName, 'newName') + +def setUp(test): + testing.setUp(test) + module.setUp(test) + +def tearDown(test): + module.tearDown(test) + testing.tearDown(test) + +def test_suite(): + return unittest.TestSuite(( + unittest.defaultTestLoader.loadTestsFromName(__name__), + doctest.DocFileSuite('README.rst'), + doctest.DocFileSuite('manual.rst'), + doctest.DocFileSuite('handling.rst', + setUp=setUp, + tearDown=tearDown, + optionflags=doctest.ELLIPSIS), + )) + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/zope/location/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/location/__init__.py new file mode 100644 index 0000000..104c1ac --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/location/__init__.py @@ -0,0 +1,20 @@ +############################################################################## +# +# Copyright (c) 2003-2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Locations +""" +__docformat__ = 'restructuredtext' + +from zope.location.interfaces import ILocation +from zope.location.location import Location, locate, LocationIterator +from zope.location.location import inside, LocationProxy diff --git a/thesisenv/lib/python3.6/site-packages/zope/location/configure.zcml b/thesisenv/lib/python3.6/site-packages/zope/location/configure.zcml new file mode 100644 index 0000000..94b3ce5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/location/configure.zcml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/location/interfaces.py b/thesisenv/lib/python3.6/site-packages/zope/location/interfaces.py new file mode 100644 index 0000000..b490a09 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/location/interfaces.py @@ -0,0 +1,132 @@ +############################################################################## +# +# Copyright (c) 2003-2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Location framework interfaces +""" +__docformat__ = 'restructuredtext' + +from zope.interface import Interface +from zope.interface import Attribute +from zope.schema import TextLine + + +class ILocation(Interface): + """Objects that can be located in a hierachy. + + Given a parent and a name an object can be located within that parent. The + locatable object's `__name__` and `__parent__` attributes store this + information. + + Located objects form a hierarchy that can be used to build file-system-like + structures. For example in Zope `ILocation` is used to build URLs and to + support security machinery. + + To retrieve an object from its parent using its name, the `ISublocation` + interface provides the `sublocations` method to iterate over all objects + located within the parent. The object searched for can be found by reading + each sublocation's __name__ attribute. + + """ + + __parent__ = Attribute("The parent in the location hierarchy.") + + __name__ = TextLine( + title=(u"The name within the parent"), + description=(u"The object can be looked up from the parent's " + u"sublocations using this name."), + required=False, + default=None) + +# The IContained interface was moved from zope.container to here in +# zope.container 3.8.2 to break dependency cycles. It is not actually +# used within this package, but is depended upon by external +# consumers. + +class IContained(ILocation): + """Objects contained in containers.""" + +class ILocationInfo(Interface): + """Provides supplemental information for located objects. + + Requires that the object has been given a location in a hierarchy. + + """ + + def getRoot(): + """Return the root object of the hierarchy.""" + + def getPath(): + """Return the physical path to the object as a string. + + Uses '/' as the path segment separator. + + """ + + def getParent(): + """Returns the container the object was traversed via. + + Returns None if the object is a containment root. + Raises TypeError if the object doesn't have enough context to get the + parent. + + """ + + def getParents(): + """Returns a list starting with the object's parent followed by + each of its parents. + + Raises a TypeError if the object is not connected to a containment + root. + + """ + + def getName(): + """Return the last segment of the physical path.""" + + def getNearestSite(): + """Return the site the object is contained in + + If the object is a site, the object itself is returned. + + """ + + +class ISublocations(Interface): + """Provide access to sublocations of an object. + + All objects with the same parent object are called the ``sublocations`` of + that parent. + + """ + + def sublocations(): + """Return an iterable of the object's sublocations.""" + + +class IRoot(Interface): + """Marker interface to designate root objects within a location hierarchy. + """ + + +class LocationError(KeyError, LookupError): + """There is no object for a given location.""" + +# Soft dependency on zope.component. +# +# Also, these interfaces used to be defined here directly, so this provides +# backward-compatibility +try: + from zope.component.interfaces import ISite +except ImportError: # pragma: no cover + class ISite(Interface): + pass diff --git a/thesisenv/lib/python3.6/site-packages/zope/location/location.py b/thesisenv/lib/python3.6/site-packages/zope/location/location.py new file mode 100644 index 0000000..3554c51 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/location/location.py @@ -0,0 +1,126 @@ +############################################################################## +# +# Copyright (c) 2003-2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Location support +""" +__docformat__ = 'restructuredtext' + +from zope.interface import implementer +from zope.proxy import ProxyBase +from zope.proxy import getProxiedObject +from zope.proxy import non_overridable +from zope.proxy.decorator import DecoratorSpecificationDescriptor + +from zope.location.interfaces import ILocation + +@implementer(ILocation) +class Location(object): + """Mix-in that implements ILocation. + + It provides the `__parent__` and `__name__` attributes. + """ + + __parent__ = None + __name__ = None + + +def locate(obj, parent, name=None): + """Update a location's coordinates.""" + obj.__parent__ = parent + obj.__name__ = name + + +def located(obj, parent, name=None): + """Ensure and return the location of an object. + + Updates the location's coordinates. + """ + location = ILocation(obj) + locate(location, parent, name) + return location + + +def LocationIterator(object): + """Iterate over an object and all of its parents.""" + while object is not None: + yield object + object = getattr(object, '__parent__', None) + + +def inside(l1, l2): + """Test whether l1 is a successor of l2. + + l1 is a successor of l2 if l2 is in the chain of parents of l1 or l2 + is l1. + + """ + while l1 is not None: + if l1 is l2: + return True + l1 = getattr(l1, '__parent__', None) + return False + +class ClassAndInstanceDescr(object): + + def __init__(self, *args): + self.funcs = args + + def __get__(self, inst, cls): + if inst is None: + return self.funcs[1](cls) + return self.funcs[0](inst) + + +@implementer(ILocation) +class LocationProxy(ProxyBase): + """Location-object proxy + + This is a non-picklable proxy that can be put around objects that + don't implement `ILocation`. + """ + __slots__ = ('__parent__', '__name__') + __safe_for_unpickling__ = True + + __doc__ = ClassAndInstanceDescr( + lambda inst: getProxiedObject(inst).__doc__, + lambda cls, __doc__ = __doc__: __doc__, + ) + + def __new__(self, ob, container=None, name=None): + return ProxyBase.__new__(self, ob) + + def __init__(self, ob, container=None, name=None): + ProxyBase.__init__(self, ob) + self.__parent__ = container + self.__name__ = name + + def __getattribute__(self, name): + if name in LocationProxy.__dict__: + return object.__getattribute__(self, name) + return ProxyBase.__getattribute__(self, name) + + def __setattr__(self, name, value): + if name in self.__slots__ + getattr(ProxyBase, '__slots__', ()): + #('_wrapped', '__parent__', '__name__'): + try: + return object.__setattr__(self, name, value) + except TypeError: #pragma NO COVER C Optimization + return ProxyBase.__setattr__(self, name, value) + return ProxyBase.__setattr__(self, name, value) + + @non_overridable + def __reduce__(self, proto=None): + raise TypeError("Not picklable") + __reduce_ex__ = __reduce__ + + __providedBy__ = DecoratorSpecificationDescriptor() diff --git a/thesisenv/lib/python3.6/site-packages/zope/location/pickling.py b/thesisenv/lib/python3.6/site-packages/zope/location/pickling.py new file mode 100644 index 0000000..46e6217 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/location/pickling.py @@ -0,0 +1,39 @@ +############################################################################## +# +# Copyright (c) 2003-2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Location copying/pickling support +""" +__docformat__ = 'restructuredtext' + +from zope.interface import implementer +from zope.location.location import inside + +try: + from zope.copy.interfaces import ICopyHook, ResumeCopy +except ImportError: # pragma: no cover + raise NotImplementedError("zope.location.pickling is not supported " + "because zope.copy is not available") + + +@implementer(ICopyHook) +class LocationCopyHook(object): + """Copy hook to preserve copying referenced objects that are not + located inside object that's being copied. + """ + def __init__(self, context): + self.context = context + + def __call__(self, toplevel, register): + if not inside(self.context, toplevel): + return self.context + raise ResumeCopy diff --git a/thesisenv/lib/python3.6/site-packages/zope/location/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/location/tests/__init__.py new file mode 100644 index 0000000..d3173e6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/location/tests/__init__.py @@ -0,0 +1 @@ +#package diff --git a/thesisenv/lib/python3.6/site-packages/zope/location/tests/test_configure.py b/thesisenv/lib/python3.6/site-packages/zope/location/tests/test_configure.py new file mode 100644 index 0000000..ec31efb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/location/tests/test_configure.py @@ -0,0 +1,37 @@ +############################################################################## +# +# Copyright (c) 2003-2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test ZCML loading +""" +import unittest + +class Test_ZCML_loads(unittest.TestCase): + + def test_it(self): + import zope.component # no registrations made if not present + ADAPTERS_REGISTERED = 4 + from zope.configuration.xmlconfig import _clearContext + from zope.configuration.xmlconfig import _getContext + from zope.configuration.xmlconfig import XMLConfig + import zope.location + + _clearContext() + context = _getContext() + XMLConfig('configure.zcml', zope.location) + adapters = ([x for x in context.actions + if x['discriminator'] is not None]) + self.assertEqual(len(adapters), ADAPTERS_REGISTERED) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/location/tests/test_location.py b/thesisenv/lib/python3.6/site-packages/zope/location/tests/test_location.py new file mode 100644 index 0000000..ea60099 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/location/tests/test_location.py @@ -0,0 +1,408 @@ +############################################################################## +# +# Copyright (c) 2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + + +class ConformsToILocation(object): + + def test_class_conforms_to_ILocation(self): + from zope.interface.verify import verifyClass + from zope.location.interfaces import ILocation + verifyClass(ILocation, self._getTargetClass()) + + def test_instance_conforms_to_ILocation(self): + from zope.interface.verify import verifyObject + from zope.location.interfaces import ILocation + verifyObject(ILocation, self._makeOne()) + + +class LocationTests(unittest.TestCase, ConformsToILocation): + + def _getTargetClass(self): + from zope.location.location import Location + return Location + + def _makeOne(self): + return self._getTargetClass()() + + def test_ctor(self): + loc = self._makeOne() + self.assertEqual(loc.__parent__, None) + self.assertEqual(loc.__name__, None) + + +class Test_locate(unittest.TestCase): + + def _callFUT(self, obj, *args, **kw): + from zope.location.location import locate + return locate(obj, *args, **kw) + + def test_wo_name(self): + class Dummy(object): + pass + parent = Dummy() + dummy = Dummy() + self._callFUT(dummy, parent) + self.assertTrue(dummy.__parent__ is parent) + self.assertEqual(dummy.__name__, None) + + def test_w_name(self): + class Dummy(object): + pass + parent = Dummy() + dummy = Dummy() + self._callFUT(dummy, parent, 'name') + self.assertTrue(dummy.__parent__ is parent) + self.assertEqual(dummy.__name__, 'name') + + +class Test_located(unittest.TestCase): + + def _callFUT(self, obj, *args, **kw): + from zope.location.location import located + return located(obj, *args, **kw) + + def test_wo_name_obj_implements_ILocation(self): + from zope.interface import implementer + from zope.location.interfaces import ILocation + @implementer(ILocation) + class Dummy(object): + __parent__ = None + __name__ = object() + parent = Dummy() + dummy = Dummy() + self._callFUT(dummy, parent) + self.assertTrue(dummy.__parent__ is parent) + self.assertEqual(dummy.__name__, None) + + def test_w_name_adaptable_to_ILocation(self): + from zope.interface.interface import adapter_hooks + from zope.location.interfaces import ILocation + _hooked = [] + def _hook(iface, obj): + _hooked.append((iface, obj)) + return obj + class Dummy(object): + pass + parent = Dummy() + dummy = Dummy() + before = adapter_hooks[:] + adapter_hooks.insert(0, _hook) + try: + self._callFUT(dummy, parent, 'name') + finally: + adapter_hooks[:] = before + self.assertTrue(dummy.__parent__ is parent) + self.assertEqual(dummy.__name__, 'name') + self.assertEqual(len(_hooked), 1) + self.assertEqual(_hooked[0], (ILocation, dummy)) + + def test_wo_name_not_adaptable_to_ILocation(self): + class Dummy(object): + __parent__ = None + __name__ = 'before' + parent = Dummy() + dummy = Dummy() + self.assertRaises(TypeError, self._callFUT, dummy, parent, 'name') + self.assertEqual(dummy.__parent__, None) + self.assertEqual(dummy.__name__, 'before') + + +class Test_LocationIterator(unittest.TestCase): + + def _callFUT(self, obj): + from zope.location.location import LocationIterator + return LocationIterator(obj) + + def test_w_None(self): + self.assertEqual(list(self._callFUT(None)), []) + + def test_w_non_location_object(self): + island = object() + self.assertEqual(list(self._callFUT(island)), [island]) + + def test_w_isolated_location_object(self): + class Dummy(object): + __parent__ = None + __name__ = 'before' + island = Dummy() + self.assertEqual(list(self._callFUT(island)), [island]) + + def test_w_nested_location_object(self): + class Dummy(object): + __parent__ = None + __name__ = 'before' + parent = Dummy() + child = Dummy() + child.__parent__ = parent + grand = Dummy() + grand.__parent__ = child + self.assertEqual(list(self._callFUT(grand)), [grand, child, parent]) + + +class Test_inside(unittest.TestCase): + + def _callFUT(self, i1, i2): + from zope.location.location import inside + return inside(i1, i2) + + def test_w_non_location_objects(self): + island = object() + atoll = object() + self.assertTrue(self._callFUT(island, island)) + self.assertFalse(self._callFUT(island, atoll)) + self.assertFalse(self._callFUT(atoll, island)) + self.assertTrue(self._callFUT(atoll, atoll)) + + def test_w_isolated_location_objects(self): + class Dummy(object): + __parent__ = None + __name__ = 'before' + island = Dummy() + atoll = Dummy() + self.assertTrue(self._callFUT(island, island)) + self.assertFalse(self._callFUT(island, atoll)) + self.assertFalse(self._callFUT(atoll, island)) + self.assertTrue(self._callFUT(atoll, atoll)) + + def test_w_nested_location_object(self): + class Dummy(object): + __parent__ = None + __name__ = 'before' + parent = Dummy() + child = Dummy() + child.__parent__ = parent + grand = Dummy() + grand.__parent__ = child + self.assertTrue(self._callFUT(child, parent)) + self.assertFalse(self._callFUT(parent, child)) + self.assertTrue(self._callFUT(child, child)) + self.assertTrue(self._callFUT(grand, parent)) + self.assertFalse(self._callFUT(parent, grand)) + self.assertTrue(self._callFUT(grand, child)) + self.assertFalse(self._callFUT(child, grand)) + self.assertTrue(self._callFUT(grand, grand)) + + +class ClassAndInstanceDescrTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.location.location import ClassAndInstanceDescr + return ClassAndInstanceDescr + + def _makeOne(self, _inst, _class): + return self._getTargetClass()(_inst, _class) + + def _makeScaffold(self): + _inst_called = [] + def _inst(*args, **kw): + _inst_called.append((args, kw)) + return 'INST' + _class_called = [] + def _class(*args, **kw): + _class_called.append((args, kw)) + return 'CLASS' + class Foo(object): + descr = self._makeOne(_inst, _class) + return Foo, _class_called, _inst_called + + def test_fetched_from_class(self): + Foo, _class_called, _inst_called = self._makeScaffold() + self.assertEqual(Foo.descr, 'CLASS') + self.assertEqual(_class_called, [((Foo,),{})]) + self.assertEqual(_inst_called, []) + + def test_fetched_from_instance(self): + Foo, _class_called, _inst_called = self._makeScaffold() + foo = Foo() + self.assertEqual(foo.descr, 'INST') + self.assertEqual(_class_called, []) + self.assertEqual(_inst_called, [((foo,),{})]) + + +_MARKER = object() + + +class LocationProxyTests(unittest.TestCase, ConformsToILocation): + + def _getTargetClass(self): + from zope.location.location import LocationProxy + return LocationProxy + + def _makeOne(self, obj=None, container=_MARKER, name=_MARKER): + if obj is None: + obj = object() + if container is _MARKER: + self.assertIs(name, _MARKER) + return self._getTargetClass()(obj) + self.assertIsNot(name, _MARKER) + return self._getTargetClass()(obj, container, name) + + def test_ctor_defaults(self): + dummy = object() # can't setattr + proxy = self._makeOne(dummy) + self.assertEqual(proxy.__parent__, None) + self.assertEqual(proxy.__name__, None) + + def test_ctor_explicit(self): + dummy = object() # can't setattr + parent = object() + proxy = self._makeOne(dummy, parent, 'name') + self.assertTrue(proxy.__parent__ is parent) + self.assertEqual(proxy.__name__, 'name') + + def test___getattribute___wrapped(self): + class Context(object): + attr = 'ATTR' + context = Context() + proxy = self._makeOne(context) + self.assertEqual(proxy.attr, 'ATTR') + + def test___setattr___wrapped(self): + class Context(object): + attr = 'BEFORE' + context = Context() + proxy = self._makeOne(context) + proxy.attr = 'AFTER' + self.assertEqual(context.attr, 'AFTER') + + def test___doc___from_derived_class(self): + klass = self._getTargetClass() + class Derived(klass): + """DERIVED""" + self.assertEqual(Derived.__doc__, 'DERIVED') + + def test___doc___from_target_class(self): + klass = self._getTargetClass() + class Context(object): + """CONTEXT""" + proxy = self._makeOne(Context()) + self.assertEqual(proxy.__doc__, 'CONTEXT') + + def test___doc___from_target_instance(self): + klass = self._getTargetClass() + class Context(object): + """CONTEXT""" + context = Context() + context.__doc__ = 'INSTANCE' + proxy = self._makeOne(context) + self.assertEqual(proxy.__doc__, 'INSTANCE') + + def test___reduce__(self): + proxy = self._makeOne() + self.assertRaises(TypeError, proxy.__reduce__) + + def test___reduce_ex__(self): + proxy = self._makeOne() + self.assertRaises(TypeError, proxy.__reduce_ex__, 1) + + def test___reduce___via_pickling(self): + import pickle + class Context(object): + def __reduce__(self): + raise AssertionError("This is not called") + proxy = self._makeOne(Context()) + # XXX: this TypeError is not due to LocationProxy.__reduce__: + # it's descriptor (under pure Python) isn't begin triggered + # properly + self.assertRaises(TypeError, pickle.dumps, proxy) + + def test__providedBy___class(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface import providedBy + from zope.interface import provider + class IProxyFactory(Interface): + pass + class IProxy(Interface): + pass + @provider(IProxyFactory) + @implementer(IProxy) + class Foo(self._getTargetClass()): + pass + self.assertEqual(list(providedBy(Foo)), [IProxyFactory]) + + def test__providedBy___instance(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface import providedBy + from zope.interface import provider + from zope.location.interfaces import ILocation + class IProxyFactory(Interface): + pass + class IProxy(Interface): + pass + class IContextFactory(Interface): + pass + class IContext(Interface): + pass + @provider(IProxyFactory) + @implementer(IProxy) + class Proxy(self._getTargetClass()): + pass + @provider(IContextFactory) + @implementer(IContext) + class Context(object): + pass + context = Context() + proxy = Proxy(context) + self.assertEqual(list(providedBy(proxy)), [IContext, IProxy, ILocation]) + + +class LocationPyProxyTests(LocationProxyTests): + + def setUp(self): + import sys + for mod in ('zope.location.location', + 'zope.proxy.decorator'): + try: + del sys.modules[mod] + except KeyError: # pragma: no cover + pass + import zope.proxy + self.orig = (zope.proxy.ProxyBase, + zope.proxy.getProxiedObject, + zope.proxy.setProxiedObject, + zope.proxy.isProxy, + zope.proxy.sameProxiedObjects, + zope.proxy.queryProxy, + zope.proxy.queryInnerProxy, + zope.proxy.removeAllProxies, + zope.proxy.non_overridable) + zope.proxy.ProxyBase = zope.proxy.PyProxyBase + zope.proxy.getProxiedObject = zope.proxy.py_getProxiedObject + zope.proxy.setProxiedObject = zope.proxy.py_setProxiedObject + zope.proxy.isProxy = zope.proxy.py_isProxy + zope.proxy.sameProxiedObjects = zope.proxy.py_sameProxiedObjects + zope.proxy.queryProxy = zope.proxy.py_queryProxy + zope.proxy.queryInnerProxy = zope.proxy.py_queryInnerProxy + zope.proxy.removeAllProxies = zope.proxy.py_removeAllProxies + zope.proxy.non_overridable = zope.proxy.PyNonOverridable + + + def tearDown(self): + import zope.proxy + (zope.proxy.ProxyBase, + zope.proxy.getProxiedObject, + zope.proxy.setProxiedObject, + zope.proxy.isProxy, + zope.proxy.sameProxiedObjects, + zope.proxy.queryProxy, + zope.proxy.queryInnerProxy, + zope.proxy.removeAllProxies, + zope.proxy.non_overridable) = self.orig + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/location/tests/test_pickling.py b/thesisenv/lib/python3.6/site-packages/zope/location/tests/test_pickling.py new file mode 100644 index 0000000..063c9f6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/location/tests/test_pickling.py @@ -0,0 +1,61 @@ +############################################################################## +# +# Copyright (c) 2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + + +import zope.copy + +class LocationCopyHookTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.location.pickling import LocationCopyHook + return LocationCopyHook + + def _makeOne(self, obj=None): + if obj is None: + obj = object() + return self._getTargetClass()(obj) + + def test_class_conforms_to_ICopyHook(self): + from zope.interface.verify import verifyClass + from zope.copy.interfaces import ICopyHook + verifyClass(ICopyHook, self._getTargetClass()) + + def test_instance_conforms_to_ICopyHook(self): + from zope.interface.verify import verifyObject + from zope.copy.interfaces import ICopyHook + verifyObject(ICopyHook, self._makeOne()) + + def test___call___w_context_inside_toplevel(self): + from zope.copy.interfaces import ResumeCopy + class Dummy(object): + __parent__ = __name__ = None + top_level = Dummy() + context = Dummy() + context.__parent__ = top_level + hook = self._makeOne(context) + self.assertRaises(ResumeCopy, hook, top_level, object()) + + def test___call___w_context_outside_toplevel(self): + class Dummy(object): + __parent__ = __name__ = None + top_level = Dummy() + context = Dummy() + hook = self._makeOne(context) + self.assertTrue(hook(top_level, object()) is context) + + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/location/tests/test_traversing.py b/thesisenv/lib/python3.6/site-packages/zope/location/tests/test_traversing.py new file mode 100644 index 0000000..ed82e4b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/location/tests/test_traversing.py @@ -0,0 +1,307 @@ +############################################################################## +# +# Copyright (c) 2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + + +class ConformsToILocationInfo(object): + + def test_class_conforms_to_ILocationInfo(self): + from zope.interface.verify import verifyClass + from zope.location.interfaces import ILocationInfo + verifyClass(ILocationInfo, self._getTargetClass()) + + def test_instance_conforms_to_ILocationInfo(self): + from zope.interface.verify import verifyObject + from zope.location.interfaces import ILocationInfo + verifyObject(ILocationInfo, self._makeOne()) + + +class LocationPhysicallyLocatableTests( + unittest.TestCase, ConformsToILocationInfo): + + def _getTargetClass(self): + from zope.location.traversing import LocationPhysicallyLocatable + return LocationPhysicallyLocatable + + def _makeOne(self, obj=None): + if obj is None: + obj = object() + return self._getTargetClass()(obj) + + def test_getRoot_not_location_aware(self): + proxy = self._makeOne(object()) + self.assertRaises(AttributeError, proxy.getRoot) + + def test_getRoot_location_but_no_IRoot(self): + class Dummy(object): + __parent__ = None + proxy = self._makeOne(Dummy()) + self.assertRaises(TypeError, proxy.getRoot) + + def test_getRoot_wo_cycle(self): + from zope.interface import directlyProvides + from zope.location.interfaces import IRoot + class Dummy(object): + __parent__ = None + one = Dummy() + directlyProvides(one, IRoot) + two = Dummy() + two.__parent__ = one + three = Dummy() + three.__parent__ = two + proxy = self._makeOne(three) + self.assertTrue(proxy.getRoot() is one) + + def test_getRoot_w_cycle(self): + class Dummy(object): + __parent__ = None + one = Dummy() + two = Dummy() + two.__parent__ = one + three = Dummy() + three.__parent__ = two + one.__parent__ = three + proxy = self._makeOne(two) + self.assertRaises(TypeError, proxy.getRoot) + + def test_getPath_not_location_aware(self): + proxy = self._makeOne(object()) + self.assertRaises(AttributeError, proxy.getPath) + + def test_getPath_location_but_no_IRoot(self): + class Dummy(object): + __parent__ = __name__ = None + proxy = self._makeOne(Dummy()) + self.assertRaises(TypeError, proxy.getPath) + + def test_getPath_at_root(self): + from zope.interface import directlyProvides + from zope.location.interfaces import IRoot + class Dummy(object): + __parent__ = __name__ = None + one = Dummy() + directlyProvides(one, IRoot) + proxy = self._makeOne(one) + self.assertEqual(proxy.getPath(), '/') + + def test_getPath_wo_cycle(self): + from zope.interface import directlyProvides + from zope.location.interfaces import IRoot + class Dummy(object): + __parent__ = __name__ = None + one = Dummy() + directlyProvides(one, IRoot) + two = Dummy() + two.__parent__ = one + two.__name__ = 'two' + three = Dummy() + three.__parent__ = two + three.__name__ = 'three' + proxy = self._makeOne(three) + self.assertEqual(proxy.getPath(), '/two/three') + + def test_getPath_w_cycle(self): + class Dummy(object): + __parent__ = __name__ = None + one = Dummy() + two = Dummy() + two.__parent__ = one + two.__name__ = 'two' + three = Dummy() + three.__parent__ = two + three.__name__ = 'three' + one.__parent__ = three + proxy = self._makeOne(two) + self.assertRaises(TypeError, proxy.getPath) + + def test_getParent_not_location_aware(self): + proxy = self._makeOne(object()) + self.assertRaises(TypeError, proxy.getParent) + + def test_getParent_location_but_no_IRoot(self): + class Dummy(object): + __parent__ = __name__ = None + proxy = self._makeOne(Dummy()) + self.assertRaises(TypeError, proxy.getParent) + + def test_getParent_at_root(self): + from zope.interface import directlyProvides + from zope.location.interfaces import IRoot + class Dummy(object): + __parent__ = __name__ = None + one = Dummy() + directlyProvides(one, IRoot) + proxy = self._makeOne(one) + self.assertRaises(TypeError, proxy.getParent) + + def test_getParent_wo_cycle(self): + from zope.interface import directlyProvides + from zope.location.interfaces import IRoot + class Dummy(object): + __parent__ = __name__ = None + one = Dummy() + directlyProvides(one, IRoot) + two = Dummy() + two.__parent__ = one + two.__name__ = 'two' + three = Dummy() + three.__parent__ = two + three.__name__ = 'three' + proxy = self._makeOne(three) + self.assertTrue(proxy.getParent() is two) + + def test_getParents_not_location_aware(self): + proxy = self._makeOne(object()) + self.assertRaises(TypeError, proxy.getParents) + + def test_getParents_location_but_no_IRoot(self): + class Dummy(object): + __parent__ = __name__ = None + proxy = self._makeOne(Dummy()) + self.assertRaises(TypeError, proxy.getParents) + + def test_getParents_at_root(self): + from zope.interface import directlyProvides + from zope.location.interfaces import IRoot + class Dummy(object): + __parent__ = __name__ = None + one = Dummy() + directlyProvides(one, IRoot) + proxy = self._makeOne(one) + self.assertRaises(TypeError, proxy.getParents) + + def test_getParents_wo_cycle(self): + from zope.interface import directlyProvides + from zope.location.interfaces import IRoot + class Dummy(object): + __parent__ = __name__ = None + one = Dummy() + directlyProvides(one, IRoot) + two = Dummy() + two.__parent__ = one + two.__name__ = 'two' + three = Dummy() + three.__parent__ = two + three.__name__ = 'three' + proxy = self._makeOne(three) + self.assertEqual(proxy.getParents(), [two, one]) + + def test_getName_not_location_aware(self): + proxy = self._makeOne(object()) + self.assertRaises(AttributeError, proxy.getName) + + def test_getName_location(self): + class Dummy(object): + __name__ = None + proxy = self._makeOne(Dummy()) + self.assertEqual(proxy.getName(), None) + + def test_getName_location_w_name(self): + class Dummy(object): + __name__ = 'name' + proxy = self._makeOne(Dummy()) + self.assertEqual(proxy.getName(), 'name') + + def test_getNearestSite_context_is_site(self): + from zope.location.interfaces import ISite # zope.component, if present + from zope.interface import directlyProvides + class Dummy(object): + pass + context = Dummy() + directlyProvides(context, ISite) + proxy = self._makeOne(context) + self.assertTrue(proxy.getNearestSite() is context) + + def test_getNearestSite_ancestor_is_site(self): + from zope.location.interfaces import ISite # zope.component, if present + from zope.interface import directlyProvides + from zope.location.interfaces import IRoot + class Dummy(object): + pass + one = Dummy() + directlyProvides(one, (ISite, IRoot)) + two = Dummy() + two.__parent__ = one + two.__name__ = 'two' + three = Dummy() + three.__parent__ = two + three.__name__ = 'three' + proxy = self._makeOne(three) + self.assertTrue(proxy.getNearestSite() is one) + + def test_getNearestSite_no_site(self): + from zope.interface import directlyProvides + from zope.location.interfaces import IRoot + class Dummy(object): + __parent__ = __name__ = None + one = Dummy() + directlyProvides(one, IRoot) + two = Dummy() + two.__parent__ = one + two.__name__ = 'two' + three = Dummy() + three.__parent__ = two + three.__name__ = 'three' + proxy = self._makeOne(three) + self.assertTrue(proxy.getNearestSite() is one) + + +class RootPhysicallyLocatableTests( + unittest.TestCase, ConformsToILocationInfo): + + def _getTargetClass(self): + from zope.location.traversing import RootPhysicallyLocatable + return RootPhysicallyLocatable + + def _makeOne(self, obj=None): + if obj is None: + obj = object() + return self._getTargetClass()(obj) + + def test_getRoot(self): + context = object() + proxy = self._makeOne(context) + self.assertTrue(proxy.getRoot() is context) + + def test_getPath(self): + context = object() + proxy = self._makeOne(context) + self.assertEqual(proxy.getPath(), '/') + + def test_getParent(self): + context = object() + proxy = self._makeOne(context) + self.assertEqual(proxy.getParent(), None) + + def test_getParents(self): + context = object() + proxy = self._makeOne(context) + self.assertEqual(proxy.getParents(), []) + + def test_getName(self): + context = object() + proxy = self._makeOne(context) + self.assertEqual(proxy.getName(), '') + + def test_getNearestSite(self): + context = object() + proxy = self._makeOne(context) + self.assertTrue(proxy.getNearestSite() is context) + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(LocationPhysicallyLocatableTests), + unittest.makeSuite(RootPhysicallyLocatableTests), + )) diff --git a/thesisenv/lib/python3.6/site-packages/zope/location/traversing.py b/thesisenv/lib/python3.6/site-packages/zope/location/traversing.py new file mode 100644 index 0000000..2e6836c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/location/traversing.py @@ -0,0 +1,152 @@ +############################################################################## +# +# Copyright (c) 2003-2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Classes to support implenting IContained +""" +__docformat__ = 'restructuredtext' + +from zope.interface import implementer + +from zope.location.interfaces import ILocationInfo +from zope.location.interfaces import IRoot +from zope.location.interfaces import ISite # zope.component, if present + + +@implementer(ILocationInfo) +class LocationPhysicallyLocatable(object): + """Provide location information for location objects + """ + def __init__(self, context): + self.context = context + + def getRoot(self): + """See ILocationInfo. + """ + context = self.context + max = 9999 + while context is not None: + if IRoot.providedBy(context): + return context + context = context.__parent__ + max -= 1 + if max < 1: + raise TypeError("Maximum location depth exceeded, " + "probably due to a a location cycle.") + + raise TypeError("Not enough context to determine location root") + + def getPath(self): + """See ILocationInfo. + """ + path = [] + context = self.context + max = 9999 + while context is not None: + if IRoot.providedBy(context): + if path: + path.append('') + path.reverse() + return u'/'.join(path) + return u'/' + path.append(context.__name__) + context = context.__parent__ + max -= 1 + if max < 1: + raise TypeError("Maximum location depth exceeded, " + "probably due to a a location cycle.") + + raise TypeError("Not enough context to determine location root") + + def getParent(self): + """See ILocationInfo. + """ + parent = getattr(self.context, '__parent__', None) + if parent is not None: + return parent + + raise TypeError('Not enough context information to get parent', + self.context) + + def getParents(self): + """See ILocationInfo. + """ + # XXX Merge this implementation with getPath. This was refactored + # from zope.traversing. + parents = [] + w = self.context + while 1: + w = getattr(w, '__parent__', None) + if w is None: + break + parents.append(w) + + if parents and IRoot.providedBy(parents[-1]): + return parents + + raise TypeError("Not enough context information to get all parents") + + def getName(self): + """See ILocationInfo + """ + return self.context.__name__ + + def getNearestSite(self): + """See ILocationInfo + """ + if ISite.providedBy(self.context): + return self.context + for parent in self.getParents(): + if ISite.providedBy(parent): + return parent + return self.getRoot() + +@implementer(ILocationInfo) +class RootPhysicallyLocatable(object): + """Provide location information for the root object + + This adapter is very simple, because there's no places to search + for parents and nearest sites, so we are only working with context + object, knowing that its the root object already. + """ + def __init__(self, context): + self.context = context + + def getRoot(self): + """See ILocationInfo + """ + return self.context + + def getPath(self): + """See ILocationInfo + """ + return u'/' + + def getParent(self): + """See ILocationInfo. + """ + return None + + def getParents(self): + """See ILocationInfo + """ + return [] + + def getName(self): + """See ILocationInfo + """ + return u'' + + def getNearestSite(self): + """See ILocationInfo + """ + return self.context diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/__init__.py new file mode 100644 index 0000000..8a804a4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/__init__.py @@ -0,0 +1,15 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Page Templates +""" diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/engine.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/engine.py new file mode 100644 index 0000000..138109b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/engine.py @@ -0,0 +1,531 @@ +############################################################################## +# +# Copyright (c) 2002-2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Expression engine configuration and registration. + +Each expression engine can have its own expression types and base names. +""" +__docformat__ = 'restructuredtext' + +import sys + +from zope import component +from zope.interface import implementer +from zope.interface.interfaces import ComponentLookupError +from zope.proxy import isProxy +from zope.traversing.interfaces import IPathAdapter, ITraversable +from zope.traversing.interfaces import TraversalError +from zope.traversing.adapters import traversePathElement +from zope.security.proxy import ProxyFactory, removeSecurityProxy +from zope.i18n import translate + +try: + from zope.untrustedpython import rcompile + from zope.untrustedpython.builtins import SafeBuiltins + HAVE_UNTRUSTED = True +except ImportError: # pragma: no cover + HAVE_UNTRUSTED = False + +# PyPy doesn't support assigning to '__builtins__', even when +# using eval() (http://pypy.readthedocs.org/en/latest/cpython_differences.html), +# so don't try to use it. It won't work. +if HAVE_UNTRUSTED: + import platform + if platform.python_implementation() == 'PyPy': # pragma: no cover + HAVE_UNTRUSTED = False + del rcompile + del SafeBuiltins + +from zope.tales.expressions import PathExpr, StringExpr, NotExpr, DeferExpr +from zope.tales.expressions import SimpleModuleImporter +from zope.tales.pythonexpr import PythonExpr +from zope.tales.tales import ExpressionEngine, Context + +from zope.pagetemplate.i18n import ZopeMessageFactory as _ + +class InlineCodeError(Exception): + pass + +class ZopeTraverser(object): + + def __init__(self, proxify=None): + if proxify is None: + self.proxify = lambda x: x + else: + self.proxify = proxify + + def __call__(self, object, path_items, econtext): + """Traverses a sequence of names, first trying attributes then items. + """ + request = getattr(econtext, 'request', None) + path_items = list(path_items) + path_items.reverse() + + while path_items: + name = path_items.pop() + + # special-case dicts for performance reasons + if getattr(object, '__class__', None) == dict: + object = object[name] + elif isinstance(object, dict) and not isProxy(object): + object = object[name] + else: + object = traversePathElement(object, name, path_items, + request=request) + object = self.proxify(object) + return object + +zopeTraverser = ZopeTraverser(ProxyFactory) + +class ZopePathExpr(PathExpr): + + def __init__(self, name, expr, engine): + super(ZopePathExpr, self).__init__(name, expr, engine, zopeTraverser) + +trustedZopeTraverser = ZopeTraverser() + +class TrustedZopePathExpr(PathExpr): + + def __init__(self, name, expr, engine): + super(TrustedZopePathExpr, self).__init__(name, expr, engine, + trustedZopeTraverser) + + +# Create a version of the restricted built-ins that uses a safe +# version of getattr() that wraps values in security proxies where +# appropriate: + + +class ZopePythonExpr(PythonExpr): + + if HAVE_UNTRUSTED: + + def __call__(self, econtext): + __traceback_info__ = self.text + vars = self._bind_used_names(econtext, SafeBuiltins) + return eval(self._code, vars) + + def _compile(self, text, filename): + return rcompile.compile(text, filename, 'eval') + +def _get_iinterpreter(): + from zope.app.interpreter.interfaces import IInterpreter + return IInterpreter # pragma: no cover + +class ZopeContextBase(Context): + """Base class for both trusted and untrusted evaluation contexts.""" + + request = None + + def translate(self, msgid, domain=None, mapping=None, default=None): + return translate(msgid, domain, mapping, + context=self.request, default=default) + + evaluateInlineCode = False + + def evaluateCode(self, lang, code): + if not self.evaluateInlineCode: + raise InlineCodeError( + _('Inline Code Evaluation is deactivated, which means that ' + 'you cannot have inline code snippets in your Page ' + 'Template. Activate Inline Code Evaluation and try again.')) + + # TODO This is only needed when self.evaluateInlineCode is true, + # so should only be needed for zope.app.pythonpage. + IInterpreter = _get_iinterpreter() + interpreter = component.queryUtility(IInterpreter, lang) + if interpreter is None: + error = _('No interpreter named "${lang_name}" was found.', + mapping={'lang_name': lang}) + raise InlineCodeError(error) + + globs = self.vars.copy() + result = interpreter.evaluateRawCode(code, globs) + # Add possibly new global variables. + old_names = self.vars.keys() + for name, value in globs.items(): + if name not in old_names: + self.setGlobal(name, value) + return result + + +class ZopeContext(ZopeContextBase): + """Evaluation context for untrusted programs.""" + + def evaluateMacro(self, expr): + """evaluateMacro gets security-proxied macro programs when this + is run with the zopeTraverser, and in other untrusted + situations. This will cause evaluation to fail in + zope.tal.talinterpreter, which knows nothing of security proxies. + Therefore, this method removes any proxy from the evaluated + expression. + + >>> from zope.pagetemplate.engine import ZopeContext + >>> from zope.tales.tales import ExpressionEngine + >>> from zope.security.proxy import ProxyFactory + >>> output = [('version', 'xxx'), ('mode', 'html'), ('other', 'things')] + >>> def expression(context): + ... return ProxyFactory(output) + ... + >>> zc = ZopeContext(ExpressionEngine, {}) + >>> out = zc.evaluateMacro(expression) + >>> type(out) is list + True + + The method does some trivial checking to make sure we are getting + back a macro like we expect: it must be a sequence of sequences, in + which the first sequence must start with 'version', and the second + must start with 'mode'. + + >>> del output[0] + >>> zc.evaluateMacro(expression) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ValueError: ('unexpected result from macro evaluation.', ...) + + >>> del output[:] + >>> zc.evaluateMacro(expression) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ValueError: ('unexpected result from macro evaluation.', ...) + + >>> output = None + >>> zc.evaluateMacro(expression) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ValueError: ('unexpected result from macro evaluation.', ...) + """ + macro = removeSecurityProxy(Context.evaluateMacro(self, expr)) + # we'll do some basic checks that it is the sort of thing we expect + problem = False + try: + problem = macro[0][0] != 'version' or macro[1][0] != 'mode' + except (TypeError, IndexError): + problem = True + if problem: + raise ValueError('unexpected result from macro evaluation.', macro) + return macro + + def setContext(self, name, value): + # Hook to allow subclasses to do things like adding security proxies + Context.setContext(self, name, ProxyFactory(value)) + + +class TrustedZopeContext(ZopeContextBase): + """Evaluation context for trusted programs.""" + + +class AdapterNamespaces(object): + """Simulate tales function namespaces with adapter lookup. + + When we are asked for a namespace, we return an object that + actually computes an adapter when called: + + To demonstrate this, we need to register an adapter: + + >>> from zope.component.testing import setUp, tearDown + >>> setUp() + >>> from zope.component import provideAdapter + >>> def adapter1(ob): + ... return 1 + >>> adapter1.__component_adapts__ = (None,) + >>> from zope.traversing.interfaces import IPathAdapter + >>> provideAdapter(adapter1, None, IPathAdapter, 'a1') + + Now, with this adapter in place, we can try out the namespaces: + + >>> ob = object() + >>> from zope.pagetemplate.engine import AdapterNamespaces + >>> namespaces = AdapterNamespaces() + >>> namespace = namespaces['a1'] + >>> namespace(ob) + 1 + >>> namespace = namespaces['a2'] + >>> namespace(ob) + Traceback (most recent call last): + ... + KeyError: 'a2' + + + Cleanup: + + >>> tearDown() + """ + + def __init__(self): + self.namespaces = {} + + def __getitem__(self, name): + namespace = self.namespaces.get(name) + if namespace is None: + def namespace(object): + try: + return component.getAdapter(object, IPathAdapter, name) + except ComponentLookupError: + raise KeyError(name) + + self.namespaces[name] = namespace + return namespace + + +class ZopeBaseEngine(ExpressionEngine): + + _create_context = ZopeContext + + def __init__(self): + ExpressionEngine.__init__(self) + self.namespaces = AdapterNamespaces() + + def getContext(self, __namespace=None, **namespace): + if __namespace: + if namespace: + namespace.update(__namespace) + else: + namespace = __namespace + + context = self._create_context(self, namespace) + + # Put request into context so path traversal can find it + if 'request' in namespace: + context.request = namespace['request'] + + # Put context into context so path traversal can find it + if 'context' in namespace: + context.context = namespace['context'] + + return context + +class ZopeEngine(ZopeBaseEngine): + """ + Untrusted expression engine. + + This engine does not allow modules to be imported; only modules + already available may be accessed:: + + >>> from zope.pagetemplate.engine import _Engine + >>> modname = 'zope.pagetemplate.tests.trusted' + >>> engine = _Engine() + >>> context = engine.getContext(engine.getBaseNames()) + + >>> modname in sys.modules + False + >>> context.evaluate('modules/' + modname) + Traceback (most recent call last): + ... + KeyError: 'zope.pagetemplate.tests.trusted' + + (The use of ``KeyError`` is an unfortunate implementation detail; I + think this should be a ``TraversalError``.) + + Modules which have already been imported by trusted code are + available, wrapped in security proxies:: + + >>> m = context.evaluate('modules/sys') + >>> m.__name__ + 'sys' + >>> m._getframe + Traceback (most recent call last): + ... + ForbiddenAttribute: ('_getframe', ) + + The results of Python expressions evaluated by this engine are + wrapped in security proxies if the 'untrusted' extra is installed:: + + >>> r = context.evaluate('python: {12: object()}.values') + >>> str(type(r).__name__) in ( + ... ('_Proxy',) if HAVE_UNTRUSTED else + ... ('builtin_function_or_method', 'method')) + True + + >>> r = context.evaluate('python: {12: object()}[12].__class__') + >>> str(type(r).__name__) == '_Proxy' or not HAVE_UNTRUSTED + True + + General path expressions provide objects that are wrapped in + security proxies as well:: + + >>> from zope.component.testing import setUp, tearDown + >>> from zope.security.checker import NamesChecker, defineChecker + + >>> @implementer(ITraversable) + ... class Container(dict): + ... def traverse(self, name, further_path): + ... return self[name] + + >>> setUp() + >>> defineChecker(Container, NamesChecker(['traverse'])) + >>> d = engine.getBaseNames() + >>> foo = Container() + >>> foo.__name__ = 'foo' + >>> d['foo'] = ProxyFactory(foo) + >>> foo['bar'] = bar = Container() + >>> bar.__name__ = 'bar' + >>> bar.__parent__ = foo + >>> bar['baz'] = baz = Container() + >>> baz.__name__ = 'baz' + >>> baz.__parent__ = bar + >>> context = engine.getContext(d) + + >>> o1 = context.evaluate('foo/bar') + >>> o1.__name__ + 'bar' + >>> type(o1) + + + >>> o2 = context.evaluate('foo/bar/baz') + >>> o2.__name__ + 'baz' + >>> type(o2) + + >>> o3 = o2.__parent__ + >>> type(o3) + + >>> o1 == o3 + True + + >>> o1 is o2 + False + + Note that this engine special-cases dicts during path traversal: + it traverses only to their items, but not to their attributes + (e.g. methods on dicts), because of performance reasons:: + + >>> d = engine.getBaseNames() + >>> d['adict'] = {'items': 123} + >>> d['anotherdict'] = {} + >>> context = engine.getContext(d) + >>> context.evaluate('adict/items') + 123 + >>> context.evaluate('anotherdict/keys') + Traceback (most recent call last): + ... + KeyError: 'keys' + + This special-casing also applies to non-proxied dict subclasses:: + + >>> class TraverserDict(dict): + ... def __init__(self): + ... self.item_requested = None + ... def __getitem__(self, item): + ... self.item_requested = item + ... return dict.__getitem__(self, item) + + >>> d = engine.getBaseNames() + >>> foo = TraverserDict() + >>> d['foo'] = foo + >>> foo['bar'] = 'baz' + >>> context = engine.getContext(d) + >>> context.evaluate('foo/bar') + 'baz' + >>> foo.item_requested + 'bar' + + >>> tearDown() + + """ + + def getFunctionNamespace(self, namespacename): + """ Returns the function namespace """ + return ProxyFactory( + super(ZopeEngine, self).getFunctionNamespace(namespacename)) + +class TrustedZopeEngine(ZopeBaseEngine): + """ + Trusted expression engine. + + This engine allows modules to be imported:: + + >>> from zope.pagetemplate.engine import _TrustedEngine + >>> modname = 'zope.pagetemplate.tests.trusted' + >>> engine = _TrustedEngine() + >>> context = engine.getContext(engine.getBaseNames()) + + >>> modname in sys.modules + False + >>> m = context.evaluate('modules/' + modname) + >>> m.__name__ == modname + True + >>> modname in sys.modules + True + + Since this is trusted code, we can look at whatever is in the + module, not just ``__name__`` or what's declared in a security + assertion:: + + >>> m.x + 42 + + Clean up after ourselves:: + + >>> del sys.modules[modname] + + """ + + _create_context = TrustedZopeContext + + +@implementer(ITraversable) +class TraversableModuleImporter(SimpleModuleImporter): + + def traverse(self, name, further_path): + try: + return self[name] + except ImportError: + raise TraversalError(self, name) + + +def _Engine(engine=None): + if engine is None: + engine = ZopeEngine() + engine = _create_base_engine(engine, ZopePathExpr) + engine.registerType('python', ZopePythonExpr) + + # Using a proxy around sys.modules allows page templates to use + # modules for which security declarations have been made, but + # disallows execution of any import-time code for modules, which + # should not be allowed to happen during rendering. + engine.registerBaseName('modules', ProxyFactory(sys.modules)) + + return engine + +def _TrustedEngine(engine=None): + if engine is None: + engine = TrustedZopeEngine() + engine = _create_base_engine(engine, TrustedZopePathExpr) + engine.registerType('python', PythonExpr) + engine.registerBaseName('modules', TraversableModuleImporter()) + return engine + +def _create_base_engine(engine, pathtype): + for pt in pathtype._default_type_names: + engine.registerType(pt, pathtype) + engine.registerType('string', StringExpr) + engine.registerType('not', NotExpr) + engine.registerType('defer', DeferExpr) + return engine + + +Engine = _Engine() +TrustedEngine = _TrustedEngine() + + +class AppPT(object): + + def pt_getEngine(self): + return Engine + + +class TrustedAppPT(object): + + def pt_getEngine(self): + return TrustedEngine diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/i18n.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/i18n.py new file mode 100644 index 0000000..fc47f88 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/i18n.py @@ -0,0 +1,20 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Customization of zope.i18n for the Zope application server +""" +__docformat__ = 'restructuredtext' + +# import this as _ to create i18n messages in the zope domain +from zope.i18nmessageid import MessageFactory +ZopeMessageFactory = MessageFactory('zope') diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/interfaces.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/interfaces.py new file mode 100644 index 0000000..9a0fe71 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/interfaces.py @@ -0,0 +1,156 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Interface that describes the 'macros' attribute of a PageTemplate. +""" +from zope.interface import Interface, Attribute + + +class IPageTemplate(Interface): + """Objects that can render page templates + """ + + def __call__(*args, **kw): + """Render a page template + + The argument handling is specific to particular + implementations. Normally, however, positional arguments are + bound to the top-level ``args`` variable and keyword arguments + are bound to the top-level ``options`` variable. + """ + + def pt_edit(source, content_type): + """Set the source and content type + """ + + def pt_errors(namespace): + """Return a sequence of strings that describe errors in the template. + + The errors may occur when the template is compiled or + rendered. + + *namespace* is the set of names passed to the TALES expression + evaluator, similar to what's returned by pt_getContext(). + + This can be used to let a template author know what went wrong + when an attempt was made to render the template. + """ + + def read(): + """Get the template source + """ + + macros = Attribute("An object that implements the ``__getitem__`` " + "protocol (e.g., a :class:`dict`), containing page template macros.") + +class IPageTemplateSubclassing(IPageTemplate): + """Behavior that may be overridden or used by subclasses + """ + + + def pt_getContext(**kw): + """Compute a dictionary of top-level template names + + Responsible for returning the set of + top-level names supported in path expressions + + """ + + def pt_getEngine(): + """Returns the TALES expression evaluator. + """ + + def pt_getEngineContext(namespace): + """Return an execution context from the expression engine.""" + + def __call__(*args, **kw): + """Render a page template + + This is sometimes overridden to provide additional argument + binding. + """ + + def pt_source_file(): + """return some text describing where a bit of ZPT code came from. + + This could be a file path, a object path, etc. + """ + + def _cook(): + """Compile the source + + Results are saved in the variables: ``_v_errors``, ``_v_warnings``, + ``_v_program``, and ``_v_macros``, and the flag ``_v_cooked`` is set. + """ + def _cook_check(): + """Compiles the source if necessary + + Subclasses might override this to influence the decision about + whether compilation is necessary. + """ + + content_type = Attribute("The content-type of the generated output") + + expand = Attribute( + "Flag indicating whether the read method should expand macros") + + +class IPageTemplateEngine(Interface): + """Template engine implementation. + + The engine must provide a ``cook`` method to return a cooked + template program from a source input. + """ + + def cook(source_file, text, engine, content_type): + """Parse text and return prepared template program and macros. + + Note that while *source_file* is provided to name the source + of the input *text*, it should not be relied on to be an + actual filename (it may be an application-specific, virtual + path). + + The return type is a tuple ``(program, macros)``. + """ + + +class IPageTemplateProgram(Interface): + """Cooked template program.""" + + def __call__( + context, macros, debug=0, wrap=60, metal=1, tal=1, showtal=-1, + strictinsert=1, stackLimit=100, i18nInterpolate=1, + sourceAnnotations=0): + """ + Render template in the provided template *context*. + + Optional arguments: + + :keyword bool debug: enable debugging output to sys.stderr (off by default). + :keyword int wrap: try to wrap attributes on opening tags to this number of + column (default: 60). + :keyword bool metal: enable METAL macro processing (on by default). + :keyword bool tal: enable TAL processing (on by default). + :keyword int showtal: do not strip away TAL directives. A special value of + -1 (which is the default setting) enables showtal when TAL + processing is disabled, and disables showtal when TAL processing is + enabled. Note that you must use 0, 1, or -1; true boolean values + are not supported (for historical reasons). + :keyword bool strictinsert: enable TAL processing and stricter HTML/XML + checking on text produced by structure inserts (on by default). + Note that Zope turns this value off by default. + :keyword int stackLimit: set macro nesting limit (default: 100). + :keyword bool i18nInterpolate: enable i18n translations (default: on). + :keyword bool sourceAnnotations: enable source annotations with HTML comments + (default: off). + """ diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/pagetemplate.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/pagetemplate.py new file mode 100644 index 0000000..81b2c8b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/pagetemplate.py @@ -0,0 +1,295 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Page Template module + +HTML- and XML-based template objects using TAL, TALES, and METAL. +""" +import sys +import six +from zope.tal.talparser import TALParser +from zope.tal.htmltalparser import HTMLTALParser +from zope.tal.talgenerator import TALGenerator +from zope.tal.talinterpreter import TALInterpreter +from zope.tales.engine import Engine +from zope.component import queryUtility + +from zope.pagetemplate.interfaces import IPageTemplateSubclassing +from zope.pagetemplate.interfaces import IPageTemplateEngine +from zope.pagetemplate.interfaces import IPageTemplateProgram +from zope.interface import implementer +from zope.interface import provider + +_default_options = {} + + +class StringIO(list): + # Unicode aware append-only version of StringIO. + write = list.append + + def __init__(self, value=None): + list.__init__(self) + if value is not None: + self.append(value) + + def getvalue(self): + return u''.join(self) + + +@implementer(IPageTemplateSubclassing) +class PageTemplate(object): + """ + Page Templates using TAL, TALES, and METAL. + + **Subclassing** + + This class implements :class:`~zope.pagetemplate.interfaces.IPageTemplateSubclassing`. + + The following methods have certain internal responsibilities. + + ``pt_getContext(**keywords)`` + Should ignore keyword arguments that it doesn't care about, + and construct the namespace passed to the TALES expression + engine. This method is free to use the keyword arguments it + receives. + + ``pt_render(namespace, source=False, sourceAnnotations=False, showtal=False)`` + Responsible the TAL interpreter to perform the rendering. The + namespace argument is a mapping which defines the top-level + namespaces passed to the TALES expression engine. + + ``__call__(*args, **keywords)`` + Calls pt_getContext() to construct the top-level namespace + passed to the TALES expression engine, then calls pt_render() + to perform the rendering. + """ + + _error_start = '' + _newline = '\n' + + content_type = 'text/html' + expand = 1 + _v_errors = () + _v_cooked = 0 + _v_macros = None + _v_program = None + _text = '' + + @property + def macros(self): + self._cook_check() + return self._v_macros + + def pt_edit(self, text, content_type): + if content_type: + self.content_type = str(content_type) + if hasattr(text, 'read'): + text = text.read() + self.write(text) + + def pt_getContext(self, args=(), options=_default_options, **ignored): + rval = {'template': self, + 'options': options, + 'args': args, + 'nothing': None, + } + rval.update(self.pt_getEngine().getBaseNames()) + return rval + + def __call__(self, *args, **kwargs): + return self.pt_render(self.pt_getContext(args, kwargs)) + + def pt_getEngineContext(self, namespace): + return self.pt_getEngine().getContext(namespace) + + def pt_getEngine(self): + return Engine + + def pt_render(self, namespace, source=False, sourceAnnotations=False, + showtal=False): + """Render this Page Template""" + self._cook_check() + + __traceback_supplement__ = ( + PageTemplateTracebackSupplement, self, namespace + ) + + if self._v_errors: + raise PTRuntimeError(str(self._v_errors)) + + context = self.pt_getEngineContext(namespace) + + return self._v_program( + context, self._v_macros, tal=not source, showtal=showtal, + strictinsert=0, sourceAnnotations=sourceAnnotations + ) + + def pt_errors(self, namespace, check_macro_expansion=True): + self._cook_check() + err = self._v_errors + if err: + return err + if check_macro_expansion: + try: + self.pt_render(namespace, source=1) + except Exception: + return ('Macro expansion failed', '%s: %s' % sys.exc_info()[:2]) + + def _convert(self, string, text): + """Adjust the string type to the type of text""" + if isinstance(text, six.binary_type) and not isinstance(string, six.binary_type): + return string.encode('utf-8') + + if isinstance(text, six.text_type) and not isinstance(string, six.text_type): + return string.decode('utf-8') + + return string + + def write(self, text): + # We accept both, since the text can either come from a file (and the + # parser will take care of the encoding) or from a TTW template, in + # which case we already have unicode. + assert isinstance(text, (six.string_types, six.binary_type)) + + def bs(s): + """Bytes or str""" + return self._convert(s, text) + + if text.startswith(bs(self._error_start)): + errend = text.find(bs(self._error_end)) + if errend >= 0: + text = text[errend + 3:] + if text[:1] == bs(self._newline): + text = text[1:] + if self._text != text: + self._text = text + + # Always cook on an update, even if the source is the same; + # the content-type might have changed. + self._cook() + + def read(self, request=None): + """Gets the source, sometimes with macros expanded.""" + self._cook_check() + def bs(s): + """Bytes or str""" + return self._convert(s, self._text) + if not self._v_errors: + if not self.expand: + return self._text + try: + # This gets called, if macro expansion is turned on. + # Note that an empty dictionary is fine for the context at + # this point, since we are not evaluating the template. + context = self.pt_getContext(self, request) + return self.pt_render(context, source=1) + except: + return (bs('%s\n Macro expansion failed\n %s\n-->\n' % + (self._error_start, "%s: %s" % sys.exc_info()[:2])) + + self._text) + + return bs('%s\n %s\n-->\n' % (self._error_start, + '\n'.join(self._v_errors))) + \ + self._text + + def pt_source_file(self): + """To be overridden.""" + return None + + def _cook_check(self): + if not self._v_cooked: + self._cook() + + def _cook(self): + """Compile the TAL and METAL statments. + + Cooking must not fail due to compilation errors in templates. + """ + + pt_engine = self.pt_getEngine() + source_file = self.pt_source_file() + + self._v_errors = () + + try: + engine = queryUtility( + IPageTemplateEngine, default=PageTemplateEngine + ) + self._v_program, self._v_macros = engine.cook( + source_file, self._text, pt_engine, self.content_type) + except: + etype, e = sys.exc_info()[:2] + self._v_errors = [ + "Compilation failed", + "%s.%s: %s" % (etype.__module__, etype.__name__, e) + ] + + self._v_cooked = 1 + + +class PTRuntimeError(RuntimeError): + '''The Page Template has template errors that prevent it from rendering.''' + pass + + +@implementer(IPageTemplateProgram) +@provider(IPageTemplateEngine) +class PageTemplateEngine(object): + """ + Page template engine that uses the TAL interpreter to render. + + This class implements :class:`zope.pagetemplate.interfaces.IPageTemplateProgram`. + """ + + + def __init__(self, program): + self.program = program + + def __call__(self, context, macros, **options): + output = StringIO(u'') + interpreter = TALInterpreter( + self.program, macros, context, + stream=output, **options + ) + interpreter() + return output.getvalue() + + @classmethod + def cook(cls, source_file, text, engine, content_type): + if content_type == 'text/html': + gen = TALGenerator(engine, xml=0, source_file=source_file) + parser = HTMLTALParser(gen) + else: + gen = TALGenerator(engine, source_file=source_file) + parser = TALParser(gen) + + parser.parseString(text) + program, macros = parser.getCode() + + return cls(program), macros + + +#@implementer(ITracebackSupplement) +class PageTemplateTracebackSupplement(object): + + def __init__(self, pt, namespace): + self.manageable_object = pt + self.warnings = [] + try: + e = pt.pt_errors(namespace, check_macro_expansion=False) + except TypeError: + # Old page template. + e = pt.pt_errors(namespace) + if e: + self.warnings.extend(e) diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/pagetemplatefile.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/pagetemplatefile.py new file mode 100644 index 0000000..4cf0bf3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/pagetemplatefile.py @@ -0,0 +1,128 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Filesystem Page Template module + +Zope object encapsulating a Page Template from the filesystem. +""" + +__all__ = ("PageTemplateFile",) + +import os +import sys +import re +import logging + +from zope.pagetemplate.pagetemplate import PageTemplate + +logger = logging.getLogger(__name__) + +DEFAULT_ENCODING = "utf-8" + +meta_pattern = re.compile( + br'\s*\s*', + re.IGNORECASE) + +def package_home(gdict): + filename = gdict["__file__"] + return os.path.dirname(filename) + +class PageTemplateFile(PageTemplate): + "Zope wrapper for filesystem Page Template using TAL, TALES, and METAL" + + _v_last_read = 0 + _v_debug = __debug__ + + def __init__(self, filename, _prefix=None): + path = self.get_path_from_prefix(_prefix) + self.filename = os.path.join(path, filename) + if not os.path.isfile(self.filename): + raise ValueError("No such file", self.filename) + + def get_path_from_prefix(self, _prefix): + if isinstance(_prefix, str): + path = _prefix + else: + if _prefix is None: + _prefix = sys._getframe(2).f_globals + path = package_home(_prefix) + return path + + def _prepare_html(self, text): + match = meta_pattern.search(text) + if match is not None: + type_, encoding = (x.decode('utf-8') for x in match.groups()) + # TODO: Shouldn't / stripping + # be in PageTemplate.__call__()? + text = meta_pattern.sub(b"", text) + else: + type_ = None + encoding = DEFAULT_ENCODING + text = text.decode(encoding) + return text, type_ + + def _read_file(self): + __traceback_info__ = self.filename + with open(self.filename, "rb") as f: + text = f.read(XML_PREFIX_MAX_LENGTH) + type_ = sniff_type(text) + text += f.read() + + if type_ != "text/xml": + text, type_ = self._prepare_html(text) + + return text, type_ + + def _cook_check(self): + if self._v_last_read and not self._v_debug: + return + __traceback_info__ = self.filename + try: + mtime = os.path.getmtime(self.filename) + except OSError: + mtime = 0 + if self._v_program is not None and mtime == self._v_last_read: + return + text, type_ = self._read_file() + self.pt_edit(text, type_) + assert self._v_cooked + if self._v_errors: + logger.error('PageTemplateFile: Error in template %s: %s', + self.filename, '\n'.join(self._v_errors)) + return + self._v_last_read = mtime + + def pt_source_file(self): + return self.filename + + def __getstate__(self): + raise TypeError("non-picklable object") + +XML_PREFIXES = [ + b" self._last: + raise IndexError(index) + return self._sequence[index + self._first] + +def opt(start, end, size, orphan, sequence): + assert size >= 1 + assert start > 0 + + start = len(sequence) if start - 1 >= len(sequence) else start + assert end <= 0 + end = start + size - 1 + + assert end + orphan - 1 < len(sequence) + + return start, end, size diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checknotexpression.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checknotexpression.html new file mode 100644 index 0000000..81f3735 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checknotexpression.html @@ -0,0 +1,9 @@ + + + +
    not:python:0
    +
    not:python:1
    +
    not: python:1
    +
    not:python:range(1,20)
    + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checknothing.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checknothing.html new file mode 100644 index 0000000..bb531ba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checknothing.html @@ -0,0 +1,7 @@ + + + + Hello World! + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checkpathalt.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checkpathalt.html new file mode 100644 index 0000000..1407d24 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checkpathalt.html @@ -0,0 +1,17 @@ + + +
    +

    1

    +

    2

    +

    3

    +

    4

    +

    5

    + +

    Z

    +

    Z

    +

    Z

    + +

    Z

    +
    + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checkpathnothing.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checkpathnothing.html new file mode 100644 index 0000000..99f88be --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checkpathnothing.html @@ -0,0 +1,7 @@ + + + + Hello World! + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checkwithxmlheader.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checkwithxmlheader.html new file mode 100644 index 0000000..c184b36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/checkwithxmlheader.html @@ -0,0 +1,5 @@ + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/dtml1.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/dtml1.html new file mode 100644 index 0000000..421a1b1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/dtml1.html @@ -0,0 +1,19 @@ + + Test of documentation templates + + blah +
    +
    The arguments to this test program were:
    +
    +
      +
    • + Argument number 99 + is default +
    • +
    +
    +
    +

    No arguments were given.

    + And thats da trooth. + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/dtml3.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/dtml3.html new file mode 100644 index 0000000..d3c84b2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/dtml3.html @@ -0,0 +1,33 @@ +Test of documentation templates + +
    + The arguments were: + + (previous start item-previous end item) + +
    + +
    ??.
    +
    Argument 99 was ??
    +
    +
    + + (next start item-next end item) + +
    +

    + No arguments were given. +

    + And I am 100% sure! + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/globalsshadowlocals.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/globalsshadowlocals.html new file mode 100644 index 0000000..c3f99e7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/globalsshadowlocals.html @@ -0,0 +1,14 @@ + + + +
    + Should be 2 here! +
    +
    + Should be 1 here! +
    +
    + Should be 3 here! +
    + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/loop1.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/loop1.html new file mode 100644 index 0000000..a4d0118 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/loop1.html @@ -0,0 +1,15 @@ + + +Loop doc + + +

    Choose your type:

    + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/recursive.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/recursive.html new file mode 100644 index 0000000..9726867 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/recursive.html @@ -0,0 +1,7 @@ + +
      +
    • +
    • +
        + +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/stringexpression.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/stringexpression.html new file mode 100644 index 0000000..a60e72d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/stringexpression.html @@ -0,0 +1,7 @@ + + + + This is the title + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/teeshop1.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/teeshop1.html new file mode 100644 index 0000000..a915b0d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/teeshop1.html @@ -0,0 +1,75 @@ + + +Zope Stuff + + + + + + + + + +
      + + + + +
      +
      +
      + + + + + + + +
      apparelmugstoysmisc
      +
      +
      +
      + + + + +
      + + + + + + + + + + +
      + + + + + + + + + +
      Description: + This is the tee for those who LOVE Zope. Show your heart + on your tee. +

      +
      Price:12.99
      +
      + + + + + +
      +
      +
      +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/teeshop2.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/teeshop2.html new file mode 100644 index 0000000..7ca0cf1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/teeshop2.html @@ -0,0 +1,5 @@ + +
      +Body +
      + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/teeshoplaf.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/teeshoplaf.html new file mode 100644 index 0000000..88d8a5b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/teeshoplaf.html @@ -0,0 +1,58 @@ + + +Zope Stuff + + + + + + + + + +
      + + + + +
      +
      +
      + + + + + + + +
      apparelmugstoysmisc
      +
      +
      +
      + + + + +
      + + + + + + + +

      + + + + + +
      This is the tee for those who LOVE Zope. Show your heart on + your tee. +

      +
      +
      +
      +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/translation.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/translation.html new file mode 100644 index 0000000..1df9bd5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/input/translation.html @@ -0,0 +1,2 @@ +

      Define and translate message id in ZPT

      +

      Insert Message object here

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checknotexpression.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checknotexpression.html new file mode 100644 index 0000000..48e2ba0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checknotexpression.html @@ -0,0 +1,9 @@ + + + +
      not:python:0
      + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checknothing.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checknothing.html new file mode 100644 index 0000000..c65a966 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checknothing.html @@ -0,0 +1,7 @@ + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checkpathalt.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checkpathalt.html new file mode 100644 index 0000000..ba33be1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checkpathalt.html @@ -0,0 +1,17 @@ + + +
      +

      X

      +

      X

      +

      X

      +

      X

      +

      X

      + +

      Z

      +

      Z

      +

      Z

      + +

      c

      +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checkpathnothing.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checkpathnothing.html new file mode 100644 index 0000000..c65a966 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checkpathnothing.html @@ -0,0 +1,7 @@ + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checkwithxmlheader.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checkwithxmlheader.html new file mode 100644 index 0000000..d2afeb0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/checkwithxmlheader.html @@ -0,0 +1,4 @@ + + +Hello! + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/dtml1a.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/dtml1a.html new file mode 100644 index 0000000..f146b34 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/dtml1a.html @@ -0,0 +1,34 @@ + + Test of documentation templates + + +
      +
      The arguments to this test program were:
      +
      +
        +
      • + Argument number 1 + is one +
      • + Argument number 2 + is two +
      • + Argument number 3 + is three +
      • + Argument number 4 + is cha +
      • + Argument number 5 + is cha +
      • + Argument number 6 + is cha +
      • +
      +
      +
      + + And thats da trooth. + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/dtml1b.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/dtml1b.html new file mode 100644 index 0000000..edc7637 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/dtml1b.html @@ -0,0 +1,7 @@ + + Test of documentation templates + +

      No arguments were given.

      + And thats da trooth. + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/dtml3.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/dtml3.html new file mode 100644 index 0000000..fd774e1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/dtml3.html @@ -0,0 +1,30 @@ +Test of documentation templates + +
      + The arguments were: + +
      + +
      one.
      +
      Argument 1 was one
      +
      +
      two.
      +
      Argument 2 was two
      +
      +
      three.
      +
      Argument 3 was three
      +
      +
      four.
      +
      Argument 4 was four
      +
      +
      five.
      +
      Argument 5 was five
      +
      +
      + + (six-ten) + +
      + + And I am 100% sure! + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/globalsshadowlocals.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/globalsshadowlocals.html new file mode 100644 index 0000000..e22e59b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/globalsshadowlocals.html @@ -0,0 +1,14 @@ + + + +
      + 2 +
      +
      + 1 +
      +
      + 3 +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/loop1.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/loop1.html new file mode 100644 index 0000000..f16ea89 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/loop1.html @@ -0,0 +1,25 @@ + + +Loop doc + + +

      Choose your type:

      + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/recursive.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/recursive.html new file mode 100644 index 0000000..a251dcf --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/recursive.html @@ -0,0 +1,14 @@ + +
        +
      • root
      • +
      • +
          +
        • first
        • +
        +
      • +
      • +
          +
        • second
        • +
        +
      • +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/stringexpression.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/stringexpression.html new file mode 100644 index 0000000..58b55f5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/stringexpression.html @@ -0,0 +1,7 @@ + + + + Hello World! + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/teeshop1.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/teeshop1.html new file mode 100644 index 0000000..d2abcdb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/teeshop1.html @@ -0,0 +1,89 @@ + + +Zope Stuff + + + + + + + + + +
      + + + + +
      +
      +
      + + + + + + + +
      apparelmugstoysmisc
      +
      +
      +
      + + + + +
      + + + + + + + + + + + + + +
      + + + + + + + + + +
      Description: + This is the tee for those who LOVE Zope. Show your heart on your tee. +

      +
      Price:12.99
      +
      + + + + + + + + + +
      Description: + This is the tee for Jim Fulton. He's the Zope Pope! +

      +
      Price:11.99
      +
      + + + + + +
      +
      +
      +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/teeshop2.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/teeshop2.html new file mode 100644 index 0000000..9843453 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/teeshop2.html @@ -0,0 +1,35 @@ + + +Zope Stuff + + + + + + + + + +
      + + + + +
      +
      +
      + + + + + + + +
      apparelmugstoysmisc
      +
      +
      +
      +Body +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/teeshoplaf.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/teeshoplaf.html new file mode 100644 index 0000000..5fb936c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/teeshoplaf.html @@ -0,0 +1,58 @@ + + +Zope Stuff + + + + + + + + + +
      + + + + +
      +
      +
      + + + + + + + +
      apparelmugstoysmisc
      +
      +
      +
      + + + + +
      + + + + + + + +

      + + + + + +
      This is the tee for those who LOVE Zope. Show your heart on + your tee. +

      +
      +
      +
      +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/translation.html b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/translation.html new file mode 100644 index 0000000..4335d11 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/output/translation.html @@ -0,0 +1,2 @@ +

      Define and translate message id in ZPT

      +

      Translate this!

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/test_basictemplate.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/test_basictemplate.py new file mode 100644 index 0000000..f998f21 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/test_basictemplate.py @@ -0,0 +1,276 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Basic Page Template tests +""" +import unittest + +from zope.pagetemplate.tests import util +import zope.pagetemplate.pagetemplate +import zope.component.testing + +class BasicTemplateTests(unittest.TestCase): + + def setUp(self): + zope.component.testing.setUp(self) + self.t = zope.pagetemplate.pagetemplate.PageTemplate() + + def tearDown(self): + zope.component.testing.tearDown(self) + + def test_if_in_var(self): + # DTML test 1: if, in, and var: + + # %(comment)[ blah %(comment)] + # Test of documentation templates + # + # %(if args)[ + #
      The arguments to this test program were:

      + #

      + #
        + # %(in args)[ + #
      • Argument number %(num)d was %(arg)s + # %(in args)] + #

      + # %(if args)] + # %(else args)[ + # No arguments were given.

      + # %(else args)] + # And thats da trooth. + # + + tal = util.read_input('dtml1.html') + self.t.write(tal) + + aa = util.argv(('one', 'two', 'three', 'cha', 'cha', 'cha')) + o = self.t(content=aa) + expect = util.read_output('dtml1a.html') + + util.check_xml(expect, o) + + aa = util.argv(()) + o = self.t(content=aa) + expect = util.read_output('dtml1b.html') + util.check_xml(expect, o) + + def test_pt_runtime_error(self): + self.t.write("xyz") + try: + self.t.pt_render({}) + except zope.pagetemplate.pagetemplate.PTRuntimeError as e: + self.assertEqual( + str(e), + "['Compilation failed', 'zope.tal.taldefs.TALError:" + " TAL attributes on require explicit" + " , at line 1, column 1']") + else: + self.fail("expected PTRuntimeError") + + def test_engine_utility_registration(self): + self.t.write("foo") + output = self.t.pt_render({}) + self.assertEqual(output, 'foo') + + from zope.pagetemplate.interfaces import IPageTemplateEngine + from zope.component import provideUtility + + class DummyProgram(object): + def __init__(self, *args): + self.args = args + + def __call__(self, *args, **kwargs): + return self.args, (self,) + args, kwargs + + class DummyEngine(object): + @staticmethod + def cook(*args): + return DummyProgram(*args), "macros" + + provideUtility(DummyEngine, IPageTemplateEngine) + self.t._cook() + + self.assertIsInstance(self.t._v_program, DummyProgram) + self.assertEqual(self.t._v_macros, "macros") + + # "Render" and unpack arguments passed for verification + ((source_file, text, _engine, content_type), + (program, _context, macros), + options) = self.t.pt_render({}) + + self.assertEqual(source_file, None) + self.assertEqual(text, 'foo') + self.assertEqual(content_type, 'text/html') + self.assertEqual(macros, 'macros') + self.assertIsInstance(program, DummyProgram) + self.assertEqual(options, { + 'tal': True, + 'showtal': False, + 'sourceAnnotations': False, + 'strictinsert': 0, + }) + + def test_batches_and_formatting(self): + # DTML test 3: batches and formatting: + + # Test of documentation templates + # + # + # The arguments were: + # + # + # (- + # ) + # + # + #

      + # + #
      .
      + #
      Argument was
      + # + # (- + # ) + # + # + #
      + # + # No arguments were given.

      + # + # And I\'m 100% sure! + # + + tal = util.read_input('dtml3.html') + self.t.write(tal) + + aa = util.argv(( + 'one', 'two', 'three', 'four', 'five', + 'six', 'seven', 'eight', 'nine', 'ten', + 'eleven', 'twelve', 'thirteen', 'fourteen', 'fifteen', + 'sixteen', 'seventeen', 'eighteen', 'nineteen', + 'twenty', + )) + from zope.pagetemplate.tests import batch + o = self.t(content=aa, batch=batch.batch(aa.args, 5)) + + expect = util.read_output('dtml3.html') + util.check_xml(expect, o) + + def test_on_error_in_slot_filler(self): + # The `here` isn't defined, so the macro definition is + # expected to catch the error that gets raised. + text = '''\ +

      +
      +
      + cool +
      +
      + +
      +
      +

      +

      +
      + ''' + self.t.write(text) + self.t() + + def test_on_error_in_slot_default(self): + # The `here` isn't defined, so the macro definition is + # expected to catch the error that gets raised. + text = '''\ +
      +
      +
      +
      +
      +
      +
      + +
      +
      + ''' + self.t.write(text) + self.t() + + def test_unicode_html(self): + text = u'

      \xe4\xf6\xfc\xdf

      ' + + # test with HTML parser + self.t.pt_edit(text, 'text/html') + self.assertEqual(self.t().strip(), text) + + # test with XML parser + self.t.pt_edit(text, 'text/xml') + self.assertEqual(self.t().strip(), text) + + def test_edit_with_read(self): + from io import BytesIO + self.t.pt_edit(BytesIO(b""), None) + self.assertEqual(self.t._text, b'') + + def test_errors(self): + self.t._v_cooked = True + self.t._v_errors = 1 + e = self.t.pt_errors(None) + self.assertEqual(e, 1) + + self.t._v_errors = () + e = self.t.pt_errors(None) + self.assertEqual(e[0], 'Macro expansion failed') + + def test_convert(self): + string = u'binary' + text = b'binary' + self.assertEqual(text, self.t._convert(string, text)) + + def test_write_error(self): + self.t.write(self.t._error_start + 'stuff' + self.t._error_end + self.t._newline) + self.assertEqual(self.t._text, '') + + def test_read_no_expand(self): + self.t.expand = False + self.t._text = self + self.t._v_cooked = True + + self.assertIs(self.t.read(), self) + + def test_read_error_expand(self): + self.t.expand = True + self.t._text = '' + self.t._v_cooked = True + text = self.t.read() + self.assertIn(self.t._error_start, text) + self.assertIn("Macro expansion failed", text) + + def test_macros(self): + self.assertEqual(self.t.macros, {}) + + +class TestPageTemplateTracebackSupplement(unittest.TestCase): + + def test_errors_old_style(self): + class PT(object): + def pt_errors(self, ns): + return (ns,) + + pts = zope.pagetemplate.pagetemplate.PageTemplateTracebackSupplement(PT(), 'ns') + + self.assertEqual(pts.warnings, ['ns']) + + def test_errors_none(self): + class PT(object): + def pt_errors(self, ns, check_macro_expansion=False): + return None + + pts = zope.pagetemplate.pagetemplate.PageTemplateTracebackSupplement(PT(), 'ns') + self.assertEqual(pts.warnings, []) diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/test_engine.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/test_engine.py new file mode 100644 index 0000000..97ec8f6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/test_engine.py @@ -0,0 +1,207 @@ +############################################################################## +# +# Copyright (c) 2004-2009 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Doc tests for the pagetemplate's 'engine' module +""" +import doctest +import re +import unittest +import zope.pagetemplate.engine +from zope.testing.renormalizing import RENormalizing +from zope.component.testing import PlacelessSetup + +class EngineTests(PlacelessSetup, + unittest.TestCase): + + def _makeOne(self): + return zope.pagetemplate.engine._Engine() + + def test_function_namespaces_return_secured_proxies(self): + # See https://bugs.launchpad.net/zope3/+bug/98323 + from zope.proxy import isProxy + engine = self._makeOne() + namespace = engine.getFunctionNamespace('test') + self.assertTrue(isProxy(namespace)) + + def test_getContext_namespace(self): + engine = self._makeOne() + ctx = engine.getContext({'a': 1}, b=2, request=3, context=4) + self.assertEqual(ctx.getValue('a'), 1) + self.assertEqual(ctx.getValue('b'), 2) + self.assertEqual(ctx.getValue('request'), 3) + self.assertEqual(ctx.getValue('context'), 4) + +class DummyEngine(object): + + def getTypes(self): + return {} + +class DummyContext(object): + + _engine = DummyEngine() + + def __init__(self, **kw): + self.vars = kw + +class ZopePythonExprTests(unittest.TestCase): + + def test_simple(self): + from zope.pagetemplate.engine import ZopePythonExpr + expr = ZopePythonExpr('python', 'max(a,b)', DummyEngine()) + self.assertEqual(expr(DummyContext(a=1, b=2)), 2) + + def test_allowed_module_name(self): + from zope.pagetemplate.engine import ZopePythonExpr + expr = ZopePythonExpr('python', '__import__("sys").__name__', + DummyEngine()) + self.assertEqual(expr(DummyContext()), 'sys') + + @unittest.skipUnless(zope.pagetemplate.engine.HAVE_UNTRUSTED, + "Needs untrusted") + def test_forbidden_module_name(self): + from zope.pagetemplate.engine import ZopePythonExpr + from zope.security.interfaces import Forbidden + expr = ZopePythonExpr('python', '__import__("sys").exit', + DummyEngine()) + self.assertRaises(Forbidden, expr, DummyContext()) + + @unittest.skipUnless(zope.pagetemplate.engine.HAVE_UNTRUSTED, + "Needs untrusted") + def test_disallowed_builtin(self): + from zope.pagetemplate.engine import ZopePythonExpr + expr = ZopePythonExpr('python', 'open("x", "w")', DummyEngine()) + self.assertRaises(NameError, expr, DummyContext()) + + +class TestZopeContext(PlacelessSetup, + unittest.TestCase): + + assertRaisesRegex = getattr(unittest.TestCase, 'assertRaisesRegex', + getattr(unittest.TestCase, 'assertRaisesRegexp')) + + def _makeOne(self): + return zope.pagetemplate.engine.ZopeContext(None, {}) + + def test_translate(self): + ctx = self._makeOne() + self.assertEqual(ctx.translate('msgid'), 'msgid') + + def test_evaluate_error(self): + ctx = self._makeOne() + with self.assertRaisesRegex(zope.pagetemplate.engine.InlineCodeError, + "Inline Code Evaluation is deactivated"): + ctx.evaluateCode('lang', 'code') + + def test_evaluate_interpreter_not_importable(self): + ctx = self._makeOne() + ctx.evaluateInlineCode = True + with self.assertRaises(ImportError): + ctx.evaluateCode('lang', 'code') + + def test_evaluate_interpreter_not_found(self): + get = zope.pagetemplate.engine._get_iinterpreter + from zope import interface + class IInterpreter(interface.Interface): + pass + def mock_get(): + return IInterpreter + + ctx = self._makeOne() + ctx.evaluateInlineCode = True + zope.pagetemplate.engine._get_iinterpreter = mock_get + try: + with self.assertRaisesRegex(zope.pagetemplate.engine.InlineCodeError, + "No interpreter named"): + ctx.evaluateCode('lang', 'code') + finally: + zope.pagetemplate.engine._get_iinterpreter = get + + def test_evaluate_interpreter_found(self): + get = zope.pagetemplate.engine._get_iinterpreter + from zope import interface + from zope import component + class IInterpreter(interface.Interface): + pass + def mock_get(): + return IInterpreter + + @interface.implementer(IInterpreter) + class Interpreter(object): + def evaluateRawCode(self, code, globs): + globs['new'] = code + return 42 + + component.provideUtility(Interpreter(), name='lang') + + ctx = self._makeOne() + ctx.evaluateInlineCode = True + zope.pagetemplate.engine._get_iinterpreter = mock_get + try: + result = ctx.evaluateCode('lang', 'code') + finally: + zope.pagetemplate.engine._get_iinterpreter = get + + self.assertEqual(result, 42) + self.assertEqual('code', ctx.getValue('new')) + + +class TestTraversableModuleImporter(unittest.TestCase): + + def test_traverse_fails(self): + from zope.traversing.interfaces import TraversalError + + tmi = zope.pagetemplate.engine.TraversableModuleImporter() + with self.assertRaises(TraversalError): + tmi.traverse('zope.cannot exist', ()) + + with self.assertRaises(TraversalError): + tmi.traverse('zope.pagetemplate.engine.DNE', ()) + + + with self.assertRaises(TraversalError): + tmi.traverse('pickle.no_sub_module', ()) + + +class TestAppPT(unittest.TestCase): + + def test_apppt_engine(self): + self.assertIs(zope.pagetemplate.engine.AppPT().pt_getEngine(), + zope.pagetemplate.engine.Engine) + + def test_trustedapppt_engine(self): + self.assertIs(zope.pagetemplate.engine.TrustedAppPT().pt_getEngine(), + zope.pagetemplate.engine.TrustedEngine) + + +def test_suite(): + + checker = RENormalizing([ + # Python 3 includes module name in exceptions + (re.compile(r"zope.security.interfaces.ForbiddenAttribute"), + "ForbiddenAttribute"), + (re.compile(r""), + ""), + (re.compile(r""), ""), + # PyPy/pure-Python implementation + (re.compile(r""), + ""), + ]) + + suite = unittest.defaultTestLoader.loadTestsFromName(__name__) + suite.addTest(doctest.DocTestSuite('zope.pagetemplate.engine', + checker=checker)) + return suite + + +if __name__ == '__main__': + unittest.main(defaultTest='test_suite') diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/test_htmltests.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/test_htmltests.py new file mode 100644 index 0000000..1550634 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/test_htmltests.py @@ -0,0 +1,157 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Page Template HTML Tests +""" +import unittest + +from zope.pagetemplate.tests import util +from zope.pagetemplate.pagetemplate import PageTemplate + + +class Folder(object): + context = property(lambda self: self) + +class HTMLTests(unittest.TestCase): + + def setUp(self): + self.folder = f = Folder() + f.laf = PageTemplate() + f.t = PageTemplate() + + def getProducts(self): + return [ + { + 'description': ('This is the tee for those who LOVE Zope. ' + 'Show your heart on your tee.'), + 'price': 12.99, 'image': 'smlatee.jpg' + }, + { + 'description': ('This is the tee for Jim Fulton. ' + 'He\'s the Zope Pope!'), + 'price': 11.99, 'image': 'smpztee.jpg' + }, + ] + + def test_1(self): + laf = self.folder.laf + laf.write(util.read_input('teeshoplaf.html')) + expect = util.read_output('teeshoplaf.html') + util.check_html(expect, laf()) + + def test_2(self): + self.folder.laf.write(util.read_input('teeshoplaf.html')) + + t = self.folder.t + t.write(util.read_input('teeshop2.html')) + expect = util.read_output('teeshop2.html') + out = t(laf=self.folder.laf, getProducts=self.getProducts) + util.check_html(expect, out) + + + def test_3(self): + self.folder.laf.write(util.read_input('teeshoplaf.html')) + + t = self.folder.t + t.write(util.read_input('teeshop1.html')) + expect = util.read_output('teeshop1.html') + out = t(laf=self.folder.laf, getProducts=self.getProducts) + util.check_html(expect, out) + + def test_SimpleLoop(self): + t = self.folder.t + t.write(util.read_input('loop1.html')) + expect = util.read_output('loop1.html') + out = t() + util.check_html(expect, out) + + def test_GlobalsShadowLocals(self): + t = self.folder.t + t.write(util.read_input('globalsshadowlocals.html')) + expect = util.read_output('globalsshadowlocals.html') + out = t() + util.check_html(expect, out) + + def test_StringExpressions(self): + t = self.folder.t + t.write(util.read_input('stringexpression.html')) + expect = util.read_output('stringexpression.html') + out = t() + util.check_html(expect, out) + + def test_ReplaceWithNothing(self): + t = self.folder.t + t.write(util.read_input('checknothing.html')) + expect = util.read_output('checknothing.html') + out = t() + util.check_html(expect, out) + + def test_WithXMLHeader(self): + t = self.folder.t + t.write(util.read_input('checkwithxmlheader.html')) + expect = util.read_output('checkwithxmlheader.html') + out = t() + util.check_html(expect, out) + + def test_NotExpression(self): + t = self.folder.t + t.write(util.read_input('checknotexpression.html')) + expect = util.read_output('checknotexpression.html') + out = t() + util.check_html(expect, out) + + def test_PathNothing(self): + t = self.folder.t + t.write(util.read_input('checkpathnothing.html')) + expect = util.read_output('checkpathnothing.html') + out = t() + util.check_html(expect, out) + + def test_PathAlt(self): + t = self.folder.t + t.write(util.read_input('checkpathalt.html')) + expect = util.read_output('checkpathalt.html') + out = t() + util.check_html(expect, out) + + def test_translation(self): + from zope.i18nmessageid import MessageFactory + _ = MessageFactory('pttest') + msg = _("Translate this!") + + t = self.folder.t + t.write(util.read_input('translation.html')) + expect = util.read_output('translation.html') + out = t(msg=msg) + util.check_html(expect, out) + + def test_recursion(self): + t = self.folder.t + t.write(util.read_input('recursive.html')) + expect = util.read_output('recursive.html') + context = dict(name='root', + children=[dict(name='first', children=[]), + dict(name='second', children=[])]) + namespace = dict(template=t, options={}, args=(), + nothing=None, context=context) + out = t.pt_render(namespace) + # crude way of normalizing whitespace + expect = expect.replace(' ', '').replace('\n\n', '\n') + out = out.replace(' ', '').replace('\n\n', '\n') + util.check_html(expect, out) + # https://bugs.launchpad.net/zope.pagetemplate/+bug/732972 + errors = t.pt_errors(namespace, check_macro_expansion=False) + self.assertFalse(errors) + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/test_ptfile.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/test_ptfile.py new file mode 100644 index 0000000..e5cc2e9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/test_ptfile.py @@ -0,0 +1,222 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests of PageTemplateFile. +""" +import os +import tempfile +import unittest + +import six +from zope.pagetemplate.pagetemplatefile import PageTemplateFile + +class AbstractPTCase(object): + + def get_pt(self, text=b''): + with tempfile.NamedTemporaryFile(mode='wb', delete=False) as f: + f.write(text) + self.addCleanup(os.unlink, f.name) + pt = PageTemplateFile(f.name) + pt.read() + return pt + +class TypeSniffingTestCase(AbstractPTCase, + unittest.TestCase): + + def check_content_type(self, text, expected_type): + pt = self.get_pt(text) + self.assertEqual(pt.content_type, expected_type) + + def test_sniffer_xml_ascii(self): + self.check_content_type( + b"", + "text/xml") + self.check_content_type( + b"", + "text/xml") + + def test_sniffer_xml_utf8(self): + # w/out byte order mark + self.check_content_type( + b"", + "text/xml") + self.check_content_type( + b"", + "text/xml") + # with byte order mark + self.check_content_type( + b"\xef\xbb\xbf", + "text/xml") + self.check_content_type( + b"\xef\xbb\xbf", + "text/xml") + + def test_sniffer_xml_utf16_be(self): + # w/out byte order mark + self.check_content_type( + b"\0<\0?\0x\0m\0l\0 \0v\0e\0r\0s\0i\0o\0n\0=\0'\01\0.\0000\0'" + b"\0 \0e\0n\0c\0o\0d\0i\0n\0g\0=\0'\0u\0t\0f\0-\08\0'\0?\0>" + b"\0<\0d\0o\0c\0/\0>", + "text/xml") + self.check_content_type( + b"\0<\0?\0x\0m\0l\0\t\0v\0e\0r\0s\0i\0o\0n\0=\0'\01\0.\0000\0'" + b"\0 \0e\0n\0c\0o\0d\0i\0n\0g\0=\0'\0u\0t\0f\0-\08\0'\0?\0>" + b"\0<\0d\0o\0c\0/\0>", + "text/xml") + # with byte order mark + self.check_content_type( + b"\xfe\xff" + b"\0<\0?\0x\0m\0l\0 \0v\0e\0r\0s\0i\0o\0n\0=\0'\01\0.\0000\0'" + b"\0 \0e\0n\0c\0o\0d\0i\0n\0g\0=\0'\0u\0t\0f\0-\08\0'\0?\0>" + b"\0<\0d\0o\0c\0/\0>", + "text/xml") + self.check_content_type( + b"\xfe\xff" + b"\0<\0?\0x\0m\0l\0\t\0v\0e\0r\0s\0i\0o\0n\0=\0'\01\0.\0000\0'" + b"\0 \0e\0n\0c\0o\0d\0i\0n\0g\0=\0'\0u\0t\0f\0-\08\0'\0?\0>" + b"\0<\0d\0o\0c\0/\0>", + "text/xml") + + def test_sniffer_xml_utf16_le(self): + # w/out byte order mark + self.check_content_type( + b"<\0?\0x\0m\0l\0 \0v\0e\0r\0s\0i\0o\0n\0=\0'\01\0.\0000\0'\0" + b" \0e\0n\0c\0o\0d\0i\0n\0g\0=\0'\0u\0t\0f\0-\08\0'\0?\0>\0" + b"<\0d\0o\0c\0/\0>\n", + "text/xml") + self.check_content_type( + b"<\0?\0x\0m\0l\0\t\0v\0e\0r\0s\0i\0o\0n\0=\0'\01\0.\0000\0'\0" + b" \0e\0n\0c\0o\0d\0i\0n\0g\0=\0'\0u\0t\0f\0-\08\0'\0?\0>\0" + b"<\0d\0o\0c\0/\0>\0", + "text/xml") + # with byte order mark + self.check_content_type( + b"\xff\xfe" + b"<\0?\0x\0m\0l\0 \0v\0e\0r\0s\0i\0o\0n\0=\0'\01\0.\0000\0'\0" + b" \0e\0n\0c\0o\0d\0i\0n\0g\0=\0'\0u\0t\0f\0-\08\0'\0?\0>\0" + b"<\0d\0o\0c\0/\0>\0", + "text/xml") + self.check_content_type( + b"\xff\xfe" + b"<\0?\0x\0m\0l\0\t\0v\0e\0r\0s\0i\0o\0n\0=\0'\01\0.\0000\0'\0" + b" \0e\0n\0c\0o\0d\0i\0n\0g\0=\0'\0u\0t\0f\0-\08\0'\0?\0>\0" + b"<\0d\0o\0c\0/\0>\0", + "text/xml") + + HTML_PUBLIC_ID = "-//W3C//DTD HTML 4.01 Transitional//EN" + HTML_SYSTEM_ID = "http://www.w3.org/TR/html4/loose.dtd" + + def test_sniffer_html_ascii(self): + self.check_content_type( + ("" + % self.HTML_SYSTEM_ID).encode("utf-8"), + "text/html") + self.check_content_type( + b"sample document", + "text/html") + + @unittest.expectedFailure + def test_sniffer_xml_simple(self): + # TODO: This reflects a case that simply isn't handled by the + # sniffer; there are many, but it gets it right more often than + # before. This case actually returns text/html + self.check_content_type(b"", + "text/xml") + + def test_html_default_encoding(self): + pt = self.get_pt( + b"" + # 'Test' in russian (utf-8) + b"\xd0\xa2\xd0\xb5\xd1\x81\xd1\x82" + b"") + rendered = pt() + self.assertTrue(isinstance(rendered, six.text_type)) + self.assertEqual(rendered.strip(), + (u"" + u"\u0422\u0435\u0441\u0442" + u"")) + + def test_html_encoding_by_meta(self): + pt = self.get_pt( + b"" + # 'Test' in russian (windows-1251) + b"\xd2\xe5\xf1\xf2" + b'' + b"") + rendered = pt() + self.assertTrue(isinstance(rendered, six.text_type)) + self.assertEqual(rendered.strip(), + (u"" + u"\u0422\u0435\u0441\u0442" + u"")) + + def test_xhtml(self): + pt = self.get_pt( + b"" + # 'Test' in russian (windows-1251) + b"\xd2\xe5\xf1\xf2" + b'' + b"") + rendered = pt() + self.assertTrue(isinstance(rendered, six.text_type)) + self.assertEqual(rendered.strip(), + (u"" + u"\u0422\u0435\u0441\u0442" + u"")) + + +class TestPageTemplateFile(AbstractPTCase, + unittest.TestCase): + + def test_no_such_file(self): + with self.assertRaises(ValueError): + PageTemplateFile('this file does not exist') + + def test_prefix_str(self): + pt = PageTemplateFile(os.path.basename(__file__), + _prefix=os.path.dirname(__file__)) + self.assertEqual(pt.filename, __file__) + + + def test_cook_no_debug(self): + pt = self.get_pt() + pt._v_debug = False + pt._cook_check() + self.assertTrue(pt._v_last_read) + lr = pt._v_last_read + pt._cook_check() + self.assertEqual(lr, pt._v_last_read) + + + def test_cook_mtime_fails(self): + pt = self.get_pt() + + getmtime = os.path.getmtime + def bad(_path): + raise OSError() + os.path.getmtime = bad + try: + pt._cook_check() + finally: + os.path.getmtime = getmtime + + self.assertEqual(0, pt._v_last_read) + + def test_pickle_not_allowed(self): + import pickle + pt = self.get_pt() + + with self.assertRaises(TypeError): + pickle.dumps(pt) diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/trusted.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/trusted.py new file mode 100644 index 0000000..64bea91 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/trusted.py @@ -0,0 +1,20 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Sample of a module imported by a trusted module. + +This module won't be imported by an untrusted template using a +path:modules/... expression. +""" + +x = 42 diff --git a/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/util.py b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/util.py new file mode 100644 index 0000000..5ef2ad3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/pagetemplate/tests/util.py @@ -0,0 +1,87 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Utilities +""" +from __future__ import print_function +import os +import re +import sys +import unittest +import zope.pagetemplate.tests + +class arg(object): + __allow_access_to_unprotected_subobjects__ = 1 + + def __init__(self, nn, aa): + self.num, self.arg = nn, aa + + def __str__(self): + return str(self.arg) + +class argv(object): + __allow_access_to_unprotected_subobjects__ = 1 + + def __init__(self, argv=None): + args = self.args = [] + argv = argv if argv is not None else sys.argv[1:] + for aa in argv: + args.append(arg(len(args) + 1, aa)) + + context = property(lambda self: self) + +class _Test(unittest.TestCase): + + def runTest(self): # pragma: no cover 2.7 compatibility + return + +_assertEqual = _Test().assertEqual +del _Test + +def check_html(s1, s2): + s1 = normalize_html(s1) + s2 = normalize_html(s2) + _assertEqual(s1, s2, "HTML Output Changed") + +def check_xml(s1, s2): + s1 = normalize_xml(s1) + s2 = normalize_xml(s2) + _assertEqual(s1, s2, 'XML Output Changed') + +def normalize_html(s): + s = re.sub(r"[ \t]+", " ", s) + s = re.sub(r"/>", ">", s) + return s + +def normalize_xml(s): + s = re.sub(r"\s+", " ", s) + s = re.sub(r"(?s)\s+<", "<", s) + s = re.sub(r"(?s)>\s+", ">", s) + return s + + + + +here = os.path.dirname(zope.pagetemplate.tests.__file__) +input_dir = os.path.join(here, 'input') +output_dir = os.path.join(here, 'output') + +def read_input(filename): + filename = os.path.join(input_dir, filename) + with open(filename, 'r') as f: + return f.read() + +def read_output(filename): + filename = os.path.join(output_dir, filename) + with open(filename, 'r') as f: + return f.read() diff --git a/thesisenv/lib/python3.6/site-packages/zope/proxy/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/proxy/__init__.py new file mode 100644 index 0000000..620e565 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/proxy/__init__.py @@ -0,0 +1,552 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""More convenience functions for dealing with proxies. +""" +import operator +import os +import pickle +import sys + +from zope.interface import moduleProvides +from zope.proxy.interfaces import IProxyIntrospection + +moduleProvides(IProxyIntrospection) +__all__ = tuple(IProxyIntrospection) + +def ProxyIterator(p): + yield p + while isProxy(p): + p = getProxiedObject(p) + yield p + + +_MARKER = object() + +def _WrapperType_Lookup(type_, name): + """ + Looks up information in class dictionaries in MRO + order, ignoring the proxy type itself. + + Returns the first found object, or _MARKER + """ + + for base in type_.mro(): + if base is AbstractPyProxyBase: + continue + res = base.__dict__.get(name, _MARKER) + if res is not _MARKER: + return res + return _MARKER + +def _get_wrapped(self): + """ + Helper method to access the wrapped object. + """ + return super(AbstractPyProxyBase, self).__getattribute__('_wrapped') + +class _EmptyInterfaceDescriptor(object): + """A descriptor for the attributes used on the class by the + Python implementation of `zope.interface`. + + When wrapping builtin types, these descriptors prevent the objects + we find in the AbstractPyProxyBase from being used. + """ + + def __get__(self, inst, klass): + raise AttributeError() + + def __set__(self, inst, value): + raise TypeError() + + def __delete__(self, inst): + pass + + def __iter__(self): + return self + + def __next__(self): + raise StopIteration() + next = __next__ + +class _ProxyMetaclass(type): + # The metaclass is applied after the class definition + # for Py2/Py3 compatibility. + __implemented__ = _EmptyInterfaceDescriptor() + +class AbstractPyProxyBase(object): + """ + A reference implementation that cannot be instantiated. Most users + will want to use :class:`PyProxyBase`. + + This type is intended to be used in multiple-inheritance + scenarios, where another super class already has defined + ``__slots__``. In order to subclass both that class and this + class, you must include the ``_wrapped`` value in your own + ``__slots__`` definition (or else you will get the infamous + TypeError: "multiple bases have instance lay-out conflicts") + """ + __slots__ = () + + def __new__(cls, value=None): + # Some subclasses (zope.security.proxy) fail to pass the object + inst = super(AbstractPyProxyBase, cls).__new__(cls) + inst._wrapped = value + return inst + + def __init__(self, obj): + self._wrapped = obj + + def __call__(self, *args, **kw): + return self._wrapped(*args, **kw) + + def __repr__(self): + return repr(self._wrapped) + + def __str__(self): + return str(self._wrapped) + + def __unicode__(self): + return unicode(self._wrapped) + + def __reduce__(self): # pragma: no cover (__reduce_ex__ prevents normal) + raise pickle.PicklingError + + def __reduce_ex__(self, proto): + raise pickle.PicklingError + + # Rich comparison protocol + def __lt__(self, other): + return self._wrapped < other + + def __le__(self, other): + return self._wrapped <= other + + def __eq__(self, other): + return self._wrapped == other + + def __ne__(self, other): + return self._wrapped != other + + def __gt__(self, other): + return self._wrapped > other + + def __ge__(self, other): + return self._wrapped >= other + + def __nonzero__(self): + return bool(self._wrapped) + __bool__ = __nonzero__ # Python3 compat + + def __hash__(self): + return hash(self._wrapped) + + # Attribute protocol + def __getattribute__(self, name): + # Try to avoid accessing the _wrapped value until we need to. + # We don't know how subclasses may be storing it + # (e.g., persistent subclasses) + if name == '_wrapped': + return _get_wrapped(self) + + if name in ('__class__', '__module__'): + # __class__ and __module__ are special cased in the C + # implementation, because we will always find them on the + # type of this object if we are being subclassed + return getattr(_get_wrapped(self), name) + + if name in ('__reduce__', '__reduce_ex__'): + # These things we specifically override and no one + # can stop us, not even a subclass + return object.__getattribute__(self, name) + + # First, look for descriptors in this object's type + type_self = type(self) + descriptor = _WrapperType_Lookup(type_self, name) + if descriptor is _MARKER: + # Nothing in the class, go straight to the wrapped object + return getattr(_get_wrapped(self), name) + + if hasattr(descriptor, '__get__'): + if not hasattr(descriptor, '__set__'): + # Non-data-descriptor: call through to the wrapped object + # to see if it's there + try: + return getattr(_get_wrapped(self), name) + except AttributeError: + pass + # Data-descriptor on this type. Call it + return descriptor.__get__(self, type_self) + return descriptor + + def __getattr__(self, name): + return getattr(self._wrapped, name) + + def __setattr__(self, name, value): + if name == '_wrapped': + return super(AbstractPyProxyBase, self).__setattr__(name, value) + + # First, look for descriptors in this object's type + type_self = type(self) + descriptor = _WrapperType_Lookup(type_self, name) + if descriptor is _MARKER or not hasattr(descriptor, '__set__'): + # Nothing in the class that's a descriptor, + # go straight to the wrapped object + return setattr(self._wrapped, name, value) + + return object.__setattr__(self, name, value) + + def __delattr__(self, name): + if name == '_wrapped': + raise AttributeError() + delattr(self._wrapped, name) + + # Container protocols + + def __len__(self): + return len(self._wrapped) + + def __getslice__(self, start, stop): + try: + getslice = type(self._wrapped).__getslice__ + except AttributeError: + return self.__getitem__(slice(start, stop)) + return getslice(self._wrapped, start, stop) + + def __getitem__(self, key): + return self._wrapped[key] + + def __setslice__(self, start, stop, value): + try: + setslice = type(self._wrapped).__setslice__ + except AttributeError: + return self.__setitem__(slice(start, stop), value) + return setslice(self._wrapped, start, stop, value) + + def __setitem__(self, key, value): + self._wrapped[key] = value + + def __delitem__(self, key): + del self._wrapped[key] + + def __iter__(self): + # This handles a custom __iter__ and generator support at the same time. + return iter(self._wrapped) + + def next(self): + # Called when we wrap an iterator itself. + return self._wrapped.next() + + def __next__(self): # pragma: no cover Python3 + return self._wrapped.__next__() + + # Python 2.7 won't let the C wrapper support __reversed__ :( + #def __reversed__(self): + # return reversed(self._wrapped) + + def __contains__(self, item): + return item in self._wrapped + + # Numeric protocol: unary operators + def __neg__(self): + return -self._wrapped + + def __pos__(self): + return +self._wrapped + + def __abs__(self): + return abs(self._wrapped) + + def __invert__(self): + return ~self._wrapped + + # Numeric protocol: unary conversions + def __complex__(self): + return complex(self._wrapped) + + def __int__(self): + return int(self._wrapped) + + def __long__(self): + return long(self._wrapped) + + def __float__(self): + return float(self._wrapped) + + def __oct__(self): + return oct(self._wrapped) + + def __hex__(self): + return hex(self._wrapped) + + def __index__(self): + return operator.index(self._wrapped) + + # Numeric protocol: binary coercion + def __coerce__(self, other): + left, right = coerce(self._wrapped, other) + if left == self._wrapped and type(left) is type(self._wrapped): + left = self + return left, right + + # Numeric protocol: binary arithmetic operators + def __add__(self, other): + return self._wrapped + other + + def __sub__(self, other): + return self._wrapped - other + + def __mul__(self, other): + return self._wrapped * other + + def __floordiv__(self, other): + return self._wrapped // other + + def __truediv__(self, other): # pragma: no cover + # Only one of __truediv__ and __div__ is meaningful at any one time. + return self._wrapped / other + + def __div__(self, other): # pragma: no cover + # Only one of __truediv__ and __div__ is meaningful at any one time. + return self._wrapped / other + + def __mod__(self, other): + return self._wrapped % other + + def __divmod__(self, other): + return divmod(self._wrapped, other) + + def __pow__(self, other, modulus=None): + if modulus is None: + return pow(self._wrapped, other) + return pow(self._wrapped, other, modulus) + + def __radd__(self, other): + return other + self._wrapped + + def __rsub__(self, other): + return other - self._wrapped + + def __rmul__(self, other): + return other * self._wrapped + + def __rfloordiv__(self, other): + return other // self._wrapped + + def __rtruediv__(self, other): # pragma: no cover + # Only one of __rtruediv__ and __rdiv__ is meaningful at any one time. + return other / self._wrapped + + def __rdiv__(self, other): # pragma: no cover + # Only one of __rtruediv__ and __rdiv__ is meaningful at any one time. + return other / self._wrapped + + def __rmod__(self, other): + return other % self._wrapped + + def __rdivmod__(self, other): + return divmod(other, self._wrapped) + + def __rpow__(self, other, modulus=None): + if modulus is None: + return pow(other, self._wrapped) + # We can't actually get here, because we can't lie about our type() + return pow(other, self._wrapped, modulus) # pragma: no cover + + # Numeric protocol: binary bitwise operators + def __lshift__(self, other): + return self._wrapped << other + + def __rshift__(self, other): + return self._wrapped >> other + + def __and__(self, other): + return self._wrapped & other + + def __xor__(self, other): + return self._wrapped ^ other + + def __or__(self, other): + return self._wrapped | other + + def __rlshift__(self, other): + return other << self._wrapped + + def __rrshift__(self, other): + return other >> self._wrapped + + def __rand__(self, other): + return other & self._wrapped + + def __rxor__(self, other): + return other ^ self._wrapped + + def __ror__(self, other): + return other | self._wrapped + + # Numeric protocol: binary in-place operators + def __iadd__(self, other): + self._wrapped += other + return self + + def __isub__(self, other): + self._wrapped -= other + return self + + def __imul__(self, other): + self._wrapped *= other + return self + + def __idiv__(self, other): # pragma: no cover + # Only one of __itruediv__ and __idiv__ is meaningful at any one time. + self._wrapped /= other + return self + + def __itruediv__(self, other): # pragma: no cover + # Only one of __itruediv__ and __idiv__ is meaningful at any one time. + self._wrapped /= other + return self + + def __ifloordiv__(self, other): + self._wrapped //= other + return self + + def __imod__(self, other): + self._wrapped %= other + return self + + def __ilshift__(self, other): + self._wrapped <<= other + return self + + def __irshift__(self, other): + self._wrapped >>= other + return self + + def __iand__(self, other): + self._wrapped &= other + return self + + def __ixor__(self, other): + self._wrapped ^= other + return self + + def __ior__(self, other): + self._wrapped |= other + return self + + def __ipow__(self, other, modulus=None): + if modulus is None: + self._wrapped **= other + else: # pragma: no cover + # There is no syntax which triggers in-place pow w/ modulus + self._wrapped = pow(self._wrapped, other, modulus) + return self + +AbstractPyProxyBase = _ProxyMetaclass(str('AbstractPyProxyBase'), (), + dict(AbstractPyProxyBase.__dict__)) + +class PyProxyBase(AbstractPyProxyBase): + """Reference implementation. + """ + __slots__ = ('_wrapped', ) + + +def py_getProxiedObject(obj): + if isinstance(obj, PyProxyBase): + return obj._wrapped + return obj + +def py_setProxiedObject(obj, new_value): + if not isinstance(obj, PyProxyBase): + raise TypeError('Not a proxy') + old, obj._wrapped = obj._wrapped, new_value + return old + +def py_isProxy(obj, klass=None): + if klass is None: + klass = PyProxyBase + return isinstance(obj, klass) + +def py_sameProxiedObjects(lhs, rhs): + while isinstance(lhs, PyProxyBase): + lhs = super(PyProxyBase, lhs).__getattribute__('_wrapped') + while isinstance(rhs, PyProxyBase): + rhs = super(PyProxyBase, rhs).__getattribute__('_wrapped') + return lhs is rhs + +def py_queryProxy(obj, klass=None, default=None): + if klass is None: + klass = PyProxyBase + while obj is not None and not isinstance(obj, klass): + obj = getattr(obj, '_wrapped', None) + if obj is not None: + return obj + return default + +def py_queryInnerProxy(obj, klass=None, default=None): + if klass is None: + klass = PyProxyBase + found = [] + while obj is not None: + if isinstance(obj, klass): + found.append(obj) # stack + obj = getattr(obj, '_wrapped', None) + if found: + return found[-1] + return default + +def py_removeAllProxies(obj): + while isinstance(obj, PyProxyBase): + obj = super(PyProxyBase, obj).__getattribute__('_wrapped') + return obj + +_c_available = False +if 'PURE_PYTHON' not in os.environ: + try: + from zope.proxy._zope_proxy_proxy import ProxyBase as _c_available + except ImportError: # pragma: no cover + pass + +class PyNonOverridable(object): + "Deprecated, only for BWC." + def __init__(self, method_desc): # pragma: no cover PyPy + self.desc = method_desc + +if _c_available: + # Python API: not used in this module + from zope.proxy._zope_proxy_proxy import ProxyBase + from zope.proxy._zope_proxy_proxy import getProxiedObject + from zope.proxy._zope_proxy_proxy import setProxiedObject + from zope.proxy._zope_proxy_proxy import isProxy + from zope.proxy._zope_proxy_proxy import sameProxiedObjects + from zope.proxy._zope_proxy_proxy import queryProxy + from zope.proxy._zope_proxy_proxy import queryInnerProxy + from zope.proxy._zope_proxy_proxy import removeAllProxies + + # API for proxy-using C extensions. + from zope.proxy._zope_proxy_proxy import _CAPI + +else: # pragma: no cover + # no C extension available, fall back + ProxyBase = PyProxyBase + getProxiedObject = py_getProxiedObject + setProxiedObject = py_setProxiedObject + isProxy = py_isProxy + sameProxiedObjects = py_sameProxiedObjects + queryProxy = py_queryProxy + queryInnerProxy = py_queryInnerProxy + removeAllProxies = py_removeAllProxies + +def non_overridable(func): + return property(lambda self: func.__get__(self)) diff --git a/thesisenv/lib/python3.6/site-packages/zope/proxy/_compat.py b/thesisenv/lib/python3.6/site-packages/zope/proxy/_compat.py new file mode 100644 index 0000000..bbd91c3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/proxy/_compat.py @@ -0,0 +1,3 @@ +import sys + +PY3 = sys.version_info[0] >= 3 diff --git a/thesisenv/lib/python3.6/site-packages/zope/proxy/_zope_proxy_proxy.c b/thesisenv/lib/python3.6/site-packages/zope/proxy/_zope_proxy_proxy.c new file mode 100644 index 0000000..b64874a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/proxy/_zope_proxy_proxy.c @@ -0,0 +1,1225 @@ +/*############################################################################ +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################*/ + +/* + * This file is also used as a really extensive macro in + * ../container/_zope_container_contained.c. If you need to + * change this file, you need to "svn copy" it to ../container/. + * + * This approach is taken to allow the sources for the two packages + * to be compilable when the relative locations of these aren't + * related in the same way as they are in a checkout. + * + * This will be revisited in the future, but works for now. + */ + +#include "Python.h" +#include "modsupport.h" + +#define PROXY_MODULE +#include "proxy.h" + +static PyTypeObject ProxyType; + +#define Proxy_Check(wrapper) (PyObject_TypeCheck((wrapper), &ProxyType)) + +static PyObject * +empty_tuple = NULL; + + +#if PY_VERSION_HEX < 0x02070000 + #define PyCapsule_New(pointer, name, destr) \ + PyCObject_FromVoidPtr(pointer, destr) +#endif + +// Compatibility with Python 2 +#if PY_MAJOR_VERSION < 3 + #define MOD_ERROR_VAL + + #define MOD_SUCCESS_VAL(val) + + #define MOD_INIT(name) void init##name(void) + + #define MOD_DEF(ob, name, doc, methods) \ + ob = Py_InitModule3(name, methods, doc); + +#else + #define MOD_ERROR_VAL NULL + + #define MOD_SUCCESS_VAL(val) val + + #define MOD_INIT(name) PyMODINIT_FUNC PyInit_##name(void) + + #define MOD_DEF(ob, name, doc, methods) \ + static struct PyModuleDef moduledef = { \ + PyModuleDef_HEAD_INIT, name, doc, -1, methods, }; \ + ob = PyModule_Create(&moduledef); +#endif + + + +/* + * Slot methods. + */ + +static PyObject * +wrap_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + PyObject *result = NULL; + PyObject *object; + + if (PyArg_UnpackTuple(args, "__new__", 1, 1, &object)) { + if (kwds != NULL && PyDict_Size(kwds) != 0) { + PyErr_SetString(PyExc_TypeError, + "proxy.__new__ does not accept keyword args"); + return NULL; + } + result = PyType_GenericNew(type, args, kwds); + if (result != NULL) { + ProxyObject *wrapper = (ProxyObject *) result; + Py_INCREF(object); + wrapper->proxy_object = object; + } + } + return result; +} + +static int +wrap_init(PyObject *self, PyObject *args, PyObject *kwds) +{ + int result = -1; + PyObject *object; + + if (PyArg_UnpackTuple(args, "__init__", 1, 1, &object)) { + ProxyObject *wrapper = (ProxyObject *)self; + if (kwds != NULL && PyDict_Size(kwds) != 0) { + PyErr_SetString(PyExc_TypeError, + "proxy.__init__ does not accept keyword args"); + return -1; + } + /* If the object in this proxy is not the one we + * received in args, replace it with the new one. + */ + if (wrapper->proxy_object != object) { + PyObject *temp = wrapper->proxy_object; + Py_INCREF(object); + wrapper->proxy_object = object; + Py_DECREF(temp); + } + result = 0; + } + return result; +} + +static int +wrap_traverse(PyObject *self, visitproc visit, void *arg) +{ + PyObject *ob = Proxy_GET_OBJECT(self); + if (ob != NULL) + return visit(ob, arg); + else + return 0; +} + +static int +wrap_clear(PyObject *self) +{ + ProxyObject *proxy = (ProxyObject *)self; + PyObject *temp = proxy->proxy_object; + + if (temp != NULL) { + proxy->proxy_object = NULL; + Py_DECREF(temp); + } + return 0; +} + +static PyObject * +wrap_richcompare(PyObject* self, PyObject* other, int op) +{ + if (Proxy_Check(self)) { + self = Proxy_GET_OBJECT(self); + } + else { + other = Proxy_GET_OBJECT(other); + } + return PyObject_RichCompare(self, other, op); +} + +static PyObject * +wrap_iter(PyObject *self) +{ + return PyObject_GetIter(Proxy_GET_OBJECT(self)); +} + +static PyObject * +wrap_iternext(PyObject *self) +{ + return PyIter_Next(Proxy_GET_OBJECT(self)); +} + +static void +wrap_dealloc(PyObject *self) +{ + PyObject_GC_UnTrack(self); + (void) wrap_clear(self); + self->ob_type->tp_free(self); +} + +/* A variant of _PyType_Lookup that doesn't look in ProxyType. + * + * If argument search_wrappertype is nonzero, we can look in WrapperType. + */ +PyObject * +WrapperType_Lookup(PyTypeObject *type, PyObject *name) +{ + int i, n; + PyObject *mro, *res, *base, *dict; + + /* Look in tp_dict of types in MRO */ + mro = type->tp_mro; + + /* If mro is NULL, the type is either not yet initialized + by PyType_Ready(), or already cleared by type_clear(). + Either way the safest thing to do is to return NULL. */ + if (mro == NULL) + return NULL; + + assert(PyTuple_Check(mro)); + + n = PyTuple_GET_SIZE(mro) + - 1; /* We don't want to look at the last item, which is object. */ + + for (i = 0; i < n; i++) { + base = PyTuple_GET_ITEM(mro, i); + + if (((PyTypeObject *)base) != &ProxyType) { +#if PY_MAJOR_VERSION < 3 + if (PyClass_Check(base)) + dict = ((PyClassObject *)base)->cl_dict; + else +#endif + { + assert(PyType_Check(base)); + dict = ((PyTypeObject *)base)->tp_dict; + } + + assert(dict && PyDict_Check(dict)); + res = PyDict_GetItem(dict, name); + if (res != NULL) + return res; + } + } + return NULL; +} + + +static PyObject * +wrap_getattro(PyObject *self, PyObject *name) +{ + PyObject *wrapped; + PyObject *descriptor; + PyObject *res = NULL; + const char *name_as_string; + int maybe_special_name; + +#if PY_MAJOR_VERSION < 3 + name_as_string = PyString_AsString(name); +#else + name_as_string = PyUnicode_AsUTF8(name); +#endif + + if (name_as_string == NULL) { + return NULL; + } + + wrapped = Proxy_GET_OBJECT(self); + if (wrapped == NULL) { + PyErr_Format(PyExc_RuntimeError, + "object is NULL; requested to get attribute '%s'", + name_as_string); + goto finally; + } + + maybe_special_name = name_as_string[0] == '_' && name_as_string[1] == '_'; + + if (!(maybe_special_name + && (strcmp(name_as_string, "__class__") == 0 + || strcmp(name_as_string, "__module__") == 0))) { + + descriptor = WrapperType_Lookup(self->ob_type, name); + + if (descriptor != NULL) { + if (descriptor->ob_type->tp_descr_get != NULL +#if PY_MAJOR_VERSION < 3 // Always true in Python 3 + && PyType_HasFeature(descriptor->ob_type, Py_TPFLAGS_HAVE_CLASS) +#endif + ){ + if (descriptor->ob_type->tp_descr_set == NULL) + { + res = PyObject_GetAttr(wrapped, name); + if (res != NULL) + goto finally; + if (PyErr_ExceptionMatches(PyExc_AttributeError)) + PyErr_Clear(); + else + goto finally; + } + + res = descriptor->ob_type->tp_descr_get( + descriptor, + self, + (PyObject *)self->ob_type); + } + else + { + Py_INCREF(descriptor); + res = descriptor; + } + + goto finally; + } + } + res = PyObject_GetAttr(wrapped, name); + +finally: + return res; +} + +static int +wrap_setattro(PyObject *self, PyObject *name, PyObject *value) +{ + PyObject *wrapped; + PyObject *descriptor; + const char *name_as_string; + int res = -1; + +#if PY_MAJOR_VERSION < 3 + name_as_string = PyString_AsString(name); +#else + name_as_string = PyUnicode_AsUTF8(name); +#endif + + if (name_as_string == NULL) { + return NULL; + } + + descriptor = WrapperType_Lookup(self->ob_type, name); + + if (descriptor != NULL +#if PY_MAJOR_VERSION < 3 // This is always true in Python 3 (I think) + && PyType_HasFeature(descriptor->ob_type, Py_TPFLAGS_HAVE_CLASS) +#endif + && descriptor->ob_type->tp_descr_set != NULL) + { + res = descriptor->ob_type->tp_descr_set(descriptor, self, value); + goto finally; + } + + wrapped = Proxy_GET_OBJECT(self); + if (wrapped == NULL) { + PyErr_Format(PyExc_RuntimeError, + "object is NULL; requested to set attribute '%s'", + name_as_string); + goto finally; + } + res = PyObject_SetAttr(wrapped, name, value); + +finally: + return res; +} + +static int +wrap_print(PyObject *wrapper, FILE *fp, int flags) +{ + return PyObject_Print(Proxy_GET_OBJECT(wrapper), fp, flags); +} + +static PyObject * +wrap_str(PyObject *wrapper) { + return PyObject_Str(Proxy_GET_OBJECT(wrapper)); +} + +static PyObject * +wrap_repr(PyObject *wrapper) +{ + return PyObject_Repr(Proxy_GET_OBJECT(wrapper)); +} + +#if PY_MAJOR_VERSION < 3 +static int +wrap_compare(PyObject *wrapper, PyObject *v) +{ + return PyObject_Compare(Proxy_GET_OBJECT(wrapper), v); +} +#endif + +static long +wrap_hash(PyObject *self) +{ + return PyObject_Hash(Proxy_GET_OBJECT(self)); +} + +static PyObject * +wrap_call(PyObject *self, PyObject *args, PyObject *kw) +{ + if (kw) + return PyEval_CallObjectWithKeywords(Proxy_GET_OBJECT(self), + args, kw); + else + return PyObject_CallObject(Proxy_GET_OBJECT(self), args); +} + +/* + * Number methods. + */ + +static PyObject * +call_int(PyObject *self) +{ +#if PY_MAJOR_VERSION < 3 + return PyNumber_Int(self); +#else + return PyNumber_Long(self); +#endif +} + +#if PY_MAJOR_VERSION < 3 // Python 3 has no long, oct or hex methods. +static PyObject * +call_long(PyObject *self) +{ + return PyNumber_Long(self); +} + +static PyObject * +call_oct(PyObject *self) +{ + PyNumberMethods *nb = self->ob_type->tp_as_number; + if (nb == NULL || nb->nb_oct== NULL) { + PyErr_SetString(PyExc_TypeError, + "object can't be converted to oct"); + return NULL; + } + return nb->nb_oct(self); +} + +static PyObject * +call_hex(PyObject *self) +{ + PyNumberMethods *nb = self->ob_type->tp_as_number; + if (nb == NULL || nb->nb_hex == NULL) { + PyErr_SetString(PyExc_TypeError, + "object can't be converted to hex"); + return NULL; + } + return nb->nb_hex(self); +} + +#endif + +static PyObject * +call_index(PyObject *self) +{ + return PyNumber_Index(self); +} + +static PyObject * +call_float(PyObject *self) +{ + return PyNumber_Float(self); +} + +static PyObject * +call_ipow(PyObject *self, PyObject *other) +{ + /* PyNumber_InPlacePower has three args. How silly. :-) */ + return PyNumber_InPlacePower(self, other, Py_None); +} + +#if PY_MAJOR_VERSION < 3 +static PyObject * +call_unicode(PyObject *self) +{ + return PyObject_Unicode(self); +} +#endif + + +typedef PyObject *(*function1)(PyObject *); + +static PyObject * +check1(ProxyObject *self, char *opname, function1 operation) +{ + PyObject *result = NULL; + + result = operation(Proxy_GET_OBJECT(self)); +#if 0 + if (result != NULL) + /* ??? create proxy for result? */ + ; +#endif + return result; +} + +static PyObject * +check2(PyObject *self, PyObject *other, + char *opname, char *ropname, binaryfunc operation) +{ + PyObject *result = NULL; + PyObject *object; + + if (Proxy_Check(self)) { + object = Proxy_GET_OBJECT(self); + result = operation(object, other); + } + else if (Proxy_Check(other)) { + object = Proxy_GET_OBJECT(other); + result = operation(self, object); + } + else { + Py_INCREF(Py_NotImplemented); + return Py_NotImplemented; + } +#if 0 + if (result != NULL) + /* ??? create proxy for result? */ + ; +#endif + return result; +} + +static PyObject * +check2i(ProxyObject *self, PyObject *other, + char *opname, binaryfunc operation) +{ + PyObject *result = NULL; + PyObject *object = Proxy_GET_OBJECT(self); + + result = operation(object, other); + if (result == object) { + /* If the operation was really carried out inplace, + don't create a new proxy, but use the old one. */ + Py_INCREF(self); + Py_DECREF(object); + result = (PyObject *)self; + } +#if 0 + else if (result != NULL) + /* ??? create proxy for result? */ + ; +#endif + return result; +} + +#define UNOP(NAME, CALL) \ + static PyObject *wrap_##NAME(PyObject *self) \ + { return check1((ProxyObject *)self, "__"#NAME"__", CALL); } + +#define BINOP(NAME, CALL) \ + static PyObject *wrap_##NAME(PyObject *self, PyObject *other) \ + { return check2(self, other, "__"#NAME"__", "__r"#NAME"__", CALL); } + +#define INPLACE(NAME, CALL) \ + static PyObject *wrap_i##NAME(PyObject *self, PyObject *other) \ + { return check2i((ProxyObject *)self, other, "__i"#NAME"__", CALL); } + +BINOP(add, PyNumber_Add) +BINOP(sub, PyNumber_Subtract) +BINOP(mul, PyNumber_Multiply) +#if PY_MAJOR_VERSION < 3 // Python 3 doesn't support the old integer division +BINOP(div, PyNumber_Divide) +#endif +BINOP(mod, PyNumber_Remainder) +BINOP(divmod, PyNumber_Divmod) + +static PyObject * +wrap_pow(PyObject *self, PyObject *other, PyObject *modulus) +{ + PyObject *result = NULL; + PyObject *object; + + if (Proxy_Check(self)) { + object = Proxy_GET_OBJECT(self); + result = PyNumber_Power(object, other, modulus); + } + else if (Proxy_Check(other)) { + object = Proxy_GET_OBJECT(other); + result = PyNumber_Power(self, object, modulus); + } + else if (modulus != NULL && Proxy_Check(modulus)) { + object = Proxy_GET_OBJECT(modulus); + result = PyNumber_Power(self, other, modulus); + } + else { + Py_INCREF(Py_NotImplemented); + return Py_NotImplemented; + } + return result; +} + +BINOP(lshift, PyNumber_Lshift) +BINOP(rshift, PyNumber_Rshift) +BINOP(and, PyNumber_And) +BINOP(xor, PyNumber_Xor) +BINOP(or, PyNumber_Or) + +#if PY_MAJOR_VERSION < 3 // Coercion is gone in Python 3 +static int +wrap_coerce(PyObject **p_self, PyObject **p_other) +{ + PyObject *self = *p_self; + PyObject *other = *p_other; + PyObject *object; + PyObject *left; + PyObject *right; + int r; + + assert(Proxy_Check(self)); + object = Proxy_GET_OBJECT(self); + + left = object; + right = other; + r = PyNumber_CoerceEx(&left, &right); + if (r != 0) + return r; + /* Now left and right have been INCREF'ed. Any new value that + comes out is proxied; any unchanged value is left unchanged. */ + if (left == object) { + /* Keep the old proxy */ + Py_INCREF(self); + Py_DECREF(left); + left = self; + } +#if 0 + else { + /* ??? create proxy for left? */ + } + if (right != other) { + /* ??? create proxy for right? */ + } +#endif + *p_self = left; + *p_other = right; + return 0; +} +#endif + +UNOP(neg, PyNumber_Negative) +UNOP(pos, PyNumber_Positive) +UNOP(abs, PyNumber_Absolute) +UNOP(invert, PyNumber_Invert) + +UNOP(int, call_int) +UNOP(float, call_float) +#if PY_MAJOR_VERSION < 3 // Python 3 has no long, oct or hex methods +UNOP(long, call_long) +UNOP(oct, call_oct) +UNOP(hex, call_hex) +#endif + +INPLACE(add, PyNumber_InPlaceAdd) +INPLACE(sub, PyNumber_InPlaceSubtract) +INPLACE(mul, PyNumber_InPlaceMultiply) +#if PY_MAJOR_VERSION < 3 // The old integer division operator is gone in Python 3 +INPLACE(div, PyNumber_InPlaceDivide) +#endif +INPLACE(mod, PyNumber_InPlaceRemainder) +INPLACE(pow, call_ipow) +INPLACE(lshift, PyNumber_InPlaceLshift) +INPLACE(rshift, PyNumber_InPlaceRshift) +INPLACE(and, PyNumber_InPlaceAnd) +INPLACE(xor, PyNumber_InPlaceXor) +INPLACE(or, PyNumber_InPlaceOr) + +BINOP(floordiv, PyNumber_FloorDivide) +BINOP(truediv, PyNumber_TrueDivide) +INPLACE(floordiv, PyNumber_InPlaceFloorDivide) +INPLACE(truediv, PyNumber_InPlaceTrueDivide) +UNOP(index, call_index) + +#if PY_MAJOR_VERSION < 3 // Python 3 has no __unicode__ method +UNOP(unicode, call_unicode) +#endif + +static int +wrap_nonzero(PyObject *self) +{ + return PyObject_IsTrue(Proxy_GET_OBJECT(self)); +} + +/* + * Sequence methods + */ + +static Py_ssize_t +wrap_length(PyObject *self) +{ + return PyObject_Length(Proxy_GET_OBJECT(self)); +} + +static PyObject * +wrap_slice(PyObject *self, Py_ssize_t start, Py_ssize_t end) +{ + /* + * Note that we have arrived here through PySequence_GetSlice + * once already, which on Python 2 adjusted indices. We can't call + * PySequence_GetSlice again or they will be wrong. So we directly + * call the slice method the type provides. + */ + PyObject *obj = Proxy_GET_OBJECT(self); +#if PY_MAJOR_VERSION < 3 + PySequenceMethods *m; + + m = obj->ob_type->tp_as_sequence; + if (m && m->sq_slice) { + return m->sq_slice(obj, start, end); + } +#endif + return PySequence_GetSlice(obj, start, end); +} + +static int +wrap_ass_slice(PyObject *self, Py_ssize_t i, Py_ssize_t j, PyObject *value) +{ + PyObject *obj = Proxy_GET_OBJECT(self); + if (PyList_Check(obj)) { + return PyList_SetSlice(obj, i, j, value); + } + else { + return PySequence_SetSlice(obj, i, j, value); + } +} + +static int +wrap_contains(PyObject *self, PyObject *value) +{ + return PySequence_Contains(Proxy_GET_OBJECT(self), value); +} + +/* + * Mapping methods + */ + +static PyObject * +wrap_getitem(PyObject *wrapper, PyObject *v) { + return PyObject_GetItem(Proxy_GET_OBJECT(wrapper), v); +} + +static int +wrap_setitem(PyObject *self, PyObject *key, PyObject *value) +{ + if (value == NULL) + return PyObject_DelItem(Proxy_GET_OBJECT(self), key); + else + return PyObject_SetItem(Proxy_GET_OBJECT(self), key, value); +} + +/* + * Normal methods + */ + +static char +reduce__doc__[] = +"__reduce__()\n" +"Raise an exception; this prevents proxies from being picklable by\n" +"default, even if the underlying object is picklable."; + +static PyObject * +wrap_reduce(PyObject *self) +{ + PyObject *pickle_error = NULL; + PyObject *pickle = PyImport_ImportModule("pickle"); + + if (pickle == NULL) + PyErr_Clear(); + else { + pickle_error = PyObject_GetAttrString(pickle, "PicklingError"); + if (pickle_error == NULL) + PyErr_Clear(); + } + if (pickle_error == NULL) { + pickle_error = PyExc_RuntimeError; + Py_INCREF(pickle_error); + } + PyErr_SetString(pickle_error, + "proxy instances cannot be pickled"); + Py_DECREF(pickle_error); + return NULL; +} + +static PyNumberMethods +wrap_as_number = { + wrap_add, /* nb_add */ + wrap_sub, /* nb_subtract */ + wrap_mul, /* nb_multiply */ +#if PY_MAJOR_VERSION < 3 + wrap_div, /* nb_divide */ +#endif + wrap_mod, /* nb_remainder */ + wrap_divmod, /* nb_divmod */ + wrap_pow, /* nb_power */ + wrap_neg, /* nb_negative */ + wrap_pos, /* nb_positive */ + wrap_abs, /* nb_absolute */ + wrap_nonzero, /* nb_nonzero */ + wrap_invert, /* nb_invert */ + wrap_lshift, /* nb_lshift */ + wrap_rshift, /* nb_rshift */ + wrap_and, /* nb_and */ + wrap_xor, /* nb_xor */ + wrap_or, /* nb_or */ +#if PY_MAJOR_VERSION < 3 + wrap_coerce, /* nb_coerce */ +#endif + wrap_int, /* nb_int */ +#if PY_MAJOR_VERSION < 3 + wrap_long, /* nb_long */ +#else + 0, /* formerly known as nb_long */ +#endif + wrap_float, /* nb_float */ +#if PY_MAJOR_VERSION < 3 + wrap_oct, /* nb_oct */ + wrap_hex, /* nb_hex */ +#endif + + /* Added in release 2.0 */ + /* These require the Py_TPFLAGS_HAVE_INPLACEOPS flag */ + wrap_iadd, /* nb_inplace_add */ + wrap_isub, /* nb_inplace_subtract */ + wrap_imul, /* nb_inplace_multiply */ +#if PY_MAJOR_VERSION < 3 + wrap_idiv, /* nb_inplace_divide */ +#endif + wrap_imod, /* nb_inplace_remainder */ + (ternaryfunc)wrap_ipow, /* nb_inplace_power */ + wrap_ilshift, /* nb_inplace_lshift */ + wrap_irshift, /* nb_inplace_rshift */ + wrap_iand, /* nb_inplace_and */ + wrap_ixor, /* nb_inplace_xor */ + wrap_ior, /* nb_inplace_or */ + + /* Added in release 2.2 */ + /* These require the Py_TPFLAGS_HAVE_CLASS flag */ + wrap_floordiv, /* nb_floor_divide */ + wrap_truediv, /* nb_true_divide */ + wrap_ifloordiv, /* nb_inplace_floor_divide */ + wrap_itruediv, /* nb_inplace_true_divide */ + wrap_index, /* nb_index */ +}; + +static PySequenceMethods +wrap_as_sequence = { + wrap_length, /* sq_length */ + 0, /* sq_concat */ + 0, /* sq_repeat */ + 0, /* sq_item */ + wrap_slice, /* sq_slice */ + 0, /* sq_ass_item */ + wrap_ass_slice, /* sq_ass_slice */ + wrap_contains, /* sq_contains */ +}; + +static PyMappingMethods +wrap_as_mapping = { + wrap_length, /* mp_length */ + wrap_getitem, /* mp_subscript */ + wrap_setitem, /* mp_ass_subscript */ +}; + +static PyMethodDef +wrap_methods[] = { + {"__reduce__", (PyCFunction)wrap_reduce, METH_NOARGS, reduce__doc__}, +#if PY_MAJOR_VERSION < 3 + {"__unicode__", (PyCFunction)wrap_unicode, METH_NOARGS, "" }, +#endif + {NULL, NULL}, +}; + +/* + * Note that the numeric methods are not supported. This is primarily + * because of the way coercion-less operations are performed with + * new-style numbers; since we can't tell which side of the operation + * is 'self', we can't ensure we'd unwrap the right thing to perform + * the actual operation. We also can't afford to just unwrap both + * sides the way weakrefs do, since we don't know what semantics will + * be associated with the wrapper itself. + */ + + +static PyTypeObject +ProxyType = { + PyVarObject_HEAD_INIT(NULL, 0) + "zope.proxy.ProxyBase", + sizeof(ProxyObject), + 0, + wrap_dealloc, /* tp_dealloc */ + wrap_print, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ +#if PY_MAJOR_VERSION < 3 + wrap_compare, /* tp_compare */ +#else + 0, /* tp_reserved */ +#endif + wrap_repr, /* tp_repr */ + &wrap_as_number, /* tp_as_number */ + &wrap_as_sequence, /* tp_as_sequence */ + &wrap_as_mapping, /* tp_as_mapping */ + wrap_hash, /* tp_hash */ + wrap_call, /* tp_call */ + wrap_str, /* tp_str */ + wrap_getattro, /* tp_getattro */ + wrap_setattro, /* tp_setattro */ + 0, /* tp_as_buffer */ +#if PY_MAJOR_VERSION < 3 + Py_TPFLAGS_DEFAULT | + Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_CHECKTYPES | + Py_TPFLAGS_BASETYPE, /* tp_flags */ +#else // Py_TPFLAGS_CHECKTYPES is always true in Python 3 and removed. + Py_TPFLAGS_DEFAULT | + Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_BASETYPE, /* tp_flags */ +#endif + 0, /* tp_doc */ + wrap_traverse, /* tp_traverse */ + wrap_clear, /* tp_clear */ + wrap_richcompare, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + wrap_iter, /* tp_iter */ + wrap_iternext, /* tp_iternext */ + wrap_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + wrap_init, /* tp_init */ + 0, /* tp_alloc */ + wrap_new, /* tp_new */ + 0, /*PyObject_GC_Del,*/ /* tp_free */ +}; + +static PyObject * +create_proxy(PyObject *object) +{ + PyObject *result = NULL; + PyObject *args; + + args = PyTuple_New(1); + if (args != NULL) { + Py_INCREF(object); + PyTuple_SET_ITEM(args, 0, object); + result = PyObject_CallObject((PyObject *)&ProxyType, args); + Py_DECREF(args); + } + return result; +} + +static int +api_check(PyObject *obj) +{ + return obj ? Proxy_Check(obj) : 0; +} + +static PyObject * +api_create(PyObject *object) +{ + if (object == NULL) { + PyErr_SetString(PyExc_ValueError, + "cannot create proxy around NULL"); + return NULL; + } + return create_proxy(object); +} + +static PyObject * +api_getobject(PyObject *proxy) +{ + if (proxy == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "cannot pass NULL to ProxyAPI.getobject()"); + return NULL; + } + if (Proxy_Check(proxy)) + return Proxy_GET_OBJECT(proxy); + else { + PyErr_Format(PyExc_TypeError, "expected proxy object, got %s", + proxy->ob_type->tp_name); + return NULL; + } +} + +static ProxyInterface +wrapper_capi = { + &ProxyType, + api_check, + api_create, + api_getobject, +}; + +static PyObject *api_object = NULL; + + +static char +getobject__doc__[] = +"getProxiedObject(proxy) --> object\n" +"\n" +"Get the underlying object for proxy, or the object itself, if it is\n" +"not a proxy."; + +static PyObject * +wrapper_getobject(PyObject *unused, PyObject *obj) +{ + if (Proxy_Check(obj)) + obj = Proxy_GET_OBJECT(obj); + + if (obj == NULL) + obj = Py_None; + + Py_INCREF(obj); + return obj; +} + +static char +setobject__doc__[] = +"setProxiedObject(proxy, object) --> object\n" +"\n" +"Set the underlying object for proxy, returning the old proxied object.\n" +"Raises TypeError if proxy is not a proxy.\n"; + +static PyObject * +wrapper_setobject(PyObject *unused, PyObject *args) +{ + PyObject *proxy; + PyObject *object; + PyObject *result = NULL; + if (PyArg_ParseTuple(args, "O!O:setProxiedObject", + &ProxyType, &proxy, &object)) { + result = Proxy_GET_OBJECT(proxy); + Py_INCREF(object); + ((ProxyObject *) proxy)->proxy_object = object; + } + return result; +} + +static char +isProxy__doc__[] = +"Check whether the given object is a proxy\n" +"\n" +"If proxytype is not None, checkes whether the object is\n" +"proxied by the given proxytype.\n" +; + +static PyObject * +wrapper_isProxy(PyObject *unused, PyObject *args) +{ + PyObject *obj, *result; + PyTypeObject *proxytype=&ProxyType; + + if (! PyArg_ParseTuple(args, "O|O!:isProxy", + &obj, &PyType_Type, &proxytype) + ) + return NULL; + + while (obj && Proxy_Check(obj)) + { + if (PyObject_TypeCheck(obj, proxytype)) + { + result = Py_True; + Py_INCREF(result); + return result; + } + obj = Proxy_GET_OBJECT(obj); + } + result = Py_False; + Py_INCREF(result); + return result; +} + +static char +removeAllProxies__doc__[] = +"removeAllProxies(proxy) --> object\n" +"\n" +"Get the proxied object with no proxies\n" +"\n" +"If obj is not a proxied object, return obj.\n" +"\n" +"The returned object has no proxies.\n" +; + +static PyObject * +wrapper_removeAllProxies(PyObject *unused, PyObject *obj) +{ + while (obj && Proxy_Check(obj)) + obj = Proxy_GET_OBJECT(obj); + + if (obj == NULL) + obj = Py_None; + + Py_INCREF(obj); + return obj; +} + +static char +sameProxiedObjects__doc__[] = +"Check whether two objects are the same or proxies of the same object"; + +static PyObject * +wrapper_sameProxiedObjects(PyObject *unused, PyObject *args) +{ + PyObject *ob1, *ob2; + + if (! PyArg_ParseTuple(args, "OO:sameProxiedObjects", &ob1, &ob2)) + return NULL; + + while (ob1 && Proxy_Check(ob1)) + ob1 = Proxy_GET_OBJECT(ob1); + + while (ob2 && Proxy_Check(ob2)) + ob2 = Proxy_GET_OBJECT(ob2); + + if (ob1 == ob2) + ob1 = Py_True; + else + ob1 = Py_False; + + Py_INCREF(ob1); + return ob1; +} + + +static char +queryProxy__doc__[] = +"Look for a proxy of the given type around the object\n" +"\n" +"If no such proxy can be found, return the default.\n" +; + +static PyObject * +wrapper_queryProxy(PyObject *unused, PyObject *args) +{ + PyObject *obj, *result=Py_None; + PyTypeObject *proxytype=&ProxyType; + + if (! PyArg_ParseTuple(args, "O|O!O:queryProxy", + &obj, &PyType_Type, &proxytype, &result) + ) + return NULL; + + while (obj && Proxy_Check(obj)) + { + if (PyObject_TypeCheck(obj, proxytype)) + { + Py_INCREF(obj); + return obj; + } + obj = Proxy_GET_OBJECT(obj); + } + + Py_INCREF(result); + return result; +} + +static char +queryInnerProxy__doc__[] = +"Look for the inner-most proxy of the given type around the object\n" +"\n" +"If no such proxy can be found, return the default.\n" +"\n" +"If there is such a proxy, return the inner-most one.\n" +; + +static PyObject * +wrapper_queryInnerProxy(PyObject *unused, PyObject *args) +{ + PyObject *obj, *result=Py_None; + PyTypeObject *proxytype=&ProxyType; + + if (! PyArg_ParseTuple(args, "O|O!O:queryInnerProxy", + &obj, &PyType_Type, &proxytype, &result) + ) + return NULL; + + while (obj && Proxy_Check(obj)) + { + if (PyObject_TypeCheck(obj, proxytype)) + result = obj; + obj = Proxy_GET_OBJECT(obj); + } + + Py_INCREF(result); + return result; +} + +/* Module initialization */ + +static char +module___doc__[] = +"Association between an object, a context object, and a dictionary.\n\ +\n\ +The context object and dictionary give additional context information\n\ +associated with a reference to the basic object. The wrapper objects\n\ +act as proxies for the original object."; + + +static PyMethodDef +module_functions[] = { + {"getProxiedObject", wrapper_getobject, METH_O, getobject__doc__}, + {"setProxiedObject", wrapper_setobject, METH_VARARGS, setobject__doc__}, + {"isProxy", wrapper_isProxy, METH_VARARGS, isProxy__doc__}, + {"sameProxiedObjects", wrapper_sameProxiedObjects, METH_VARARGS, + sameProxiedObjects__doc__}, + {"queryProxy", wrapper_queryProxy, METH_VARARGS, queryProxy__doc__}, + {"queryInnerProxy", wrapper_queryInnerProxy, METH_VARARGS, + queryInnerProxy__doc__}, + {"removeAllProxies", wrapper_removeAllProxies, METH_O, + removeAllProxies__doc__}, + {NULL} +}; + +MOD_INIT(_zope_proxy_proxy) +{ + PyObject *m; + + MOD_DEF(m, "_zope_proxy_proxy", module___doc__, module_functions) + + if (m == NULL) + return MOD_ERROR_VAL; + + if (empty_tuple == NULL) + empty_tuple = PyTuple_New(0); + + ProxyType.tp_free = PyObject_GC_Del; + + if (PyType_Ready(&ProxyType) < 0) + return MOD_ERROR_VAL; + + Py_INCREF(&ProxyType); + PyModule_AddObject(m, "ProxyBase", (PyObject *)&ProxyType); + + if (api_object == NULL) { + api_object = PyCapsule_New(&wrapper_capi, NULL, NULL); + if (api_object == NULL) + return MOD_ERROR_VAL; + } + Py_INCREF(api_object); + PyModule_AddObject(m, "_CAPI", api_object); + + return MOD_SUCCESS_VAL(m); + +} diff --git a/thesisenv/lib/python3.6/site-packages/zope/proxy/_zope_proxy_proxy.cpython-36m-darwin.so b/thesisenv/lib/python3.6/site-packages/zope/proxy/_zope_proxy_proxy.cpython-36m-darwin.so new file mode 100755 index 0000000..ad33759 Binary files /dev/null and b/thesisenv/lib/python3.6/site-packages/zope/proxy/_zope_proxy_proxy.cpython-36m-darwin.so differ diff --git a/thesisenv/lib/python3.6/site-packages/zope/proxy/decorator.py b/thesisenv/lib/python3.6/site-packages/zope/proxy/decorator.py new file mode 100644 index 0000000..9f2084f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/proxy/decorator.py @@ -0,0 +1,49 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Decorator support + +Decorators are proxies that are mostly transparent but that may provide +additional features. +""" +__docformat__ = "reStructuredText" + +from zope.proxy import getProxiedObject, ProxyBase +from zope.interface.declarations import ObjectSpecificationDescriptor +from zope.interface.declarations import getObjectSpecification +from zope.interface.declarations import ObjectSpecification +from zope.interface import providedBy + +class DecoratorSpecificationDescriptor(ObjectSpecificationDescriptor): + """Support for interface declarations on decorators + """ + def __get__(self, inst, cls=None): + if inst is None: + return getObjectSpecification(cls) + else: + provided = providedBy(getProxiedObject(inst)) + + # Use type rather than __class__ because inst is a proxy and + # will return the proxied object's class. + cls = type(inst) + return ObjectSpecification(provided, cls) + + def __set__(self, inst, value): + raise TypeError("Can't set __providedBy__ on a decorated object") + + +class SpecificationDecoratorBase(ProxyBase): + """Base class for a proxy that provides additional interfaces.""" + + __providedBy__ = DecoratorSpecificationDescriptor() + diff --git a/thesisenv/lib/python3.6/site-packages/zope/proxy/interfaces.py b/thesisenv/lib/python3.6/site-packages/zope/proxy/interfaces.py new file mode 100644 index 0000000..71f113d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/proxy/interfaces.py @@ -0,0 +1,66 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +"""Proxy-related interfaces. +""" + +from zope.interface import Interface + +class IProxyIntrospection(Interface): + """Provides methods for indentifying proxies and extracting proxied objects + """ + + def isProxy(obj, proxytype=None): + """Check whether the given object is a proxy + + If proxytype is not None, checkes whether the object is + proxied by the given proxytype. + """ + + def sameProxiedObjects(ob1, ob2): + """Check whether ob1 and ob2 are the same or proxies of the same object + """ + + def getProxiedObject(obj): + """Get the proxied Object + + If the object isn't proxied, then just return the object. + """ + + def setProxiedObject(ob1, ob2): + """Set the underlying object for ob1 to ob2, returning the old object. + + Raises TypeError if ob1 is not a proxy. + """ + + def removeAllProxies(obj): + """Get the proxied object with no proxies + + If obj is not a proxied object, return obj. + + The returned object has no proxies. + """ + + def queryProxy(obj, proxytype, default=None): + """Look for a proxy of the given type around the object + + If no such proxy can be found, return the default. + """ + + def queryInnerProxy(obj, proxytype, default=None): + """Look for the inner-most proxy of the given type around the object + + If no such proxy can be found, return the default. + + If there is such a proxy, return the inner-most one. + """ diff --git a/thesisenv/lib/python3.6/site-packages/zope/proxy/proxy.h b/thesisenv/lib/python3.6/site-packages/zope/proxy/proxy.h new file mode 100644 index 0000000..509564e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/proxy/proxy.h @@ -0,0 +1,59 @@ +#ifndef _proxy_H_ +#define _proxy_H_ 1 + +typedef struct { + PyObject_HEAD + PyObject *proxy_object; +} ProxyObject; + +#define Proxy_GET_OBJECT(ob) (((ProxyObject *)(ob))->proxy_object) + +typedef struct { + PyTypeObject *proxytype; + int (*check)(PyObject *obj); + PyObject *(*create)(PyObject *obj); + PyObject *(*getobject)(PyObject *proxy); +} ProxyInterface; + +#ifndef PROXY_MODULE + +/* These are only defined in the public interface, and are not + * available within the module implementation. There we use the + * classic Python/C API only. + */ + +static ProxyInterface *_proxy_api = NULL; + +static int +Proxy_Import(void) +{ + if (_proxy_api == NULL) { + PyObject *m = PyImport_ImportModule("zope.proxy"); + if (m != NULL) { + PyObject *tmp = PyObject_GetAttrString(m, "_CAPI"); + if (tmp != NULL) { +#if PY_VERSION_HEX < 0x02070000 + if (PyCObject_Check(tmp)) + _proxy_api = (ProxyInterface *) + PyCObject_AsVoidPtr(tmp); +#else + if (PyCapsule_CheckExact(tmp)) + _proxy_api = (ProxyInterface *) + PyCapsule_GetPointer(tmp, NULL); +#endif + Py_DECREF(tmp); + } + } + } + return (_proxy_api == NULL) ? -1 : 0; +} + +#define ProxyType (*_proxy_api->proxytype) +#define Proxy_Check(obj) (_proxy_api->check((obj))) +#define Proxy_CheckExact(obj) ((obj)->ob_type == ProxyType) +#define Proxy_New(obj) (_proxy_api->create((obj))) +#define Proxy_GetObject(proxy) (_proxy_api->getobject((proxy))) + +#endif /* PROXY_MODULE */ + +#endif /* _proxy_H_ */ diff --git a/thesisenv/lib/python3.6/site-packages/zope/proxy/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/proxy/tests/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/proxy/tests/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/proxy/tests/test_decorator.py b/thesisenv/lib/python3.6/site-packages/zope/proxy/tests/test_decorator.py new file mode 100644 index 0000000..4003377 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/proxy/tests/test_decorator.py @@ -0,0 +1,167 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test Harness +""" +import unittest + + +class DecoratorSpecificationDescriptorTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.proxy.decorator import DecoratorSpecificationDescriptor + return DecoratorSpecificationDescriptor + + def _makeOne(self): + return self._getTargetClass()() + + def test___get___w_class(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface import provider + class IContextFactory(Interface): + pass + class IContext(Interface): + pass + @provider(IContextFactory) + @implementer(IContext) + class Context(object): + pass + dsd = self._makeOne() + self.assertEqual(list(dsd.__get__(None, Context)), [IContextFactory]) + + def test___get___w_inst_no_proxy(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface import provider + class IContextFactory(Interface): + pass + class IContext(Interface): + pass + @provider(IContextFactory) + @implementer(IContext) + class Context(object): + pass + dsd = self._makeOne() + self.assertEqual(list(dsd.__get__(Context(), None)), [IContext]) + + def test___get___w_inst_w_proxy(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface import provider + from zope.proxy import ProxyBase + class IContextFactory(Interface): + pass + class IContext(Interface): + pass + @provider(IContextFactory) + @implementer(IContext) + class Context(object): + pass + context = Context() + proxy = ProxyBase(context) + dsd = self._makeOne() + self.assertEqual(list(dsd.__get__(proxy, None)), [IContext]) + + def test___get___w_inst_w_derived_proxy(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface import provider + from zope.proxy import ProxyBase + class IContextFactory(Interface): + pass + class IContext(Interface): + pass + @provider(IContextFactory) + @implementer(IContext) + class Context(object): + pass + class IProxyFactory(Interface): + pass + class IProxy(Interface): + pass + @provider(IProxyFactory) + @implementer(IProxy) + class Proxy(ProxyBase): + pass + context = Context() + proxy = Proxy(context) + dsd = self._makeOne() + self.assertEqual(list(dsd.__get__(proxy, None)), + [IContext, IProxy]) + + def test___set___not_allowed(self): + from zope.interface import Interface + from zope.interface import implementer + class IFoo(Interface): + pass + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + dsd = self._makeOne() + self.assertRaises(TypeError, dsd.__set__, foo, object()) + + +class SpecificationDecoratorBaseTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.proxy.decorator import SpecificationDecoratorBase + return SpecificationDecoratorBase + + def _makeOne(self, wrapped): + return self._getTargetClass()(wrapped) + + def test_wrapped_instance(self): + from zope.interface import Interface + from zope.interface import implementer + from zope.interface import providedBy + class IFoo(Interface): + pass + @implementer(IFoo) + class Foo(object): + pass + foo = Foo() + proxy = self._makeOne(foo) + self.assertEqual(list(providedBy(proxy)), list(providedBy(foo))) + + def test_proxy_that_provides_interface_as_well_as_wrapped(self): + # If both the wrapper and the wrapped object provide + # interfaces, the wrapper provides the sum + from zope.interface import Interface + from zope.interface import implementer + from zope.interface import providedBy + class IFoo(Interface): + pass + @implementer(IFoo) + class Foo(object): + from_foo = 1 + + class IWrapper(Interface): + pass + @implementer(IWrapper) + class Proxy(self._getTargetClass()): + pass + + foo = Foo() + proxy = Proxy(foo) + + self.assertEqual(proxy.from_foo, 1) + self.assertEqual(list(providedBy(proxy)), [IFoo,IWrapper]) + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(DecoratorSpecificationDescriptorTests), + unittest.makeSuite(SpecificationDecoratorBaseTests), + )) diff --git a/thesisenv/lib/python3.6/site-packages/zope/proxy/tests/test_proxy.py b/thesisenv/lib/python3.6/site-packages/zope/proxy/tests/test_proxy.py new file mode 100644 index 0000000..c96492d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/proxy/tests/test_proxy.py @@ -0,0 +1,1508 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test base proxy class. +""" +import unittest + +try: + import zope.security +except ImportError: # pragma: no cover + _HAVE_ZOPE_SECURITY = False +else: + _HAVE_ZOPE_SECURITY = True + +from zope.proxy._compat import PY3 + +class ModuleConformanceCase(unittest.TestCase): + + def test_module_conforms_to_IProxyIntrospection(self): + from zope.interface.verify import verifyObject + import zope.proxy + from zope.proxy.interfaces import IProxyIntrospection + verifyObject(IProxyIntrospection, zope.proxy) + + +class PyProxyBaseTestCase(unittest.TestCase): + + # Names of special methods + getslice = '__getitem__' if PY3 else '__getslice__' + setslice = '__setitem__' if PY3 else '__setslice__' + + + def _getTargetClass(self): + from zope.proxy import PyProxyBase + return PyProxyBase + + def _makeOne(self, o): + return self._getTargetClass()(o) + + def test_constructor(self): + o = object() + self.assertRaises(TypeError, self._makeOne, o, o) + self.assertRaises(TypeError, self._makeOne, o, key='value') + self.assertRaises(TypeError, self._makeOne, key='value') + + def test_subclass_constructor(self): + class MyProxy(self._getTargetClass()): + def __new__(cls, *args, **kwds): + return super(MyProxy, cls).__new__(cls, *args, **kwds) + def __init__(self, *args, **kwds): + super(MyProxy, self).__init__(*args, **kwds) + o1 = object() + o2 = object() + o = MyProxy((o1, o2)) + + self.assertEqual(o1, o[0]) + self.assertEqual(o2, o[1]) + + self.assertRaises(TypeError, MyProxy, o1, o2) + self.assertRaises(TypeError, MyProxy, o1, key='value') + self.assertRaises(TypeError, MyProxy, key='value') + + # Check that are passed to __init__() overrides what's passed + # to __new__(). + class MyProxy2(self._getTargetClass()): + def __new__(cls, *args, **kwds): + return super(MyProxy2, cls).__new__(cls, 'value') + + proxy = MyProxy2('splat!') + self.assertEqual(list(proxy), list('splat!')) + + class MyProxy3(MyProxy2): + def __init__(self, arg): + if list(self) != list('value'): + raise AssertionError("list(self) != list('value')") + super(MyProxy3, self).__init__('another') + + proxy = MyProxy3('notused') + self.assertEqual(list(proxy), list('another')) + + def test_custom_int_to_int(self): + class CustomClass(object): + def __int__(self): + return 42 + proxy = self._makeOne(CustomClass()) + self.assertEqual(42, int(proxy)) + + def test_string_to_float(self): + proxy = self._makeOne("14") + self.assertEqual(float("14"), float(proxy)) + + def test_incorrect_string_to_int(self): + proxy = self._makeOne("") + self.assertRaises(ValueError, int, proxy) + + def test_incorrect_string_to_float(self): + proxy = self._makeOne("") + self.assertRaises(ValueError, float, proxy) + + def test_custom_float_to_float(self): + class CustomClass(object): + def __float__(self): + return 42.0 + proxy = self._makeOne(CustomClass()) + self.assertEqual(42.0, float(proxy)) + + @unittest.skipIf(PY3, "Gone in Py3") + def test___unicode__of_unicode(self): + s = u'Hello, \u2603' + proxy = self._makeOne(s) + self.assertEqual(unicode(proxy), s) + + @unittest.skipIf(PY3, "Gone in Py3") + def test___unicode__of_custom_class(self): + class CustomClass(object): + def __unicode__(self): + return u'Hello, \u2603' + cc = CustomClass() + self.assertEqual(unicode(cc), u'Hello, \u2603') + proxy = self._makeOne(cc) + self.assertEqual(unicode(proxy), u'Hello, \u2603') + + @unittest.skipIf(PY3, "Gone in Py3") + def test___unicode__of_custom_class_no_unicode(self): + class CustomClass(object): + pass + cc = CustomClass() + cc_unicode = unicode(cc) + self.assertEqual(type(cc_unicode), unicode) + proxy = self._makeOne(cc) + self.assertEqual(unicode(proxy), cc_unicode) + + def test___call__(self): + def _foo(): + return 'FOO' + proxy = self._makeOne(_foo) + self.assertEqual(proxy(), 'FOO') + + @unittest.skipIf(PY3, "Gone in Py3") + def test_callable(self): + w = self._makeOne({}.get) + self.assertTrue(callable(w)) + + def test___repr__(self): + def _foo(): + raise AssertionError("Not called") + proxy = self._makeOne(_foo) + self.assertEqual(repr(proxy), repr(_foo)) + + def test___str__(self): + def _foo(): + raise AssertionError("Not called") + proxy = self._makeOne(_foo) + self.assertEqual(str(proxy), str(_foo)) + + @unittest.skipIf(PY3, "Gone in Py3") + def test___unicode__(self): + def _foo(): + raise AssertionError("Not called") + proxy = self._makeOne(_foo) + self.assertTrue(unicode(proxy).startswith(' w1) + self.assertTrue(w2 >= w1) + self.assertTrue(w2 > o1) + self.assertTrue(w2 >= o1) + self.assertTrue(o2 > w1) + self.assertTrue(o2 >= w2) + + def test___nonzero__(self): + w = self._makeOne(None) + self.assertFalse(w) + self.assertTrue(not w) + + def test___hash__(self): + w1 = self._makeOne(1) + self.assertEqual(hash(w1), hash(1)) + + def test___getattr__miss_both(self): + class Foo(object): + pass + o = Foo() + w = self._makeOne(o) + def _try(): + return w.nonesuch + self.assertRaises(AttributeError, _try) + + def test___getattr__delegates_to_wrapped(self): + class Foo(object): + pass + o = Foo() + o.foo = 1 + w = self._makeOne(o) + self.assertEqual(w.foo, 1) + + def test___getattr__delegates_to_wrapped_when_conflict(self): + class Proxy(self._getTargetClass()): + def foo(self): + raise AssertionError("Not called") + class Foo(object): + def foo(self): + return 'FOO' + o = Foo() + w = Proxy(o) + self.assertEqual(w.foo(), 'FOO') + + def test___setattr__delegates_to_wrapped(self): + class Foo(object): + pass + o = Foo() + w = self._makeOne(o) + w.foo = 1 + self.assertEqual(o.foo, 1) + + def test___setattr__sets_proxy_property(self): + class Proxy(self._getTargetClass()): + bar = property( + lambda s: s.__dict__.get('_bar'), + lambda s, v: s.__dict__.__setitem__('_bar', v) + ) + class Foo(object): + pass + o = Foo() + w = Proxy(o) + w.bar = 43 + self.assertEqual(w.bar, 43) + self.assertRaises(AttributeError, getattr, o, 'bar') + + def test___delattr___wrapped(self): + class Foo(object): + pass + o = Foo() + o.foo = 1 + w = self._makeOne(o) + def _try(): + del w._wrapped + self.assertRaises(AttributeError, _try) + + def test___delattr__delegates_to_wrapped(self): + class Foo(object): + pass + o = Foo() + o.foo = 1 + w = self._makeOne(o) + del w.foo + self.assertFalse('foo' in o.__dict__) + + def test___len__(self): + l = [] + w = self._makeOne(l) + self.assertEqual(len(w), 0) + l.append(0) + self.assertEqual(len(w), 1) + + def test___getitem_____setitem_____delitem__(self): + w = self._makeOne({}) + self.assertRaises(KeyError, lambda: w[1]) + w[1] = 'a' + self.assertEqual(w[1], 'a') + del w[1] + self.assertRaises(KeyError, lambda: w[1]) + def del_w_1(): + del w[1] + self.assertRaises(KeyError, del_w_1) + + def test___getitem__w_slice_against_list(self): + # Lists have special slicing behavior. + pList = self._makeOne([1, 2]) + self.assertEqual(pList[-1:], [2]) + self.assertEqual(pList[-2:], [1, 2]) + self.assertEqual(pList[-3:], [1, 2]) + + def test___getitem__w_slice_against_tuple(self): + # Tuples also have special slicing behavior. + pTuple = self._makeOne((1, 2)) + self.assertEqual(pTuple[-1:], (2,)) + self.assertEqual(pTuple[-2:], (1, 2)) + self.assertEqual(pTuple[-3:], (1, 2)) + + def test___getitem__w_slice_against_derived_list(self): + data = [1, 2] + class DerivedList(list): + def __getslice__(self, start, stop): + return list.__getslice__(self, start, stop) + + pList = self._makeOne(DerivedList(data)) + + self.assertEqual(pList[-1:], data[-1:]) + self.assertEqual(pList[-2:], data[-2:]) + self.assertEqual(pList[-3:], data[-3:]) + + def test___getitem__w_slice_against_class_w_custom___getslice__(self): + import sys + test = self + class Slicer(object): + def __len__(self): + return 2 + + def __getslice__(self, start, end): + return (start, end) + + def __getitem__(self, a_slice): # pragma: no cover + test.assertTrue(PY3) + # On Python 3, we basically just return what the test expects. + # Mostly that's the computed indices (yay!) but there are + # a few special cases. + indices = a_slice.indices(len(self)) + return (indices[0] if a_slice.start != -3 else -1, + indices[-1] if a_slice.stop is not None else sys.maxsize) + + pSlicer = self._makeOne(Slicer()) + self.assertEqual(pSlicer[:1][0], 0) + self.assertEqual(pSlicer[:1][1], 1) + self.assertEqual(pSlicer[:-1][0], 0) + self.assertEqual(pSlicer[:-1][1], 1) + self.assertEqual(pSlicer[-1:][0], 1) + self.assertEqual(pSlicer[-2:][0], 0) + self.assertEqual(pSlicer[-3:], (-1, sys.maxsize)) + + def test___getslice___dne_uses_getitem(self): + class Missing(Exception): + pass + class Get(object): + def __getitem__(self, x): + raise Missing('__getitem__') + + target = Get() + proxy = self._makeOne(target) + with self.assertRaisesRegexp(Missing, + '__getitem__'): + proxy[1:2] + + def test___getslice___error_propagates(self): + test = self + class Missing(Exception): + pass + class Get(object): + def __getitem__(self, x): # pragma: no cover (only py3) + test.assertTrue(PY3) + raise Missing('__getitem__') + def __getslice__(self, start, stop): + raise Missing("__getslice__") + target = Get() + proxy = self._makeOne(target) + with self.assertRaisesRegexp(Missing, + self.getslice): + proxy[1:2] + + def test___setslice___against_list(self): + # Lists have special slicing bahvior for assignment as well. + pList = self._makeOne([1, 2]) + pList[-1:] = [3, 4] + self.assertEqual(pList, [1, 3, 4]) + pList = self._makeOne([1, 2]) + pList[-2:] = [3, 4] + self.assertEqual(pList, [3, 4]) + pList = self._makeOne([1, 2]) + pList[-3:] = [3, 4] + self.assertEqual(pList, [3, 4]) + + def test___setslice___against_derived_list(self): + # This behavior should be true for all list-derived classes. + class DerivedList(list): + pass + + pList = self._makeOne(DerivedList([1, 2])) + pList[-1:] = [3, 4] + self.assertEqual(pList, [1, 3, 4]) + pList = self._makeOne(DerivedList([1, 2])) + pList[-2:] = [3, 4] + self.assertEqual(pList, [3, 4]) + pList = self._makeOne(DerivedList([1, 2])) + pList[-3:] = [3, 4] + self.assertEqual(pList, [3, 4]) + + def test___setslice___error_propagates(self): + class Missing(Exception): + pass + class Set(object): + def __setitem__(self, k, v): + raise Missing('__setitem__') # pragma: no cover (only py3) + def __setslice__(self, start, stop, value): + raise Missing("__setslice__") + target = Set() + proxy = self._makeOne(target) + with self.assertRaisesRegexp(Missing, + self.setslice): + proxy[1:2] = 1 + + def test___setslice___dne_uses_setitem(self): + class Missing(Exception): + pass + class Set(object): + def __setitem__(self, k, v): + raise Missing('__setitem__') + + target = Set() + proxy = self._makeOne(target) + with self.assertRaisesRegexp(Missing, + '__setitem__'): + proxy[1:2] = 1 + + + def test___iter___w_wrapped_iterable(self): + a = [1, 2, 3] + b = [] + for x in self._makeOne(a): + b.append(x) + self.assertEqual(a, b) + + def test___iter___w_wrapped_iterator(self): + # Wrap an iterator before starting iteration. + # PyObject_GetIter() will still be called on the proxy. + a = [1, 2, 3] + b = [] + for x in self._makeOne(iter(a)): + b.append(x) + self.assertEqual(a, b) + t = tuple(self._makeOne(iter(a))) + self.assertEqual(t, (1, 2, 3)) + + def test___iter___returns_self_if_defined(self): + # Return the wrapped object itself, if it is an iterator. + class MyIter(object): + def __iter__(self): + return self + def __next__(self): + raise AssertionError("Not called") + next = __next__ + myIter = MyIter() + p = self._makeOne(myIter) + self.assertEqual(iter(p), p) + self.assertTrue(isinstance(iter(p), MyIter)) + + def test___iter___next_when_returned_by_iterable(self): + # Wrap an iterator within the iteration protocol, expecting it + # still to work. PyObject_GetIter() will not be called on the + # proxy, so the tp_iter slot won't unwrap it. + + class Iterable(object): + def __init__(self, test, data): + self.test = test + self.data = data + def __iter__(self): + return self.test._makeOne(iter(self.data)) + + a = [1, 2, 3] + b = [] + for x in Iterable(self, a): + b.append(x) + self.assertEqual(a, b) + + # Python 2.7 won't let the C wrapper support __reversed__ :( + #def test___reversed__(self): + # w = self._makeOne([0, 1, 2, 3]) + # self.assertEqual(list(reversed(w)), [3, 2, 1, 0]) + + def test___contains__(self): + w = self._makeOne([0, 1, 2, 3]) + self.assertTrue(1 in w) + self.assertFalse(4 in w) + + def test___index__(self): + import operator + w = self._makeOne(42) + self.assertEqual(operator.index(w), 42) + + # Numeric ops. + + @property + def unops(self): + ops = [ + "-x", + "+x", + "abs(x)", + "~x", + "int(x)", + "float(x)", + "complex(x)", + ] + if not PY3: # long is gone in Python 3 + ops.append("long(x)") + return ops + + def test_unops(self): + for expr in self.unops: + x = 1 + y = eval(expr) + x = self._makeOne(1) + z = eval(expr) + self.assertEqual(z, y, + "x=%r; expr=%r" % (x, expr)) + + def test_odd_unops(self): + # unops that don't return a proxy + funcs = (lambda x: not x,) + if not PY3: + funcs += (oct, hex) + for func in funcs: + self.assertEqual(func(self._makeOne(100)), func(100)) + + binops = [ + "x+y", "x-y", "x*y", "x/y", "x//y", "x%y", "divmod(x, y)", + "x**y", #"pow(x,y,3)" (RHS coercion not supported w/ modulus) + "x<>y", "x&y", "x|y", "x^y", + ] + + def test_binops(self): + for expr in self.binops: + first = 1 + for x in [1, self._makeOne(1)]: + for y in [2, self._makeOne(2)]: + if first: + z = eval(expr) + first = 0 + else: + self.assertEqual(eval(expr), z, + "x=%r; y=%r; expr=%r" % (x, y, expr)) + + def test_pow_w_modulus(self): + x = self._makeOne(2) + # Can't coerce 2nd / 3rd args in pure Python, because we can't + # lie about our type + self.assertEqual(pow(x, 3, 3), 2) + + def test_inplace(self): + # TODO: should test all inplace operators... + pa = self._makeOne(1) + pa += 2 + self.assertEqual(pa, 3) + + a = [1, 2, 3] + pa = qa = self._makeOne(a) + pa += [4, 5, 6] + self.assertTrue(pa is qa) + self.assertEqual(a, [1, 2, 3, 4, 5, 6]) + + pa = self._makeOne(2) + pa -= 1 + self.assertEqual(pa, 1) + pa *= 4 + self.assertEqual(pa, 4) + pa /= 2 + self.assertEqual(pa, 2) + pa //= 2 + self.assertEqual(pa, 1) + pa += 2 + self.assertEqual(pa, 3) + pa %= 2 + self.assertEqual(pa, 1) + + pa = self._makeOne(2) + pa **= 2 + self.assertEqual(pa, 4) + pa <<= 1 + self.assertEqual(pa, 8) + pa >>= 2 + self.assertEqual(pa, 2) + + pa = self._makeOne(7) + pa &= 6 + self.assertEqual(pa, 6) + pa |= 16 + self.assertEqual(pa, 22) + pa ^= 2 + self.assertEqual(pa, 20) + + @unittest.skipIf(PY3, "No coercion in Py3") + def test_coerce(self): + # Before 2.3, coerce() of two proxies returns them unchanged + + x = self._makeOne(1) + y = self._makeOne(2) + a, b = coerce(x, y) + self.assertTrue(a is x and b is y) + + x = self._makeOne(1) + y = self._makeOne(2.1) + a, b = coerce(x, y) + self.assertTrue(isinstance(a, float)) # a was coerced + self.assertFalse(a is x) + self.assertEqual(a, float(x)) + self.assertTrue(b is y) + + x = self._makeOne(1.1) + y = self._makeOne(2) + a, b = coerce(x, y) + self.assertTrue(a is x) + self.assertTrue(isinstance(b, float)) # b was coerced + self.assertFalse(b is y) + self.assertEqual(b, float(y)) + + x = self._makeOne(1) + y = 2 + a, b = coerce(x, y) + self.assertTrue(a is x) # neither was coerced + self.assertTrue(b is y) + + x = self._makeOne(1) + y = 2.1 + a, b = coerce(x, y) + self.assertTrue(isinstance(a, float)) # a was coerced + self.assertFalse(a is x) + self.assertEqual(a, float(x)) + self.assertTrue(b is y) + + x = self._makeOne(1.1) + y = 2 + a, b = coerce(x, y) + self.assertTrue(a is x) + self.assertTrue(isinstance(b, float)) # b was coerced + self.assertFalse(b is y) + self.assertEqual(b,float(y)) + + x = 1 + y = self._makeOne(2) + a, b = coerce(x, y) + self.assertTrue(a is x) # neither was coerced + self.assertTrue(b is y) + + x = 1.1 + y = self._makeOne(2) + a, b = coerce(x, y) + self.assertTrue(a is x) + self.assertTrue(isinstance(b, float)) # b was coerced + self.assertFalse(b is y) + self.assertEqual(b, float(y)) + + x = 1 + y = self._makeOne(2.1) + a, b = coerce(x, y) + self.assertTrue(isinstance(a, float)) # a was coerced + self.assertFalse(a is x) + self.assertEqual(a, float(x)) + self.assertTrue(b is y) + + def test___class__(self): + o = object() + w = self._makeOne(o) + self.assertTrue(w.__class__ is o.__class__) + + def test_descriptor__set___only_in_proxy_subclass(self): + + class Descriptor(object): + value = None + instance = None + def __set__(self, instance, value): + self.value = value + self.instance = instance + + descriptor = Descriptor() + class Proxy(self._getTargetClass()): + attr = descriptor + + proxy = Proxy(object()) + proxy.attr = 42 + + self.assertEqual(proxy.attr, descriptor) + self.assertEqual(descriptor.value, 42) + self.assertEqual(descriptor.instance, proxy) + + def test_descriptor__get___set___in_proxy_subclass(self): + + class Descriptor(object): + value = None + instance = None + cls = None + + def __get__(self, instance, cls): + self.cls = cls + return self.value + + def __set__(self, instance, value): + self.value = value + self.instance = instance + + descriptor = Descriptor() + descriptor.value = "descriptor value" + class Proxy(self._getTargetClass()): + attr = descriptor + + proxy = Proxy(object()) + self.assertEqual(proxy.attr, "descriptor value") + self.assertEqual(descriptor.cls, Proxy) + + proxy.attr = 42 + + self.assertEqual(descriptor.value, 42) + self.assertEqual(descriptor.instance, proxy) + + def test_non_descriptor_in_proxy_subclass__dict__(self): + # Non-descriptors in the class dict of the subclass + # are always passed through to the wrapped instance + class Proxy(self._getTargetClass()): + attr = "constant value" + + proxy = Proxy(object()) + self.assertEqual(proxy.attr, "constant value") + + self.assertRaises(AttributeError, setattr, proxy, 'attr', 42) + self.assertEqual(proxy.attr, "constant value") + + def _check_wrapping_builtin_returns_correct_provided_by(self, proxy_class, builtin_type): + # We get the __implemented__ (fallback) of the type, not our own + from zope.interface import Interface + from zope.interface import classImplements + from zope.interface import classImplementsOnly + from zope.interface import implementedBy + from zope.interface import providedBy + from zope.interface import implementedBy + + # Set up the builtin interface + class IFoo(Interface): + pass + impl_before = list(implementedBy(builtin_type)) + + classImplements(builtin_type, IFoo) + + builtin = builtin_type() + self.assertTrue(IFoo in list(providedBy(builtin))) + self.assertTrue(IFoo in list(implementedBy(builtin_type))) + + try: + # The asserts must be before we remove the interface + # because there's a single object that gets mutated + + proxy_instance = proxy_class(builtin) + provided_instance = providedBy(proxy_instance) + self.assertTrue(IFoo in list(provided_instance)) + + proxy_type = proxy_class(builtin_type) + from zope.interface.declarations import BuiltinImplementationSpecifications + self.assertIn(proxy_type, BuiltinImplementationSpecifications) + self.assertIsNot(BuiltinImplementationSpecifications.get(proxy_type, self), + self) + provided_type = implementedBy(proxy_type) + self.assertTrue(IFoo in list(provided_type)) + finally: + classImplementsOnly(builtin_type, *impl_before) + + def test_wrapping_builtin_type_returns_correct_provided_by(self): + self._check_wrapping_builtin_returns_correct_provided_by(self._getTargetClass(), list) + + def _check_wrapping_builtin_with_subclass_returns_correct_provided_by(self, builtin_type): + class Proxy(self._getTargetClass()): + pass + + self._check_wrapping_builtin_returns_correct_provided_by(Proxy, builtin_type) + # Our new class did not gain an __implemented__ attribute, unless we're + # the pure-python version + if hasattr(Proxy, '__implemented__'): # pragma: no cover + from zope.proxy import PyProxyBase + self.assertTrue(self._getTargetClass() is PyProxyBase) + + def test_wrapping_builtin_with_subclass_returns_correct_provided_by(self): + self._check_wrapping_builtin_with_subclass_returns_correct_provided_by(list) + + def test_method_in_proxy_subclass(self): + class Proxy(self._getTargetClass()): + def __getitem__(self, k): + return k + + proxy = Proxy(object()) + # Both when called by the interpreter, which bypasses + # __getattribute__ + self.assertEqual(proxy[42], 42) + # And when asked for as an attribute + self.assertNotEqual(getattr(proxy, '__getitem__'), self) + + def test_string_to_int(self): + proxy = self._makeOne("14") + self.assertEqual(14, int(proxy)) + +class ProxyBaseTestCase(PyProxyBaseTestCase): + + def _getTargetClass(self): + from zope.proxy import ProxyBase + return ProxyBase + +class Test_py__module(unittest.TestCase): + # Historically, proxying __module__ has been troublesome, + # especially when subclasses of the proxy class are involved; + # there was also a discrepancy between the C and Python implementations + # in that the C implementation only failed Test_subclass__module:test__module__in_instance, + # whereas the Python version failed every test. + # See https://github.com/zopefoundation/zopetoolkit/pull/2#issuecomment-106075153 + # and https://github.com/zopefoundation/zope.proxy/pull/8 + + def _getTargetClass(self): + from zope.proxy import PyProxyBase + return PyProxyBase + + def _makeProxy(self, obj): + from zope.proxy import PyProxyBase + return self._getTargetClass()(obj) + + def _check_module(self, obj, expected): + self.assertEqual(expected, obj.__module__) + self.assertEqual(expected, self._makeProxy(obj).__module__) + + def test__module__in_instance(self): + # We can find __module__ in an instance dict + class Module(object): + def __init__(self): + self.__module__ = 'module' + + self._check_module(Module(), 'module') + + def test__module__in_class_instance(self): + # We can find module in an instance of a class + class Module(object): + pass + + self._check_module(Module(), __name__) + + def test__module__in_class(self): + # We can find module in a class itself + class Module(object): + pass + self._check_module(Module, __name__) + + def test__module_in_eq_transitive(self): + # An object that uses __module__ in its implementation + # of __eq__ is transitively equal to a proxy of itself. + # Seen with zope.interface.interface.Interface + + class Module(object): + def __init__(self): + self.__module__ = __name__ + def __eq__(self, other): + return self.__module__ == other.__module__ + + module = Module() + # Sanity checks + self.assertEqual(module, module) + self.assertEqual(module.__module__, __name__) + + # transitive equal + self.assertEqual(module, self._makeProxy(module)) + self.assertEqual(self._makeProxy(module), module) + +class Test__module(Test_py__module): + + def _getTargetClass(self): + from zope.proxy import ProxyBase + return ProxyBase + +class Test_py_subclass__module(Test_py__module): + + def _getTargetClass(self): + class ProxySubclass(super(Test_py_subclass__module, self)._getTargetClass()): + pass + return ProxySubclass + +class Test_subclass__module(Test__module): + + def _getTargetClass(self): + class ProxySubclass(super(Test_subclass__module, self)._getTargetClass()): + pass + return ProxySubclass + + +class Test_py_getProxiedObject(unittest.TestCase): + + def _callFUT(self, *args): + from zope.proxy import py_getProxiedObject + return py_getProxiedObject(*args) + + def _makeProxy(self, obj): + from zope.proxy import PyProxyBase + return PyProxyBase(obj) + + def test_no_proxy(self): + class C(object): + pass + c = C() + self.assertTrue(self._callFUT(c) is c) + + def test_simple_proxy(self): + class C(object): + pass + c = C() + proxy = self._makeProxy(c) + self.assertTrue(self._callFUT(proxy) is c) + + def test_nested_proxy(self): + class C(object): + pass + c = C() + proxy = self._makeProxy(c) + proxy2 = self._makeProxy(proxy) + self.assertTrue(self._callFUT(proxy2) is proxy) + +class Test_getProxiedObject(Test_py_getProxiedObject): + + def _callFUT(self, *args): + from zope.proxy import getProxiedObject + return getProxiedObject(*args) + + def _makeProxy(self, obj): + from zope.proxy import ProxyBase + return ProxyBase(obj) + + +class Test_py_setProxiedObject(unittest.TestCase): + + def _callFUT(self, *args): + from zope.proxy import py_setProxiedObject + return py_setProxiedObject(*args) + + def _makeProxy(self, obj): + from zope.proxy import PyProxyBase + return PyProxyBase(obj) + + def test_no_proxy(self): + class C(object): + pass + c1 = C() + c2 = C() + self.assertRaises(TypeError, self._callFUT, c1, c2) + + def test_w_proxy(self): + class C(object): + def __init__(self, name): + self.name = name + c1 = C('c1') + c2 = C('c2') + proxy = self._makeProxy(c1) + self.assertEqual(proxy.name, 'c1') + old = self._callFUT(proxy, c2) + self.assertTrue(old is c1) + self.assertEqual(proxy.name, 'c2') + + def test_w_nested_proxy(self): + class C(object): + def __init__(self, name): + self.name = name + c1 = C('c1') + c2 = C('c2') + p1 = self._makeProxy(c1) + proxy2 = self._makeProxy(c2) + proxy = self._makeProxy(p1) + self.assertEqual(proxy.name, 'c1') + old = self._callFUT(proxy, proxy2) + self.assertTrue(old is p1) + self.assertEqual(proxy.name, 'c2') + + +class Test_setProxiedObject(Test_py_setProxiedObject): + + def _callFUT(self, *args): + from zope.proxy import setProxiedObject + return setProxiedObject(*args) + + def _makeProxy(self, obj): + from zope.proxy import ProxyBase + return ProxyBase(obj) + + +class Test_py_isProxy(unittest.TestCase): + + def _callFUT(self, *args): + from zope.proxy import py_isProxy + return py_isProxy(*args) + + def _proxyClass(self): + from zope.proxy import PyProxyBase + return PyProxyBase + + def test_bare_instance(self): + class C(object): + pass + c = C() + self.assertFalse(self._callFUT(c)) + + def test_proxy_no_class(self): + class P1(self._proxyClass()): + pass + class C(object): + pass + c = C() + p1 = P1(c) + self.assertTrue(self._callFUT(p1)) + + def test_proxy_w_same_class(self): + class P1(self._proxyClass()): + pass + class C(object): + pass + c = C() + p1 = P1(c) + self.assertTrue(self._callFUT(p1, P1)) + + def test_proxy_w_other_class(self): + class P1(self._proxyClass()): + pass + class P2(self._proxyClass()): + pass + class C(object): + pass + c = C() + p1 = P1(c) + self.assertFalse(self._callFUT(p1, P2)) + + +class Test_isProxy(Test_py_isProxy): + + def _callFUT(self, *args): + from zope.proxy import isProxy + return isProxy(*args) + + def _proxyClass(self): + from zope.proxy import ProxyBase + return ProxyBase + + +class Test_py_sameProxiedObjects(unittest.TestCase): + + def _callFUT(self, *args): + from zope.proxy import py_sameProxiedObjects + return py_sameProxiedObjects(*args) + + def _makeProxy(self, obj): + from zope.proxy import PyProxyBase + return PyProxyBase(obj) + + def _makeSecurityProxy(self, obj): + from zope.security.proxy import ProxyPy + from zope.security.checker import CheckerPy + checker = CheckerPy({}) + return ProxyPy(obj, checker) + + def test_bare_instance_identical(self): + class C(object): + pass + c1 = C() + self.assertTrue(self._callFUT(c1, c1)) + + def test_bare_instances_different(self): + class C(object): + pass + c1 = C() + c2 = C() + self.assertFalse(self._callFUT(c1, c2)) + self.assertFalse(self._callFUT(c2, c1)) + + def test_proxy_and_same_bare(self): + class C(object): + pass + c1 = C() + self.assertTrue(self._callFUT(self._makeProxy(c1), c1)) + self.assertTrue(self._callFUT(c1, self._makeProxy(c1))) + + def test_proxy_and_other_bare(self): + class C(object): + pass + c1 = C() + c2 = C() + self.assertFalse(self._callFUT(self._makeProxy(c1), c2)) + self.assertFalse(self._callFUT(c2, self._makeProxy(c1))) + + def test_proxies_w_same_bare(self): + _mP = self._makeProxy + class C(object): + pass + c1 = C() + self.assertTrue(self._callFUT(_mP(c1), _mP(c1))) + + def test_proxies_w_other_bare(self): + _mP = self._makeProxy + class C(object): + pass + c1 = C() + c2 = C() + self.assertFalse(self._callFUT(_mP(c1), _mP(c2))) + self.assertFalse(self._callFUT(_mP(c2), _mP(c1))) + + def test_nested_proxy_and_same_bare(self): + _mP = self._makeProxy + class C(object): + pass + c1 = C() + self.assertTrue(self._callFUT(_mP(_mP(c1)), c1)) + self.assertTrue(self._callFUT(c1, _mP(_mP(c1)))) + + def test_nested_proxy_and_other_bare(self): + _mP = self._makeProxy + class C(object): + pass + c1 = C() + c2 = C() + self.assertFalse(self._callFUT(_mP(_mP(c1)), c2)) + self.assertFalse(self._callFUT(c2, _mP(_mP(c1)))) + + @unittest.skipUnless(_HAVE_ZOPE_SECURITY, 'zope.security missing') + def test_security_proxy(self): + class C(object): + pass + c1 = C() + proxy1 = self._makeSecurityProxy(c1) + proxy1_2 = self._makeSecurityProxy(c1) + + self.assertTrue(self._callFUT(proxy1, proxy1)) + self.assertTrue(self._callFUT(proxy1, proxy1_2)) + + c2 = C() + proxy2 = self._makeSecurityProxy(c2) + self.assertFalse(self._callFUT(proxy1, proxy2)) + +class Test_sameProxiedObjects(Test_py_sameProxiedObjects): + + def _callFUT(self, *args): + from zope.proxy import sameProxiedObjects + return sameProxiedObjects(*args) + + def _makeProxy(self, obj): + from zope.proxy import ProxyBase + return ProxyBase(obj) + + def _makeSecurityProxy(self, obj): + from zope.security.proxy import Proxy + from zope.security.checker import Checker + checker = Checker({}) + return Proxy(obj, checker) + +class Test_py_queryProxy(unittest.TestCase): + + def _callFUT(self, *args): + from zope.proxy import py_queryProxy + return py_queryProxy(*args) + + def _proxyClass(self): + from zope.proxy import PyProxyBase + return PyProxyBase + + def test_bare_instance(self): + class C(object): + pass + c = C() + self.assertEqual(self._callFUT(c), None) + + def test_proxy_no_class(self): + class P1(self._proxyClass()): + pass + class C(object): + pass + c = C() + p1 = P1(c) + self.assertTrue(self._callFUT(p1) is p1) + + def test_proxy_w_same_class(self): + class P1(self._proxyClass()): + pass + class C(object): + pass + c = C() + p1 = P1(c) + self.assertTrue(self._callFUT(p1, P1) is p1) + self.assertTrue(self._callFUT(p1, P1, 42) is p1) + + def test_proxy_w_other_class(self): + class P1(self._proxyClass()): + pass + class P2(self._proxyClass()): + pass + class C(object): + pass + c = C() + p1 = P1(c) + self.assertEqual(self._callFUT(p1, P2), None) + self.assertEqual(self._callFUT(p1, P2, 42), 42) + + def test_proxy_w_base_class(self): + class P1(self._proxyClass()): + pass + class P2(self._proxyClass()): + pass + class C(object): + pass + c = C() + p1 = P1(c) + self.assertTrue(self._callFUT(p1, self._proxyClass()) is p1) + self.assertTrue(self._callFUT(p1, self._proxyClass(), 42) is p1) + + +class Test_queryProxy(Test_py_queryProxy): + + def _callFUT(self, *args): + from zope.proxy import queryProxy + return queryProxy(*args) + + def _proxyClass(self): + from zope.proxy import ProxyBase + return ProxyBase + + +class Test_py_queryInnerProxy(unittest.TestCase): + + def _callFUT(self, *args): + from zope.proxy import py_queryInnerProxy + return py_queryInnerProxy(*args) + + def _proxyClass(self): + from zope.proxy import PyProxyBase + return PyProxyBase + + def test_bare_instance(self): + class C(object): + pass + c = C() + self.assertEqual(self._callFUT(c), None) + + def test_proxy_no_class(self): + class P1(self._proxyClass()): + pass + class C(object): + pass + c = C() + p1 = P1(c) + self.assertTrue(self._callFUT(p1) is p1) + + def test_proxy_w_same_class(self): + class P1(self._proxyClass()): + pass + class C(object): + pass + c = C() + p1 = P1(c) + self.assertTrue(self._callFUT(p1, P1) is p1) + self.assertTrue(self._callFUT(p1, P1, 42) is p1) + + def test_nested_proxy(self): + class P1(self._proxyClass()): + pass + class P2(self._proxyClass()): + pass + class C(object): + pass + c = C() + p1 = P1(c) + proxy2 = P2(p1) + self.assertTrue(self._callFUT(proxy2, P1) is p1) + self.assertTrue(self._callFUT(proxy2, P1, 42) is p1) + self.assertTrue(self._callFUT(proxy2, P2) is proxy2) + self.assertTrue(self._callFUT(proxy2, P2, 42) is proxy2) + + def test_re_nested_proxy(self): + class P1(self._proxyClass()): + pass + class P2(self._proxyClass()): + pass + class C(object): + pass + c = C() + p1 = P1(c) + proxy2 = P2(p1) + proxy3 = P1(proxy2) + self.assertTrue(self._callFUT(proxy3, P1) is p1) + self.assertTrue(self._callFUT(proxy3, P1, 42) is p1) + self.assertTrue(self._callFUT(proxy3, P2) is proxy2) + self.assertTrue(self._callFUT(proxy3, P2, 42) is proxy2) + + +class Test_queryInnerProxy(Test_py_queryInnerProxy): + + def _callFUT(self, *args): + from zope.proxy import queryInnerProxy + return queryInnerProxy(*args) + + def _proxyClass(self): + from zope.proxy import ProxyBase + return ProxyBase + + +class Test_py_removeAllProxies(unittest.TestCase): + + def _callFUT(self, *args): + from zope.proxy import py_removeAllProxies + return py_removeAllProxies(*args) + + def _makeProxy(self, obj): + from zope.proxy import PyProxyBase + return PyProxyBase(obj) + + def _makeSecurityProxy(self, obj): + from zope.security.proxy import ProxyPy + checker = object() + return ProxyPy(obj, checker) + + def test_no_proxy(self): + class C(object): + pass + c = C() + self.assertTrue(self._callFUT(c) is c) + + def test_simple_proxy(self): + class C(object): + pass + c = C() + proxy = self._makeProxy(c) + self.assertTrue(self._callFUT(proxy) is c) + + def test_nested_proxy(self): + class C(object): + pass + c = C() + proxy = self._makeProxy(c) + proxy2 = self._makeProxy(proxy) + self.assertTrue(self._callFUT(proxy2) is c) + + @unittest.skipUnless(_HAVE_ZOPE_SECURITY, 'zope.security missing') + def test_security_proxy(self): + class C(object): + pass + c = C() + proxy = self._makeSecurityProxy(c) + self.assertIs(self._callFUT(proxy), c) + +class Test_removeAllProxies(Test_py_removeAllProxies): + + def _callFUT(self, *args): + from zope.proxy import removeAllProxies + return removeAllProxies(*args) + + def _makeProxy(self, obj): + from zope.proxy import ProxyBase + return ProxyBase(obj) + + def _makeSecurityProxy(self, obj): + from zope.security.proxy import Proxy + checker = object() + return Proxy(obj, checker) + +class Test_ProxyIterator(unittest.TestCase): + + def _callFUT(self, *args): + from zope.proxy import ProxyIterator + return ProxyIterator(*args) + + def test_no_proxy(self): + class C(object): + pass + c = C() + self.assertEqual(list(self._callFUT(c)), [c]) + + def test_w_simple_proxy(self): + from zope.proxy import ProxyBase + class C(object): + pass + c = C() + proxy = ProxyBase(c) + self.assertEqual(list(self._callFUT(proxy)), [proxy, c]) + + def test_w_nested_proxies(self): + from zope.proxy import ProxyBase + class C(object): + pass + c = C() + proxy = ProxyBase(c) + proxy2 = ProxyBase(proxy) + proxy3 = ProxyBase(proxy2) + proxy4 = ProxyBase(proxy3) + self.assertEqual(list(self._callFUT(proxy4)), + [proxy4, proxy3, proxy2, proxy, c]) + + +class Test_nonOverridable(unittest.TestCase): + + def test_it(self): + from zope.proxy import ProxyBase + from zope.proxy import non_overridable + class Proxy(ProxyBase): + def who(self): + raise AssertionError("Not called") + @non_overridable + def what(self): + return 'PROXY' + class Foo(object): + def who(self): + return 'FOO' + def what(self): + return 'FOO' + p0 = ProxyBase(Foo()) + self.assertEqual(p0.who(), 'FOO') + self.assertEqual(p0.what(), 'FOO') + proxy = Proxy(Foo()) + self.assertEqual(proxy.who(), 'FOO') + self.assertEqual(proxy.what(), 'PROXY') + + +class TestEmptyInterfaceDescriptor(unittest.TestCase): + + def _makeOne(self): + from zope.proxy import _EmptyInterfaceDescriptor + class It(object): + feature = _EmptyInterfaceDescriptor() + return It() + + def test_set(self): + it = self._makeOne() + with self.assertRaises(TypeError): + it.feature = 42 + + def test_delete(self): + it = self._makeOne() + del it.feature + with self.assertRaises(AttributeError): + getattr(it, 'feature') + + def test_iter(self): + it = type(self._makeOne()) + feature = it.__dict__['feature'] + self.assertEqual([], list(feature)) + + +class Comparable(object): + def __init__(self, value): + self.value = value + + def __eq__(self, other): + return self.value == getattr(other, 'value', other) + + def __ne__(self, other): + return not self.__eq__(other) + + def __lt__(self, other): + return self.value < getattr(other, 'value', other) + + def __ge__(self, other): + return not self.__lt__(other) + + def __le__(self, other): + return self.value <= getattr(other, 'value', other) + + def __gt__(self, other): + return not self.__le__(other) + + def __repr__(self): # pragma: no cover + return "" % self.value + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/publisher/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/publisher/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/publisher/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/publisher/_compat.py b/thesisenv/lib/python3.6/site-packages/zope/publisher/_compat.py new file mode 100644 index 0000000..043ff74 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/publisher/_compat.py @@ -0,0 +1,37 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Compatibility module for xmlrpclib + +This module unifies namespace for xmlrpclib, that changed its name in +python-3.x (became xmlrpc.client). + +The intention is to let xmlrpclib names to be importable from zcml. +""" +import sys +PYTHON2 = sys.version_info[0] == 2 +PYTHON3 = sys.version_info[0] == 3 + +if PYTHON2: + def _u(s, encoding='unicode_escape'): + return unicode(s, encoding) + from xmlrpclib import * + import types + CLASS_TYPES = (type, types.ClassType) +else: + def _u(s, encoding=None): + if encoding is None: + return s + return str(s, encoding) + CLASS_TYPES = (type,) + from xmlrpc.client import * diff --git a/thesisenv/lib/python3.6/site-packages/zope/publisher/base.py b/thesisenv/lib/python3.6/site-packages/zope/publisher/base.py new file mode 100644 index 0000000..ee6de47 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/publisher/base.py @@ -0,0 +1,487 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Base implementations of the Publisher objects + +Specifically, 'BaseRequest', 'BaseResponse', and 'DefaultPublication' are +specified here. +""" +from io import BytesIO, StringIO + +from zope.interface import implementer +from zope.interface.common.mapping import IReadMapping, IEnumerableMapping +from zope.exceptions.exceptionformatter import print_exception +from zope.security.proxy import removeSecurityProxy + +from zope.publisher.interfaces import IPublication, IHeld +from zope.publisher.interfaces import NotFound, DebugError, Unauthorized +from zope.publisher.interfaces import IRequest, IResponse, IDebugFlags +from zope.publisher.publish import mapply + +from zope.publisher._compat import PYTHON2 + +_marker = object() + +@implementer(IResponse) +class BaseResponse(object): + """Base Response Class + """ + + __slots__ = ( + '_result', # The result of the application call + '_request', # The associated request (if any) + ) + + + def __init__(self): + self._request = None + + def setResult(self, result): + 'See IPublisherResponse' + self._result = result + + def handleException(self, exc_info): + 'See IPublisherResponse' + # We want exception to be formatted to native strings. Pick + # respective io class depending on python version. + f = BytesIO() if PYTHON2 else StringIO() + print_exception( + exc_info[0], exc_info[1], exc_info[2], 100, f) + self.setResult(f.getvalue()) + + def internalError(self): + 'See IPublisherResponse' + pass + + def reset(self): + 'See IPublisherResponse' + pass + + def retry(self): + 'See IPublisherResponse' + return self.__class__() + +@implementer(IReadMapping) +class RequestDataGetter(object): + + def __init__(self, request): + self.__get = getattr(request, self._gettrname) + + def __getitem__(self, name): + return self.__get(name) + + def get(self, name, default=None): + return self.__get(name, default) + + def __contains__(self, key): + lookup = self.get(key, self) + return lookup is not self + + has_key = __contains__ + +@implementer(IEnumerableMapping) +class RequestDataMapper(object): + + def __init__(self, request): + self.__map = getattr(request, self._mapname) + + def __getitem__(self, name): + return self.__map[name] + + def get(self, name, default=None): + return self.__map.get(name, default) + + def __contains__(self, key): + lookup = self.get(key, self) + return lookup is not self + + has_key = __contains__ + + def keys(self): + return self.__map.keys() + + def __iter__(self): + return iter(self.keys()) + + def items(self): + return self.__map.items() + + def values(self): + return self.__map.values() + + def __len__(self): + return len(self.__map) + +class RequestDataProperty(object): + + def __init__(self, gettr_class): + self.__gettr_class = gettr_class + + def __get__(self, request, rclass=None): + if request is not None: + return self.__gettr_class(request) + + def __set__(*args): + raise AttributeError('Unassignable attribute') + + +class RequestEnvironment(RequestDataMapper): + _mapname = '_environ' + + +@implementer(IDebugFlags) +class DebugFlags(object): + """Debugging flags.""" + + sourceAnnotations = False + showTAL = False + + +@implementer(IRequest) +class BaseRequest(object): + """Represents a publishing request. + + This object provides access to request data. Request data may + vary depending on the protocol used. + + Request objects are created by the object publisher and will be + passed to published objects through the argument name, REQUEST. + + The request object is a mapping object that represents a + collection of variable to value mappings. + """ + + __slots__ = ( + '__provides__', # Allow request to directly provide interfaces + '_held', # Objects held until the request is closed + '_traversed_names', # The names that have been traversed + '_last_obj_traversed', # Object that was traversed last + '_traversal_stack', # Names to be traversed, in reverse order + '_environ', # The request environment variables + '_response', # The response + '_args', # positional arguments + '_body_instream', # input stream + '_body', # The request body as a string + '_publication', # publication object + '_principal', # request principal, set by publication + 'interaction', # interaction, set by interaction + 'debug', # debug flags + 'annotations', # per-package annotations + ) + + environment = RequestDataProperty(RequestEnvironment) + + def __init__(self, body_instream, environ, response=None, + positional=None): + self._traversal_stack = [] + self._last_obj_traversed = None + self._traversed_names = [] + self._environ = environ + + self._args = positional or () + + if response is None: + self._response = self._createResponse() + else: + self._response = response + + self._response._request = self + + self._body_instream = body_instream + self._held = () + self._principal = None + self.debug = DebugFlags() + self.interaction = None + self.annotations = {} + + def setPrincipal(self, principal): + self._principal = principal + + principal = property(lambda self: self._principal) + + def _getPublication(self): + 'See IPublisherRequest' + return getattr(self, '_publication', None) + + publication = property(_getPublication) + + def processInputs(self): + 'See IPublisherRequest' + # Nothing to do here + + def retry(self): + 'See IPublisherRequest' + raise TypeError('Retry is not supported') + + def setPublication(self, pub): + 'See IPublisherRequest' + self._publication = pub + + def supportsRetry(self): + 'See IPublisherRequest' + return 0 + + def traverse(self, obj): + 'See IPublisherRequest' + + publication = self.publication + + traversal_stack = self._traversal_stack + traversed_names = self._traversed_names + + prev_object = None + while True: + + self._last_obj_traversed = obj + + if removeSecurityProxy(obj) is not removeSecurityProxy(prev_object): + # Invoke hooks (but not more than once). + publication.callTraversalHooks(self, obj) + + if not traversal_stack: + # Finished traversal. + break + + prev_object = obj + + # Traverse to the next step. + entry_name = traversal_stack.pop() + traversed_names.append(entry_name) + obj = publication.traverseName(self, obj, entry_name) + + return obj + + def close(self): + 'See IPublicationRequest' + + for held in self._held: + if IHeld.providedBy(held): + held.release() + + self._held = None + self._body_instream = None + self._publication = None + + def getPositionalArguments(self): + 'See IPublicationRequest' + return self._args + + def _getResponse(self): + return self._response + + response = property(_getResponse) + + def getTraversalStack(self): + 'See IPublicationRequest' + return list(self._traversal_stack) # Return a copy + + def hold(self, object): + 'See IPublicationRequest' + self._held = self._held + (object,) + + def setTraversalStack(self, stack): + 'See IPublicationRequest' + self._traversal_stack[:] = list(stack) + + def _getBodyStream(self): + 'See zope.publisher.interfaces.IApplicationRequest' + return self._body_instream + + bodyStream = property(_getBodyStream) + + def __len__(self): + 'See Interface.Common.Mapping.IEnumerableMapping' + return len(self.keys()) + + def items(self): + 'See Interface.Common.Mapping.IEnumerableMapping' + result = [] + get = self.get + for k in self.keys(): + result.append((k, get(k))) + return result + + def keys(self): + 'See Interface.Common.Mapping.IEnumerableMapping' + return self._environ.keys() + + def __iter__(self): + return iter(self.keys()) + + def values(self): + 'See Interface.Common.Mapping.IEnumerableMapping' + result = [] + get = self.get + for k in self.keys(): + result.append(get(k)) + return result + + def __getitem__(self, key): + 'See Interface.Common.Mapping.IReadMapping' + result = self.get(key, _marker) + if result is _marker: + raise KeyError(key) + else: + return result + + def get(self, key, default=None): + 'See Interface.Common.Mapping.IReadMapping' + result = self._environ.get(key, _marker) + if result is not _marker: + return result + + return default + + def __contains__(self, key): + 'See Interface.Common.Mapping.IReadMapping' + lookup = self.get(key, self) + return lookup is not self + + has_key = __contains__ + + def _createResponse(self): + # Should be overridden by subclasses + return BaseResponse() + + def __bool__(self): + # This is here to avoid calling __len__ for boolean tests + return True + + __nonzero__ = __bool__ # Python 2 + + def __str__(self): + L1 = self.items() + L1.sort() + return "\n".join(map(lambda item: "%s:\t%s" % item, L1)) + + def _setupPath_helper(self, attr): + path = self.get(attr, "/") + if path.endswith('/'): + # Remove trailing backslash, so that we will not get an empty + # last entry when splitting the path. + path = path[:-1] + self._endswithslash = True + else: + self._endswithslash = False + + clean = [] + for item in path.split('/'): + if not item or item == '.': + continue + elif item == '..': + # try to remove the last name + try: + del clean[-1] + except IndexError: + # the list of names was empty, so do nothing and let the + # string '..' be placed on the list + pass + clean.append(item) + + clean.reverse() + self.setTraversalStack(clean) + + self._path_suffix = None + +class TestRequest(BaseRequest): + + __slots__ = ('_presentation_type', ) + + def __init__(self, path, body_instream=None, environ=None): + + if environ is None: + environ = {} + + environ['PATH_INFO'] = path + if body_instream is None: + body_instream = BytesIO(b'') + + super(TestRequest, self).__init__(body_instream, environ) + +@implementer(IPublication) +class DefaultPublication(object): + """A stub publication. + + This works just like Zope2's ZPublisher. It rejects any name + starting with an underscore and any objects (specifically: method) + that doesn't have a docstring. + """ + + require_docstrings = True + + def __init__(self, app): + self.app = app + + def beforeTraversal(self, request): + # Lop off leading and trailing empty names + stack = request.getTraversalStack() + while stack and not stack[-1]: + stack.pop() # toss a trailing empty name + while stack and not stack[0]: + stack.pop(0) # toss a leading empty name + request.setTraversalStack(stack) + + def getApplication(self, request): + return self.app + + def callTraversalHooks(self, request, ob): + pass + + def traverseName(self, request, ob, name, check_auth=1): + if name.startswith('_'): + raise Unauthorized(name) + if hasattr(ob, name): + subob = getattr(ob, name) + else: + try: + subob = ob[name] + except (KeyError, IndexError, + TypeError, AttributeError): + raise NotFound(ob, name, request) + if self.require_docstrings and not getattr(subob, '__doc__', None): + raise DebugError(subob, 'Missing or empty doc string') + return subob + + def getDefaultTraversal(self, request, ob): + return ob, () + + def afterTraversal(self, request, ob): + pass + + def callObject(self, request, ob): + return mapply(ob, request.getPositionalArguments(), request) + + def afterCall(self, request, ob): + pass + + def endRequest(self, request, ob): + pass + + def handleException(self, object, request, exc_info, retry_allowed=1): + # Let the response handle it as best it can. + request.response.reset() + request.response.handleException(exc_info) + + +class TestPublication(DefaultPublication): + + def traverseName(self, request, ob, name, check_auth=1): + if hasattr(ob, name): + subob = getattr(ob, name) + else: + try: + subob = ob[name] + except (KeyError, IndexError, + TypeError, AttributeError): + raise NotFound(ob, name, request) + return subob diff --git a/thesisenv/lib/python3.6/site-packages/zope/publisher/browser.py b/thesisenv/lib/python3.6/site-packages/zope/publisher/browser.py new file mode 100644 index 0000000..f9e11f8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/publisher/browser.py @@ -0,0 +1,997 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Browser-specific Publisher classes + +Here we define the specific 'BrowserRequest' and 'BrowserResponse' class. The +big improvement of the 'BrowserRequest' to 'HTTPRequest' is that is can handle +HTML form data and convert them into a Python-native format. Even file data is +packaged into a nice, Python-friendly 'FileUpload' object. +""" +__docformat__ = 'restructuredtext' + +import re +from cgi import FieldStorage +import tempfile + +import zope.component +import zope.interface +from zope.interface import implementer, directlyProvides +from zope.i18n.interfaces import IUserPreferredLanguages +from zope.i18n.interfaces import IUserPreferredCharsets +from zope.i18n.interfaces import IModifiableUserPreferredLanguages +from zope.location import Location + +from zope.publisher.interfaces import NotFound +from zope.publisher.interfaces import IDefaultSkin +from zope.publisher.interfaces.browser import IBrowserRequest +from zope.publisher.interfaces.browser import IDefaultBrowserLayer +from zope.publisher.interfaces.browser import IBrowserApplicationRequest +from zope.publisher.interfaces.browser import IBrowserView +from zope.publisher.interfaces.browser import IBrowserPage +from zope.publisher.interfaces.browser import IBrowserSkinType +from zope.publisher.interfaces.http import IHTTPRequest +from zope.publisher.http import HTTPRequest, HTTPResponse, getCharsetUsingRequest + +# BBB imports, this compoennts get moved from this module +from zope.publisher.interfaces import ISkinType #BBB import +from zope.publisher.interfaces import ISkinChangedEvent #BBB import +from zope.publisher.skinnable import getDefaultSkin #BBB import +from zope.publisher.skinnable import setDefaultSkin #BBB import +from zope.publisher.skinnable import applySkin #BBB import +from zope.publisher.skinnable import SkinChangedEvent #BBB import + +from zope.publisher._compat import PYTHON2, _u + + +__ArrayTypes = (list, tuple) + +start_of_header_search=re.compile(b'(]*>)', re.I).search +base_re_search=re.compile(b'()',re.I).search +isRelative = re.compile("[-_.!~*a-zA-z0-9'()@&=+$,]+(/|$)").match +newlines = re.compile('\r\n|\n\r|\r') + +def is_text_html(content_type): + return content_type.startswith('text/html') + +# Flag Constants +SEQUENCE = 1 +DEFAULT = 2 +RECORD = 4 +RECORDS = 8 +REC = RECORD | RECORDS +CONVERTED = 32 +DEFAULTABLE_METHODS = 'GET', 'POST', 'HEAD' + + +def field2string(v): + if hasattr(v, 'read'): + return v.read() + return str(v) + +def field2text(v, nl=newlines): + return nl.sub("\n", field2string(v)) + +def field2required(v): + v = field2string(v) + if not v.strip(): + raise ValueError('No input for required field

      ') + return v + +def field2int(v): + if isinstance(v, __ArrayTypes): + return list(map(field2int, v)) + v = field2string(v) + if not v: + raise ValueError('Empty entry when integer expected') + try: + return int(v) + except ValueError: + raise ValueError("An integer was expected in the value '%s'" % v) + +def field2float(v): + if isinstance(v, __ArrayTypes): + return list(map(field2float, v)) + v = field2string(v) + if not v: + raise ValueError( + 'Empty entry when floating-point number expected') + try: + return float(v) + except ValueError: + raise ValueError( + "A floating-point number was expected in the value '%s'" % v) + +def field2long(v): + if isinstance(v, __ArrayTypes): + return list(map(field2long, v)) + v = field2string(v) + + # handle trailing 'L' if present. + if v and v[-1].upper() == 'L': + v = v[:-1] + if not v: + raise ValueError('Empty entry when integer expected') + try: + return int(v) + except ValueError: + raise ValueError("A long integer was expected in the value '%s'" % v) + +def field2tokens(v): + return field2string(v).split() + +def field2lines(v): + if isinstance(v, __ArrayTypes): + return [str(item) for item in v] + return field2text(v).splitlines() + +def field2boolean(v): + return bool(v) + +type_converters = { + 'float': field2float, + 'int': field2int, + 'long': field2long, + 'string': field2string, + 'required': field2required, + 'tokens': field2tokens, + 'lines': field2lines, + 'text': field2text, + 'boolean': field2boolean, + } + +get_converter = type_converters.get + +def registerTypeConverter(field_type, converter, replace=False): + """Add a custom type converter to the registry. + + o If 'replace' is not true, raise a KeyError if a converter is + already registered for 'field_type'. + """ + existing = type_converters.get(field_type) + + if existing is not None and not replace: + raise KeyError('Existing converter for field_type: %s' % field_type) + + type_converters[field_type] = converter + + +isCGI_NAME = lambda key: key in { + # These fields are placed in request.environ instead of request.form. + 'SERVER_SOFTWARE' : 1, + 'SERVER_NAME' : 1, + 'GATEWAY_INTERFACE' : 1, + 'SERVER_PROTOCOL' : 1, + 'SERVER_PORT' : 1, + 'REQUEST_METHOD' : 1, + 'PATH_INFO' : 1, + 'PATH_TRANSLATED' : 1, + 'SCRIPT_NAME' : 1, + 'QUERY_STRING' : 1, + 'REMOTE_HOST' : 1, + 'REMOTE_ADDR' : 1, + 'AUTH_TYPE' : 1, + 'REMOTE_USER' : 1, + 'REMOTE_IDENT' : 1, + 'CONTENT_TYPE' : 1, + 'CONTENT_LENGTH' : 1, + 'SERVER_URL': 1, + } + +hide_key=lambda key: key in { + 'HTTP_AUTHORIZATION':1, + 'HTTP_CGI_AUTHORIZATION': 1, + } + +class Record(object): + + _attrs = frozenset(('get', 'keys', 'items', 'values', 'copy', + 'has_key', '__contains__')) + + def __getattr__(self, key, default=None): + if key in self._attrs: + return getattr(self.__dict__, key) + raise AttributeError(key) + + def __getitem__(self, key): + return self.__dict__[key] + + def __str__(self): + items = list(self.__dict__.items()) + items.sort() + return "{" + ", ".join(["%s: %s" % item for item in items]) + "}" + + def __repr__(self): + items = list(self.__dict__.items()) + items.sort() + return ("{" + + ", ".join(["%s: %s" % (key, repr(value)) + for key, value in items]) + "}") + +_get_or_head = 'GET', 'HEAD' +@implementer(IBrowserRequest, IBrowserApplicationRequest) +class BrowserRequest(HTTPRequest): + + __slots__ = ( + '__provides__', # Allow request to directly provide interfaces + 'form', # Form data + 'charsets', # helper attribute + '__meth', + '__tuple_items', + '__defaults', + '__annotations__', + ) + + # Set this to True in a subclass to redirect GET requests when the + # effective and actual URLs differ. + use_redirect = False + + def __init__(self, body_instream, environ, response=None): + self.form = {} + self.charsets = None + super(BrowserRequest, self).__init__(body_instream, environ, response) + + + def _createResponse(self): + return BrowserResponse() + + def _decode(self, text): + """Try to decode the text using one of the available charsets.""" + # According to PEP-3333, in python-3, QUERY_STRING is a string, + # representing 'latin-1' encoded byte array. So, if we are in python-3 + # context, encode text as 'latin-1' first, to try to decode + # resulting byte array using user-supplied charset. + if not isinstance(text, bytes): + text = text.encode('latin-1') + if self.charsets is None: + envadapter = IUserPreferredCharsets(self) + self.charsets = envadapter.getPreferredCharsets() or ['utf-8'] + self.charsets = [c for c in self.charsets if c != '*'] + for charset in self.charsets: + try: + text = _u(text, charset) + break + except UnicodeError: + pass + return text + + def processInputs(self): + 'See IPublisherRequest' + + if self.method not in _get_or_head: + # Process self.form if not a GET request. + fp = self._body_instream + if self.method == 'POST': + content_type = self._environ.get('CONTENT_TYPE') + if content_type and not ( + content_type.startswith('application/x-www-form-urlencoded') + or + content_type.startswith('multipart/') + ): + # for non-multi and non-form content types, FieldStorage + # consumes the body and we have no good place to put it. + # So we just won't call FieldStorage. :) + return + else: + fp = None + + # If 'QUERY_STRING' is not present in self._environ + # FieldStorage will try to get it from sys.argv[1] + # which is not what we need. + if 'QUERY_STRING' not in self._environ: + self._environ['QUERY_STRING'] = '' + + # The Python 2.6 cgi module mixes the query string and POST values + # together. We do not want this. + env = self._environ + if self.method == 'POST' and self._environ['QUERY_STRING']: + env = env.copy() + del env['QUERY_STRING'] + + + args = {'encoding': 'utf-8'} if not PYTHON2 else {} + fs = ZopeFieldStorage(fp=fp, environ=env, + keep_blank_values=1, **args) + # On python 3.4 and up, FieldStorage explictly closes files + # when it is garbage collected + # see: + # http://bugs.python.org/issue18394 + # https://hg.python.org/cpython/rev/c0e9ba7b26d5 + # so we keep a reference to the FieldStorage till we are + # finished processing the request. + self.hold(fs) + + fslist = getattr(fs, 'list', None) + if fslist is not None: + self.__meth = None + self.__tuple_items = {} + self.__defaults = {} + + # process all entries in the field storage (form) + for item in fslist: + self.__processItem(item) + + if self.__defaults: + self.__insertDefaults() + + if self.__tuple_items: + self.__convertToTuples() + + if self.__meth: + self.setPathSuffix((self.__meth,)) + + _typeFormat = re.compile('([a-zA-Z][a-zA-Z0-9_]+|\\.[xy])$') + + def __processItem(self, item): + """Process item in the field storage.""" + + # Check whether this field is a file upload object + # Note: A field exists for files, even if no filename was + # passed in and no data was uploaded. Therefore we can only + # tell by the empty filename that no upload was made. + key = item.name + if (hasattr(item, 'file') and hasattr(item, 'filename') + and hasattr(item,'headers')): + if (item.file and + (item.filename is not None and item.filename != '' + # RFC 1867 says that all fields get a content-type. + # or 'content-type' in map(lower, item.headers.keys()) + )): + item = FileUpload(item) + else: + item = item.value + + flags = 0 + converter = None + + # Loop through the different types and set + # the appropriate flags + # Syntax: var_name:type_name + + # We'll search from the back to the front. + # We'll do the search in two steps. First, we'll + # do a string search, and then we'll check it with + # a re search. + + while key: + pos = key.rfind(":") + if pos < 0: + break + match = self._typeFormat.match(key, pos + 1) + if match is None: + break + + key, type_name = key[:pos], key[pos + 1:] + + # find the right type converter + c = get_converter(type_name, None) + + if c is not None: + converter = c + flags |= CONVERTED + elif type_name == 'list': + flags |= SEQUENCE + elif type_name == 'tuple': + self.__tuple_items[key] = 1 + flags |= SEQUENCE + elif (type_name == 'method' or type_name == 'action'): + if key: + self.__meth = key + else: + self.__meth = item + elif (type_name == 'default_method' + or type_name == 'default_action') and not self.__meth: + if key: + self.__meth = key + else: + self.__meth = item + elif type_name == 'default': + flags |= DEFAULT + elif type_name == 'record': + flags |= RECORD + elif type_name == 'records': + flags |= RECORDS + elif type_name == 'ignore_empty' and not item: + # skip over empty fields + return + + if key is not None: + key = self._decode(key) + + if isinstance(item, (str, bytes)): + item = self._decode(item) + + if flags: + self.__setItemWithType(key, item, flags, converter) + else: + self.__setItemWithoutType(key, item) + + def __setItemWithoutType(self, key, item): + """Set item value without explicit type.""" + form = self.form + if key not in form: + form[key] = item + else: + found = form[key] + if isinstance(found, list): + found.append(item) + else: + form[key] = [found, item] + + def __setItemWithType(self, key, item, flags, converter): + """Set item value with explicit type.""" + #Split the key and its attribute + if flags & REC: + key, attr = self.__splitKey(key) + + # defer conversion + if flags & CONVERTED: + try: + item = converter(item) + except: + if item or flags & DEFAULT or key not in self.__defaults: + raise + item = self.__defaults[key] + if flags & RECORD: + item = getattr(item, attr) + elif flags & RECORDS: + item = getattr(item[-1], attr) + + # Determine which dictionary to use + if flags & DEFAULT: + form = self.__defaults + else: + form = self.form + + # Insert in dictionary + if key not in form: + if flags & SEQUENCE: + item = [item] + if flags & RECORD: + r = form[key] = Record() + setattr(r, attr, item) + elif flags & RECORDS: + r = Record() + setattr(r, attr, item) + form[key] = [r] + else: + form[key] = item + else: + r = form[key] + if flags & RECORD: + if not flags & SEQUENCE: + setattr(r, attr, item) + else: + if not hasattr(r, attr): + setattr(r, attr, [item]) + else: + getattr(r, attr).append(item) + elif flags & RECORDS: + last = r[-1] + if not hasattr(last, attr): + if flags & SEQUENCE: + item = [item] + setattr(last, attr, item) + else: + if flags & SEQUENCE: + getattr(last, attr).append(item) + else: + new = Record() + setattr(new, attr, item) + r.append(new) + else: + if isinstance(r, list): + r.append(item) + else: + form[key] = [r, item] + + def __splitKey(self, key): + """Split the key and its attribute.""" + i = key.rfind(".") + if i >= 0: + return key[:i], key[i + 1:] + return key, "" + + def __convertToTuples(self): + """Convert form values to tuples.""" + form = self.form + + for key in self.__tuple_items: + if key in form: + form[key] = tuple(form[key]) + else: + k, attr = self.__splitKey(key) + + # remove any type_names in the attr + i = attr.find(":") + if i >= 0: + attr = attr[:i] + + if k in form: + item = form[k] + if isinstance(item, Record): + if hasattr(item, attr): + setattr(item, attr, tuple(getattr(item, attr))) + else: + for v in item: + if hasattr(v, attr): + setattr(v, attr, tuple(getattr(v, attr))) + + def __insertDefaults(self): + """Insert defaults into form dictionary.""" + form = self.form + + for keys, values in self.__defaults.items(): + if not keys in form: + form[keys] = values + else: + item = form[keys] + if isinstance(values, Record): + for k, v in values.items(): + if not hasattr(item, k): + setattr(item, k, v) + elif isinstance(values, list): + for val in values: + if isinstance(val, Record): + for k, v in val.items(): + for r in item: + if not hasattr(r, k): + setattr(r, k, v) + elif not val in item: + item.append(val) + + def traverse(self, obj): + 'See IPublisherRequest' + + ob = super(BrowserRequest, self).traverse(obj) + method = self.method + + base_needed = 0 + if self._path_suffix: + # We had a :method variable, so we need to set the base, + # but we don't look for default documents any more. + base_needed = 1 + redirect = 0 + elif method in DEFAULTABLE_METHODS: + # We need to check for default documents + publication = self.publication + + nsteps = 0 + ob, add_steps = publication.getDefaultTraversal(self, ob) + while add_steps: + nsteps += len(add_steps) + add_steps = list(add_steps) + add_steps.reverse() + self.setTraversalStack(add_steps) + ob = super(BrowserRequest, self).traverse(ob) + ob, add_steps = publication.getDefaultTraversal(self, ob) + + if nsteps != self._endswithslash: + base_needed = 1 + redirect = self.use_redirect and method == 'GET' + + + if base_needed: + url = self.getURL() + response = self.response + if redirect: + response.redirect(url) + return '' + elif not response.getBase(): + response.setBase(url) + + return ob + + def keys(self): + 'See Interface.Common.Mapping.IEnumerableMapping' + d = {} + d.update(self._environ) + d.update(self._cookies) + d.update(self.form) + return list(d.keys()) + + + def get(self, key, default=None): + 'See Interface.Common.Mapping.IReadMapping' + marker = object() + result = self.form.get(key, marker) + if result is not marker: + return result + + return super(BrowserRequest, self).get(key, default) + +class ZopeFieldStorage(FieldStorage): + + def make_file(self, binary=None): + if PYTHON2 or self._binary_file: + return tempfile.NamedTemporaryFile("w+b") + else: + return tempfile.NamedTemporaryFile("w+", + encoding=self.encoding, newline='\n') + + +class FileUpload(object): + '''File upload objects + + File upload objects are used to represent file-uploaded data. + + File upload objects can be used just like files. + + In addition, they have a 'headers' attribute that is a dictionary + containing the file-upload headers, and a 'filename' attribute + containing the name of the uploaded file. + ''' + + def __init__(self, aFieldStorage): + + file = aFieldStorage.file + if hasattr(file, '__methods__'): + methods = file.__methods__ + else: + methods = ['close', 'fileno', 'flush', 'isatty', + 'read', 'readline', 'readlines', 'seek', + 'tell', 'truncate', 'write', 'writelines', + 'name'] + + d = self.__dict__ + for m in methods: + if hasattr(file,m): + d[m] = getattr(file,m) + + self.headers = aFieldStorage.headers + filename = aFieldStorage.filename + if isinstance(aFieldStorage.filename, bytes): + filename = _u(aFieldStorage.filename, 'UTF-8') + # fix for IE full paths + filename = filename[filename.rfind('\\')+1:].strip() + self.filename = filename + +class RedirectingBrowserRequest(BrowserRequest): + """Browser requests that redirect when the actual and effective URLs differ + """ + + use_redirect = True + +class TestRequest(BrowserRequest): + """Browser request with a constructor convenient for testing + """ + + def __init__(self, body_instream=None, environ=None, form=None, + skin=None, **kw): + + _testEnv = { + 'SERVER_URL': 'http://127.0.0.1', + 'HTTP_HOST': '127.0.0.1', + 'CONTENT_LENGTH': '0', + 'GATEWAY_INTERFACE': 'TestFooInterface/1.0', + } + + if environ is not None: + _testEnv.update(environ) + + if kw: + _testEnv.update(kw) + if body_instream is None: + from io import BytesIO + body_instream = BytesIO() + + super(TestRequest, self).__init__(body_instream, _testEnv) + if form: + self.form.update(form) + + # Setup locale object + langs = BrowserLanguages(self).getPreferredLanguages() + from zope.i18n.locales import locales + if not langs or langs[0] == '': + self._locale = locales.getLocale(None, None, None) + else: + parts = (langs[0].split('-') + [None, None])[:3] + self._locale = locales.getLocale(*parts) + + if skin is not None: + directlyProvides(self, skin) + else: + directlyProvides(self, IDefaultBrowserLayer) + + + +class BrowserResponse(HTTPResponse): + """Browser response + """ + + __slots__ = ( + '_base', # The base href + ) + + def _implicitResult(self, body): + content_type = self.getHeader('content-type') + if content_type is None and self._status != 304: + if isHTML(body): + content_type = 'text/html' + else: + content_type = 'text/plain' + self.setHeader('x-content-type-warning', 'guessed from content') + self.setHeader('content-type', content_type) + + body, headers = super(BrowserResponse, self)._implicitResult(body) + body = self.__insertBase(body) + # Update the Content-Length header to account for the inserted + # tag. + headers = [ + (name, value) for name, value in headers + if name != 'content-length' + ] + headers.append(('content-length', str(len(body)))) + return body, headers + + + def __insertBase(self, body): + # Only insert a base tag if content appears to be html. + content_type = self.getHeader('content-type', '') + if content_type and not is_text_html(content_type): + return body + + if self.getBase(): + if body: + match = start_of_header_search(body) + if match is not None: + index = match.start(0) + len(match.group(0)) + ibase = base_re_search(body) + if ibase is None: + # Make sure the base URL is not a unicode string. + base = self.getBase() + if not isinstance(base, bytes): + encoding = getCharsetUsingRequest(self._request) or 'utf-8' + base = self.getBase().encode(encoding) + #body = (b'%s\n\n%s' % + # (body[:index], base, body[index:])) + body = b''.join([body[:index], + b'\n\n', + body[index:]]) + return body + + def getBase(self): + return getattr(self, '_base', '') + + def setBase(self, base): + self._base = base + + def redirect(self, location, status=None, trusted=False): + base = getattr(self, '_base', '') + if base and isRelative(str(location)): + l = base.rfind('/') + if l >= 0: + base = base[:l+1] + else: + base += '/' + location = base + location + + # TODO: HTTP redirects must provide an absolute location, see + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.30 + # So, what if location is relative and base is unknown? Uncomment + # the following and you'll see that it actually happens. + # + # if isRelative(str(location)): + # raise AssertionError('Cannot determine absolute location') + + return super(BrowserResponse, self).redirect(location, status, trusted) + + def reset(self): + super(BrowserResponse, self).reset() + self._base = '' + +def isHTML(str): + """Try to determine whether str is HTML or not.""" + s = str.lstrip().lower() + if s.startswith(''): + return True + if s.startswith(' + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/protectclass.py b/thesisenv/lib/python3.6/site-packages/zope/security/protectclass.py new file mode 100644 index 0000000..1b93994 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/protectclass.py @@ -0,0 +1,89 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Make assertions about permissions needed to access instance attributes +""" + +from zope.security.checker import Checker +from zope.security.checker import CheckerPublic +from zope.security.checker import defineChecker +from zope.security.checker import getCheckerForInstancesOf +from zope.security.interfaces import PUBLIC_PERMISSION_NAME as zope_Public + + +def protectName(class_, name, permission): + """Set a permission on a particular name.""" + + checker = getCheckerForInstancesOf(class_) + if checker is None: + checker = Checker({}, {}) + defineChecker(class_, checker) + + if permission == zope_Public: + # Translate public permission to CheckerPublic + permission = CheckerPublic + + # We know a dictionary was used because we set it + protections = checker.get_permissions + protections[name] = permission + +def protectSetAttribute(class_, name, permission): + """Set a permission on a particular name.""" + checker = getCheckerForInstancesOf(class_) + if checker is None: + checker = Checker({}, {}) + defineChecker(class_, checker) + + if permission == zope_Public: + # Translate public permission to CheckerPublic + permission = CheckerPublic + + # We know a dictionary was used because we set it + # Note however, that if a checker was created manually + # and the caller used say NamesChecker or MultiChecker, + # then set_permissions may be None here as Checker + # defaults a missing set_permissions parameter to None. + # Jim says this doensn't happens with the C version of the + # checkers because they use a 'shared dummy dict'. + protections = checker.set_permissions + protections[name] = permission + +def protectLikeUnto(class_, like_unto): + """Use the protections from like_unto for the given class.""" + + unto_checker = getCheckerForInstancesOf(like_unto) + if unto_checker is None: + return + + # We know a dictionary was used because we set it + # Note however, that if a checker was created manually + # and the caller used say NamesChecker or MultiChecker, + # then set_permissions may be None here as Checker + # defaults a missing set_permissions parameter to None. + # Jim says this doensn't happens with the C version of the + # checkers because they use a 'shared dummy dict'. + unto_get_protections = unto_checker.get_permissions + unto_set_protections = unto_checker.set_permissions + + checker = getCheckerForInstancesOf(class_) + if checker is None: + checker = Checker({}, {}) + defineChecker(class_, checker) + + get_protections = checker.get_permissions + for name in unto_get_protections: + get_protections[name] = unto_get_protections[name] + + set_protections = checker.set_permissions + for name in unto_set_protections: + set_protections[name] = unto_set_protections[name] diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/proxy.py b/thesisenv/lib/python3.6/site-packages/zope/security/proxy.py new file mode 100644 index 0000000..b64fd0a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/proxy.py @@ -0,0 +1,416 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" +Helper functions for proxies. + +.. seealso:: :ref:`proxy-known-issues` +""" +import functools +import sys + +from zope.proxy import PyProxyBase +from zope.security._compat import PURE_PYTHON +from zope.security._compat import _BUILTINS +from zope.security.interfaces import ForbiddenAttribute + +def _check_name(meth, wrap_result=True): + name = meth.__name__ + def _wrapper(self, *args, **kw): + wrapped = super(PyProxyBase, self).__getattribute__('_wrapped') + checker = super(PyProxyBase, self).__getattribute__('_checker') + checker.check(wrapped, name) + res = meth(self, *args, **kw) + if not wrap_result: + return res + return checker.proxy(res) + return functools.update_wrapper(_wrapper, meth) + +def _check_name_inplace(meth): + name = meth.__name__ + def _wrapper(self, *args, **kw): + wrapped = super(PyProxyBase, self).__getattribute__('_wrapped') + checker = super(PyProxyBase, self).__getattribute__('_checker') + checker.check(wrapped, name) + w_meth = getattr(wrapped, name, None) + if w_meth is not None: + # The proxy object cannot change; we are modifying in place. + self._wrapped = w_meth(*args, **kw) + return self + x_name = '__%s__' % name[3:-2] + return ProxyPy(getattr(wrapped, x_name)(*args, **kw), checker) + return functools.update_wrapper(_wrapper, meth) + +def _fmt_address(obj): + # Try to replicate PyString_FromString("%p", obj), which actually uses + # the platform sprintf(buf, "%p", obj), which we cannot access from Python + # directly (and ctypes seems like overkill). + if sys.platform != 'win32': + return '0x%0x' % id(obj) + if sys.maxsize < 2**32: # pragma: no cover + return '0x%08X' % id(obj) + return '0x%016X' % id(obj) # pragma: no cover + + +class ProxyPy(PyProxyBase): + """ + The pure-Python reference implementation of a security proxy. + + This should normally not be created directly, instead use the + :func:`~.ProxyFactory`. + + You can choose to use this implementation instead of the C implementation + by default by setting the ``PURE_PYTHON`` environment variable before + :mod:`zope.security` is imported. + """ + __slots__ = ('_wrapped', '_checker') + + def __new__(cls, value, checker): + inst = super(ProxyPy, cls).__new__(cls) + inst._wrapped = value + inst._checker = checker + return inst + + def __init__(self, value, checker): + if checker is None: + raise ValueError('checker may now be None') + self._wrapped = value + self._checker = checker + + # Attribute protocol + def __getattribute__(self, name): + if name in ('_wrapped', '_checker'): + # Only allow _wrapped and _checker to be accessed from inside. + if sys._getframe(1).f_locals.get('self') is not self: + raise AttributeError(name) + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + if name == '_wrapped': + return wrapped + checker = super(ProxyPy, self).__getattribute__('_checker') + if name == '_checker': + return checker + if name not in ('__cmp__', '__hash__', '__bool__', '__nonzero__', + '__lt__', '__le__', '__eq__', '__ne__', '__ge__', + '__gt__'): + checker.check_getattr(wrapped, name) + if name in ('__reduce__', '__reduce_ex__'): + # The superclass specifically denies access to __reduce__ + # and __reduce__ex__, not letting proxies be pickled. But + # for backwards compatibility, we need to be able to + # pickle proxies. See checker:Global for an example. + val = getattr(wrapped, name) + elif name == '__module__': + # The superclass deals with descriptors found in the type + # of this object just like the Python language spec states, letting + # them have precedence over things found in the instance. This + # normally makes us a better proxy implementation. However, the + # C version of this code in _proxy doesn't take that same care and instead + # uses the generic object attribute access methods directly on + # the wrapped object. This is a behaviour difference; so far, it's + # only been noticed for the __module__ attribute, which checker:Global + # wants to override but couldn't because this object's type's __module__ would + # get in the way. That broke pickling, and checker:Global can't return + # anything more sophisticated than a str (a tuple) because it gets proxied + # and breaks pickling again. Our solution is to match the C version for this + # one attribute. + val = getattr(wrapped, name) + else: + val = super(ProxyPy, self).__getattribute__(name) + return checker.proxy(val) + + def __getattr__(self, name): + # We only get here if __getattribute__ has already raised an + # AttributeError (we have to implement this because the super + # class does). We expect that we will also raise that same + # error, one way or another---either it will be forbidden by + # the checker or it won't exist. However, if the underlying + # object is playing games in *its* + # __getattribute__/__getattr__, and we call getattr() on it, + # (maybe there are threads involved), we might actually + # succeed this time. + + # The C implementation *does not* do two checks; it only does + # one check, and raises either the ForbiddenAttribute or the + # underlying AttributeError, *without* invoking any defined + # __getattribute__/__getattr__ more than once. So we + # explicitly do the same. The consequence is that we lose a + # good stack trace if the object implemented its own methods + # but we're consistent. We would provide a better error + # message or even subclass of AttributeError, but that's liable to break + # (doc)tests. + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + checker = super(ProxyPy, self).__getattribute__('_checker') + checker.check_getattr(wrapped, name) + raise AttributeError(name) + + def __setattr__(self, name, value): + if name in ('_wrapped', '_checker'): + return super(ProxyPy, self).__setattr__(name, value) + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + checker = super(ProxyPy, self).__getattribute__('_checker') + checker.check_setattr(wrapped, name) + setattr(wrapped, name, value) + + def __delattr__(self, name): + if name in ('_wrapped', '_checker'): + raise AttributeError() + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + checker = super(ProxyPy, self).__getattribute__('_checker') + checker.check_setattr(wrapped, name) + delattr(wrapped, name) + + @_check_name + def __getslice__(self, start, end): + wrapped = object.__getattribute__(self, '_wrapped') + try: + getslice = wrapped.__getslice__ + except AttributeError: + return wrapped.__getitem__(slice(start, end)) + return getslice(start, end) + + @_check_name + def __setslice__(self, start, end, value): + wrapped = object.__getattribute__(self, '_wrapped') + try: + setslice = wrapped.__setslice__ + except AttributeError: + return wrapped.__setitem__(slice(start, end), value) + return setslice(start, end, value) + + def __cmp__(self, other): + # no check + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + return cmp(wrapped, other) + + def __lt__(self, other): + # no check + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + return wrapped < other + + def __le__(self, other): + # no check + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + return wrapped <= other + + def __eq__(self, other): + # no check + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + return wrapped == other + + def __ne__(self, other): + # no check + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + return wrapped != other + + def __ge__(self, other): + # no check + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + return wrapped >= other + + def __gt__(self, other): + # no check + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + return wrapped > other + + def __hash__(self): + # no check + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + return hash(wrapped) + + def __nonzero__(self): + # no check + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + return bool(wrapped) + __bool__ = __nonzero__ + + def __length_hint__(self): + # no check + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + try: + hint = wrapped.__length_hint__ + except AttributeError: + return NotImplemented + else: + return hint() + + def __coerce__(self, other): + # For some reason _check_name does not work for coerce() + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + checker = super(ProxyPy, self).__getattribute__('_checker') + checker.check(wrapped, '__coerce__') + return super(ProxyPy, self).__coerce__(other) + + def __str__(self): + try: + return _check_name(PyProxyBase.__str__)(self) + # The C implementation catches almost all exceptions; the + # exception is a TypeError that's raised when the repr returns + # the wrong type of object. + except TypeError: + raise + except: + # The C implementation catches all exceptions. + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + return '' %( + wrapped.__class__.__module__, wrapped.__class__.__name__, + _fmt_address(wrapped)) + + def __repr__(self): + try: + return _check_name(PyProxyBase.__repr__)(self) + # The C implementation catches almost all exceptions; the + # exception is a TypeError that's raised when the repr returns + # the wrong type of object. + except TypeError: + raise + except: + wrapped = super(ProxyPy, self).__getattribute__('_wrapped') + return '' %( + wrapped.__class__.__module__, wrapped.__class__.__name__, + _fmt_address(wrapped)) + +for name in ['__call__', + #'__repr__', + #'__str__', + #'__unicode__', # Unchecked in C proxy + '__reduce__', + '__reduce_ex__', + #'__lt__', # Unchecked in C proxy (rich coparison) + #'__le__', # Unchecked in C proxy (rich coparison) + #'__eq__', # Unchecked in C proxy (rich coparison) + #'__ne__', # Unchecked in C proxy (rich coparison) + #'__ge__', # Unchecked in C proxy (rich coparison) + #'__gt__', # Unchecked in C proxy (rich coparison) + #'__nonzero__', # Unchecked in C proxy (rich coparison) + #'__bool__', # Unchecked in C proxy (rich coparison) + #'__hash__', # Unchecked in C proxy (rich coparison) + #'__cmp__', # Unchecked in C proxy + '__getitem__', + '__setitem__', + '__delitem__', + '__iter__', + '__next__', + 'next', + '__contains__', + '__neg__', + '__pos__', + '__abs__', + '__invert__', + '__complex__', + '__int__', + '__float__', + '__long__', + '__oct__', + '__hex__', + '__index__', + '__add__', + '__sub__', + '__mul__', + '__div__', + '__truediv__', + '__floordiv__', + '__mod__', + '__divmod__', + '__pow__', + '__radd__', + '__rsub__', + '__rmul__', + '__rdiv__', + '__rtruediv__', + '__rfloordiv__', + '__rmod__', + '__rdivmod__', + '__rpow__', + '__lshift__', + '__rshift__', + '__and__', + '__xor__', + '__or__', + '__rlshift__', + '__rrshift__', + '__rand__', + '__rxor__', + '__ror__', + ]: + meth = getattr(PyProxyBase, name) + setattr(ProxyPy, name, _check_name(meth)) + +for name in ( + '__len__', +): + meth = getattr(PyProxyBase, name) + setattr(ProxyPy, name, _check_name(meth, False)) + +for name in ['__iadd__', + '__isub__', + '__imul__', + '__idiv__', + '__itruediv__', + '__ifloordiv__', + '__imod__', + '__ilshift__', + '__irshift__', + '__iand__', + '__ixor__', + '__ior__', + '__ipow__', + ]: + meth = getattr(PyProxyBase, name) + setattr(ProxyPy, name, _check_name_inplace(meth)) + +def getCheckerPy(proxy): + return super(ProxyPy, proxy).__getattribute__('_checker') + + +_builtin_isinstance = sys.modules[_BUILTINS].isinstance + +def getObjectPy(proxy): + if not _builtin_isinstance(proxy, ProxyPy): + return proxy + return super(ProxyPy, proxy).__getattribute__('_wrapped') + + +_c_available = not PURE_PYTHON +if _c_available: + try: + from zope.security._proxy import _Proxy + except (ImportError, AttributeError): # pragma: no cover PyPy / PURE_PYTHON + _c_available = False + + +getChecker = getCheckerPy +getObject = getObjectPy +Proxy = ProxyPy + +if _c_available: + from zope.security._proxy import getChecker + from zope.security._proxy import getObject + Proxy = _Proxy + +removeSecurityProxy = getObject + +def getTestProxyItems(proxy): + """Return a sorted sequence of checker names and permissions for testing + """ + checker = getChecker(proxy) + return sorted(checker.get_permissions.items()) + + +def isinstance(object, cls): + """Test whether an *object* is an instance of a type. + + This works even if the object is security proxied. + """ + # The removeSecurityProxy call is OK here because it is *only* + # being used for isinstance + return _builtin_isinstance(removeSecurityProxy(object), cls) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/simplepolicies.py b/thesisenv/lib/python3.6/site-packages/zope/security/simplepolicies.py new file mode 100644 index 0000000..d1617ce --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/simplepolicies.py @@ -0,0 +1,78 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" +Simple :class:`zope.security.interfaces.ISecurityPolicy` implementations. + +As a reminder, ``ISecurityPolicy`` objects are factories for producing +:class:`zope.security.interfaces.IInteraction` objects. That means +that the classes themselves are implementations of +``ISecurityPolicy``. +""" +import zope.interface + +from zope.security.checker import CheckerPublic +from zope.security.interfaces import IInteraction +from zope.security.interfaces import ISecurityPolicy +from zope.security._definitions import system_user + + +@zope.interface.implementer(IInteraction) +@zope.interface.provider(ISecurityPolicy) +class ParanoidSecurityPolicy(object): + """ + Prohibit all access by any non-system principal, unless the item + is :data:`public `. + + This means that if there are no participations (and hence no + principals), then access is allowed. + """ + + def __init__(self, *participations): + self.participations = [] + for participation in participations: + self.add(participation) + + def add(self, participation): + if participation.interaction is not None: + raise ValueError("%r already belongs to an interaction" + % participation) + participation.interaction = self + self.participations.append(participation) + + def remove(self, participation): + if participation.interaction is not self: + raise ValueError("%r does not belong to this interaction" + % participation) + self.participations.remove(participation) + participation.interaction = None + + def checkPermission(self, permission, object): + if permission is CheckerPublic: + return True + + users = [p.principal + for p in self.participations + if p.principal is not system_user] + + return not users + + +@zope.interface.provider(ISecurityPolicy) +class PermissiveSecurityPolicy(ParanoidSecurityPolicy): + """ + Allow all access. + """ + + def checkPermission(self, permission, object): + return True diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/testing.py b/thesisenv/lib/python3.6/site-packages/zope/security/testing.py new file mode 100644 index 0000000..bd4b889 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/testing.py @@ -0,0 +1,112 @@ +############################################################################## +# +# Copyright (c) 2004-2011 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" +Testing support code. + +This module provides some helper/stub objects for setting up interactions. +""" +import contextlib +import re + +from zope import interface, component + +from zope.security import interfaces +from zope.security.permission import Permission +import zope.security.management +from zope.security._compat import PYTHON2 as PY2 +from zope.security.interfaces import PUBLIC_PERMISSION_NAME + +from zope.testing import renormalizing + +_str_prefix = 'b' if PY2 else 'u' + +rules = [ + (re.compile(_str_prefix + "('.*?')"), r"\1"), + (re.compile(_str_prefix + '(".*?")'), r"\1"), +] +output_checker = renormalizing.RENormalizing(rules) + +@interface.implementer(interfaces.IPrincipal) +class Principal(object): + """ + A trivial implementation of :class:`zope.security.interfaces.IPrincipal`. + """ + + def __init__(self, id, title=None, description='', groups=None): + self.id = id + self.title = title or id + self.description = description + if groups is not None: + self.groups = groups + interface.directlyProvides(self, interfaces.IGroupAwarePrincipal) + + +@interface.implementer(interfaces.IParticipation) +class Participation(object): + """ + A trivial implementation of :class:`zope.security.interfaces.IParticipation`. + """ + def __init__(self, principal): + self.principal = principal + self.interaction = None + + +def addCheckerPublic(): + """ + Add the CheckerPublic permission as :data:`zope.Public + `. + """ + + perm = Permission( + PUBLIC_PERMISSION_NAME, + 'Public', + """Special permission used for resources that are always public + + The public permission is effectively an optimization, sine + it allows security computation to be bypassed. + """ + ) + gsm = component.getGlobalSiteManager() + gsm.registerUtility(perm, interfaces.IPermission, perm.id) + + return perm + +def create_interaction(principal_id, **kw): + """ + Create a new interaction for the given principal ID, make it the + :func:`current interaction + `, and return the + :class:`Principal` object. + """ + principal = Principal(principal_id, **kw) + participation = Participation(principal) + zope.security.management.newInteraction(participation) + return principal + + +@contextlib.contextmanager +def interaction(principal_id, **kw): + """ + A context manager for running an interaction for the given + principal ID. + """ + if zope.security.management.queryInteraction(): + # There already is an interaction. Great. Leave it alone. + yield + else: + principal = create_interaction(principal_id, **kw) + try: + yield principal + finally: + zope.security.management.endInteraction() diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/__init__.py new file mode 100644 index 0000000..cab4c90 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/__init__.py @@ -0,0 +1,16 @@ +import io + + +class QuietWatchingChecker(object): + # zope.testrunner does not support setUp/tearDownModule, + # so we use a mixin class to make sure we don't flood stderr + # with pointless printing when testing watching checkers + + def setUp(self): + from zope.security import checker + self.__old_file = checker.CheckerLoggingMixin._file + checker.CheckerLoggingMixin._file = io.StringIO() if bytes is not str else io.BytesIO() + + def tearDown(self): + from zope.security import checker + checker.CheckerLoggingMixin._file = self.__old_file diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/exampleclass.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/exampleclass.py new file mode 100644 index 0000000..1d73b4b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/exampleclass.py @@ -0,0 +1,28 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Example test classes +""" +from zope.interface import Interface + +class ExampleClass(object): + pass + +class IExample(Interface): + pass + +class IExample2(Interface): + pass + +class IExampleContainer(Interface): + pass diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/module.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/module.py new file mode 100644 index 0000000..5b26ff5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/module.py @@ -0,0 +1,56 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Preliminaries to hookup a test suite with the external TestModule. + +This is necessary because the test framework interferes with seeing changes in +the running modules via the module namespace. This enables having some +subject classes, instances, permissions, etc, that don't live in the test +modules, themselves. +""" +from zope.interface import Interface +from zope.schema import Text + +class I(Interface): + def m1(): + "m1" + def m2(): + "m2" + +class I2(I): + def m4(): + "m4" + +class I3(Interface): + def m3(): + "m3" + +class I4(Interface): + def m2(): + "m2" + + +class S(Interface): + foo = Text() + bar = Text() + baro = Text(readonly=True) + +class S2(Interface): + foo2 = Text() + bar2 = Text() + + +template_bracket = """ + %s +""" diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/redefineperms.zcml b/thesisenv/lib/python3.6/site-packages/zope/security/tests/redefineperms.zcml new file mode 100644 index 0000000..cf5d2fb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/redefineperms.zcml @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_adapter.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_adapter.py new file mode 100644 index 0000000..a3dd541 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_adapter.py @@ -0,0 +1,476 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + +from zope.interface import directlyProvides +from zope.interface import implementer +from zope.location import ILocation +from zope.location import LocationProxy +from zope.proxy import getProxiedObject + +# pylint:disable=attribute-defined-outside-init,protected-access + +class Test_assertLocation(unittest.TestCase): + + def _callFUT(self, adapter, parent): + from zope.security.adapter import assertLocation + return assertLocation(adapter, parent) + + def test_w_non_ILocation(self): + class _NotAdapter(object): + pass + adapter = _NotAdapter() + parent = object() + returned = self._callFUT(adapter, parent) + self.assertTrue(isinstance(returned, LocationProxy)) + self.assertIs(getProxiedObject(returned), adapter) + self.assertIs(returned.__parent__, parent) + + def test_w_ILocation_no_parent(self): + @implementer(ILocation) + class _Adapter(object): + __parent__ = None + adapter = _Adapter() + parent = object() + returned = self._callFUT(adapter, parent) + self.assertIs(returned, adapter) + self.assertIs(returned.__parent__, parent) + + def test_w_ILocation_w_parent(self): + parent = object() + @implementer(ILocation) + class _Adapter(object): + __parent__ = parent + adapter = _Adapter() + new_parent = object() + returned = self._callFUT(adapter, new_parent) + self.assertIs(returned, adapter) + self.assertIs(returned.__parent__, parent) + + +class LocatingTrustedAdapterFactoryTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.security.adapter import LocatingTrustedAdapterFactory + return LocatingTrustedAdapterFactory + + def _makeOne(self, factory): + return self._getTargetClass()(factory) + + def _makeFactory(self): + class _Factory(object): + __name__ = 'testing' + __module__ = 'zope.security.tests.test_adapter' + _called_with = () + def __call__(self, *args): + self._called_with = args + return self + return _Factory() + + def test_ctor(self): + factory = self._makeFactory() + ltaf = self._makeOne(factory) + self.assertIs(ltaf.factory, factory) + self.assertEqual(ltaf.__name__, 'testing') + self.assertEqual(ltaf.__module__, 'zope.security.tests.test_adapter') + + def test__call__w_non_ILocation_non_spacesuit(self): + factory = self._makeFactory() + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + adapter = _NotAdapter() + before = factory.__dict__.copy() + returned = ltaf(adapter) + self.assertIs(returned, factory) + after = {k: v for k, v in returned.__dict__.items() + if k != '_called_with'} + self.assertEqual(factory._called_with, (adapter,)) + self.assertEqual(after, before) # no added attrs + + def test__call__w_non_ILocation_non_spacesuit_multiple_args(self): + factory = self._makeFactory() + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + adapter = _NotAdapter() + extra = object() + before = factory.__dict__.copy() + returned = ltaf(adapter, extra) + self.assertIs(returned, factory) + after = {k: v for k, v in returned.__dict__.items() + if k != '_called_with'} + self.assertEqual(factory._called_with, (adapter, extra)) + self.assertEqual(after, before) # no added attrs + + def test__call__w_ILocation_w_existing_parent_non_spacesuit(self): + factory = self._makeFactory() + parent = factory.__parent__ = object() + directlyProvides(factory, ILocation) + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + adapter = _NotAdapter() + returned = ltaf(adapter) + self.assertIs(returned, factory) + self.assertIs(returned.__parent__, parent) + + def test__call__w_ILocation_wo_existing_parent_non_spacesuit(self): + factory = self._makeFactory() + factory.__parent__ = None + directlyProvides(factory, ILocation) + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + adapter = _NotAdapter() + returned = ltaf(adapter) + self.assertIs(returned, factory) + self.assertIs(returned.__parent__, adapter) + + def test__call__w_non_ILocation_w_spacesuit(self): + from zope.security.proxy import ProxyFactory + from zope.security.proxy import removeSecurityProxy + factory = self._makeFactory() + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + adapter = _NotAdapter() + proxy = ProxyFactory(adapter) + before = factory.__dict__.copy() + returned = ltaf(proxy) + self.assertFalse(returned is factory) + ploc = removeSecurityProxy(returned) + self.assertIs(ploc.__parent__, adapter) + unwrapped = getProxiedObject(ploc) + self.assertIs(unwrapped, factory) + after = {k: v for k, v in unwrapped.__dict__.items() + if k not in ('_called_with',)} + self.assertEqual(factory._called_with, (adapter,)) + self.assertEqual(after, before) # no added attrs + + def test__call__w_non_ILocation_w_spacesuit_multiple_args(self): + from zope.security.proxy import ProxyFactory + from zope.security.proxy import removeSecurityProxy + factory = self._makeFactory() + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + adapter = _NotAdapter() + extra = object() + proxy = ProxyFactory(adapter) + before = factory.__dict__.copy() + returned = ltaf(proxy, extra) + self.assertFalse(returned is factory) + ploc = removeSecurityProxy(returned) + self.assertIs(ploc.__parent__, adapter) + unwrapped = getProxiedObject(ploc) + self.assertIs(unwrapped, factory) + after = {k: v for k, v in unwrapped.__dict__.items() + if k not in ('_called_with',)} + self.assertEqual(factory._called_with, (adapter, extra)) + self.assertEqual(after, before) # no added attrs + + def test__call__w_non_ILocation_multiple_args_extra_spacesuit(self): + from zope.security.proxy import ProxyFactory + from zope.security.proxy import removeSecurityProxy + factory = self._makeFactory() + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + class _Extra(object): + pass + adapter = _NotAdapter() + extra = _Extra() + proxy = ProxyFactory(extra) + before = factory.__dict__.copy() + returned = ltaf(adapter, proxy) + self.assertFalse(returned is factory) + ploc = removeSecurityProxy(returned) + self.assertIs(ploc.__parent__, adapter) + unwrapped = getProxiedObject(ploc) + self.assertIs(unwrapped, factory) + after = {k: v for k, v in unwrapped.__dict__.items() + if k not in ('_called_with',)} + self.assertEqual(factory._called_with, (adapter, extra)) + self.assertEqual(after, before) # no added attrs + + def test__call__w_ILocation_w_spacesuit(self): + from zope.security.proxy import getObject + from zope.security.proxy import ProxyFactory + from zope.security.proxy import removeSecurityProxy + factory = self._makeFactory() + factory.__parent__ = factory.__name__ = None + directlyProvides(factory, ILocation) + ltaf = self._makeOne(factory) + class _Adapter(object): + pass + adapter = _Adapter() + proxy = ProxyFactory(adapter) + before = {k: v for k, v in factory.__dict__.items() + if k not in ('_called_with', '__parent__')} + returned = ltaf(proxy) + self.assertFalse(returned is factory) + ploc = removeSecurityProxy(returned) + self.assertIs(ploc.__parent__, adapter) + unwrapped = getObject(ploc) + self.assertIs(unwrapped, factory) + after = {k: v for k, v in unwrapped.__dict__.items() + if k not in ('_called_with', '__parent__')} + self.assertEqual(factory._called_with, (adapter,)) + self.assertIs(factory.__parent__, adapter) + self.assertEqual(after, before) # no added attrs + + def test__call__w_ILocation_w_spacesuit_w_existing_parent(self): + from zope.security.proxy import getObject + from zope.security.proxy import ProxyFactory + from zope.security.proxy import removeSecurityProxy + factory = self._makeFactory() + factory.__name__ = None + factory.__parent__ = parent = object() + directlyProvides(factory, ILocation) + ltaf = self._makeOne(factory) + class _Adapter(object): + pass + adapter = _Adapter() + proxy = ProxyFactory(adapter) + before = {k: v for k, v in factory.__dict__.items() + if k not in ('_called_with', '__parent__')} + returned = ltaf(proxy) + self.assertFalse(returned is factory) + ploc = removeSecurityProxy(returned) + self.assertIs(ploc.__parent__, parent) + unwrapped = getObject(ploc) + self.assertIs(unwrapped, factory) + after = {k: v for k, v in unwrapped.__dict__.items() + if k not in ('_called_with', '__parent__')} + self.assertEqual(factory._called_with, (adapter,)) + self.assertEqual(after, before) # no added attrs + + +class TrustedAdapterFactoryTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.security.adapter import TrustedAdapterFactory + return TrustedAdapterFactory + + def _makeOne(self, factory): + return self._getTargetClass()(factory) + + def _makeFactory(self): + class _Factory(object): + __name__ = 'testing' + __module__ = 'zope.security.tests.test_adapter' + def __call__(self, *args): + self._called_with = args + return self + return _Factory() + + def test__call__w_non_ILocation_w_spacesuit(self): + from zope.security.proxy import ProxyFactory + from zope.security.proxy import removeSecurityProxy + factory = self._makeFactory() + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + adapter = _NotAdapter() + proxy = ProxyFactory(adapter) + before = factory.__dict__.copy() + returned = ltaf(proxy) + self.assertFalse(returned is factory) + unwrapped = removeSecurityProxy(returned) + self.assertTrue('__parent__' not in unwrapped.__dict__) + self.assertIs(unwrapped, factory) + after = {k: v for k, v in unwrapped.__dict__.items() + if k not in ('_called_with',)} + self.assertEqual(factory._called_with, (adapter,)) + self.assertEqual(after, before) # no added attrs + + def test__call__w_non_ILocation_w_spacesuit_multiple_args(self): + from zope.security.proxy import ProxyFactory + from zope.security.proxy import removeSecurityProxy + factory = self._makeFactory() + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + adapter = _NotAdapter() + extra = object() + proxy = ProxyFactory(adapter) + before = factory.__dict__.copy() + returned = ltaf(proxy, extra) + self.assertFalse(returned is factory) + unwrapped = removeSecurityProxy(returned) + self.assertTrue('__parent__' not in unwrapped.__dict__) + self.assertIs(unwrapped, factory) + after = {k: v for k, v in unwrapped.__dict__.items() + if k not in ('_called_with',)} + self.assertEqual(factory._called_with, (adapter, extra)) + self.assertEqual(after, before) # no added attrs + + def test__call__w_non_ILocation_multiple_args_extra_spacesuit(self): + from zope.security.proxy import ProxyFactory + from zope.security.proxy import removeSecurityProxy + factory = self._makeFactory() + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + class _Extra(object): + pass + adapter = _NotAdapter() + extra = _Extra() + proxy = ProxyFactory(extra) + before = factory.__dict__.copy() + returned = ltaf(adapter, proxy) + self.assertFalse(returned is factory) + unwrapped = removeSecurityProxy(returned) + self.assertTrue('__parent__' not in unwrapped.__dict__) + self.assertIs(unwrapped, factory) + after = {k: v for k, v in unwrapped.__dict__.items() + if k not in ('_called_with',)} + self.assertEqual(factory._called_with, (adapter, extra)) + self.assertEqual(after, before) # no added attrs + + def test__call__w_ILocation_w_spacesuit(self): + from zope.security.proxy import ProxyFactory + from zope.security.proxy import removeSecurityProxy + factory = self._makeFactory() + factory.__parent__ = factory.__name__ = None + directlyProvides(factory, ILocation) + ltaf = self._makeOne(factory) + class _Adapter(object): + pass + adapter = _Adapter() + proxy = ProxyFactory(adapter) + before = {k: v for k, v in factory.__dict__.items() + if k not in ('_called_with', '__parent__')} + returned = ltaf(proxy) + self.assertFalse(returned is factory) + unwrapped = removeSecurityProxy(returned) + self.assertIs(unwrapped.__parent__, adapter) + self.assertIs(unwrapped, factory) + after = {k: v for k, v in unwrapped.__dict__.items() + if k not in ('_called_with', '__parent__')} + self.assertEqual(factory._called_with, (adapter,)) + self.assertEqual(after, before) # no added attrs + + def test__call__w_ILocation_w_spacesuit_w_existing_parent(self): + from zope.security.proxy import ProxyFactory + from zope.security.proxy import removeSecurityProxy + factory = self._makeFactory() + factory.__name__ = None + factory.__parent__ = parent = object() + directlyProvides(factory, ILocation) + ltaf = self._makeOne(factory) + class _Adapter(object): + pass + adapter = _Adapter() + proxy = ProxyFactory(adapter) + before = {k: v for k, v in factory.__dict__.items() + if k not in ('_called_with', '__parent__')} + returned = ltaf(proxy) + self.assertFalse(returned is factory) + unwrapped = removeSecurityProxy(returned) + self.assertIs(unwrapped.__parent__, parent) + self.assertIs(unwrapped, factory) + after = {k: v for k, v in unwrapped.__dict__.items() + if k not in ('_called_with', '__parent__')} + self.assertEqual(factory._called_with, (adapter,)) + self.assertEqual(after, before) # no added attrs + + +class LocatingUntrustedAdapterFactoryTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.security.adapter import LocatingUntrustedAdapterFactory + return LocatingUntrustedAdapterFactory + + def _makeOne(self, factory): + return self._getTargetClass()(factory) + + def _makeFactory(self): + class _Factory(object): + __name__ = 'testing' + __module__ = 'zope.security.tests.test_adapter' + _called_with = () + def __call__(self, *args): + self._called_with = args + return self + return _Factory() + + def test_ctor(self): + factory = self._makeFactory() + ltaf = self._makeOne(factory) + self.assertIs(ltaf.factory, factory) + self.assertEqual(ltaf.__name__, 'testing') + self.assertEqual(ltaf.__module__, 'zope.security.tests.test_adapter') + + def test__call__w_non_ILocation(self): + factory = self._makeFactory() + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + adapter = _NotAdapter() + before = factory.__dict__.copy() + returned = ltaf(adapter) + self.assertFalse(returned is factory) + unwrapped = getProxiedObject(returned) + self.assertIs(unwrapped, factory) + after = {k: v for k, v in returned.__dict__.items() + if k != '_called_with'} + self.assertEqual(factory._called_with, (adapter,)) + self.assertEqual(after, before) # no added attrs + + def test__call__w_non_ILocation_multiple_args(self): + factory = self._makeFactory() + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + adapter = _NotAdapter() + extra = object() + before = factory.__dict__.copy() + returned = ltaf(adapter, extra) + self.assertFalse(returned is factory) + unwrapped = getProxiedObject(returned) + self.assertIs(unwrapped, factory) + after = {k: v for k, v in returned.__dict__.items() + if k != '_called_with'} + self.assertEqual(factory._called_with, (adapter, extra)) + self.assertEqual(after, before) # no added attrs + + def test__call__w_ILocation_w_existing_parent(self): + factory = self._makeFactory() + parent = factory.__parent__ = object() + directlyProvides(factory, ILocation) + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + adapter = _NotAdapter() + returned = ltaf(adapter) + self.assertIs(returned, factory) + self.assertIs(returned.__parent__, parent) + + def test__call__w_ILocation_wo_existing_parent(self): + factory = self._makeFactory() + factory.__parent__ = None + directlyProvides(factory, ILocation) + ltaf = self._makeOne(factory) + class _NotAdapter(object): + pass + adapter = _NotAdapter() + returned = ltaf(adapter) + self.assertIs(returned, factory) + self.assertIs(returned.__parent__, adapter) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_checker.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_checker.py new file mode 100644 index 0000000..14f6002 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_checker.py @@ -0,0 +1,2315 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests for zope.security.checker +""" +import unittest + +from zope.security import checker as sec_checker +from zope.security.tests import QuietWatchingChecker +from zope.security._compat import PYTHON3 as PY3 +from zope.security._compat import PYTHON2 as PY2 + + +# pylint:disable=protected-access,inherit-non-class,no-method-argument,old-style-class +# pylint:disable=blacklisted-name,no-init + +class Test_ProxyFactory(unittest.TestCase): + + def _callFUT(self, obj, checker=None): + from zope.security.checker import ProxyFactory + return ProxyFactory(obj, checker) + + def test_w_already_proxied_no_checker(self): + from zope.security.proxy import Proxy, getChecker + obj = object() + def _check(*x): + raise AssertionError("Never called") + proxy = Proxy(obj, _check) + returned = self._callFUT(proxy, None) + self.assertIs(returned, proxy) + self.assertIs(getChecker(returned), _check) + + def test_w_already_proxied_same_checker(self): + from zope.security.proxy import Proxy, getChecker + obj = object() + def _check(*x): + raise AssertionError("Never called") + proxy = Proxy(obj, _check) + returned = self._callFUT(proxy, _check) + self.assertIs(returned, proxy) + self.assertIs(getChecker(returned), _check) + + def test_w_already_proxied_different_checker(self): + from zope.security.proxy import Proxy + obj = object() + def _check(*x): + raise AssertionError("Never called") + proxy = Proxy(obj, _check) + def _sneaky(*x): + raise AssertionError("Never called") + self.assertRaises(TypeError, self._callFUT, proxy, _sneaky) + + def test_w_explicit_checker(self): + from zope.security.proxy import getChecker + obj = object() + def _check(*x): + raise AssertionError("Never called") + returned = self._callFUT(obj, _check) + self.assertFalse(returned is obj) + self.assertIs(getChecker(returned), _check) + + def test_no_checker_no_dunder_no_select(self): + obj = object() + returned = self._callFUT(obj) + self.assertIs(returned, obj) + + def test_no_checker_w_dunder(self): + from zope.security.proxy import getChecker, getObject + _check = object() # don't use a func, due to bound method + class _WithChecker(object): + __Security_checker__ = _check + obj = _WithChecker() + returned = self._callFUT(obj) + self.assertFalse(returned is obj) + self.assertIs(getObject(returned), obj) + self.assertIs(getChecker(returned), _check) + + def test_no_checker_no_dunder_w_select(self): + from zope.security.checker import Checker + from zope.security.checker import _checkers + from zope.security.checker import _clear + from zope.security.proxy import getChecker, getObject + class _Obj(object): + pass + obj = _Obj() + _checker = Checker({}) + def _check(*args): + return _checker + _checkers[_Obj] = _check + try: + returned = self._callFUT(obj) + self.assertFalse(returned is obj) + self.assertIs(getObject(returned), obj) + self.assertIs(getChecker(returned), _checker) + finally: + _clear() + + +class Test_canWrite(unittest.TestCase): + + def _callFUT(self, obj, name): + from zope.security.checker import canWrite + return canWrite(obj, name) + + def _makeChecker(self, ch_get=None, ch_set=None): + class _Checker(object): + def check_getattr(self, obj, name): + if ch_get is not None: + raise ch_get() + def check_setattr(self, obj, name): + if ch_set is not None: + raise ch_set() + return _Checker() + + def test_ok(self): + from zope.security.proxy import Proxy + obj = object() + proxy = Proxy(obj, self._makeChecker()) + self.assertTrue(self._callFUT(proxy, 'whatever')) + + def test_w_setattr_unauth(self): + from zope.security.interfaces import Unauthorized + from zope.security.proxy import Proxy + obj = object() + proxy = Proxy(obj, self._makeChecker(ch_set=Unauthorized)) + self.assertFalse(self._callFUT(proxy, 'whatever')) + + def test_w_setattr_forbidden_getattr_allowed(self): + from zope.security.interfaces import ForbiddenAttribute + from zope.security.proxy import Proxy + obj = object() + proxy = Proxy(obj, self._makeChecker(ch_set=ForbiddenAttribute)) + self.assertFalse(self._callFUT(proxy, 'whatever')) + + def test_w_setattr_forbidden_getattr_unauth(self): + from zope.security.interfaces import ForbiddenAttribute + from zope.security.interfaces import Unauthorized + from zope.security.proxy import Proxy + obj = object() + proxy = Proxy(obj, self._makeChecker(ch_get=Unauthorized, + ch_set=ForbiddenAttribute)) + self.assertFalse(self._callFUT(proxy, 'whatever')) + + def test_w_setattr_forbidden_getattr_forbidden(self): + from zope.security.interfaces import ForbiddenAttribute + from zope.security.proxy import Proxy + obj = object() + proxy = Proxy(obj, self._makeChecker(ch_get=ForbiddenAttribute, + ch_set=ForbiddenAttribute)) + self.assertRaises(ForbiddenAttribute, self._callFUT, proxy, 'whatever') + + +class Test_canAccess(unittest.TestCase): + + def _callFUT(self, obj, name): + from zope.security.checker import canAccess + return canAccess(obj, name) + + def _makeChecker(self, ch_get=None): + class _Checker(object): + def check_getattr(self, obj, name): + if ch_get is not None: + raise ch_get() + return _Checker() + + def test_ok(self): + from zope.security.proxy import Proxy + obj = object() + proxy = Proxy(obj, self._makeChecker()) + self.assertTrue(self._callFUT(proxy, 'whatever')) + + def test_w_getattr_unauth(self): + from zope.security.interfaces import Unauthorized + from zope.security.proxy import Proxy + obj = object() + proxy = Proxy(obj, self._makeChecker(ch_get=Unauthorized)) + self.assertFalse(self._callFUT(proxy, 'whatever')) + + def test_w_setattr_forbidden_getattr_allowed(self): + from zope.security.interfaces import ForbiddenAttribute + from zope.security.proxy import Proxy + obj = object() + proxy = Proxy(obj, self._makeChecker(ch_get=ForbiddenAttribute)) + self.assertRaises(ForbiddenAttribute, self._callFUT, proxy, 'whatever') + + +_marker = object() + +class CheckerTestsBase(QuietWatchingChecker): + # pylint:disable=no-member + + def _getTargetClass(self): + raise NotImplementedError("Subclasses must define") + + def _makeOne(self, get_permissions=_marker, set_permissions=_marker): + if get_permissions is _marker: + get_permissions = {} + if set_permissions is _marker: + return self._getTargetClass()(get_permissions) + return self._getTargetClass()(get_permissions, set_permissions) + + def test_class_conforms_to_IChecker(self): + from zope.interface.verify import verifyClass + from zope.security.interfaces import IChecker + verifyClass(IChecker, self._getTargetClass()) + + def test_instance_conforms_to_IChecker(self): + from zope.interface.verify import verifyObject + from zope.security.interfaces import IChecker + verifyObject(IChecker, self._makeOne()) + + def test_ctor_w_non_dict_get_permissions(self): + self.assertRaises(TypeError, self._makeOne, object()) + + def test_ctor_w_non_dict_set_permissions(self): + self.assertRaises(TypeError, self._makeOne, {}, object()) + + def test_permission_id_miss(self): + checker = self._makeOne() + self.assertIsNone(checker.permission_id('nonesuch')) + + def test_permission_id_hit(self): + checker = self._makeOne({'name': 'PERMISSION'}) + self.assertEqual(checker.permission_id('name'), 'PERMISSION') + + def test_setattr_permission_id_miss_none_set(self): + checker = self._makeOne() + self.assertIsNone(checker.setattr_permission_id('nonesuch')) + + def test_setattr_permission_id_miss(self): + checker = self._makeOne(set_permissions={'name': 'PERMISSION'}) + self.assertIsNone(checker.setattr_permission_id('nonesuch')) + + def test_setattr_permission_id_hit(self): + checker = self._makeOne(set_permissions={'name': 'PERMISSION'}) + self.assertEqual(checker.setattr_permission_id('name'), 'PERMISSION') + + def test_check_setattr_miss_none_set(self): + from zope.security.interfaces import ForbiddenAttribute + checker = self._makeOne() + obj = object() + self.assertRaises(ForbiddenAttribute, + checker.check_setattr, obj, 'nonesuch') + + def test_check_setattr_miss(self): + from zope.security.interfaces import ForbiddenAttribute + checker = self._makeOne(set_permissions={'name': 'PERMISSION'}) + obj = object() + self.assertRaises(ForbiddenAttribute, + checker.check_setattr, obj, 'nonesuch') + + def test_check_setattr_public(self): + from zope.security.checker import CheckerPublic + checker = self._makeOne(set_permissions={'name': CheckerPublic}) + obj = object() + self.assertEqual(checker.check_setattr(obj, 'name'), None) + + def test_check_setattr_w_interaction_allows(self): + from zope.security._definitions import thread_local + class _Interaction(object): + def checkPermission(self, obj, perm): + return True + checker = self._makeOne(set_permissions={'name': 'view'}) + obj = object() + thread_local.interaction = _Interaction() + try: + self.assertEqual(checker.check_setattr(obj, 'name'), None) + finally: + del thread_local.interaction + + def test_check_setattr_w_interaction_denies(self): + from zope.security.interfaces import Unauthorized + from zope.security._definitions import thread_local + class _Interaction(object): + def checkPermission(self, obj, perm): + return False + checker = self._makeOne(set_permissions={'name': 'view'}) + obj = object() + thread_local.interaction = _Interaction() + try: + self.assertRaises(Unauthorized, + checker.check_setattr, obj, 'name') + finally: + del thread_local.interaction + + def test_check_miss(self): + from zope.security.interfaces import ForbiddenAttribute + checker = self._makeOne() + obj = object() + self.assertRaises(ForbiddenAttribute, + checker.check, obj, 'nonesuch') + + def test_check_available_by_default(self): + checker = self._makeOne() + obj = object() + self.assertEqual(checker.check(obj, '__repr__'), None) + + def test_check_public(self): + from zope.security.checker import CheckerPublic + checker = self._makeOne({'name': CheckerPublic}) + obj = object() + self.assertEqual(checker.check(obj, 'name'), None) + + def test_check_non_public_w_interaction_allows(self): + from zope.security._definitions import thread_local + class _Interaction(object): + def checkPermission(self, obj, perm): + return True + checker = self._makeOne({'name': 'view'}) + obj = object() + thread_local.interaction = _Interaction() + try: + self.assertEqual(checker.check(obj, 'name'), None) + finally: + del thread_local.interaction + + def test_check_non_public_w_interaction_denies(self): + from zope.security.interfaces import Unauthorized + from zope.security._definitions import thread_local + class _Interaction(object): + def checkPermission(self, obj, perm): + return False + checker = self._makeOne({'name': 'view'}) + obj = object() + thread_local.interaction = _Interaction() + try: + self.assertRaises(Unauthorized, + checker.check, obj, 'name') + finally: + del thread_local.interaction + + def test_proxy_already_proxied(self): + from zope.security.proxy import Proxy, getChecker + obj = object() + def _check(*x): + raise AssertionError("Never called") + proxy = Proxy(obj, _check) + checker = self._makeOne({'name': 'view'}) + returned = checker.proxy(proxy) + self.assertIs(returned, proxy) + self.assertIs(getChecker(returned), _check) + + def test_proxy_no_dunder_no_select(self): + obj = object() + checker = self._makeOne() + returned = checker.proxy(obj) + self.assertIs(returned, obj) + + def test_proxy_no_checker_w_dunder(self): + from zope.security.proxy import getChecker, getObject + _check = object() # don't use a func, due to bound method + class _WithChecker(object): + __Security_checker__ = _check + obj = _WithChecker() + checker = self._makeOne() + returned = checker.proxy(obj) + self.assertFalse(returned is obj) + self.assertIs(getObject(returned), obj) + self.assertIs(getChecker(returned), _check) + + def test_proxy_no_checker_no_dunder_w_select(self): + from zope.security.checker import Checker + from zope.security.checker import _checkers + from zope.security.checker import _clear + from zope.security.proxy import getChecker, getObject + class _Obj(object): + pass + obj = _Obj() + _checker = Checker({}) + def _check(*args): + return _checker + _checkers[_Obj] = _check + try: + checker = self._makeOne() + returned = checker.proxy(obj) + self.assertFalse(returned is obj) + self.assertIs(getObject(returned), obj) + self.assertIs(getChecker(returned), _checker) + finally: + _clear() + + def _check_iteration_of_dict_like(self, dict_like): + from zope.security.proxy import Proxy + from zope.security.checker import _default_checkers + + checker = _default_checkers[dict] + + proxy = Proxy(dict_like, checker) + # empty + self.assertEqual([], list(proxy.items())) + self.assertEqual([], list(proxy.keys())) + self.assertEqual([], list(proxy.values())) + self.assertEqual([], list(proxy)) + + # With an object + dict_like[1] = 2 + self.assertEqual([(1, 2)], list(proxy.items())) + self.assertEqual([1], list(proxy.keys())) + self.assertEqual([1], list(proxy)) + self.assertEqual([2], list(proxy.values())) + + + def test_iteration_of_btree_items_keys_values(self): + # iteration of BTree.items() is allowed by default. + import BTrees + for name in ('IF', 'II', 'IO', 'OI', 'OO'): + for family_name in ('family32', 'family64'): + family = getattr(BTrees, family_name) + btree = getattr(family, name).BTree() + self._check_iteration_of_dict_like(btree) + + def test_iteration_of_odict_items_keys_values(self): + # iteration of OrderedDict.items() is allowed by default. + from collections import OrderedDict + + odict = OrderedDict() + self._check_iteration_of_dict_like(odict) + + def test_iteration_of_dict_items_keys_values(self): + # iteration of regular dict is allowed by default + self._check_iteration_of_dict_like(dict()) + + def test_iteration_of_interface_implementedBy(self): + # Iteration of implementedBy is allowed by default + # See https://github.com/zopefoundation/zope.security/issues/27 + from zope.security.proxy import Proxy + from zope.security.checker import Checker + + from zope.interface import providedBy + from zope.interface import implementer + from zope.interface import Interface + + class I1(Interface): + pass + + @implementer(I1) + class O(object): + pass + + o = O() + + checker = Checker({}) + + proxy = Proxy(o, checker) + + # Since the object itself doesn't have any interfaces, + # the providedBy will return the implementedBy of the class + l = list(providedBy(proxy)) + + self.assertEqual(l, [I1]) + + def test_iteration_of_interface_providesBy(self): + # Iteration of zope.interface.Provides is allowed by default + # See https://github.com/zopefoundation/zope.security/issues/27 + from zope.security.proxy import Proxy + from zope.security.checker import Checker + + from zope.interface import providedBy + from zope.interface import alsoProvides + from zope.interface import implementer + from zope.interface import Interface + + class I1(Interface): + pass + + class I2(Interface): + pass + + @implementer(I1) + class O(object): + pass + + o = O() + alsoProvides(o, I2) + + checker = Checker({}) + + proxy = Proxy(o, checker) + + # Since the object has its own interfaces, provided + # by will return a zope.interface.Provides object + l = list(providedBy(proxy)) + + self.assertEqual(l, [I2, I1]) + + def test_iteration_with_length_hint(self): + # PEP 424 implemented officially in Python 3.4 and + # unofficially before in cPython and PyPy that allows for a + # __length_hint__ method to be defined on iterators. It should + # be allowed by default. See + # https://github.com/zopefoundation/zope.security/issues/27 + from zope.security.proxy import Proxy + from zope.security.checker import _iteratorChecker + + class Iter(object): + __Security_checker__ = _iteratorChecker + + items = (0, 1, 2) + index = 0 + hint = len(items) + hint_called = False + + def __iter__(self): + return self + + def __next__(self): + try: + return self.items[self.index] + except IndexError: + raise StopIteration() + finally: + self.index += 1 + + next = __next__ + + def __length_hint__(self): + self.hint_called = True + return self.hint + + # The hint is called on raw objects + i = Iter() + list(i) + self.assertTrue(i.hint_called, "__length_hint__ should be called") + + # The hint is called when we proxy the root object + i = Iter() + proxy = Proxy(i, _iteratorChecker) + l = list(proxy) + self.assertEqual(l, [0, 1, 2]) + self.assertTrue(i.hint_called, "__length_hint__ should be called") + + # The hint is called when we proxy its iterator + i = Iter() + it = iter(i) + proxy = Proxy(it, _iteratorChecker) + l = list(proxy) + self.assertEqual(l, [0, 1, 2]) + self.assertTrue(i.hint_called, "__length_hint__ should be called") + + + def test_iteration_of_itertools_groupby(self): + # itertools.groupby is a custom iterator type. + # The groups it returns are also custom. + from zope.security.checker import ProxyFactory + + from itertools import groupby + + group = groupby([0]) + list_group = list(group) + self.assertEqual(1, len(list_group)) + self.assertEqual(0, list_group[0][0]) + + proxy = ProxyFactory(groupby([0])) + list_group = list(proxy) + self.assertEqual(1, len(list_group)) + self.assertEqual(0, list_group[0][0]) + + # Note that groupby docs say: "The returned group is itself an + # iterator that shares the underlying iterable with groupby(). + # Because the source is shared, when the groupby() object is + # advanced, the previous group is no longer visible." + # For a one-item list, this doesn't make a difference on CPython, + # but it does on PyPy (if we use list(group), the list_group[0][0] is + # empty); probably this has to do with GC + proxy = ProxyFactory(groupby([0])) + _key, subiter = next(proxy) + self.assertEqual([0], list(subiter)) + + +class TestCheckerPy(CheckerTestsBase, unittest.TestCase): + + def _getTargetClass(self): + return sec_checker.CheckerPy + + +class TestChecker(CheckerTestsBase, unittest.TestCase): + + def _getTargetClass(self): + return sec_checker.Checker + +@unittest.skipIf(sec_checker.Checker is sec_checker.WatchingChecker, + "WatchingChecker is the default") +class TestWatchingChecker(TestChecker): + + def _getTargetClass(self): + return sec_checker.WatchingChecker + +class TestTracebackSupplement(unittest.TestCase): + + def _getTargetClass(self): + from zope.security.checker import TracebackSupplement + return TracebackSupplement + + def _makeOne(self, obj): + return self._getTargetClass()(obj) + + def test_getInfo_builtin_types(self): + from zope.security._compat import _BUILTINS + for val, typ in [('', 'str'), + (0, 'int'), + (1.0, 'float'), + ((), 'tuple'), + ([], 'list'), + ({}, 'dict'), + ]: + tbs = self._makeOne(val) + self.assertEqual(tbs.getInfo().splitlines(), + [' - class: %s.%s' % (_BUILTINS, typ), + ' - type: %s.%s' % (_BUILTINS, typ), + ]) + + def test_getInfo_newstyle_instance(self): + class C(object): + pass + tbs = self._makeOne(C()) + self.assertEqual(tbs.getInfo().splitlines(), + [' - class: %s.C' % self.__class__.__module__, + ' - type: %s.C' % self.__class__.__module__, + ]) + + def test_getInfo_classic_instance(self): + class C: + pass + tbs = self._makeOne(C()) + + lines = tbs.getInfo().splitlines() + self.assertEqual(lines[0], + ' - class: %s.C' % self.__class__.__module__) + kind = '__builtin__.instance' if PY2 else '%s.C' % self.__class__.__module__ + self.assertEqual(lines[1], + ' - type: ' + kind) + + +class TestGlobal(unittest.TestCase): + + def _getTargetClass(self): + from zope.security.checker import Global + return Global + + def _makeOne(self, name, module): + return self._getTargetClass()(name, module) + + def test_ctor_name_and_module(self): + glob = self._makeOne('foo', 'bar.baz') + self.assertEqual(glob.__name__, 'foo') + self.assertEqual(glob.__module__, 'bar.baz') + + def test___reduce__(self): + glob = self._makeOne('foo', 'bar.baz') + self.assertEqual(glob.__reduce__(), 'foo') + + def test___repr__(self): + glob = self._makeOne('foo', 'bar.baz') + self.assertEqual(repr(glob), 'Global(foo,bar.baz)') + + +class Test_NamesChecker(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.security.checker import NamesChecker + return NamesChecker(*args, **kw) + + def test_empty_names_no_kw(self): + from zope.interface.verify import verifyObject + from zope.security.interfaces import IChecker + checker = self._callFUT() + verifyObject(IChecker, checker) + self.assertIsNone(checker.permission_id('nonesuch')) + + def test_w_names_no_kw(self): + from zope.security.checker import CheckerPublic + checker = self._callFUT(('foo', 'bar', 'baz')) + self.assertIs(checker.permission_id('foo'), CheckerPublic) + self.assertIs(checker.permission_id('bar'), CheckerPublic) + self.assertIs(checker.permission_id('baz'), CheckerPublic) + self.assertIsNone(checker.permission_id('nonesuch')) + + def test_w_names_no_kw_explicit_permission(self): + other_perm = object() + checker = self._callFUT(('foo', 'bar', 'baz'), + permission_id=other_perm) + self.assertIs(checker.permission_id('foo'), other_perm) + self.assertIs(checker.permission_id('bar'), other_perm) + self.assertIs(checker.permission_id('baz'), other_perm) + self.assertIsNone(checker.permission_id('nonesuch')) + + def test_w_names_w_kw_no_clash(self): + from zope.security.checker import CheckerPublic + other_perm = object() + checker = self._callFUT(('foo', 'bar', 'baz'), bam=other_perm) + self.assertIs(checker.permission_id('foo'), CheckerPublic) + self.assertIs(checker.permission_id('bar'), CheckerPublic) + self.assertIs(checker.permission_id('baz'), CheckerPublic) + self.assertIs(checker.permission_id('bam'), other_perm) + self.assertIsNone(checker.permission_id('nonesuch')) + + def test_w_names_w_kw_w_clash(self): + from zope.security.checker import DuplicationError + other_perm = object() + self.assertRaises(DuplicationError, + self._callFUT, ('foo',), foo=other_perm) + + +class Test_InterfaceChecker(unittest.TestCase): + + def _callFUT(self, *args, **kw): + from zope.security.checker import InterfaceChecker + return InterfaceChecker(*args, **kw) + + def test_simple_iface_wo_kw(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.checker import CheckerPublic + class IFoo(Interface): + bar = Attribute('Bar') + checker = self._callFUT(IFoo) + self.assertIs(checker.permission_id('bar'), CheckerPublic) + self.assertIsNone(checker.permission_id('nonesuch')) + + def test_simple_iface_w_explicit_permission(self): + from zope.interface import Attribute + from zope.interface import Interface + class IFoo(Interface): + bar = Attribute('Bar') + other_perm = object() + checker = self._callFUT(IFoo, other_perm) + self.assertIs(checker.permission_id('bar'), other_perm) + + def test_simple_iface_w_kw(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.checker import CheckerPublic + class IFoo(Interface): + bar = Attribute('Bar') + other_perm = object() + checker = self._callFUT(IFoo, baz=other_perm) + self.assertIs(checker.permission_id('bar'), CheckerPublic) + self.assertIs(checker.permission_id('baz'), other_perm) + self.assertIsNone(checker.permission_id('nonesuch')) + + def test_derived_iface(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.checker import CheckerPublic + class IFoo(Interface): + bar = Attribute('Bar') + class IBar(IFoo): + baz = Attribute('Baz') + checker = self._callFUT(IBar) + self.assertIs(checker.permission_id('bar'), CheckerPublic) + self.assertIs(checker.permission_id('baz'), CheckerPublic) + self.assertIsNone(checker.permission_id('nonesuch')) + + def test_w_clash(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.checker import DuplicationError + class IFoo(Interface): + bar = Attribute('Bar') + bam = Attribute('Bam') + other_perm = object() + self.assertRaises(DuplicationError, + self._callFUT, IFoo, bar=other_perm) + + +class Test_MultiChecker(unittest.TestCase): + + def _callFUT(self, specs): + from zope.security.checker import MultiChecker + return MultiChecker(specs) + + def test_empty(self): + from zope.interface.verify import verifyObject + from zope.security.interfaces import IChecker + checker = self._callFUT([]) + verifyObject(IChecker, checker) + self.assertIsNone(checker.permission_id('nonesuch')) + + def test_w_spec_as_names(self): + from zope.security.checker import CheckerPublic + checker = self._callFUT([(('foo', 'bar', 'baz'), CheckerPublic)]) + self.assertIs(checker.permission_id('foo'), CheckerPublic) + self.assertIs(checker.permission_id('bar'), CheckerPublic) + self.assertIs(checker.permission_id('baz'), CheckerPublic) + self.assertIsNone(checker.permission_id('nonesuch')) + + def test_w_spec_as_iface(self): + from zope.interface import Attribute + from zope.interface import Interface + class IFoo(Interface): + bar = Attribute('Bar') + other_perm = object() + checker = self._callFUT([(IFoo, other_perm)]) + self.assertIs(checker.permission_id('bar'), other_perm) + self.assertIsNone(checker.permission_id('nonesuch')) + + def test_w_spec_as_names_and_iface(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.checker import CheckerPublic + class IFoo(Interface): + bar = Attribute('Bar') + other_perm = object() + checker = self._callFUT([(IFoo, other_perm), + (('foo', 'baz'), CheckerPublic)]) + self.assertIs(checker.permission_id('foo'), CheckerPublic) + self.assertIs(checker.permission_id('bar'), other_perm) + self.assertIs(checker.permission_id('baz'), CheckerPublic) + self.assertIsNone(checker.permission_id('nonesuch')) + + def test_w_spec_as_names_and_iface_clash(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.checker import CheckerPublic + from zope.security.checker import DuplicationError + class IFoo(Interface): + bar = Attribute('Bar') + other_perm = object() + self.assertRaises(DuplicationError, + self._callFUT, [(IFoo, other_perm), + (('foo', 'bar'), CheckerPublic)]) + + def test_w_spec_as_mapping(self): + from zope.security.checker import CheckerPublic + other_perm = object() + spec = {'foo': CheckerPublic, + 'bar': other_perm, + } + checker = self._callFUT([spec]) + self.assertIs(checker.permission_id('foo'), CheckerPublic) + self.assertIs(checker.permission_id('bar'), other_perm) + self.assertIsNone(checker.permission_id('nonesuch')) + + def test_w_spec_as_names_and_mapping_clash(self): + from zope.security.checker import CheckerPublic + from zope.security.checker import DuplicationError + other_perm = object() + spec = { + 'foo': other_perm, + } + self.assertRaises(DuplicationError, + self._callFUT, + [(('foo', 'bar'), CheckerPublic), spec]) + + + +class _SelectCheckerBase(object): + # pylint:disable=no-member + + def _callFUT(self, obj): + raise NotImplementedError("Subclass responsibility") + + def setUp(self): + from zope.security.checker import _clear + _clear() + + def tearDown(self): + from zope.security.checker import _clear + _clear() + + def test_w_basic_types_NoProxy(self): + import datetime + from zope.i18nmessageid import Message + msg = Message('msg') + for obj in [object(), + 42, + 3.14, + None, + u'text', + b'binary', + msg, + True, + datetime.timedelta(1), + datetime.datetime.now(), + datetime.date.today(), + datetime.datetime.now().time(), + datetime.tzinfo(), + ]: + self.assertIsNone(self._callFUT(obj)) + + def test_w_checker_inst(self): + from zope.security.checker import Checker + from zope.security.checker import _checkers + class Foo(object): + pass + checker = _checkers[Foo] = Checker({}) + self.assertIs(self._callFUT(Foo()), checker) + + def test_w_factory_returning_checker(self): + from zope.security.checker import Checker + from zope.security.checker import _checkers + class Foo(object): + pass + checker = Checker({}) + def _factory(obj): + return checker + _checkers[Foo] = _factory + self.assertIs(self._callFUT(Foo()), checker) + + def test_w_factory_returning_NoProxy(self): + from zope.security.checker import NoProxy + from zope.security.checker import _checkers + class Foo(object): + pass + def _factory(obj): + return NoProxy + _checkers[Foo] = _factory + self.assertIsNone(self._callFUT(Foo())) + + def test_w_factory_returning_None(self): + from zope.security.checker import _checkers + class Foo(object): + pass + def _factory(obj): + pass + _checkers[Foo] = _factory + self.assertIsNone(self._callFUT(Foo())) + + def test_w_factory_factory(self): + from zope.security.checker import Checker + from zope.security.checker import _checkers + class Foo(object): + pass + checker = Checker({}) + def _factory(obj): + return checker + def _factory_factory(obj): + return _factory + _checkers[Foo] = _factory_factory + self.assertIs(self._callFUT(Foo()), checker) + + def test_itertools_checkers(self): + from zope.security.checker import _iteratorChecker + import sys + import itertools + pred = lambda x: x + iterable = (1, 2, 3) + pred_iterable = (pred, iterable) + missing_in_py3 = {'ifilter', 'ifilterfalse', 'imap', + 'izip', 'izip_longest'} + missing_in_py2 = {'zip_longest', 'accumulate', 'compress', + 'combinations', 'combinations_with_replacement'} + missing = missing_in_py3 if sys.version_info[0] >= 3 else missing_in_py2 + for func, args in ( + ('count', ()), + ('cycle', ((),)), + ('dropwhile', pred_iterable), + ('ifilter', pred_iterable), + ('ifilterfalse', pred_iterable), + ('imap', pred_iterable), + ('islice', (iterable, 2)), + ('izip', (iterable,)), + ('izip_longest', (iterable,)), + ('permutations', (iterable,)), + ('product', (iterable,)), + ('repeat', (1, 2)), + ('starmap', pred_iterable), + ('takewhile', pred_iterable), + ('tee', (iterable,)), + # Python 3 additions + ('zip_longest', (iterable,)), + ('accumulate', (iterable,)), + ('compress', (iterable, ())), + ('combinations', (iterable, 1)), + ('combinations_with_replacement', (iterable, 1)), + ): + try: + func = getattr(itertools, func) + except AttributeError: + if func in missing: + continue + self.fail("Attribute error raised", func) + __traceback_info__ = func + result = func(*args) + if func == itertools.tee: + result = result[0] + + self.assertIs(self._callFUT(result), _iteratorChecker) + + +class Test_selectCheckerPy(_SelectCheckerBase, unittest.TestCase): + + def _callFUT(self, obj): + from zope.security.checker import selectCheckerPy + return selectCheckerPy(obj) + + + +@unittest.skipIf(sec_checker.selectChecker is sec_checker.selectCheckerPy, + "Pure Python") +class Test_selectChecker(_SelectCheckerBase, unittest.TestCase): + + def _callFUT(self, obj): + from zope.security.checker import selectChecker + return selectChecker(obj) + + +class Test_getCheckerForInstancesOf(unittest.TestCase): + + def setUp(self): + from zope.security.checker import _clear + _clear() + + def tearDown(self): + from zope.security.checker import _clear + _clear() + + def _callFUT(self, obj): + from zope.security.checker import getCheckerForInstancesOf + return getCheckerForInstancesOf(obj) + + def test_miss(self): + class Unknown(object): + pass + self.assertIsNone(self._callFUT(Unknown)) + + def test_hit(self): + from zope.security.checker import _checkers + class Foo(object): + pass + checker = _checkers[Foo] = object() + self.assertIs(self._callFUT(Foo), checker) + + +class Test_defineChecker(unittest.TestCase): + + def setUp(self): + from zope.security.checker import _clear + _clear() + + def tearDown(self): + from zope.security.checker import _clear + _clear() + + def _callFUT(self, type_, checker): + from zope.security.checker import defineChecker + return defineChecker(type_, checker) + + def test_w_wrong_type(self): + checker = object() + for obj in [object(), + 42, + 3.14, + None, + u'text', + b'binary', + True, + ]: + self.assertRaises(TypeError, self._callFUT, obj, checker) + + def test_w_duplicate(self): + from zope.exceptions import DuplicationError + from zope.security.checker import _checkers + class Foo(object): + pass + checker1, checker2 = object(), object() + _checkers[Foo] = checker1 + self.assertRaises(DuplicationError, self._callFUT, Foo, checker2) + + def test_w_newstyle_class(self): + from zope.security.checker import _checkers + checker = object() + class Foo(object): + pass + self._callFUT(Foo, checker) + self.assertIs(_checkers[Foo], checker) + + def test_w_module(self): + import zope.interface + from zope.security.checker import _checkers + checker = object() + self._callFUT(zope.interface, checker) + self.assertIs(_checkers[zope.interface], checker) + + def test_w_oldstyle_class(self): + from zope.security.checker import _checkers + checker = object() + class Foo: + pass + self._callFUT(Foo, checker) + self.assertIs(_checkers[Foo], checker) + + +class Test_undefineChecker(unittest.TestCase): + + def setUp(self): + from zope.security.checker import _clear + _clear() + + def tearDown(self): + from zope.security.checker import _clear + _clear() + + def _callFUT(self, type_): + from zope.security.checker import undefineChecker + return undefineChecker(type_) + + def test_miss(self): + class Foo(object): + pass + self.assertRaises(KeyError, self._callFUT, Foo) + + def test_hit(self): + from zope.security.checker import _checkers + class Foo(object): + pass + _checkers[Foo] = object() + self._callFUT(Foo) + self.assertFalse(Foo in _checkers) + + +class TestCombinedChecker(QuietWatchingChecker, + unittest.TestCase): + + def _getTargetClass(self): + from zope.security.checker import CombinedChecker + return CombinedChecker + + def _makeOne(self, checker1=None, checker2=None): + if checker1 is None: + checker1 = self._makeOther() + if checker2 is None: + checker1 = self._makeOther() + return self._getTargetClass()(checker1, checker2) + + def _makeOther(self, get_permissions=None, set_permissions=None): + from zope.security.checker import Checker + if get_permissions is None: + get_permissions = {} + if set_permissions is None: + set_permissions = {} + return Checker(get_permissions, set_permissions) + + def test_class_conforms_to_IChecker(self): + from zope.interface.verify import verifyClass + from zope.security.interfaces import IChecker + verifyClass(IChecker, self._getTargetClass()) + + def test_instance_conforms_to_IChecker(self): + from zope.interface.verify import verifyObject + from zope.security.interfaces import IChecker + verifyObject(IChecker, self._makeOne()) + + def test_check_lhs_ok_rhs_not_called(self): + from zope.security.checker import CheckerPublic + from zope.security.checker import Checker + class _NeverCalled(Checker): + def check(self, object, name): # pylint:disable=redefined-builtin + raise AssertionError("Never called") + + lhs = self._makeOther({'name': CheckerPublic}) + rhs = _NeverCalled({}) + combined = self._makeOne(lhs, rhs) + combined.check(object(), 'name') # no raise + + def test_check_lhs_unauth_rhs_ok(self): + from zope.security.checker import CheckerPublic + from zope.security._definitions import thread_local + class _Interaction(object): + def checkPermission(self, obj, perm): + return False + + lhs = self._makeOther({'name': 'view'}) # unauth + rhs = self._makeOther({'name': CheckerPublic}) + combined = self._makeOne(lhs, rhs) + thread_local.interaction = _Interaction() + try: + combined.check(object(), 'name') #no raise + finally: + del thread_local.interaction + + def test_check_lhs_unauth_rhs_forbidden(self): + from zope.security.interfaces import Unauthorized + from zope.security._definitions import thread_local + class _Interaction(object): + def checkPermission(self, obj, perm): + return False + + lhs = self._makeOther({'name': 'view'}) # unauth + rhs = self._makeOther() # forbidden + combined = self._makeOne(lhs, rhs) + thread_local.interaction = _Interaction() + try: + self.assertRaises(Unauthorized, + combined.check, object(), 'name') + finally: + del thread_local.interaction + + def test_check_lhs_unauth_rhs_unauth(self): + from zope.security.interfaces import Unauthorized + from zope.security._definitions import thread_local + class _Interaction(object): + def checkPermission(self, obj, perm): + return False + + lhs = self._makeOther({'name': 'view'}) # unauth + rhs = self._makeOther({'name': 'inspect'}) + combined = self._makeOne(lhs, rhs) + thread_local.interaction = _Interaction() + try: + self.assertRaises(Unauthorized, + combined.check, object(), 'name') + finally: + del thread_local.interaction + + def test_check_lhs_forbidden_rhs_ok(self): + from zope.security.checker import CheckerPublic + + lhs = self._makeOther() # forbidden + rhs = self._makeOther({'name': CheckerPublic}) + combined = self._makeOne(lhs, rhs) + combined.check(object(), 'name') # no raise + + def test_check_lhs_forbidden_rhs_forbidden(self): + from zope.security.interfaces import Forbidden + + lhs = self._makeOther() # forbidden + rhs = self._makeOther() # forbidden + combined = self._makeOne(lhs, rhs) + self.assertRaises(Forbidden, + combined.check, object(), 'name') + + def test_check_lhs_forbidden_rhs_unauth(self): + from zope.security.interfaces import Unauthorized + from zope.security._definitions import thread_local + class _Interaction(object): + def checkPermission(self, obj, perm): + return False + + lhs = self._makeOther() # Forbidden + rhs = self._makeOther({'name': 'inspect'}) + combined = self._makeOne(lhs, rhs) + thread_local.interaction = _Interaction() + try: + self.assertRaises(Unauthorized, + combined.check, object(), 'name') + finally: + del thread_local.interaction + + def test_check_setattr_lhs_ok_rhs_not_called(self): + from zope.security.checker import CheckerPublic + from zope.security.checker import Checker + class _NeverCalled(Checker): + def check_setattr(self, object, name): # pylint:disable=redefined-builtin + raise AssertionError("Never called") + + lhs = self._makeOther(set_permissions={'name': CheckerPublic}) + rhs = _NeverCalled({}) + combined = self._makeOne(lhs, rhs) + combined.check_setattr(object(), 'name') # no raise + + def test_check_setattr_lhs_unauth_rhs_ok(self): + from zope.security.checker import CheckerPublic + from zope.security._definitions import thread_local + class _Interaction(object): + def checkPermission(self, obj, perm): + return False + + lhs = self._makeOther(set_permissions={'name': 'update'}) # unauth + rhs = self._makeOther(set_permissions={'name': CheckerPublic}) + combined = self._makeOne(lhs, rhs) + thread_local.interaction = _Interaction() + try: + combined.check_setattr(object(), 'name') # no raise + finally: + del thread_local.interaction + + def test_check_setattr_lhs_unauth_rhs_forbidden(self): + from zope.security.interfaces import Unauthorized + from zope.security._definitions import thread_local + class _Interaction(object): + def checkPermission(self, obj, perm): + return False + + lhs = self._makeOther(set_permissions={'name': 'view'}) # unauth + rhs = self._makeOther() # forbidden + combined = self._makeOne(lhs, rhs) + thread_local.interaction = _Interaction() + try: + self.assertRaises(Unauthorized, + combined.check_setattr, object(), 'name') + finally: + del thread_local.interaction + + def test_check_setattr_lhs_unauth_rhs_unauth(self): + from zope.security.interfaces import Unauthorized + from zope.security._definitions import thread_local + class _Interaction(object): + def checkPermission(self, obj, perm): + return False + + lhs = self._makeOther(set_permissions={'name': 'view'}) # unauth + rhs = self._makeOther(set_permissions={'name': 'inspect'}) # unauth + combined = self._makeOne(lhs, rhs) + thread_local.interaction = _Interaction() + try: + self.assertRaises(Unauthorized, + combined.check_setattr, object(), 'name') + finally: + del thread_local.interaction + + def test_check_setattr_lhs_forbidden_rhs_ok(self): + from zope.security.checker import CheckerPublic + + lhs = self._makeOther() # forbidden + rhs = self._makeOther(set_permissions={'name': CheckerPublic}) + combined = self._makeOne(lhs, rhs) + combined.check_setattr(object(), 'name') # no raise + + def test_check_setattr_lhs_forbidden_rhs_forbidden(self): + from zope.security.interfaces import Forbidden + + lhs = self._makeOther() # forbidden + rhs = self._makeOther() # forbidden + combined = self._makeOne(lhs, rhs) + self.assertRaises(Forbidden, + combined.check_setattr, object(), 'name') + + def test_check_setattr_lhs_forbidden_rhs_unauth(self): + from zope.security.interfaces import Unauthorized + from zope.security._definitions import thread_local + class _Interaction(object): + def checkPermission(self, obj, perm): + return False + + lhs = self._makeOther() # forbidden + rhs = self._makeOther(set_permissions={'name': 'inspect'}) # unauth + combined = self._makeOne(lhs, rhs) + thread_local.interaction = _Interaction() + try: + self.assertRaises(Unauthorized, + combined.check_setattr, object(), 'name') + finally: + del thread_local.interaction + +@unittest.skipIf(sec_checker.WatchingCombinedChecker is sec_checker.CombinedChecker, + "WatchingCombinedChecker is the default") +class TestWatchingCombinedChecker(TestCombinedChecker): + + def _getTargetClass(self): + return sec_checker.WatchingCombinedChecker + +class TestCheckerLoggingMixin(unittest.TestCase): + + def _getTargetClass(self): + from zope.security.checker import CheckerLoggingMixin + return CheckerLoggingMixin + + def _makeOne(self, raising=None): + class _Checker(object): + def __init__(self, raising, stream): + self._file = stream + self._raising = raising + def check(self, obj, name): + if self._raising: + raise self._raising + check_getattr = check_setattr = check + class _Derived(self._getTargetClass(), _Checker): + pass + return _Derived(raising, self._makeStream()) + + def _makeStream(self): + class _Stream(list): + def write(self, msg): + self.append(msg) + return _Stream() + + def _makeObject(self): + class _Object(object): + def __repr__(self): + return 'TESTING' + return _Object() + + def test_check_ok_normal_verbosity(self): + checker = self._makeOne() + checker.check(self._makeObject(), 'name') + self.assertEqual(len(checker._file), 0) + + def test_check_ok_raised_verbosity_available_by_default(self): + checker = self._makeOne() + checker.verbosity = 2 + checker.check(self._makeObject(), '__name__') + self.assertEqual(len(checker._file), 1) + self.assertEqual(checker._file[0], + '[CHK] + Always available: __name__ on TESTING\n') + + def test_check_ok_raised_verbosity_normal_name(self): + checker = self._makeOne() + checker.verbosity = 2 + checker.check(self._makeObject(), 'name') + self.assertEqual(len(checker._file), 1) + self.assertEqual(checker._file[0], + '[CHK] + Granted: name on TESTING\n') + + def test_check_unauthorized(self): + from zope.security.interfaces import Unauthorized + checker = self._makeOne(Unauthorized) + self.assertRaises(Unauthorized, + checker.check, self._makeObject(), 'name') + self.assertEqual(len(checker._file), 1) + self.assertEqual(checker._file[0], + '[CHK] - Unauthorized: name on TESTING\n') + + def test_check_unauthorized_raised_verbosity(self): + from zope.security.interfaces import Unauthorized + checker = self._makeOne(Unauthorized) + checker.verbosity = 2 + self.assertRaises(Unauthorized, + checker.check, self._makeObject(), 'name') + self.assertEqual(len(checker._file), 1) + self.assertEqual(checker._file[0], + '[CHK] - Unauthorized: name on TESTING\n') + + def test_check_forbidden_attribute(self): + from zope.security.interfaces import ForbiddenAttribute + checker = self._makeOne(ForbiddenAttribute) + self.assertRaises(ForbiddenAttribute, + checker.check, self._makeObject(), 'name') + self.assertEqual(len(checker._file), 1) + self.assertEqual(checker._file[0], + '[CHK] - Forbidden: name on TESTING\n') + + def test_check_getattr_ok_normal_verbosity(self): + checker = self._makeOne() + checker.check(self._makeObject(), 'name') + self.assertEqual(len(checker._file), 0) + + def test_check_getattr_ok_raised_verbosity_available_by_default(self): + checker = self._makeOne() + checker.verbosity = 2 + checker.check_getattr(self._makeObject(), '__name__') + self.assertEqual(len(checker._file), 1) + self.assertEqual(checker._file[0], + '[CHK] + Always available getattr: ' + '__name__ on TESTING\n') + + def test_check_getattr_ok_raised_verbosity_normal_name(self): + checker = self._makeOne() + checker.verbosity = 2 + checker.check_getattr(self._makeObject(), 'name') + self.assertEqual(len(checker._file), 1) + self.assertEqual(checker._file[0], + '[CHK] + Granted getattr: name on TESTING\n') + + def test_check_getattr_unauthorized(self): + from zope.security.interfaces import Unauthorized + checker = self._makeOne(Unauthorized) + self.assertRaises(Unauthorized, + checker.check_getattr, self._makeObject(), 'name') + self.assertEqual(len(checker._file), 1) + self.assertEqual(checker._file[0], + '[CHK] - Unauthorized getattr: name on TESTING\n') + + def test_check_getattr_forbidden_attribute(self): + from zope.security.interfaces import ForbiddenAttribute + checker = self._makeOne(ForbiddenAttribute) + self.assertRaises(ForbiddenAttribute, + checker.check_getattr, self._makeObject(), 'name') + self.assertEqual(len(checker._file), 1) + self.assertEqual(checker._file[0], + '[CHK] - Forbidden getattr: name on TESTING\n') + + def test_check_setattr_ok_normal_verbosity(self): + checker = self._makeOne() + checker.check_setattr(self._makeObject(), 'name') + self.assertEqual(len(checker._file), 0) + + def test_check_setattr_ok_raised_verbosity_normal_name(self): + checker = self._makeOne() + checker.verbosity = 2 + checker.check_setattr(self._makeObject(), 'name') + self.assertEqual(len(checker._file), 1) + self.assertEqual(checker._file[0], + '[CHK] + Granted setattr: name on TESTING\n') + + def test_check_setattr_unauthorized(self): + from zope.security.interfaces import Unauthorized + checker = self._makeOne(Unauthorized) + self.assertRaises(Unauthorized, + checker.check_setattr, self._makeObject(), 'name') + self.assertEqual(len(checker._file), 1) + self.assertEqual(checker._file[0], + '[CHK] - Unauthorized setattr: name on TESTING\n') + + def test_check_setattr_forbidden_attribute(self): + from zope.security.interfaces import ForbiddenAttribute + checker = self._makeOne(ForbiddenAttribute) + self.assertRaises(ForbiddenAttribute, + checker.check_setattr, self._makeObject(), 'name') + self.assertEqual(len(checker._file), 1) + self.assertEqual(checker._file[0], + '[CHK] - Forbidden setattr: name on TESTING\n') + + def test_check_setitem_unauthorized(self): + # __setitem__ is an alias for check_getattr, used for speed reasons + # (AFAIU calling tp_setitem from C is much faster than calling a + # method by name). + from zope.security.interfaces import Unauthorized + checker = self._makeOne(Unauthorized) + self.assertRaises(Unauthorized, + checker.__setitem__, self._makeObject(), 'name') + self.assertEqual(len(checker._file), 1) + self.assertEqual(checker._file[0], + '[CHK] - Unauthorized getattr: name on TESTING\n') + + +class Test__instanceChecker(unittest.TestCase): + + def setUp(self): + from zope.security.checker import _clear + _clear() + + def tearDown(self): + from zope.security.checker import _clear + _clear() + + def _callFUT(self, type_): + from zope.security.checker import _instanceChecker + return _instanceChecker(type_) + + def test_miss(self): + from zope.security.checker import _defaultChecker + class Foo(object): + pass + self.assertIs(self._callFUT(Foo()), _defaultChecker) + + def test_hit(self): + from zope.security.checker import _checkers + class Foo(object): + pass + checker = _checkers[Foo] = object() + self.assertIs(self._callFUT(Foo()), checker) + + +class Test_moduleChecker(unittest.TestCase): + + def setUp(self): + from zope.security.checker import _clear + _clear() + + def tearDown(self): + from zope.security.checker import _clear + _clear() + + def _callFUT(self, type_): + from zope.security.checker import moduleChecker + return moduleChecker(type_) + + def test_miss(self): + from zope.interface import verify + self.assertIsNone(self._callFUT(verify)) + + def test_hit(self): + from zope.interface import verify + from zope.security.checker import _checkers + checker = _checkers[verify] = object() + self.assertIs(self._callFUT(verify), checker) + + + + +# Pre-geddon tests start here + +class TestSecurityPolicy(QuietWatchingChecker, + unittest.TestCase): + + def setUp(self): + super(TestSecurityPolicy, self).setUp() + + from zope.security.management import newInteraction + from zope.security.management import setSecurityPolicy + sec_checker._clear() + self.__oldpolicy = setSecurityPolicy(self._makeSecurityPolicy()) + newInteraction() + + def tearDown(self): + super(TestSecurityPolicy, self).tearDown() + + from zope.security.management import endInteraction + from zope.security.management import setSecurityPolicy + endInteraction() + setSecurityPolicy(self.__oldpolicy) + sec_checker._clear() + + def _get_old_class_type(self): + # Py3 has no ClassType and no old-style classes + import types + old_type = getattr(types, 'ClassType', type) + self.assertTrue((PY2 and old_type is not type) + or (PY3 and old_type is type)) + return old_type + + def _makeSecurityPolicy(self): + from zope.interface import implementer + from zope.security.interfaces import ISecurityPolicy + @implementer(ISecurityPolicy) + class SecurityPolicy(object): + def checkPermission(self, permission, _object): + return permission == 'test_allowed' + return SecurityPolicy + + def test_defineChecker_oldstyle_class(self): + from zope.security.checker import defineChecker + from zope.security.checker import NamesChecker + old_type = self._get_old_class_type() + class ClassicClass: + __metaclass__ = old_type + self.assertIsInstance(ClassicClass, old_type) + + defineChecker(ClassicClass, NamesChecker()) + + def test_defineChecker_newstyle_class(self): + from zope.security.checker import defineChecker + from zope.security.checker import NamesChecker + class NewStyleClass(object): + pass + self.assertIsInstance(NewStyleClass, type) + defineChecker(NewStyleClass, NamesChecker()) + + def test_defineChecker_module(self): + import zope.security + from zope.security.checker import defineChecker + from zope.security.checker import NamesChecker + defineChecker(zope.security, NamesChecker()) + + def test_defineChecker_error(self): + from zope.security.checker import defineChecker + from zope.security.checker import NamesChecker + not_a_type = object() + self.assertRaises(TypeError, + defineChecker, not_a_type, NamesChecker()) + + def _makeClasses(self): + old_type = self._get_old_class_type() + class OldInst: + a = 1 + def b(self): + raise AssertionError("Never called") + c = 2 + def gete(self): + raise AssertionError("Never called") + e = property(gete) + def __getitem__(self, x): + raise AssertionError("Never called") + def __setitem__(self, x, v): + raise AssertionError("Never called") + + self.assertIsInstance(OldInst, old_type) + + class NewInst(OldInst, object): + # This is not needed, but left in to show the change of metaclass + # __metaclass__ = type + def gete(self): + raise AssertionError("Never called") + def sete(self, v): + raise AssertionError("Never called") + e = property(gete, sete) + + self.assertIsInstance(NewInst, type) + return OldInst, NewInst + + # check_getattr cases: + # + # - no attribute there + # - method + # - allow and disallow by permission + def test_check_getattr(self): + # pylint:disable=attribute-defined-outside-init + from zope.security.interfaces import Forbidden + from zope.security.interfaces import Unauthorized + from zope.security.checker import NamesChecker + from zope.security.checker import CheckerPublic + + OldInst, NewInst = self._makeClasses() + + oldinst = OldInst() + oldinst.d = OldInst() + + newinst = NewInst() + newinst.d = NewInst() + + for inst in oldinst, newinst: + checker = NamesChecker(['a', 'b', 'c', '__getitem__'], 'perm') + + self.assertRaises(Unauthorized, checker.check_getattr, inst, 'a') + self.assertRaises(Unauthorized, checker.check_getattr, inst, 'b') + self.assertRaises(Unauthorized, checker.check_getattr, inst, 'c') + self.assertRaises(Unauthorized, checker.check, inst, '__getitem__') + self.assertRaises(Forbidden, checker.check, inst, '__setitem__') + self.assertRaises(Forbidden, checker.check_getattr, inst, 'd') + self.assertRaises(Forbidden, checker.check_getattr, inst, 'e') + self.assertRaises(Forbidden, checker.check_getattr, inst, 'f') + + checker = NamesChecker(['a', 'b', 'c', '__getitem__'], + 'test_allowed') + + checker.check_getattr(inst, 'a') + checker.check_getattr(inst, 'b') + checker.check_getattr(inst, 'c') + checker.check(inst, '__getitem__') + self.assertRaises(Forbidden, checker.check, inst, '__setitem__') + self.assertRaises(Forbidden, checker.check_getattr, inst, 'd') + self.assertRaises(Forbidden, checker.check_getattr, inst, 'e') + self.assertRaises(Forbidden, checker.check_getattr, inst, 'f') + + checker = NamesChecker(['a', 'b', 'c', '__getitem__'], + CheckerPublic) + + checker.check_getattr(inst, 'a') + checker.check_getattr(inst, 'b') + checker.check_getattr(inst, 'c') + checker.check(inst, '__getitem__') + self.assertRaises(Forbidden, checker.check, inst, '__setitem__') + self.assertRaises(Forbidden, checker.check_getattr, inst, 'd') + self.assertRaises(Forbidden, checker.check_getattr, inst, 'e') + self.assertRaises(Forbidden, checker.check_getattr, inst, 'f') + + def test_check_setattr(self): + # pylint:disable=attribute-defined-outside-init + from zope.security.interfaces import Forbidden + from zope.security.interfaces import Unauthorized + from zope.security.checker import Checker + from zope.security.checker import CheckerPublic + + OldInst, NewInst = self._makeClasses() + + oldinst = OldInst() + oldinst.d = OldInst() + + newinst = NewInst() + newinst.d = NewInst() + + for inst in oldinst, newinst: + checker = Checker({}, {'a': 'perm', 'z': 'perm'}) + + self.assertRaises(Unauthorized, checker.check_setattr, inst, 'a') + self.assertRaises(Unauthorized, checker.check_setattr, inst, 'z') + self.assertRaises(Forbidden, checker.check_setattr, inst, 'c') + self.assertRaises(Forbidden, checker.check_setattr, inst, 'd') + self.assertRaises(Forbidden, checker.check_setattr, inst, 'e') + self.assertRaises(Forbidden, checker.check_setattr, inst, 'f') + + checker = Checker({}, {'a': 'test_allowed', 'z': 'test_allowed'}) + + checker.check_setattr(inst, 'a') + checker.check_setattr(inst, 'z') + self.assertRaises(Forbidden, checker.check_setattr, inst, 'd') + self.assertRaises(Forbidden, checker.check_setattr, inst, 'e') + self.assertRaises(Forbidden, checker.check_setattr, inst, 'f') + + checker = Checker({}, {'a': CheckerPublic, 'z': CheckerPublic}) + + checker.check_setattr(inst, 'a') + checker.check_setattr(inst, 'z') + self.assertRaises(Forbidden, checker.check_setattr, inst, 'd') + self.assertRaises(Forbidden, checker.check_setattr, inst, 'e') + self.assertRaises(Forbidden, checker.check_setattr, inst, 'f') + + def test_proxy(self): + from zope.security.proxy import getChecker + from zope.security.proxy import removeSecurityProxy + from zope.security.checker import BasicTypes_examples + from zope.security.checker import CheckerPublic + from zope.security.checker import NamesChecker + + OldInst, NewInst = self._makeClasses() + + checker = NamesChecker(()) + + rocks = tuple(BasicTypes_examples.values()) + for rock in rocks: + proxy = checker.proxy(rock) + self.assertIs(proxy, rock, (rock, type(proxy))) + + for class_ in OldInst, NewInst: + inst = class_() + + for ob in inst, class_: + proxy = checker.proxy(ob) + self.assertIs(removeSecurityProxy(proxy), ob) + checker = getChecker(proxy) + if ob is inst: + self.assertEqual(checker.permission_id('__str__'), + None) + else: + self.assertEqual(checker.permission_id('__str__'), + CheckerPublic) + + # No longer doing anything special for transparent proxies. + # A proxy needs to provide its own security checker. + + def test_iteration(self): + from zope.security.checker import ProxyFactory + from zope.security.checker import selectChecker + + for i in ((1,), [1]): + _iter = iter(i) + proxy = ProxyFactory(_iter, selectChecker(_iter)) + self.assertEqual(next(proxy), 1) + + def testLayeredProxies(self): + #Test that a Proxy will not be re-proxied. + from zope.security.proxy import Proxy, getObject + from zope.security.checker import Checker + from zope.security.checker import NamesChecker + class Base: + __Security_checker__ = NamesChecker(['__Security_checker__']) + base = Base() + checker = Checker({}) + + # base is not proxied, so we expect a proxy + proxy1 = checker.proxy(base) + self.assertIs(type(proxy1), Proxy) + self.assertIs(getObject(proxy1), base) + + # proxy is a proxy, so we don't expect to get another + proxy2 = checker.proxy(proxy1) + self.assertIs(proxy2, proxy1) + self.assertIs(getObject(proxy2), base) + + + def testMultiChecker(self): + from zope.interface import Interface + + class I1(Interface): + def f1(): + "f1" + def f2(): + "f2" + + class I2(I1): + def f3(): + "f3" + def f4(): + "f4" + + class I3(Interface): + def g(): + "g" + + from zope.exceptions import DuplicationError + + from zope.security.checker import MultiChecker + + self.assertRaises(DuplicationError, + MultiChecker, + [(I1, 'p1'), (I2, 'p2')]) + + self.assertRaises(DuplicationError, + MultiChecker, + [(I1, 'p1'), {'f2': 'p2'}]) + + MultiChecker([(I1, 'p1'), (I2, 'p1')]) + + checker = MultiChecker([ + (I2, 'p1'), + {'a': 'p3'}, + (I3, 'p2'), + (('x', 'y', 'z'), 'p4'), + ]) + + self.assertEqual(checker.permission_id('f1'), 'p1') + self.assertEqual(checker.permission_id('f2'), 'p1') + self.assertEqual(checker.permission_id('f3'), 'p1') + self.assertEqual(checker.permission_id('f4'), 'p1') + self.assertEqual(checker.permission_id('g'), 'p2') + self.assertEqual(checker.permission_id('a'), 'p3') + self.assertEqual(checker.permission_id('x'), 'p4') + self.assertEqual(checker.permission_id('y'), 'p4') + self.assertEqual(checker.permission_id('z'), 'p4') + self.assertEqual(checker.permission_id('zzz'), None) + + def testAlwaysAvailable(self): + from zope.security.checker import NamesChecker + checker = NamesChecker(()) + class C(object): + pass + self.assertEqual(checker.check(C, '__hash__'), None) + self.assertEqual(checker.check(C, '__nonzero__'), None) + self.assertEqual(checker.check(C, '__class__'), None) + self.assertEqual(checker.check(C, '__implements__'), None) + self.assertEqual(checker.check(C, '__lt__'), None) + self.assertEqual(checker.check(C, '__le__'), None) + self.assertEqual(checker.check(C, '__gt__'), None) + self.assertEqual(checker.check(C, '__ge__'), None) + self.assertEqual(checker.check(C, '__eq__'), None) + self.assertEqual(checker.check(C, '__ne__'), None) + self.assertEqual(checker.check(C, '__name__'), None) + self.assertEqual(checker.check(C, '__parent__'), None) + + def test_setattr(self): + from zope.security.interfaces import Forbidden + from zope.security.checker import NamesChecker + + OldInst, NewInst = self._makeClasses() + + checker = NamesChecker(['a', 'b', 'c', '__getitem__'], + 'test_allowed') + + for inst in NewInst(), OldInst(): + self.assertRaises(Forbidden, checker.check_setattr, inst, 'a') + self.assertRaises(Forbidden, checker.check_setattr, inst, 'z') + + # TODO: write a test to see that + # Checker.check/check_setattr handle permission + # values that evaluate to False + + def test_ProxyFactory(self): + # pylint:disable=attribute-defined-outside-init + from zope.security.checker import _defaultChecker + from zope.security.checker import defineChecker + from zope.security.checker import NamesChecker + from zope.security.checker import ProxyFactory + from zope.security.proxy import getChecker + from zope.security.proxy import Proxy + class SomeClass(object): + pass + checker = NamesChecker() + specific_checker = NamesChecker() + checker_as_magic_attr = NamesChecker() + + obj = SomeClass() + + proxy = ProxyFactory(obj) + self.assertIs(type(proxy), Proxy) + self.assertIs(getChecker(proxy), _defaultChecker) + + defineChecker(SomeClass, checker) + + proxy = ProxyFactory(obj) + self.assertIs(type(proxy), Proxy) + self.assertIs(getChecker(proxy), checker) + + obj.__Security_checker__ = checker_as_magic_attr + + proxy = ProxyFactory(obj) + self.assertIs(type(proxy), Proxy) + self.assertIs(getChecker(proxy), checker_as_magic_attr) + + proxy = ProxyFactory(obj, specific_checker) + self.assertIs(type(proxy), Proxy) + self.assertIs(getChecker(proxy), specific_checker) + + def test_define_and_undefineChecker(self): + from zope.security.checker import defineChecker + from zope.security.checker import NamesChecker + from zope.security.checker import undefineChecker + class SomeClass(object): + pass + obj = SomeClass() + + checker = NamesChecker() + from zope.security.checker import _defaultChecker, selectChecker + self.assertIs(selectChecker(obj), _defaultChecker) + defineChecker(SomeClass, checker) + self.assertIs(selectChecker(obj), checker) + undefineChecker(SomeClass) + self.assertIs(selectChecker(obj), _defaultChecker) + + def test_ProxyFactory_using_proxy(self): + from zope.security.checker import ProxyFactory + from zope.security.checker import NamesChecker + class SomeClass(object): + pass + obj = SomeClass() + checker = NamesChecker() + proxy1 = ProxyFactory(obj) + + proxy2 = ProxyFactory(proxy1) + self.assertIs(proxy1, proxy2) + + # Trying to change the checker on a proxy. + self.assertRaises(TypeError, ProxyFactory, proxy1, checker) + + # Setting exactly the same checker as the proxy already has. + proxy1 = ProxyFactory(obj, checker) + proxy2 = ProxyFactory(proxy1, checker) + self.assertIs(proxy1, proxy2) + + def test_canWrite_canAccess(self): + # the canWrite and canAccess functions are conveniences. Often code + # wants to check if a certain option is open to a user before + # presenting it. If the code relies on a certain permission, the + # Zope 3 goal of keeping knowledge of security assertions out of the + # code and only in the zcml assertions is broken. Instead, ask if the + # current user canAccess or canWrite some pertinent aspect of the + # object. canAccess is used for both read access on an attribute + # and call access to methods. + + # For example, consider this humble pair of class and object. + from zope.security.interfaces import Forbidden + from zope.security.checker import Checker + from zope.security.checker import canAccess + from zope.security.checker import canWrite + from zope.security.checker import defineChecker + class SomeClass(object): + pass + obj = SomeClass() + + # We will establish a checker for the class. This is the standard + # name-based checker, and works by specifying two dicts, one for read + # and one for write. Each item in the dictionary should be an + # attribute name and the permission required to read or write it. + + # For these tests, the SecurityPolicy defined at the top of this file + # is in place. It is a stub. Normally, the security policy would + # have knowledge of interactions and participants, and would determine + # on the basis of the particpants and the object if a certain permission + # were authorized. This stub simply says that the 'test_allowed' + # permission is authorized and nothing else is, for any object you pass + # it. + + # Therefore, according to the checker created here, the current + # 'interaction' (as stubbed out in the security policy) will be allowed + # to access and write foo, and access bar. The interaction is + # unauthorized for accessing baz and writing bar. Any other access or + # write is not merely unauthorized but forbidden--including write access + # for baz. + checker = Checker( + {'foo':'test_allowed', # these are the read settings + 'bar':'test_allowed', + 'baz':'you_will_not_have_this_permission'}, + {'foo':'test_allowed', # these are the write settings + 'bar':'you_will_not_have_this_permission', + 'bing':'you_will_not_have_this_permission'}) + defineChecker(SomeClass, checker) + + # so, our hapless interaction may write and access foo... + self.assertTrue(canWrite(obj, 'foo')) + self.assertTrue(canAccess(obj, 'foo')) + + # ...may access, but not write, bar... + self.assertTrue(not canWrite(obj, 'bar')) + self.assertTrue(canAccess(obj, 'bar')) + + # ...and may access baz. + self.assertTrue(not canAccess(obj, 'baz')) + + # there are no security assertions for writing or reading shazam, so + # checking these actually raises Forbidden. The rationale behind + # exposing the Forbidden exception is primarily that it is usually + # indicative of programming or configuration errors. + self.assertRaises(Forbidden, canAccess, obj, 'shazam') + self.assertRaises(Forbidden, canWrite, obj, 'shazam') + + # However, we special-case canWrite when an attribute has a Read + # setting but no Write setting. Consider the 'baz' attribute from the + # checker above: it is readonly. All users are forbidden to write + # it. This is a very reasonable configuration. Therefore, canWrite + # will hide the Forbidden exception if and only if there is a + # setting for accessing the attribute. + self.assertTrue(not canWrite(obj, 'baz')) + + # The reverse is not true at the moment: an unusal case like the + # write-only 'bing' attribute will return a boolean for canWrite, + # but canRead will simply raise a Forbidden exception, without checking + # write settings. + self.assertTrue(not canWrite(obj, 'bing')) + self.assertRaises(Forbidden, canAccess, obj, 'bing') + +class TestCheckerPublic(unittest.TestCase): + + def test_that_pickling_CheckerPublic_retains_identity(self): + import pickle + from zope.security.checker import CheckerPublic + self.assertIs(pickle.loads(pickle.dumps(CheckerPublic)), + CheckerPublic) + + def test_that_CheckerPublic_identity_works_even_when_proxied(self): + from zope.security.checker import ProxyFactory + from zope.security.checker import CheckerPublic + self.assertIs(ProxyFactory(CheckerPublic), CheckerPublic) + + +class TestMixinDecoratedChecker(unittest.TestCase): + + policy = None + _oldpolicy = None + interaction = None + obj = None + + def decoratedSetUp(self): + from zope.security.management import getInteraction + from zope.security.management import newInteraction + from zope.security.management import setSecurityPolicy + self.policy = self._makeSecurityPolicy() + self._oldpolicy = setSecurityPolicy(self.policy) + newInteraction() + self.interaction = getInteraction() + self.obj = object() + + def decoratedTearDown(self): + from zope.security.management import endInteraction + from zope.security.management import setSecurityPolicy + endInteraction() + setSecurityPolicy(self._oldpolicy) + del self.policy + del self._oldpolicy + del self.obj + del self.interaction + + def _makeSecurityPolicy(self): + from zope.interface import implementer + from zope.security.interfaces import ISecurityPolicy + @implementer(ISecurityPolicy) + class RecordedSecurityPolicy(object): + def __init__(self): + self._checked = [] + self.permissions = {} + def checkPermission(self, permission, _obj): + self._checked.append(permission) + return self.permissions.get(permission, True) + def checkChecked(self, checked): + res = self._checked == checked + self._checked = [] + return res + return RecordedSecurityPolicy + + def check_checking_impl(self, checker): + from zope.security.interfaces import ForbiddenAttribute + o = self.obj + checker.check_getattr(o, 'both_get_set') + self.assertTrue(self.interaction.checkChecked(['dc_get_permission'])) + checker.check_getattr(o, 'c_only') + self.assertTrue(self.interaction.checkChecked(['get_permission'])) + checker.check_getattr(o, 'd_only') + self.assertTrue(self.interaction.checkChecked(['dc_get_permission'])) + self.assertRaises(ForbiddenAttribute, + checker.check_getattr, o, + 'completely_different_attr') + self.assertTrue(self.interaction.checkChecked([])) + checker.check(o, '__str__') + self.assertTrue(self.interaction.checkChecked(['get_permission'])) + + checker.check_setattr(o, 'both_get_set') + self.assertTrue(self.interaction.checkChecked(['dc_set_permission'])) + self.assertRaises(ForbiddenAttribute, + checker.check_setattr, o, 'c_only') + self.assertTrue(self.interaction.checkChecked([])) + self.assertRaises(ForbiddenAttribute, + checker.check_setattr, o, 'd_only') + self.assertTrue(self.interaction.checkChecked([])) + + @property + def originalChecker(self): + from zope.security.checker import NamesChecker + return NamesChecker(['both_get_set', 'c_only', '__str__'], + 'get_permission') + + decorationSetMap = {'both_get_set': 'dc_set_permission'} + + decorationGetMap = {'both_get_set': 'dc_get_permission', + 'd_only': 'dc_get_permission'} + + @property + def overridingChecker(self): + from zope.security.checker import Checker + return Checker(self.decorationGetMap, self.decorationSetMap) + +class TestCombinedCheckerMixin(QuietWatchingChecker, + TestMixinDecoratedChecker, + unittest.TestCase): + + def setUp(self): + super(TestCombinedCheckerMixin, self).setUp() + self.decoratedSetUp() + + def tearDown(self): + self.decoratedTearDown() + super(TestCombinedCheckerMixin, self).tearDown() + + def test_checking(self): + from zope.security.interfaces import Unauthorized + from zope.security.checker import CombinedChecker + cc = CombinedChecker(self.overridingChecker, self.originalChecker) + self.check_checking_impl(cc) + + # When a permission is not authorized by the security policy, + # the policy is queried twice per check_getattr -- once for each + # checker. + self.interaction.permissions['dc_get_permission'] = False + cc.check_getattr(self.obj, 'both_get_set') + self.assertTrue( + self.interaction.checkChecked(['dc_get_permission', + 'get_permission']) + ) + + # This should raise Unauthorized instead of ForbiddenAttribute, since + # access can be granted if you e.g. login with different credentials. + self.assertRaises(Unauthorized, cc.check_getattr, self.obj, 'd_only') + self.assertRaises(Unauthorized, cc.check, self.obj, 'd_only') + + def test_interface(self): + from zope.interface.verify import verifyObject + from zope.security.checker import CombinedChecker + from zope.security.interfaces import IChecker + dc = CombinedChecker(self.overridingChecker, self.originalChecker) + verifyObject(IChecker, dc) + + +class TestBasicTypes(unittest.TestCase): + + def setUp(self): + from zope.security.checker import _clear + _clear() + + def tearDown(self): + from zope.security.checker import _clear + _clear() + + def test___setitem__(self): + from zope.security.checker import BasicTypes + from zope.security.checker import _checkers + class Foo(object): + pass + checker = object() + BasicTypes[Foo] = checker + self.assertIs(BasicTypes[Foo], checker) + self.assertIs(_checkers[Foo], checker) + + def test___delitem__(self): + from zope.security.checker import BasicTypes + from zope.security.checker import _checkers + class Foo(object): + pass + checker = object() + BasicTypes[Foo] = checker + del BasicTypes[Foo] + self.assertFalse(Foo in BasicTypes) + self.assertFalse(Foo in _checkers) + + def test_clear(self): + from zope.security.checker import BasicTypes + self.assertRaises(NotImplementedError, BasicTypes.clear) + + def test_update(self): + from zope.security.checker import BasicTypes + from zope.security.checker import _checkers + class Foo(object): + pass + checker = object() + BasicTypes.update({Foo: checker}) + self.assertIs(BasicTypes[Foo], checker) + self.assertIs(_checkers[Foo], checker) + + + def test(self): + from zope.security.checker import BasicTypes + from zope.security.checker import NoProxy + from zope.security.checker import _checkers + from zope.security.checker import _clear + class MyType(object): + pass + class MyType2(object): + pass + + # When an item is added to the basic types, it should also be added to + # the list of checkers. + BasicTypes[MyType] = NoProxy + self.assertIn(MyType, _checkers) + + # If we clear the checkers, the type should still be there + _clear() + self.assertIn(MyType, BasicTypes) + self.assertIn(MyType, _checkers) + + # Now delete the type from the dictionary, will also delete it from + # the checkers + del BasicTypes[MyType] + self.assertNotIn(MyType, BasicTypes) + self.assertNotIn(MyType, _checkers) + + # The quick way of adding new types is using update + BasicTypes.update({MyType: NoProxy, MyType2: NoProxy}) + self.assertIn(MyType, BasicTypes) + self.assertIn(MyType2, BasicTypes) + self.assertIn(MyType, _checkers) + self.assertIn(MyType2, _checkers) + + # Let's remove the two new types + del BasicTypes[MyType] + del BasicTypes[MyType2] + + # Of course, BasicTypes is a full dictionary. This dictionary is by + # default filled with several entries: + keys = BasicTypes.keys() + self.assertIn(bool, keys) + self.assertIn(int, keys) + self.assertIn(float, keys) + self.assertIn(str, keys) + try: + unicode + except NameError: # pragma: no cover Py3k + pass + else: # pragma: no cover Python2 + self.assertIn(unicode, keys) + self.assertIn(object, keys) + # ... + + # Finally, the ``clear()`` method has been deactivated to avoid + # unwanted deletions. + self.assertRaises(NotImplementedError, BasicTypes.clear) + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_decorator.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_decorator.py new file mode 100644 index 0000000..650380d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_decorator.py @@ -0,0 +1,176 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test zope.security.decorator +""" +import unittest +from zope.security.tests import QuietWatchingChecker + + +class DecoratedSecurityCheckerDescriptorTests(QuietWatchingChecker, + unittest.TestCase): + + def setUp(self): + super(DecoratedSecurityCheckerDescriptorTests, self).setUp() + from zope.security.checker import _clear + _clear() + + def tearDown(self): + super(DecoratedSecurityCheckerDescriptorTests, self).tearDown() + from zope.security.checker import _clear + _clear() + + def _getTargetClass(self): + from zope.security.decorator import DecoratedSecurityCheckerDescriptor + return DecoratedSecurityCheckerDescriptor + + def _makeOne(self): + return self._getTargetClass()() + + def test_neither_wrapper_nor_object_has_checker(self): + from zope.proxy import ProxyBase + from zope.security.checker import NoProxy + from zope.security.checker import defineChecker + + class Foo(object): + a = 'a' + defineChecker(Foo, NoProxy) + foo = Foo() + + class Wrapper(ProxyBase): + b = 'b' + __Security_checker__ = self._makeOne() + defineChecker(Wrapper, NoProxy) + wrapper = Wrapper(foo) + self.assertRaises(AttributeError, + getattr, wrapper, '__Security_checker__') + + def test_both_wrapper_and_object_have_checkers_not_security_proxied(self): + from zope.proxy import ProxyBase + from zope.security.checker import CombinedChecker + from zope.security.checker import NamesChecker + from zope.security.checker import defineChecker + from zope.security.interfaces import ForbiddenAttribute + + class Foo(object): + a = 'a' + fooChecker = NamesChecker(['a']) # a is public + defineChecker(Foo, fooChecker) + foo = Foo() + fooChecker.check(foo, 'a') # no raise + self.assertRaises(ForbiddenAttribute, + fooChecker.check, foo, 'b') + + class Wrapper(ProxyBase): + b = 'b' + __Security_checker__ = self._makeOne() + wrapperChecker = NamesChecker(['b']) # b is public + defineChecker(Wrapper, wrapperChecker) + wrapper = Wrapper(foo) + self.assertRaises(ForbiddenAttribute, + wrapperChecker.check, foo, 'a') + wrapperChecker.check(foo, 'b') # no raise + + checker = wrapper.__Security_checker__ + self.assertTrue(isinstance(checker, CombinedChecker)) + checker.check(wrapper, 'a') # no raise + checker.check(wrapper, 'b') # no raise + + def test_only_wrapper_has_checker(self): + from zope.proxy import ProxyBase + from zope.security.checker import NamesChecker + from zope.security.checker import NoProxy + from zope.security.checker import defineChecker + + class Foo(object): + a = 'a' + foo = Foo() + defineChecker(Foo, NoProxy) + + class Wrapper(ProxyBase): + b = 'b' + __Security_checker__ = self._makeOne() + wrapperChecker = NamesChecker(['b']) # b is public + defineChecker(Wrapper, wrapperChecker) + wrapper = Wrapper(foo) + self.assertTrue(wrapper.__Security_checker__ is wrapperChecker) + + def test_only_object_has_checker(self): + from zope.proxy import ProxyBase + from zope.security.checker import NamesChecker + from zope.security.checker import NoProxy + from zope.security.checker import defineChecker + + class Foo(object): + a = 'a' + fooChecker = NamesChecker(['a']) # a is public + defineChecker(Foo, fooChecker) + foo = Foo() + + class Wrapper(ProxyBase): + b = 'b' + __Security_checker__ = self._makeOne() + defineChecker(Wrapper, NoProxy) + wrapper = Wrapper(foo) + self.assertTrue(wrapper.__Security_checker__ is fooChecker) + + + def test_both_wrapper_and_object_have_checkers_security_proxied(self): + from zope.proxy import ProxyBase + from zope.security.checker import CombinedChecker + from zope.security.checker import NamesChecker + from zope.security.checker import defineChecker + from zope.security.proxy import ProxyFactory + + class Foo(object): + a = 'a' + fooChecker = NamesChecker(['a']) # a is public + defineChecker(Foo, fooChecker) + foo = Foo() + f_sec = ProxyFactory(foo) + + class Wrapper(ProxyBase): + b = 'b' + __Security_checker__ = self._makeOne() + wrapperChecker = NamesChecker(['b']) # b is public + defineChecker(Wrapper, wrapperChecker) + w_sec = Wrapper(f_sec) + + checker = w_sec.__Security_checker__ + self.assertTrue(isinstance(checker, CombinedChecker)) + checker.check(w_sec, 'a') # no raise + checker.check(w_sec, 'b') # no raise + + def test_cannot_overwrite(self): + from zope.proxy import ProxyBase + from zope.security.checker import NoProxy + from zope.security.checker import defineChecker + + class Foo(object): + a = 'a' + defineChecker(Foo, NoProxy) + foo = Foo() + + class Wrapper(ProxyBase): + b = 'b' + __Security_checker__ = self._makeOne() + wrapper = Wrapper(foo) + + def _try(): + wrapper.__Security_checker__ = None + + self.assertRaises(TypeError, _try) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_location.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_location.py new file mode 100644 index 0000000..bc5fb03 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_location.py @@ -0,0 +1,45 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test location support +""" +import unittest + +from zope.location.location import LocationProxy +from zope.security.tests import QuietWatchingChecker + + +class LocationSecurityProxyTests(QuietWatchingChecker, + unittest.TestCase): + + def test_locationproxy_security(self): + from zope.security.checker import defineChecker + from zope.security.checker import NamesChecker + from zope.security.proxy import ProxyFactory + class Unlocated(object): + a = 'a' + unlocated = Unlocated() + located = LocationProxy(unlocated) + + # define a checker for the unlocated object, which will also be + # used by the security proxy as the LocationProxy defines + # __Security_checker__: + unlocatedChecker = NamesChecker(['a']) + defineChecker(Unlocated, unlocatedChecker) + + secure_located = ProxyFactory(located) + self.assertEqual(secure_located.a, 'a') + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_management.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_management.py new file mode 100644 index 0000000..93ce19d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_management.py @@ -0,0 +1,194 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" Unit tests for zope.security.management +""" +import unittest + + +class Test(unittest.TestCase): + + def setUp(self): + self._cleanUp() + + def tearDown(self): + self._cleanUp() + + def _cleanUp(self): + from zope.security.management import _clear + from zope.security.management import endInteraction + _clear() + endInteraction() + + def test_import(self): + from zope.interface.verify import verifyObject + from zope.security import management + from zope.security.interfaces import ISecurityManagement + from zope.security.interfaces import IInteractionManagement + + verifyObject(ISecurityManagement, management) + verifyObject(IInteractionManagement, management) + + def test_securityPolicy(self): + from zope.security.management import setSecurityPolicy + from zope.security.management import getSecurityPolicy + from zope.security.simplepolicies import PermissiveSecurityPolicy + + policy = PermissiveSecurityPolicy + setSecurityPolicy(policy) + self.assertTrue(getSecurityPolicy() is policy) + + def test_getInteraction_none_present(self): + from zope.security.interfaces import NoInteraction + from zope.security.management import getInteraction + self.assertRaises(NoInteraction, getInteraction) + + def test_queryInteraction_none_present(self): + from zope.security.management import queryInteraction + self.assertEqual(queryInteraction(), None) + + def test_newInteraction(self): + from zope.security.management import newInteraction + from zope.security.management import queryInteraction + newInteraction() + interaction = queryInteraction() + self.assertTrue(interaction is not None) + + def test_newInteraction_repeated_without_end(self): + from zope.security.management import ExistingInteraction + from zope.security.management import newInteraction + newInteraction() + self.assertRaises(ExistingInteraction, newInteraction) + + def test_endInteraction(self): + from zope.security.management import endInteraction + from zope.security.management import newInteraction + from zope.security.management import queryInteraction + newInteraction() + endInteraction() + self.assertEqual(queryInteraction(), None) + + def test_endInteraction_repeated(self): + from zope.security.management import endInteraction + from zope.security.management import newInteraction + from zope.security.management import queryInteraction + newInteraction() + interaction = queryInteraction() + endInteraction() + self.assertEqual(queryInteraction(), None) + endInteraction() + self.assertEqual(queryInteraction(), None) + + def test_restoreInteraction_after_end(self): + from zope.security.management import endInteraction + from zope.security.management import newInteraction + from zope.security.management import queryInteraction + from zope.security.management import restoreInteraction + newInteraction() + interaction = queryInteraction() + endInteraction() + restoreInteraction() + self.assertTrue(interaction is queryInteraction()) + + def test_restoreInteraction_after_new(self): + from zope.security.management import newInteraction + from zope.security.management import queryInteraction + from zope.security.management import restoreInteraction + newInteraction() + self.assertTrue(queryInteraction() is not None) + restoreInteraction() # restore to no interaction + self.assertTrue(queryInteraction() is None) + + def test_restoreInteraction_after_neither(self): + from zope.security.management import queryInteraction + from zope.security.management import restoreInteraction + from zope.security._definitions import thread_local + try: + del thread_local.interaction + except AttributeError: + pass + try: + del thread_local.previous_interaction + except AttributeError: + pass + restoreInteraction() + self.assertTrue(queryInteraction() is None) + + def test_checkPermission_w_no_interaction(self): + from zope.security.management import checkPermission + from zope.security.interfaces import NoInteraction + permission = 'zope.Test' + obj = object() + self.assertRaises(NoInteraction, checkPermission, permission, obj) + + def test_checkPermission_w_interaction(self): + from zope.security.management import checkPermission + from zope.security.management import setSecurityPolicy + from zope.security.management import queryInteraction + from zope.security.management import newInteraction + + permission = 'zope.Test' + obj = object() + + class PolicyStub(object): + def checkPermission(s, p, o,): + self.assertTrue(p is permission) + self.assertTrue(o is obj) + self.assertTrue(s is queryInteraction() or s is interaction) + return s is interaction + + setSecurityPolicy(PolicyStub) + newInteraction() + interaction = queryInteraction() + self.assertEqual(checkPermission(permission, obj), True) + + def test_checkPermission_forbidden_policy(self): + from zope.security import checkPermission + from zope.security.checker import CheckerPublic + from zope.security.management import setSecurityPolicy + from zope.security.management import newInteraction + + obj = object() + + class ForbiddenPolicyStub(object): + def checkPermission(s, p, o): + return False + + setSecurityPolicy(ForbiddenPolicyStub) + newInteraction() + self.assertEqual(checkPermission('zope.Test', obj), False) + self.assertEqual(checkPermission(None, obj), True) + self.assertEqual(checkPermission(CheckerPublic, obj), True) + + + def test_system_user(self): + from zope.interface.verify import verifyObject + from zope.security.interfaces import IPrincipal + from zope.security.interfaces import ISystemPrincipal + from zope.security.management import system_user + + self.assertEqual(system_user.id, + u'zope.security.management.system_user') + + self.assertEqual(system_user.title, u'System') + + for name in 'id', 'title', 'description': + self.assertIsInstance(getattr(system_user, name), + type(u'')) + + verifyObject(IPrincipal, system_user) + verifyObject(ISystemPrincipal, system_user) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_metaconfigure.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_metaconfigure.py new file mode 100644 index 0000000..20477c0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_metaconfigure.py @@ -0,0 +1,646 @@ +############################################################################## +# +# Copyright (c) 2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test ZCML directives +""" +import unittest +from zope.security.interfaces import PUBLIC_PERMISSION_NAME as zope_Public + +class Test_dottedName(unittest.TestCase): + + def _callFUT(self, obj): + from zope.security.metaconfigure import dottedName + return dottedName(obj) + + def test_dottted_name_w_None(self): + self.assertEqual(self._callFUT(None), 'None') + + def test_dottted_name_w_class(self): + self.assertEqual(self._callFUT(Test_dottedName), + 'zope.security.tests.test_metaconfigure.' + + 'Test_dottedName') + + +class ClassDirectiveTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.security.metaconfigure import ClassDirective + return ClassDirective + + def _makeOne(self, _context, class_): + return self._getTargetClass()(_context, class_) + + #def test_ctor_non_class(self): TODO needs better guard in __init__ + + def test_implements_empty(self): + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.implements(context, []) + self.assertEqual(len(context._actions), 0) + + def test_implements_single_interface(self): + from zope.component.interface import provideInterface + from zope.interface import Interface + from zope.interface import classImplements + class IFoo(Interface): + pass + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.implements(context, [IFoo]) + self.assertEqual(len(context._actions), 2) + self.assertEqual(context._actions[0]['discriminator'][:2], + ('ContentDirective', Foo, )) #3rd is object() + self.assertTrue(context._actions[0]['callable'] is classImplements) + self.assertEqual(context._actions[0]['args'], (Foo, IFoo)) + self.assertTrue(context._actions[1]['discriminator'] is None) + self.assertTrue(context._actions[1]['callable'] is provideInterface) + self.assertEqual(context._actions[1]['args'], + ('zope.security.tests.test_metaconfigure.IFoo', IFoo)) + + def test_implements_multiple_interfaces(self): + from zope.component.interface import provideInterface + from zope.interface import Interface + from zope.interface import classImplements + class IFoo(Interface): + pass + class IBar(Interface): + pass + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.implements(context, [IFoo, IBar]) + self.assertEqual(len(context._actions), 4) + self.assertEqual(context._actions[0]['discriminator'][:2], + ('ContentDirective', Foo, )) #3rd is object() + self.assertTrue(context._actions[0]['callable'] is classImplements) + self.assertEqual(context._actions[0]['args'], (Foo, IFoo)) + self.assertTrue(context._actions[1]['discriminator'] is None) + self.assertTrue(context._actions[1]['callable'] is provideInterface) + self.assertEqual(context._actions[1]['args'], + ('zope.security.tests.test_metaconfigure.IFoo', IFoo)) + self.assertEqual(context._actions[2]['discriminator'][:2], + ('ContentDirective', Foo, )) #3rd is object() + self.assertTrue(context._actions[2]['callable'] is classImplements) + self.assertEqual(context._actions[2]['args'], (Foo, IBar)) + self.assertTrue(context._actions[3]['discriminator'] is None) + self.assertTrue(context._actions[3]['callable'] is provideInterface) + self.assertEqual(context._actions[3]['args'], + ('zope.security.tests.test_metaconfigure.IBar', IBar)) + + def test_require_only_like_class(self): + from zope.security.protectclass import protectLikeUnto + class Bar(object): + pass + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.require(context, like_class=Bar) + self.assertEqual(len(context._actions), 1) + self.assertEqual(context._actions[0]['discriminator'][:2], + ('mimic', Foo, )) #3rd is object() + self.assertTrue(context._actions[0]['callable'] is protectLikeUnto) + self.assertEqual(context._actions[0]['args'], (Foo, Bar)) + + def test_require_only_permission(self): + from zope.configuration.exceptions import ConfigurationError + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + self.assertRaises(ConfigurationError, + directive.require, context, permission='testing') + + def test_require_no_like_class_wo_permission(self): + from zope.configuration.exceptions import ConfigurationError + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + self.assertRaises(ConfigurationError, + directive.require, context, attributes=('foo', 'bar')) + + def test_require_w_single_interface(self): + from zope.component.interface import provideInterface + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.protectclass import protectName + class IFoo(Interface): + bar = Attribute("Bar") + baz = Attribute("Baz") + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.require(context, permission='testing', interface=[IFoo]) + self.assertEqual(len(context._actions), 3) + self.assertEqual(context._actions[0]['discriminator'], + ('protectName', Foo, 'bar')) + self.assertTrue(context._actions[0]['callable'] is protectName) + self.assertEqual(context._actions[0]['args'], (Foo, 'bar', 'testing')) + self.assertEqual(context._actions[1]['discriminator'], + ('protectName', Foo, 'baz')) + self.assertTrue(context._actions[1]['callable'] is protectName) + self.assertEqual(context._actions[1]['args'], (Foo, 'baz', 'testing')) + self.assertTrue(context._actions[2]['discriminator'] is None) + self.assertTrue(context._actions[2]['callable'] is provideInterface) + self.assertEqual(context._actions[2]['args'], + ('zope.security.tests.test_metaconfigure.IFoo', IFoo)) + + def test_require_w_multiple_interfaces(self): + from zope.component.interface import provideInterface + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.protectclass import protectName + class IFoo(Interface): + bar = Attribute("Bar") + class IBar(Interface): + baz = Attribute("Baz") + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.require(context, permission='testing', interface=[IFoo, IBar]) + self.assertEqual(len(context._actions), 4) + self.assertEqual(context._actions[0]['discriminator'], + ('protectName', Foo, 'bar')) + self.assertTrue(context._actions[0]['callable'] is protectName) + self.assertEqual(context._actions[0]['args'], (Foo, 'bar', 'testing')) + self.assertTrue(context._actions[1]['discriminator'] is None) + self.assertTrue(context._actions[1]['callable'] is provideInterface) + self.assertEqual(context._actions[1]['args'], + ('zope.security.tests.test_metaconfigure.IFoo', IFoo)) + self.assertEqual(context._actions[2]['discriminator'], + ('protectName', Foo, 'baz')) + self.assertTrue(context._actions[2]['callable'] is protectName) + self.assertEqual(context._actions[2]['args'], (Foo, 'baz', 'testing')) + self.assertTrue(context._actions[3]['discriminator'] is None) + self.assertTrue(context._actions[3]['callable'] is provideInterface) + self.assertEqual(context._actions[3]['args'], + ('zope.security.tests.test_metaconfigure.IBar', IBar)) + + def test_require_w_attributes(self): + from zope.security.protectclass import protectName + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.require(context, permission='testing', + attributes=['bar', 'baz']) + self.assertEqual(len(context._actions), 2) + self.assertEqual(context._actions[0]['discriminator'], + ('protectName', Foo, 'bar')) + self.assertTrue(context._actions[0]['callable'] is protectName) + self.assertEqual(context._actions[0]['args'], (Foo, 'bar', 'testing')) + self.assertEqual(context._actions[1]['discriminator'], + ('protectName', Foo, 'baz')) + self.assertTrue(context._actions[1]['callable'] is protectName) + self.assertEqual(context._actions[1]['args'], (Foo, 'baz', 'testing')) + + def test_require_w_set_attributes(self): + from zope.security.protectclass import protectSetAttribute + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.require(context, permission='testing', + set_attributes=['bar', 'baz']) + self.assertEqual(len(context._actions), 2) + self.assertEqual(context._actions[0]['discriminator'], + ('protectSetAttribute', Foo, 'bar')) + self.assertTrue(context._actions[0]['callable'] is protectSetAttribute) + self.assertEqual(context._actions[0]['args'], (Foo, 'bar', 'testing')) + self.assertEqual(context._actions[1]['discriminator'], + ('protectSetAttribute', Foo, 'baz')) + self.assertTrue(context._actions[1]['callable'] is protectSetAttribute) + self.assertEqual(context._actions[1]['args'], (Foo, 'baz', 'testing')) + + def test_require_w_set_schema_normal_fields(self): + from zope.component.interface import provideInterface + from zope.schema import Field + from zope.interface import Interface + from zope.security.protectclass import protectSetAttribute + class IFoo(Interface): + bar = Field(u"Bar") + baz = Field(u"Baz") + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.require(context, permission='testing', set_schema=[IFoo]) + self.assertEqual(len(context._actions), 3) + self.assertEqual(context._actions[0]['discriminator'], + ('protectSetAttribute', Foo, 'bar')) + self.assertTrue(context._actions[0]['callable'] is protectSetAttribute) + self.assertEqual(context._actions[0]['args'], (Foo, 'bar', 'testing')) + self.assertEqual(context._actions[1]['discriminator'], + ('protectSetAttribute', Foo, 'baz')) + self.assertTrue(context._actions[1]['callable'] is protectSetAttribute) + self.assertEqual(context._actions[1]['args'], (Foo, 'baz', 'testing')) + self.assertTrue(context._actions[2]['discriminator'] is None) + self.assertTrue(context._actions[2]['callable'] is provideInterface) + self.assertEqual(context._actions[2]['args'], + ('zope.security.tests.test_metaconfigure.IFoo', IFoo)) + + def test_require_w_set_schema_ignores_non_fields(self): + from zope.component.interface import provideInterface + from zope.interface import Attribute + from zope.interface import Interface + class IFoo(Interface): + bar = Attribute("Bar") + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.require(context, permission='testing', set_schema=[IFoo]) + self.assertEqual(len(context._actions), 1) + self.assertTrue(context._actions[0]['discriminator'] is None) + self.assertTrue(context._actions[0]['callable'] is provideInterface) + self.assertEqual(context._actions[0]['args'], + ('zope.security.tests.test_metaconfigure.IFoo', IFoo)) + + def test_require_w_set_schema_ignores_readonly_fields(self): + from zope.component.interface import provideInterface + from zope.schema import Field + from zope.interface import Interface + class IFoo(Interface): + bar = Field(u"Bar", readonly=True) + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.require(context, permission='testing', set_schema=[IFoo]) + self.assertEqual(len(context._actions), 1) + self.assertTrue(context._actions[0]['discriminator'] is None) + self.assertTrue(context._actions[0]['callable'] is provideInterface) + self.assertEqual(context._actions[0]['args'], + ('zope.security.tests.test_metaconfigure.IFoo', IFoo)) + + def test_allow_no_attributes_or_interface(self): + from zope.configuration.exceptions import ConfigurationError + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + self.assertRaises(ConfigurationError, directive.allow, context) + + def test_allow_w_single_interface(self): + from zope.component.interface import provideInterface + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.protectclass import protectName + class IFoo(Interface): + bar = Attribute("Bar") + baz = Attribute("Baz") + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.allow(context, interface=[IFoo]) + self.assertEqual(len(context._actions), 3) + self.assertEqual(context._actions[0]['discriminator'], + ('protectName', Foo, 'bar')) + self.assertTrue(context._actions[0]['callable'] is protectName) + self.assertEqual(context._actions[0]['args'], + (Foo, 'bar', zope_Public)) + self.assertEqual(context._actions[1]['discriminator'], + ('protectName', Foo, 'baz')) + self.assertTrue(context._actions[1]['callable'] is protectName) + self.assertEqual(context._actions[1]['args'], + (Foo, 'baz', zope_Public)) + self.assertTrue(context._actions[2]['discriminator'] is None) + self.assertTrue(context._actions[2]['callable'] is provideInterface) + self.assertEqual(context._actions[2]['args'], + ('zope.security.tests.test_metaconfigure.IFoo', IFoo)) + + def test_allow_w_multiple_interfaces(self): + from zope.component.interface import provideInterface + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.protectclass import protectName + class IFoo(Interface): + bar = Attribute("Bar") + class IBar(Interface): + baz = Attribute("Baz") + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.allow(context, interface=[IFoo, IBar]) + self.assertEqual(len(context._actions), 4) + self.assertEqual(context._actions[0]['discriminator'], + ('protectName', Foo, 'bar')) + self.assertTrue(context._actions[0]['callable'] is protectName) + self.assertEqual(context._actions[0]['args'], + (Foo, 'bar', zope_Public)) + self.assertTrue(context._actions[1]['discriminator'] is None) + self.assertTrue(context._actions[1]['callable'] is provideInterface) + self.assertEqual(context._actions[1]['args'], + ('zope.security.tests.test_metaconfigure.IFoo', IFoo)) + self.assertEqual(context._actions[2]['discriminator'], + ('protectName', Foo, 'baz')) + self.assertTrue(context._actions[2]['callable'] is protectName) + self.assertEqual(context._actions[2]['args'], + (Foo, 'baz', zope_Public)) + self.assertTrue(context._actions[3]['discriminator'] is None) + self.assertTrue(context._actions[3]['callable'] is provideInterface) + self.assertEqual(context._actions[3]['args'], + ('zope.security.tests.test_metaconfigure.IBar', IBar)) + + def test_allow_w_attributes(self): + from zope.security.protectclass import protectName + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + directive.allow(context, attributes=['bar', 'baz']) + self.assertEqual(len(context._actions), 2) + self.assertEqual(context._actions[0]['discriminator'], + ('protectName', Foo, 'bar')) + self.assertTrue(context._actions[0]['callable'] is protectName) + self.assertEqual(context._actions[0]['args'], + (Foo, 'bar', zope_Public)) + self.assertEqual(context._actions[1]['discriminator'], + ('protectName', Foo, 'baz')) + self.assertTrue(context._actions[1]['callable'] is protectName) + self.assertEqual(context._actions[1]['args'], + (Foo, 'baz', zope_Public)) + + def test___call__(self): + context = DummyZCMLContext() + directive = self._makeOne(context, Foo) + self.assertEqual(directive(), ()) + + def test_factory_wo_explicit_id(self): + from zope.component.interfaces import IFactory + from zope.component.interface import provideInterface + from zope.component.zcml import handler + context = DummyZCMLContext() + context.info = 'INFO' + directive = self._makeOne(context, Foo) + directive.factory(context, title='TITLE', description='DESCRIPTION') + self.assertEqual(len(context._actions), 2) + self.assertEqual(context._actions[0]['discriminator'], + ('utility', IFactory, + 'zope.security.tests.test_metaconfigure.Foo')) + self.assertTrue(context._actions[0]['callable'] is handler) + args = context._actions[0]['args'] + self.assertEqual(args[0], 'registerUtility') + factory = args[1] + self.assertEqual(factory._callable, Foo) + self.assertEqual(factory.title, 'TITLE') + self.assertEqual(factory.description, 'DESCRIPTION') + self.assertEqual(args[2], IFactory) + self.assertEqual(args[3], 'zope.security.tests.test_metaconfigure.Foo') + self.assertEqual(args[4], 'INFO') + self.assertTrue(context._actions[1]['discriminator'] is None) + self.assertTrue(context._actions[1]['callable'] is provideInterface) + self.assertEqual(context._actions[1]['args'], ('', IFactory)) + + def test_factory_w_explicit_id(self): + from zope.component.interfaces import IFactory + from zope.component.interface import provideInterface + from zope.component.zcml import handler + context = DummyZCMLContext() + context.info = 'INFO' + directive = self._makeOne(context, Foo) + directive.factory(context, id='test_id') + self.assertEqual(len(context._actions), 2) + self.assertEqual(context._actions[0]['discriminator'], + ('utility', IFactory, 'test_id')) + self.assertTrue(context._actions[0]['callable'] is handler) + args = context._actions[0]['args'] + self.assertEqual(args[0], 'registerUtility') + factory = args[1] + self.assertEqual(factory._callable, Foo) + self.assertEqual(args[2], IFactory) + self.assertEqual(args[3], 'test_id') + self.assertEqual(args[4], 'INFO') + self.assertTrue(context._actions[1]['discriminator'] is None) + self.assertTrue(context._actions[1]['callable'] is provideInterface) + self.assertEqual(context._actions[1]['args'], ('', IFactory)) + + +class Foo(object): + pass + + +class Test_protectModule(unittest.TestCase): + + def setUp(self): + from zope.security.checker import _clear + _clear() + + def tearDown(self): + from zope.security.checker import _clear + _clear() + + def _callFUT(self, module, name, permission): + from zope.security.metaconfigure import protectModule + return protectModule(module, name, permission) + + def test_check_wo_existing_module_checker(self): + from zope.security import tests as module + from zope.security.checker import _checkers + perm = object() + self._callFUT(module, 'name', perm) + checker = _checkers[module] + self.assertTrue(checker.get_permissions['name'] is perm) + + def test_check_w_existing_module_checker_zope_Public(self): + from zope.security import tests as module + from zope.security.checker import Checker + from zope.security.checker import CheckerPublic + from zope.security.checker import _checkers + before = _checkers[module] = Checker({'other': CheckerPublic}) + self._callFUT(module, 'name', zope_Public) + checker = _checkers[module] + self.assertTrue(checker is before) + self.assertTrue(checker.get_permissions['name'] is CheckerPublic) + + +class Test_allow(unittest.TestCase): + + def setUp(self): + from zope.security.checker import _clear + _clear() + + def tearDown(self): + from zope.security.checker import _clear + _clear() + + def _callFUT(self, context, attributes=None, interface=None): + from zope.security.metaconfigure import allow + if interface is None: + if attributes is None: + return allow(context) + return allow(context, attributes) + if attributes is None: + return allow(context, interface=interface) + return allow(context, attributes, interface) + + def test_empty(self): + context = DummyZCMLContext() + self._callFUT(context) + self.assertEqual(len(context._actions), 0) + + def test_w_attributes(self): + from zope.security.metaconfigure import protectModule + ATTRS = ['foo', 'bar'] + context = DummyZCMLContext() + context.module = 'testing' + self._callFUT(context, ATTRS) + self.assertEqual(len(context._actions), len(ATTRS)) + self.assertEqual(context._actions[0]['discriminator'], + ('http://namespaces.zope.org/zope:module', + 'testing', 'foo')) + self.assertTrue(context._actions[0]['callable'] is protectModule) + self.assertEqual(context._actions[0]['args'], + ('testing', 'foo', zope_Public)) + self.assertEqual(context._actions[1]['discriminator'], + ('http://namespaces.zope.org/zope:module', + 'testing', 'bar')) + self.assertTrue(context._actions[1]['callable'] is protectModule) + self.assertEqual(context._actions[1]['args'], + ('testing', 'bar', zope_Public)) + + def test_w_interface(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.metaconfigure import protectModule + class IFoo(Interface): + bar = Attribute('Bar') + context = DummyZCMLContext() + context.module = 'testing' + self._callFUT(context, interface=[IFoo]) + self.assertEqual(len(context._actions), 1) + self.assertEqual(context._actions[0]['discriminator'], + ('http://namespaces.zope.org/zope:module', + 'testing', 'bar')) + self.assertTrue(context._actions[0]['callable'] is protectModule) + self.assertEqual(context._actions[0]['args'], + ('testing', 'bar', zope_Public)) + + def test_w_both(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.metaconfigure import protectModule + class IFoo(Interface): + bar = Attribute('Bar') + baz = Attribute('Baz') + ATTRS = ['foo', 'bar'] + context = DummyZCMLContext() + context.module = 'testing' + self._callFUT(context, ATTRS, [IFoo]) + self.assertEqual(len(context._actions), 3) + self.assertEqual(context._actions[0]['discriminator'], + ('http://namespaces.zope.org/zope:module', + 'testing', 'foo')) + self.assertTrue(context._actions[0]['callable'] is protectModule) + self.assertEqual(context._actions[0]['args'], + ('testing', 'foo', zope_Public)) + self.assertEqual(context._actions[1]['discriminator'], + ('http://namespaces.zope.org/zope:module', + 'testing', 'bar')) + self.assertTrue(context._actions[1]['callable'] is protectModule) + self.assertEqual(context._actions[1]['args'], + ('testing', 'bar', zope_Public)) + self.assertEqual(context._actions[2]['discriminator'], + ('http://namespaces.zope.org/zope:module', + 'testing', 'baz')) + self.assertTrue(context._actions[2]['callable'] is protectModule) + self.assertEqual(context._actions[2]['args'], + ('testing', 'baz', zope_Public)) + + +class Test_requre(unittest.TestCase): + + def setUp(self): + from zope.security.checker import _clear + _clear() + + def tearDown(self): + from zope.security.checker import _clear + _clear() + + def _callFUT(self, context, permission, attributes=None, interface=None): + from zope.security.metaconfigure import require + if interface is None: + if attributes is None: + return require(context, permission) + return require(context, permission, attributes) + if attributes is None: + return require(context, permission, interface=interface) + return require(context, permission, attributes, interface) + + def test_empty(self): + context = DummyZCMLContext() + context.module = 'testing' + perm = object() + self._callFUT(context, perm) + self.assertEqual(len(context._actions), 0) + + def test_w_attributes(self): + from zope.security.metaconfigure import protectModule + ATTRS = ['foo', 'bar'] + context = DummyZCMLContext() + context.module = 'testing' + perm = object() + self._callFUT(context, perm, ATTRS) + self.assertEqual(len(context._actions), len(ATTRS)) + self.assertEqual(context._actions[0]['discriminator'], + ('http://namespaces.zope.org/zope:module', + 'testing', 'foo')) + self.assertTrue(context._actions[0]['callable'] is protectModule) + self.assertEqual(context._actions[0]['args'], + ('testing', 'foo', perm)) + self.assertEqual(context._actions[1]['discriminator'], + ('http://namespaces.zope.org/zope:module', + 'testing', 'bar')) + self.assertTrue(context._actions[1]['callable'] is protectModule) + self.assertEqual(context._actions[1]['args'], + ('testing', 'bar', perm)) + + def test_w_interface(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.metaconfigure import protectModule + class IFoo(Interface): + bar = Attribute('Bar') + context = DummyZCMLContext() + context.module = 'testing' + perm = object() + self._callFUT(context, perm, interface=[IFoo]) + self.assertEqual(len(context._actions), 1) + self.assertEqual(context._actions[0]['discriminator'], + ('http://namespaces.zope.org/zope:module', + 'testing', 'bar')) + self.assertTrue(context._actions[0]['callable'] is protectModule) + self.assertEqual(context._actions[0]['args'], + ('testing', 'bar', perm)) + + def test_w_both(self): + from zope.interface import Attribute + from zope.interface import Interface + from zope.security.metaconfigure import protectModule + class IFoo(Interface): + bar = Attribute('Bar') + baz = Attribute('Baz') + ATTRS = ['foo', 'bar'] + context = DummyZCMLContext() + context.module = 'testing' + perm = object() + self._callFUT(context, perm, ATTRS, [IFoo]) + self.assertEqual(len(context._actions), 3) + self.assertEqual(context._actions[0]['discriminator'], + ('http://namespaces.zope.org/zope:module', + 'testing', 'foo')) + self.assertTrue(context._actions[0]['callable'] is protectModule) + self.assertEqual(context._actions[0]['args'], + ('testing', 'foo', perm)) + self.assertEqual(context._actions[1]['discriminator'], + ('http://namespaces.zope.org/zope:module', + 'testing', 'bar')) + self.assertTrue(context._actions[1]['callable'] is protectModule) + self.assertEqual(context._actions[1]['args'], + ('testing', 'bar', perm)) + self.assertEqual(context._actions[2]['discriminator'], + ('http://namespaces.zope.org/zope:module', + 'testing', 'baz')) + self.assertTrue(context._actions[2]['callable'] is protectModule) + self.assertEqual(context._actions[2]['args'], + ('testing', 'baz', perm)) + + +class DummyZCMLContext(object): + + def __init__(self): + self._actions = [] + + def action(self, **kw): + self._actions.append(kw.copy()) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_permission.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_permission.py new file mode 100644 index 0000000..82ddea2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_permission.py @@ -0,0 +1,196 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test permissions +""" +import unittest +from zope.component.testing import PlacelessSetup +from zope.security.interfaces import PUBLIC_PERMISSION_NAME as zope_Public + +class PermissionTests(unittest.TestCase): + + def _getTargetClass(self): + from zope.security.permission import Permission + return Permission + + def _makeOne(self, id, *args): + klass = self._getTargetClass() + return klass(id, *args) + + def test_class_conforms_to_IPermission(self): + from zope.interface.verify import verifyClass + from zope.security.interfaces import IPermission + verifyClass(IPermission, self._getTargetClass()) + + def test_instance_conforms_to_IPermission(self): + from zope.interface.verify import verifyObject + from zope.security.interfaces import IPermission + from zope.schema import getValidationErrors + verifyObject(IPermission, self._makeOne('testing')) + self.assertEqual([], + getValidationErrors(IPermission, + self._makeOne('testing'))) + + def test_ctor_only_id(self): + permission = self._makeOne('testing') + self.assertEqual(permission.id, u'testing') + self.assertEqual(permission.title, u'') + self.assertEqual(permission.description, u'') + + def test_ctor_w_title_and_description(self): + permission = self._makeOne('testing', u'TITLE', u'DESCRIPTION') + self.assertEqual(permission.id, 'testing') + self.assertEqual(permission.title, u'TITLE') + self.assertEqual(permission.description, u'DESCRIPTION') + + +class Test_checkPermission(PlacelessSetup, unittest.TestCase): + + def _callFUT(self, context, permission_id): + from zope.security.permission import checkPermission + return checkPermission(context, permission_id) + + def test_w_CheckerPublic(self): + from zope.security.checker import CheckerPublic + self._callFUT(None, CheckerPublic) # no raise + + def test_miss(self): + self.assertRaises(ValueError, self._callFUT, None, 'nonesuch') + + def test_hit(self): + from zope.component import provideUtility + from zope.security.interfaces import IPermission + permission = object() + provideUtility(permission, IPermission, 'testing') + self._callFUT(None, 'testing') # no raise + + +class Test_allPermissions(PlacelessSetup, unittest.TestCase): + + def _callFUT(self): + from zope.security.permission import allPermissions + return allPermissions() + + def test_empty(self): + self.assertEqual(list(self._callFUT()), []) + + def test_w_registration(self): + self.assertEqual(list(self._callFUT()), []) + from zope.component import provideUtility + from zope.security.interfaces import IPermission + permission = object() + provideUtility(permission, IPermission, 'testing') + self.assertEqual(list(self._callFUT()), ['testing']) + + def test_skips_zope_Public(self): + self.assertEqual(list(self._callFUT()), []) + from zope.component import provideUtility + from zope.security.checker import CheckerPublic + from zope.security.interfaces import IPermission + permission = object() + provideUtility(permission, IPermission, 'testing') + provideUtility(CheckerPublic, IPermission, zope_Public) + self.assertEqual(list(self._callFUT()), ['testing']) + + +class Test_PermissionsVocabulary(PlacelessSetup, unittest.TestCase): + + def _callFUT(self): + from zope.security.permission import PermissionsVocabulary + return PermissionsVocabulary() + + def test_empty(self): + from zope.schema.vocabulary import SimpleVocabulary + vocabulary = self._callFUT() + self.assertTrue(isinstance(vocabulary, SimpleVocabulary)) + self.assertEqual(list(vocabulary), []) + + def test_w_registration(self): + self.assertEqual(list(self._callFUT()), []) + from zope.component import provideUtility + from zope.security.interfaces import IPermission + permission = object() + provideUtility(permission, IPermission, 'testing') + vocabulary = self._callFUT() + self.assertEqual([x.token for x in vocabulary], ['testing']) + + def test_includes_zope_Public(self): + self.assertEqual(list(self._callFUT()), []) + from zope.component import provideUtility + from zope.security.checker import CheckerPublic + from zope.security.interfaces import IPermission + permission = object() + provideUtility(permission, IPermission, 'testing') + provideUtility(CheckerPublic, IPermission, zope_Public) + vocabulary = self._callFUT() + self.assertEqual(sorted([x.token for x in vocabulary]), + ['testing', zope_Public]) + + def test_zcml_valid(self): + from zope.configuration import xmlconfig + import zope.security + from zope.interface.verify import verifyObject + from zope.security.interfaces import IPermission + from zope.schema import getValidationErrors + + + xmlconfig.file('configure.zcml', zope.security) + vocabulary = self._callFUT() + vocabulary = sorted(vocabulary, key=lambda term: term.token) + self.assertEqual(6, len(vocabulary)) + + for term in vocabulary: + p = term.value + __traceback_info__ = term.token, p + verifyObject(IPermission, p) + self.assertEqual([], getValidationErrors(IPermission, p)) + +class Test_PermissionIdsVocabulary(PlacelessSetup, unittest.TestCase): + + def _callFUT(self): + from zope.security.permission import PermissionIdsVocabulary + return PermissionIdsVocabulary() + + def test_empty(self): + from zope.schema.vocabulary import SimpleVocabulary + vocabulary = self._callFUT() + self.assertTrue(isinstance(vocabulary, SimpleVocabulary)) + self.assertEqual(list(vocabulary), []) + + def test_w_registration(self): + self.assertEqual(list(self._callFUT()), []) + from zope.component import provideUtility + from zope.security.interfaces import IPermission + permission = object() + provideUtility(permission, IPermission, 'testing') + vocabulary = self._callFUT() + self.assertEqual([x.value for x in vocabulary], ['testing']) + self.assertEqual([x.token for x in vocabulary], ['testing']) + + def test_includes_zope_Public(self): + self.assertEqual(list(self._callFUT()), []) + from zope.component import provideUtility + from zope.security.checker import CheckerPublic + from zope.security.interfaces import IPermission + permission = object() + provideUtility(permission, IPermission, 'testing') + provideUtility(CheckerPublic, IPermission, zope_Public) + vocabulary = self._callFUT() + self.assertEqual([x.value for x in vocabulary], + [CheckerPublic, 'testing']) + self.assertEqual([x.token for x in vocabulary], + [zope_Public, 'testing']) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_protectclass.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_protectclass.py new file mode 100644 index 0000000..759f151 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_protectclass.py @@ -0,0 +1,146 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Test handler for 'protectClass' directive +""" +import unittest +from zope.security.interfaces import PUBLIC_PERMISSION_NAME as zope_Public + +class Test_protectName(unittest.TestCase): + + def setUp(self): + from zope.security.checker import _clear + _clear() + + def tearDown(self): + from zope.security.checker import _clear + _clear() + + def _callFUT(self, class_, name, permission): + from zope.security.protectclass import protectName + return protectName(class_, name, permission) + + def test_wo_existing_checker_w_zope_Public(self): + from zope.security.checker import CheckerPublic + from zope.security.checker import _checkers + self._callFUT(Foo, 'bar', zope_Public) + self.assertTrue(_checkers[Foo].get_permissions['bar'] is CheckerPublic) + + def test_w_existing_checker(self): + from zope.security.checker import Checker + from zope.security.checker import _checkers + checker = _checkers[Foo] = Checker({}) + permission = object() + self._callFUT(Foo, 'bar', permission) + self.assertTrue(_checkers[Foo] is checker) + self.assertTrue(checker.get_permissions['bar'] is permission) + + +class Test_protectSetAttribute(unittest.TestCase): + + def setUp(self): + from zope.security.checker import _clear + _clear() + + def tearDown(self): + from zope.security.checker import _clear + _clear() + + def _callFUT(self, class_, name, permission): + from zope.security.protectclass import protectSetAttribute + return protectSetAttribute(class_, name, permission) + + def test_wo_existing_checker_w_zope_Public(self): + from zope.security.checker import CheckerPublic + from zope.security.checker import _checkers + self._callFUT(Foo, 'bar', zope_Public) + self.assertTrue(_checkers[Foo].set_permissions['bar'] is CheckerPublic) + + def test_w_existing_checker(self): + from zope.security.checker import Checker + from zope.security.checker import _checkers + checker = _checkers[Foo] = Checker({}) + permission = object() + self._callFUT(Foo, 'bar', permission) + self.assertTrue(_checkers[Foo] is checker) + self.assertTrue(checker.set_permissions['bar'] is permission) + + +class Test_protectLikeUnto(unittest.TestCase): + + def setUp(self): + from zope.security.checker import _clear + _clear() + + def tearDown(self): + from zope.security.checker import _clear + _clear() + + def _callFUT(self, class_, like_unto): + from zope.security.protectclass import protectLikeUnto + return protectLikeUnto(class_, like_unto) + + def test_wo_existing_like_unto_checker(self): + from zope.security.checker import _checkers + self.assertFalse(Foo in _checkers) + self._callFUT(Bar, Foo) + self.assertFalse(Foo in _checkers) + self.assertFalse(Bar in _checkers) + + def test_w_existing_like_unto_checker_wo_existing_checker(self): + from zope.security.checker import Checker + from zope.security.checker import CheckerPublic + from zope.security.checker import defineChecker + from zope.security.checker import _checkers + permission = object() + foo_checker = Checker({'bar': CheckerPublic}, {'bar': permission}) + defineChecker(Foo, foo_checker) + self._callFUT(Bar, Foo) + bar_checker = _checkers[Bar] + self.assertEqual(bar_checker.get_permissions, + foo_checker.get_permissions) + self.assertEqual(bar_checker.set_permissions, + foo_checker.set_permissions) + + def test_w_existing_like_unto_checker_w_existing_checker(self): + from zope.security.checker import Checker + from zope.security.checker import CheckerPublic + from zope.security.checker import defineChecker + from zope.security.checker import _checkers + permission1, permission2 = object(), object() + foo_checker = Checker({'bar': CheckerPublic}, {'bar': permission2}) + defineChecker(Foo, foo_checker) + bar_checker = Checker({'bar': permission1, 'baz': CheckerPublic}, {}) + defineChecker(Bar, bar_checker) + self._callFUT(Bar, Foo) + bar_checker = _checkers[Bar] + self.assertEqual(bar_checker.get_permissions, + {'bar': CheckerPublic, 'baz': CheckerPublic}) + self.assertEqual(bar_checker.set_permissions, + foo_checker.set_permissions) + + +class Foo(object): + bar = 'Bar' + + +class Bar(Foo): + baz = 'Baz' + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(Test_protectName), + unittest.makeSuite(Test_protectSetAttribute), + unittest.makeSuite(Test_protectLikeUnto), + )) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_proxy.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_proxy.py new file mode 100644 index 0000000..16bc98a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_proxy.py @@ -0,0 +1,2200 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Security proxy tests +""" +import unittest + +from zope.security._compat import PYTHON2, PURE_PYTHON + +def _skip_if_not_Py2(testfunc): + return unittest.skipUnless(PYTHON2, "Only on Py2")(testfunc) + +# pylint:disable=protected-access,eval-used,too-many-lines,too-many-public-methods + +if not PYTHON2: # pragma: no cover (Python 3) + def coerce(*args): + raise NotImplementedError("Not on Python 3") + cmp = coerce + long = int + +class AbstractProxyTestBase(object): + + # pylint:disable=no-member,blacklisted-name + + # The names of attributes that are spelled different on Py2 + # vs Py3 + itruediv = '__itruediv__' if not PYTHON2 else '__idiv__' + idiv = itruediv + div = '__truediv__' if not PYTHON2 else '__div__' + truediv = div + getslice = '__getitem__' if not PYTHON2 else '__getslice__' + setslice = '__setitem__' if not PYTHON2 else '__setslice__' + + def _getTargetClass(self): + raise NotImplementedError("Subclass responsibility") + + def _makeOne(self, obj, checker): + return self._getTargetClass()(obj, checker) + + def test_ctor_w_checker_None(self): + self.assertRaises(ValueError, self._makeOne, object(), None) + + def test___getattr___w_checker_ok(self): + class Foo(object): + bar = 'Bar' + target = Foo() + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy.bar, 'Bar') + self.assertEqual(getattr(proxy, 'bar'), 'Bar') + self.assertEqual(checker._checked, 'bar') + self.assertEqual(checker._proxied, 'Bar') + + def test___getattr___w_checker_unauthorized(self): + from zope.security.interfaces import Unauthorized + class Foo(object): + bar = 'Bar' + target = Foo() + checker = DummyChecker(Unauthorized) + proxy = self._makeOne(target, checker) + self.assertRaises(Unauthorized, getattr, proxy, 'bar') + self.assertEqual(checker._checked, 'bar') + + def test___getattr___w_checker_forbidden_attribute(self): + from zope.security.interfaces import ForbiddenAttribute + class Foo(object): + bar = 'Bar' + target = Foo() + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + + with self.assertRaises(ForbiddenAttribute): + getattr(proxy, 'bar') + self.assertEqual(checker._checked, 'bar') + + def test__getattr__w_checker_ok_dynamic_attribute_called_once(self): + class Dynamic(object): + count = 0 + def __getattr__(self, name): + self.count += 1 + if self.count == 1: + # Called from __getattribute__ + raise AttributeError(name) + raise AssertionError("We should not be called more than once") + + target = Dynamic() + checker = DummyChecker() + proxy = self._makeOne(target, checker) + + with self.assertRaisesRegexp(AttributeError, "name"): + getattr(proxy, 'name') + self.assertEqual(1, target.count) + + def test___setattr___w_checker_ok(self): + class Foo(object): + bar = 'Bar' + target = Foo() + checker = DummyChecker() + proxy = self._makeOne(target, checker) + proxy.bar = 'Baz' + self.assertEqual(target.bar, 'Baz') + self.assertEqual(checker._checked, 'bar') + self.assertEqual(checker._proxied, None) + + def test___setattr___w_checker_unauthorized(self): + from zope.security.interfaces import Unauthorized + class Foo(object): + bar = 'Bar' + target = Foo() + checker = DummyChecker(Unauthorized) + proxy = self._makeOne(target, checker) + self.assertRaises(Unauthorized, setattr, proxy, 'bar', 'Baz') + self.assertEqual(checker._checked, 'bar') + + def test___setattr___w_checker_forbidden_attribute(self): + from zope.security.interfaces import ForbiddenAttribute + class Foo(object): + bar = 'Bar' + target = Foo() + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, setattr, proxy, 'bar', 'Baz') + self.assertEqual(checker._checked, 'bar') + + def test___delattr___w_checker_ok(self): + class Foo(object): + bar = None + target = Foo() + target.bar = 'Bar' + checker = DummyChecker() + proxy = self._makeOne(target, checker) + del proxy.bar + self.assertEqual(target.bar, None) + self.assertEqual(checker._checked, 'bar') + self.assertEqual(checker._proxied, None) + + def test___delattr___w_checker_unauthorized(self): + from zope.security.interfaces import Unauthorized + class Foo(object): + def __init__(self): + self.bar = 'Bar' + target = Foo() + checker = DummyChecker(Unauthorized) + proxy = self._makeOne(target, checker) + self.assertRaises(Unauthorized, delattr, proxy, 'bar') + self.assertEqual(target.bar, 'Bar') + self.assertEqual(checker._checked, 'bar') + + def test___delattr___w_checker_forbidden_attribute(self): + from zope.security.interfaces import ForbiddenAttribute + class Foo(object): + def __init__(self): + self.bar = 'Bar' + target = Foo() + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, delattr, proxy, 'bar') + self.assertEqual(target.bar, 'Bar') + self.assertEqual(checker._checked, 'bar') + + def test___str___checker_allows_str(self): + target = object() + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(str(proxy), str(target)) + + def test___str___checker_forbids_str(self): + from zope.security.interfaces import ForbiddenAttribute + from zope.security._compat import _BUILTINS + from zope.security.proxy import _fmt_address + target = object() + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + address = _fmt_address(target) + self.assertEqual(str(proxy), + '' % (_BUILTINS, address)) + + def test___str___fails_return(self): + from zope.security.interfaces import ForbiddenAttribute + class CustomStr(object): + def __str__(self): + "" # Docstring, not a return + + target = CustomStr() + checker = DummyChecker(ForbiddenAttribute, allowed=('__str__')) + proxy = self._makeOne(target, checker) + with self.assertRaises(TypeError): + str(target) + with self.assertRaises(TypeError): + str(proxy) + + def test___repr___checker_allows_str(self): + target = object() + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(repr(proxy), repr(target)) + + def test___repr___checker_forbids_str(self): + from zope.security.interfaces import ForbiddenAttribute + from zope.security._compat import _BUILTINS + from zope.security.proxy import _fmt_address + target = object() + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + address = _fmt_address(target) + self.assertEqual(repr(proxy), + '' % (_BUILTINS, address)) + + def test___str___falls_through_to_repr_when_both_allowed(self): + from zope.security.interfaces import ForbiddenAttribute + class CustomRepr(object): + def __repr__(self): + return "" + + target = CustomRepr() + checker = DummyChecker(ForbiddenAttribute, allowed=("__str__", '__repr__')) + proxy = self._makeOne(target, checker) + self.assertEqual(repr(proxy), "") + self.assertEqual(str(target), "") + self.assertEqual(str(proxy), str(target)) + + def test___str___doesnot_fall_through_to_repr_when_str_not_allowed(self): + from zope.security.interfaces import ForbiddenAttribute + class CustomRepr(object): + def __repr__(self): + return "" + + target = CustomRepr() + checker = DummyChecker(ForbiddenAttribute, allowed=('__repr__')) + proxy = self._makeOne(target, checker) + self.assertEqual(repr(proxy), "") + self.assertEqual(str(target), "") + self.assertIn("" + + target = CustomRepr() + checker = DummyChecker(ForbiddenAttribute, allowed=('__str__')) + proxy = self._makeOne(target, checker) + self.assertEqual(str(target), "") + self.assertEqual(str(proxy), str(target)) + self.assertIn("" # Docstring, not a return + + target = CustomRepr() + checker = DummyChecker(ForbiddenAttribute, allowed=('__repr__')) + proxy = self._makeOne(target, checker) + with self.assertRaises(TypeError): + repr(target) + with self.assertRaises(TypeError): + repr(proxy) + + @_skip_if_not_Py2 + def test___cmp___w_self(self): + target = object() + checker = object() # checker not consulted + proxy = self._makeOne(target, checker) + self.assertEqual(cmp(proxy, proxy), 0) + + @_skip_if_not_Py2 + def test___cmp___w_target(self): + target = object() + checker = object() # checker not consulted + proxy = self._makeOne(target, checker) + self.assertEqual(cmp(proxy, target), 0) + + @_skip_if_not_Py2 + def test___cmp___w_other(self): + target = object() + other = object() + checker = object() # checker not consulted + proxy = self._makeOne(target, checker) + self.assertNotEqual(cmp(proxy, other), 0) + + @_skip_if_not_Py2 + def test___cmp___w_other_proxy(self): + target = object() + checker = object() # checker not consulted + proxy = self._makeOne(target, checker) + o_proxy = self._makeOne(target, checker) + self.assertEqual(cmp(proxy, o_proxy), 0) + + def test__le__(self): + target = 1 + checker = object() # checker not consulted + proxy = self._makeOne(target, checker) + self.assertTrue(proxy <= 1) + + def test__ne__(self): + target = 1 + checker = object() # checker not consulted + proxy = self._makeOne(target, checker) + self.assertFalse(proxy != 1) + + def test__ge__(self): + target = 1 + checker = object() # checker not consulted + proxy = self._makeOne(target, checker) + self.assertTrue(proxy >= 1) + + def test__gt__(self): + target = 1 + checker = object() # checker not consulted + proxy = self._makeOne(target, checker) + self.assertTrue(proxy > 0) + + def test___hash___w_self(self): + target = object() + checker = object() # checker not consulted + proxy = self._makeOne(target, checker) + self.assertEqual(hash(proxy), hash(target)) + + def test___call___w_checker_ok(self): + class Foo(object): + def __call__(self): + return 'Bar' + target = Foo() + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy(), 'Bar') + self.assertEqual(checker._checked, '__call__') + self.assertEqual(checker._proxied, 'Bar') + + def test___call___w_checker_unauthorized(self): + from zope.security.interfaces import Unauthorized + class Foo(object): + def __call__(self): + raise AssertionError("Never called") + target = Foo() + checker = DummyChecker(Unauthorized, ['__name__', '__str__']) + proxy = self._makeOne(target, checker) + self.assertRaises(Unauthorized, proxy) + self.assertEqual(checker._checked, '__call__') + + def test___call___w_checker_forbidden_attribute(self): + from zope.security.interfaces import ForbiddenAttribute + class Foo(object): + def __call__(self): + raise AssertionError("Never called") + target = Foo() + checker = DummyChecker(ForbiddenAttribute, ['__str__']) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, proxy) + self.assertEqual(checker._checked, '__call__') + + def test___int___w_checker_allows(self): + target = 3.0 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(int(proxy), int(target)) + self.assertEqual(checker._checked, '__int__') + + def test___int___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3.0 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, int, proxy) + self.assertEqual(checker._checked, '__int__') + + def test___float___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(float(proxy), float(target)) + self.assertEqual(checker._checked, '__float__') + + def test___float___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, float, proxy) + self.assertEqual(checker._checked, '__float__') + + @_skip_if_not_Py2 + def test___long___w_checker_allows(self): + target = 3.0 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(long(proxy), long(target)) + self.assertEqual(checker._checked, '__long__') + + @_skip_if_not_Py2 + def test___long___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, long, proxy) + self.assertEqual(checker._checked, '__long__') + + @_skip_if_not_Py2 + def test___oct___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(oct(proxy), oct(target)) + self.assertEqual(checker._checked, '__oct__') + + @_skip_if_not_Py2 + def test___oct___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, oct, proxy) + self.assertEqual(checker._checked, '__oct__') + + @_skip_if_not_Py2 + def test___hex___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(hex(proxy), hex(target)) + self.assertEqual(checker._checked, '__hex__') + + @_skip_if_not_Py2 + def test___hex___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, hex, proxy) + self.assertEqual(checker._checked, '__hex__') + + def test___add___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy + 2, target + 2) + self.assertEqual(checker._checked, '__add__') + + def test___add___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy + 2) + self.assertEqual(checker._checked, '__add__') + + def test___sub___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy - 2, target - 2) + self.assertEqual(checker._checked, '__sub__') + + def test___sub___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy - 2) + self.assertEqual(checker._checked, '__sub__') + + def test___mul___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy * 2, target * 2) + self.assertEqual(checker._checked, '__mul__') + + def test___mul___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy * 2) + self.assertEqual(checker._checked, '__mul__') + + def test___div___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy / 2, target / 2) + self.assertEqual(checker._checked, self.div) + + def test___div___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy / 2) + self.assertEqual(checker._checked, self.div) + + def test___truediv___w_checker_allows(self): + target = 3.0 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy / 2, target / 2) + self.assertEqual(checker._checked, self.truediv) + + def test___truediv___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3.0 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy / 2) + self.assertEqual(checker._checked, self.truediv) + + def test___floordiv___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy // 2, target // 2) + self.assertEqual(checker._checked, '__floordiv__') + + def test___floordiv___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy // 2) + self.assertEqual(checker._checked, '__floordiv__') + + def test___mod___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy % 2, target % 2) + self.assertEqual(checker._checked, '__mod__') + + def test___mod___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy % 2) + self.assertEqual(checker._checked, '__mod__') + + def test___divmod___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(divmod(proxy, 2), divmod(target, 2)) + self.assertEqual(checker._checked, '__divmod__') + + def test___divmod___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: divmod(proxy, 2)) + self.assertEqual(checker._checked, '__divmod__') + + def test___pow___w_x_proxied_allowed(self): + x, y, z = 3, 4, 7 + checker = DummyChecker() + proxy = self._makeOne(x, checker) + self.assertEqual(pow(proxy, y, z), pow(x, y, z)) + self.assertEqual(checker._checked, '__pow__') + + def test___pow___w_x_proxied_forbidden(self): + from zope.security.interfaces import ForbiddenAttribute + y, z = 4, 7 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(y, checker) + self.assertRaises(ForbiddenAttribute, lambda: pow(proxy, y, z)) + self.assertEqual(checker._checked, '__pow__') + + def test___pow___w_y_proxied_allowed(self): + x, y = 3, 4 + checker = DummyChecker() + proxy = self._makeOne(y, checker) + self.assertEqual(pow(x, proxy), pow(x, y)) + self.assertEqual(checker._checked, '__rpow__') + + def test___pow___w_y_proxied_forbidden(self): + from zope.security.interfaces import ForbiddenAttribute + x, y = 3, 4 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(y, checker) + self.assertRaises(ForbiddenAttribute, lambda: pow(x, proxy)) + self.assertEqual(checker._checked, '__rpow__') + + def test___pow___w_z_proxied_allowed(self): + x, y, z = 3, 4, 7 + checker = DummyChecker() + proxy = self._makeOne(z, checker) + self.assertEqual(pow(x, y, proxy), pow(x, y, z)) + self.assertEqual(checker._checked, '__3pow__') + + def test___pow___w_z_proxied_forbidden(self): + from zope.security.interfaces import ForbiddenAttribute + x, y, z = 3, 4, 7 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(z, checker) + self.assertRaises(ForbiddenAttribute, lambda: pow(x, y, proxy)) + self.assertEqual(checker._checked, '__3pow__') + + def test___neg___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(-proxy, -target) + self.assertEqual(checker._checked, '__neg__') + + def test___neg___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: -proxy) + self.assertEqual(checker._checked, '__neg__') + + def test___pos___w_checker_allows(self): + target = -3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(+proxy, +target) + self.assertEqual(checker._checked, '__pos__') + + def test___pos___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = -3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: +proxy) + self.assertEqual(checker._checked, '__pos__') + + def test___abs___w_checker_allows(self): + target = -3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(abs(proxy), abs(target)) + self.assertEqual(checker._checked, '__abs__') + + def test___abs___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = -3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, abs, proxy) + self.assertEqual(checker._checked, '__abs__') + + def test___bool___(self): + target = 12 + checker = object() + proxy = self._makeOne(target, checker) + self.assertEqual(bool(proxy), bool(target)) + + def test___invert___w_checker_allows(self): + target = 47 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(~proxy, ~target) + self.assertEqual(checker._checked, '__invert__') + + def test___invert___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 47 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: ~proxy) + self.assertEqual(checker._checked, '__invert__') + + def test___lshift___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy << 2, target << 2) + self.assertEqual(checker._checked, '__lshift__') + + def test___lshift___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy << 2) + self.assertEqual(checker._checked, '__lshift__') + + def test___rshift___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy >> 2, target >> 2) + self.assertEqual(checker._checked, '__rshift__') + + def test___rshift___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy >> 2) + self.assertEqual(checker._checked, '__rshift__') + + def test___and___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy & 2, target & 2) + self.assertEqual(checker._checked, '__and__') + + def test___and___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy & 2) + self.assertEqual(checker._checked, '__and__') + + def test___xor___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy ^ 2, target ^ 2) + self.assertEqual(checker._checked, '__xor__') + + def test___xor___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy ^ 2) + self.assertEqual(checker._checked, '__xor__') + + def test___or___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy | 2, target | 2) + self.assertEqual(checker._checked, '__or__') + + def test___or___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy | 2) + self.assertEqual(checker._checked, '__or__') + + @_skip_if_not_Py2 + def test___coerce___w_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(coerce(proxy, 4.0), coerce(target, 4.0)) + self.assertEqual(checker._checked, '__coerce__') + + @_skip_if_not_Py2 + def test___coerce___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, coerce, proxy, 4.0) + self.assertEqual(checker._checked, '__coerce__') + + def test___iadd___not_inplace_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy += 3 + self.assertIsNot(proxy, before) + self.assertEqual(proxy, 6) + self.assertEqual(checker._checked, '__iadd__') + + def test___iadd___inplace_checker_allows(self): + class Foo(object): + def __init__(self, value): + self.value = value + def __iadd__(self, rhs): + self.value += rhs + return self + target = Foo(3) + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy += 3 + self.assertIs(proxy, before) + self.assertEqual(target.value, 6) + self.assertEqual(checker._checked, '__iadd__') + + def test___iadd___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + with self.assertRaises(ForbiddenAttribute): + proxy += 3 + self.assertEqual(checker._checked, '__iadd__') + + def test___isub___not_inplace_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy -= 3 + self.assertIsNot(proxy, before) + self.assertEqual(proxy, 0) + self.assertEqual(checker._checked, '__isub__') + + def test___isub___inplace_checker_allows(self): + class Foo(object): + def __init__(self, value): + self.value = value + def __isub__(self, rhs): + self.value -= rhs + return self + target = Foo(3) + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy -= 3 + self.assertIs(proxy, before) + self.assertEqual(target.value, 0) + self.assertEqual(checker._checked, '__isub__') + + def test___isub___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + with self.assertRaises(ForbiddenAttribute): + proxy -= 3 + self.assertEqual(checker._checked, '__isub__') + + def test___imul___not_inplace_checker_allows(self): + target = 3 + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy *= 3 + self.assertIsNot(proxy, before) + self.assertEqual(proxy, 9) + self.assertEqual(checker._checked, '__imul__') + + def test___imul___inplace_checker_allows(self): + class Foo(object): + def __init__(self, value): + self.value = value + def __imul__(self, rhs): + self.value *= rhs + return self + target = Foo(3) + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy *= 3 + self.assertIs(proxy, before) + self.assertEqual(target.value, 9) + self.assertEqual(checker._checked, '__imul__') + + def test___imul___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 3 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + with self.assertRaises(ForbiddenAttribute): + proxy *= 3 + self.assertEqual(checker._checked, '__imul__') + + def test___idiv___not_inplace_checker_allows(self): + target = 6 + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy /= 3 + self.assertIsNot(proxy, before) + self.assertEqual(proxy, 2) + self.assertEqual(checker._checked, self.idiv) + + def test___idiv___inplace_checker_allows(self): + class Foo(object): + def __init__(self, value): + self.value = value + def __idiv__(self, rhs): + self.value /= rhs + return self + __itruediv__ = __idiv__ + target = Foo(6) + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy /= 3 + self.assertIs(proxy, before) + self.assertEqual(target.value, 2) + self.assertEqual(checker._checked, self.idiv) + + def test___idiv___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 6 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + with self.assertRaises(ForbiddenAttribute): + proxy /= 3 + self.assertEqual(checker._checked, self.idiv) + + def test___itruediv___not_inplace_checker_allows(self): + target = 6 + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy /= 3 + self.assertIsNot(proxy, before) + self.assertEqual(proxy, 2) + self.assertEqual(checker._checked, self.itruediv) + + def test___itruediv___inplace_checker_allows(self): + class Foo(object): + def __init__(self, value): + self.value = value + def __itruediv__(self, rhs): + self.value /= rhs + return self + __idiv__ = __itruediv__ + target = Foo(6) + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy /= 3 + self.assertIs(proxy, before) + self.assertEqual(target.value, 2) + self.assertEqual(checker._checked, self.itruediv) + + def test___itruediv___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 6 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + with self.assertRaises(ForbiddenAttribute): + proxy /= 3 + self.assertEqual(checker._checked, self.itruediv) + + def test___ifloordiv___not_inplace_checker_allows(self): + target = 6 + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy //= 3 + self.assertIsNot(proxy, before) + self.assertEqual(proxy, 2) + self.assertEqual(checker._checked, '__ifloordiv__') + + def test___ifloordiv___inplace_checker_allows(self): + class Foo(object): + def __init__(self, value): + self.value = value + def __ifloordiv__(self, rhs): + self.value //= rhs + return self + target = Foo(6) + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy //= 3 + self.assertIs(proxy, before) + self.assertEqual(target.value, 2) + self.assertEqual(checker._checked, '__ifloordiv__') + + def test___ifloordiv___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 6 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + with self.assertRaises(ForbiddenAttribute): + proxy //= 3 + self.assertEqual(checker._checked, '__ifloordiv__') + + def test___imod___not_inplace_checker_allows(self): + target = 6 + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy %= 3 + self.assertIsNot(proxy, before) + self.assertEqual(proxy, 0) + self.assertEqual(checker._checked, '__imod__') + + def test___imod___inplace_checker_allows(self): + class Foo(object): + def __init__(self, value): + self.value = value + def __imod__(self, rhs): + self.value %= rhs + return self + target = Foo(6) + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy %= 3 + self.assertIs(proxy, before) + self.assertEqual(target.value, 0) + self.assertEqual(checker._checked, '__imod__') + + def test___imod___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 6 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + with self.assertRaises(ForbiddenAttribute): + proxy %= 3 + self.assertEqual(checker._checked, '__imod__') + + def test___ipow___not_inplace_checker_allows(self): + target = 2 + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy **= 3 + self.assertIsNot(proxy, before) + self.assertEqual(proxy, 8) + self.assertEqual(checker._checked, '__ipow__') + + def test___ipow___inplace_checker_allows(self): + class Foo(object): + def __init__(self, value): + self.value = value + def __ipow__(self, rhs): + self.value **= rhs + return self + target = Foo(2) + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy **= 3 + self.assertIs(proxy, before) + self.assertEqual(target.value, 8) + self.assertEqual(checker._checked, '__ipow__') + + def test___ipow___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 2 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + with self.assertRaises(ForbiddenAttribute): + proxy **= 3 + self.assertEqual(checker._checked, '__ipow__') + + def test___ilshift___not_inplace_checker_allows(self): + target = 2 + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy <<= 3 + self.assertIsNot(proxy, before) + self.assertEqual(proxy, 16) + self.assertEqual(checker._checked, '__ilshift__') + + def test___ilshift___inplace_checker_allows(self): + class Foo(object): + def __init__(self, value): + self.value = value + def __ilshift__(self, rhs): + self.value <<= rhs + return self + target = Foo(2) + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy <<= 3 + self.assertIs(proxy, before) + self.assertEqual(target.value, 16) + self.assertEqual(checker._checked, '__ilshift__') + + def test___ilshift___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 2 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + with self.assertRaises(ForbiddenAttribute): + proxy <<= 3 + self.assertEqual(checker._checked, '__ilshift__') + + def test___irshift___not_inplace_checker_allows(self): + target = 16 + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy >>= 3 + self.assertIsNot(proxy, before) + self.assertEqual(proxy, 2) + self.assertEqual(checker._checked, '__irshift__') + + def test___irshift___inplace_checker_allows(self): + class Foo(object): + def __init__(self, value): + self.value = value + def __irshift__(self, rhs): + self.value >>= rhs + return self + target = Foo(16) + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy >>= 3 + self.assertIs(proxy, before) + self.assertEqual(target.value, 2) + self.assertEqual(checker._checked, '__irshift__') + + def test___irshift___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 16 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + with self.assertRaises(ForbiddenAttribute): + proxy >>= 3 + self.assertEqual(checker._checked, '__irshift__') + + def test___iand___not_inplace_checker_allows(self): + target = 7 + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy &= 3 + self.assertIsNot(proxy, before) + self.assertEqual(proxy, 3) + self.assertEqual(checker._checked, '__iand__') + + def test___iand___inplace_checker_allows(self): + class Foo(object): + def __init__(self, value): + self.value = value + def __iand__(self, rhs): + self.value &= rhs + return self + target = Foo(7) + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy &= 3 + self.assertIs(proxy, before) + self.assertEqual(target.value, 3) + self.assertEqual(checker._checked, '__iand__') + + def test___iand___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 7 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + with self.assertRaises(ForbiddenAttribute): + proxy &= 3 + self.assertEqual(checker._checked, '__iand__') + + def test___ixor___not_inplace_checker_allows(self): + target = 7 + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy ^= 3 + self.assertIsNot(proxy, before) + self.assertEqual(checker._checked, '__ixor__') + self.assertEqual(proxy, 4) + + def test___ixor___inplace_checker_allows(self): + class Foo(object): + def __init__(self, value): + self.value = value + def __ixor__(self, rhs): + self.value ^= rhs + return self + target = Foo(7) + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy ^= 3 + self.assertIs(proxy, before) + self.assertEqual(target.value, 4) + self.assertEqual(checker._checked, '__ixor__') + + def test___ixor___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 7 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + + with self.assertRaises(ForbiddenAttribute): + proxy ^= 3 + + self.assertEqual(checker._checked, '__ixor__') + + def test___ior___not_inplace_checker_allows(self): + target = 6 + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy |= 3 + self.assertIsNot(proxy, before) + self.assertEqual(proxy, 7) + self.assertEqual(checker._checked, '__ior__') + + def test___ior___inplace_checker_allows(self): + class Foo(object): + def __init__(self, value): + self.value = value + def __ior__(self, rhs): + self.value |= rhs + return self + target = Foo(6) + checker = DummyChecker() + proxy = before = self._makeOne(target, checker) + proxy |= 3 + self.assertIs(proxy, before) + self.assertEqual(target.value, 7) + self.assertEqual(checker._checked, '__ior__') + + def test___ior___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = 6 + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + with self.assertRaises(ForbiddenAttribute): + proxy |= 3 + self.assertEqual(checker._checked, '__ior__') + + def test___len___w_checker_allows(self): + target = [0, 1, 2] + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(len(proxy), len(target)) + self.assertEqual(checker._checked, '__len__') + + def test___len___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = [0, 1, 2] + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, len, proxy) + self.assertEqual(checker._checked, '__len__') + + def test__length_hint_w_checker_allows(self): + target = iter([0, 1, 2]) + checker = DummyChecker() + proxy = self._makeOne(target, checker) + hint = object.__getattribute__(proxy, '__length_hint__') + self.assertEqual(3, hint()) + + def test__length_hint_dne(self): + target = object() + checker = DummyChecker() + proxy = self._makeOne(target, checker) + hint = object.__getattribute__(proxy, '__length_hint__') + self.assertEqual(NotImplemented, hint()) + + def test___contains___hit_w_checker_allows(self): + target = [0, 1, 2] + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertTrue(1 in proxy) + self.assertEqual(checker._checked, '__contains__') + + def test___contains___miss_w_checker_allows(self): + target = [0, 1, 2] + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertFalse(4 in proxy) + self.assertEqual(checker._checked, '__contains__') + + def test___contains___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = [0, 1, 2] + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: 0 in proxy) + self.assertEqual(checker._checked, '__contains__') + + def test___getitem___sequence_hit_w_checker_allows(self): + target = [0, 1, 2] + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy[1], 1) + self.assertEqual(checker._checked, '__getitem__') + + def test___getitem___sequence_miss_w_checker_allows(self): + target = [0, 1, 2] + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertRaises(IndexError, lambda: proxy[4]) + self.assertEqual(checker._checked, '__getitem__') + + def test___getitem___sequence_w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = [0, 1, 2] + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy[0]) + self.assertEqual(checker._checked, '__getitem__') + + def test___setitem___sequence_hit_w_checker_allows(self): + target = [0, 1, 2] + checker = DummyChecker() + proxy = self._makeOne(target, checker) + proxy[1] = 7 + self.assertEqual(target[1], 7) + self.assertEqual(checker._checked, '__setitem__') + + def test___setitem___sequence_miss_w_checker_allows(self): + target = [0, 1, 2] + checker = DummyChecker() + proxy = self._makeOne(target, checker) + def _try(): + proxy[4] = 7 + self.assertRaises(IndexError, _try) + self.assertEqual(checker._checked, '__setitem__') + + def test___setitem___sequence_w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = [0, 1, 2] + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + def _try(): + proxy[4] = 7 + self.assertRaises(ForbiddenAttribute, _try) + self.assertEqual(checker._checked, '__setitem__') + + @_skip_if_not_Py2 + def test___getslice___w_checker_allows(self): + target = [0, 1, 2] + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy[1:3], [1, 2]) + self.assertEqual(checker._checked, self.getslice) + + def test___getslice___error_propagates(self): + class Missing(Exception): + pass + class Get(object): + def __getitem__(self, x): + raise Missing('__getitem__') # pragma: no cover (only py3) + def __getslice__(self, start, stop): + raise Missing("__getslice__") + target = Get() + checker = DummyChecker() + proxy = self._makeOne(target, checker) + with self.assertRaisesRegexp(Missing, + self.getslice): + proxy[1:2] + + self.assertEqual(checker._checked, self.getslice) + + def test___getslice___dne_uses_getitem(self): + class Missing(Exception): + pass + class Get(object): + def __getitem__(self, x): + raise Missing('__getitem__') + + target = Get() + checker = DummyChecker() + proxy = self._makeOne(target, checker) + with self.assertRaisesRegexp(Missing, + '__getitem__'): + proxy[1:2] + + self.assertEqual(checker._checked, self.getslice) + + @_skip_if_not_Py2 + def test___getslice___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = [0, 1, 2] + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy[0:2]) + self.assertEqual(checker._checked, '__getslice__') + + @_skip_if_not_Py2 + def test___setslice___w_checker_allows(self): + target = [0, 1, 2] + checker = DummyChecker() + proxy = self._makeOne(target, checker) + proxy[1:3] = [3, 4] + self.assertEqual(target, [0, 3, 4]) + self.assertEqual(checker._checked, '__setslice__') + + @_skip_if_not_Py2 + def test___setslice___w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = [0, 1, 2] + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + def _try(): + proxy[1:3] = [3, 4] + self.assertRaises(ForbiddenAttribute, _try) + self.assertEqual(checker._checked, '__setslice__') + + def test___setslice___error_propagates(self): + class Missing(Exception): + pass + class Set(object): + def __setitem__(self, k, v): + raise Missing('__setitem__') # pragma: no cover (only py3) + def __setslice__(self, start, stop, value): + raise Missing("__setslice__") + target = Set() + checker = DummyChecker() + proxy = self._makeOne(target, checker) + with self.assertRaisesRegexp(Missing, + self.setslice): + proxy[1:2] = 1 + + self.assertEqual(checker._checked, self.setslice) + + def test___setslice___dne_uses_setitem(self): + class Missing(Exception): + pass + class Set(object): + def __setitem__(self, k, v): + raise Missing('__setitem__') + + target = Set() + checker = DummyChecker() + proxy = self._makeOne(target, checker) + with self.assertRaisesRegexp(Missing, + '__setitem__'): + proxy[1:2] = 1 + + self.assertEqual(checker._checked, self.setslice) + + def test___getitem___mapping_hit_w_checker_allows(self): + target = {'a': 0, 'b': 1, 'c': 2} + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertEqual(proxy['b'], 1) + self.assertEqual(checker._checked, '__getitem__') + + def test___getitem___mapping_miss_w_checker_allows(self): + target = {'a': 0, 'b': 1, 'c': 2} + checker = DummyChecker() + proxy = self._makeOne(target, checker) + self.assertRaises(KeyError, lambda: proxy['d']) + self.assertEqual(checker._checked, '__getitem__') + + def test___getitem___mapping_w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = {'a': 0, 'b': 1, 'c': 2} + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertRaises(ForbiddenAttribute, lambda: proxy['b']) + self.assertEqual(checker._checked, '__getitem__') + + def test___setitem___mapping_hit_w_checker_allows(self): + target = {'a': 0, 'b': 1, 'c': 2} + checker = DummyChecker() + proxy = self._makeOne(target, checker) + proxy['a'] = 7 + self.assertEqual(target['a'], 7) + self.assertEqual(checker._checked, '__setitem__') + + def test___setitem___mapping_w_checker_forbids(self): + from zope.security.interfaces import ForbiddenAttribute + target = {'a': 0, 'b': 1, 'c': 2} + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + def _try(): + proxy['a'] = 7 + self.assertRaises(ForbiddenAttribute, _try) + self.assertEqual(checker._checked, '__setitem__') + + binops = [ + "x+y", "x-y", "x*y", "x/y", "divmod(x, y)", "x**y", "x//y", + "x<>y", "x&y", "x|y", "x^y", + ] + + def test_binops(self): + from zope.security.proxy import removeSecurityProxy + checker = DummyChecker() + for expr in self.binops: + first = 1 + for x in [1, self._makeOne(1, checker)]: + for y in [2, self._makeOne(2, checker)]: + if first: + z = eval(expr) + first = 0 + else: + self.assertEqual(removeSecurityProxy(eval(expr)), z, + "x=%r; y=%r; expr=%r" % (x, y, expr)) + + + @_skip_if_not_Py2 + def test___unicode___allowed_by_default(self): + # https://github.com/zopefoundation/zope.security/issues/10 + class Foo(object): + def __unicode__(self): + return u'I am unicode' + + checker = object() # checker not consulted + target = Foo() + proxy = self._makeOne(target, checker) + self.assertEqual(unicode(target), u'I am unicode') + self.assertEqual(unicode(target), unicode(proxy)) + + @_skip_if_not_Py2 + def test___unicode___falls_through_to_str_by_default(self): + # https://github.com/zopefoundation/zope.security/issues/10 + class Foo(object): + def __str__(self): + return 'I am str' + + checker = object() # checker not consulted + target = Foo() + proxy = self._makeOne(target, checker) + self.assertEqual(unicode(target), u'I am str') + self.assertIsInstance(unicode(target), unicode) + self.assertEqual(unicode(target), unicode(proxy)) + self.assertIsInstance(unicode(proxy), unicode) + + @_skip_if_not_Py2 + def test___unicode___falls_through_to_str_even_if_str_not_allowed(self): + # https://github.com/zopefoundation/zope.security/issues/10 + # Note that this is inconsistent with str() and probably not a good + # idea overall, so this test is strictly a regression test. + from zope.security.interfaces import ForbiddenAttribute + class Foo(object): + def __str__(self): + return 'I am str' + + target = Foo() + checker = DummyChecker(ForbiddenAttribute) + proxy = self._makeOne(target, checker) + self.assertEqual(unicode(target), u'I am str') + self.assertIsInstance(unicode(target), unicode) + + # Asking for the unicode of the proxy silently falls through + # to the str without any checks + self.assertEqual(unicode(target), unicode(proxy)) + + # And set str itself is checked and proxied + self.assertIn(">y", "x&y", "x|y", "x^y", + ] + + def test_binops(self): + from zope.security.proxy import removeSecurityProxy + P = self.c.proxy + for expr in self.binops: + first = 1 + for x in [1, P(1)]: + for y in [2, P(2)]: + if first: + z = eval(expr) + first = 0 + else: + self.assertEqual(removeSecurityProxy(eval(expr)), z, + "x=%r; y=%r; expr=%r" % (x, y, expr)) + self.shouldFail(lambda x, y: eval(expr), x, y) + + def test_inplace(self): + # TODO: should test all inplace operators... + from zope.security.proxy import removeSecurityProxy + P = self.c.proxy + + pa = P(1) + pa += 2 + self.assertEqual(removeSecurityProxy(pa), 3) + + a = [1, 2, 3] + pa = qa = P(a) + pa += [4, 5, 6] + self.assertIs(pa, qa) + self.assertEqual(a, [1, 2, 3, 4, 5, 6]) + + def doit(): + pa = P(1) + pa += 2 + self.shouldFail(doit) + + pa = P(2) + pa **= 2 + self.assertEqual(removeSecurityProxy(pa), 4) + + def doit2(): + pa = P(2) + pa **= 2 + self.shouldFail(doit2) + + @_skip_if_not_Py2 + def test_coerce(self): + from zope.security.proxy import removeSecurityProxy + P = self.c.proxy + + x = P(1) + y = P(2) + a, b = coerce(x, y) + self.assertIs(a, x) + self.assertIs(b, y) + + x = P(1) + y = P(2.1) + a, b = coerce(x, y) + self.assertEqual(removeSecurityProxy(a), 1.0) + self.assertIs(b, y) + self.assertIs(type(removeSecurityProxy(a)), float) + self.assertIs(b, y) + + x = P(1.1) + y = P(2) + a, b = coerce(x, y) + self.assertIs(a, x) + self.assertEqual(removeSecurityProxy(b), 2.0) + self.assertIs(a, x) + self.assertIs(type(removeSecurityProxy(b)), float) + + x = P(1) + y = 2 + a, b = coerce(x, y) + self.assertIs(a, x) + self.assertIs(b, y) + + x = P(1) + y = 2.1 + a, b = coerce(x, y) + self.assertIs(type(removeSecurityProxy(a)), float) + self.assertIs(b, y) + + x = P(1.1) + y = 2 + a, b = coerce(x, y) + self.assertIs(a, x) + self.assertIs(type(removeSecurityProxy(b)), float) + + x = 1 + y = P(2) + a, b = coerce(x, y) + self.assertIs(a, x) + self.assertIs(b, y) + + x = 1.1 + y = P(2) + a, b = coerce(x, y) + self.assertIs(a, x) + self.assertIs(type(removeSecurityProxy(b)), float) + + x = 1 + y = P(2.1) + a, b = coerce(x, y) + self.assertIs(type(removeSecurityProxy(a)), float) + self.assertIs(b, y) + + +def test_using_mapping_slots_hack(): + """The security proxy will use mapping slots, on the checker to go faster + + If a checker implements normally, a checkers's check and + check_getattr methods are used to check operator and attribute + access: + + >>> from zope.security.proxy import ProxyFactory + >>> log = [] + >>> def dump(): + ... out = '\\n'.join(log) + ... del log[:] + ... return out + >>> class Checker(object): + ... def check(self, object, name): + ... log.append(('check %s' % name)) + ... def check_getattr(self, object, name): + ... log.append(('check_getattr %s' % name)) + ... def proxy(self, object): + ... return 1 + >>> def f(): + ... pass + >>> p = ProxyFactory(f, Checker()) + >>> p.__name__ + 1 + >>> dump() + 'check_getattr __name__' + >>> p() + 1 + >>> dump() + 'check __call__' + + But, if the checker has a __setitem__ method: + + >>> def __setitem__(self, object, name): + ... log.append(('__setitem__ %s' % name)) + >>> Checker.__setitem__ = __setitem__ + + It will be used rather than either check or check_getattr: + + >>> p.__name__ + 1 + >>> dump() + '__setitem__ __name__' + >>> p() + 1 + >>> dump() + '__setitem__ __call__' + + If a checker has a __getitem__ method: + + >>> def __getitem__(self, object): + ... return 2 + >>> Checker.__getitem__ = __getitem__ + + It will be used rather than it's proxy method: + + >>> p.__name__ + 2 + >>> dump() + '__setitem__ __name__' + >>> p() + 2 + >>> dump() + '__setitem__ __call__' + + """ + + +class LocationProxySecurityCheckerTests(unittest.TestCase): + + def test_LocationProxy_gets_a_security_checker_when_importing_z_security( + self): + # Regression test for a problem introduced in 3.8.1 and fixed in + # 3.8.3. For details see change log. + import sys + from zope.location.location import LocationProxy + import zope.security + try: + from importlib import reload as _reload + except ImportError: + _reload = reload # Python 2 + + # This attribute is set when zope.security.decorator is imported, to + # show that it will be set too, if zope.security.proxy is imported + # we set it to a different value at first: + del LocationProxy.__Security_checker__ + self.assertFalse( + hasattr(LocationProxy, '__Security_checker__')) + # After deleting zope.security.decorator and reloading + # zope.security.proxy the attribute is set again: + del sys.modules["zope.security.decorator"] + _reload(zope.security) + self.assertTrue( + hasattr(LocationProxy, '__Security_checker__')) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_simpleinteraction.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_simpleinteraction.py new file mode 100644 index 0000000..d5031e4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_simpleinteraction.py @@ -0,0 +1,80 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Unit tests for zope.security.simpleinteraction. +""" +import unittest + + +class RequestStub(object): + + def __init__(self, principal=None): + self.principal = principal + self.interaction = None + + +class TestInteraction(unittest.TestCase): + + def test(self): + from zope.interface.verify import verifyObject + from zope.security.interfaces import IInteraction + from zope.security.simplepolicies import ParanoidSecurityPolicy + interaction = ParanoidSecurityPolicy() + verifyObject(IInteraction, interaction) + + def test_add(self): + from zope.security.simplepolicies import ParanoidSecurityPolicy + rq = RequestStub() + interaction = ParanoidSecurityPolicy() + interaction.add(rq) + self.assertTrue(rq in interaction.participations) + self.assertTrue(rq.interaction is interaction) + + # rq already added + self.assertRaises(ValueError, interaction.add, rq) + + interaction2 = ParanoidSecurityPolicy() + self.assertRaises(ValueError, interaction2.add, rq) + + def test_remove(self): + from zope.security.simplepolicies import ParanoidSecurityPolicy + rq = RequestStub() + interaction = ParanoidSecurityPolicy() + + self.assertRaises(ValueError, interaction.remove, rq) + + interaction.add(rq) + + interaction.remove(rq) + self.assertTrue(rq not in interaction.participations) + self.assertTrue(rq.interaction is None) + + def testCreateInteraction(self): + from zope.interface.verify import verifyObject + from zope.security.interfaces import IInteraction + from zope.security.simplepolicies import ParanoidSecurityPolicy + i1 = ParanoidSecurityPolicy() + verifyObject(IInteraction, i1) + self.assertEqual(list(i1.participations), []) + + user = object() + request = RequestStub(user) + i2 = ParanoidSecurityPolicy(request) + verifyObject(IInteraction, i2) + self.assertEqual(list(i2.participations), [request]) + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(TestInteraction), + )) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_simplepolicies.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_simplepolicies.py new file mode 100644 index 0000000..ce5c483 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_simplepolicies.py @@ -0,0 +1,129 @@ +############################################################################## +# +# Copyright (c) 2013 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + + +class ConformsToIInteraction(object): + + def _getTargetClass(self): + raise NotImplementedError("Subclass responsibility") + + def _makeOne(self, *participations): + return self._getTargetClass()(*participations) + + def test_class_conforms_to_IInteraction(self): + from zope.interface.verify import verifyClass + from zope.security.interfaces import IInteraction + verifyClass(IInteraction, self._getTargetClass()) + + def test_instance_conforms_to_IInteraction(self): + from zope.interface.verify import verifyObject + from zope.security.interfaces import IInteraction + verifyObject(IInteraction, self._makeOne()) + + +class ParanoidSecurityPolicyTests(unittest.TestCase, + ConformsToIInteraction, + ): + + def _getTargetClass(self): + from zope.security.simplepolicies import ParanoidSecurityPolicy + return ParanoidSecurityPolicy + + def test_ctor_no_participations(self): + policy = self._makeOne() + self.assertEqual(policy.participations, []) + + def test_ctor_w_participations(self): + class Participation(object): + interaction = None + p1, p2, p3 = Participation(), Participation(), Participation() + policy = self._makeOne(p1, p2, p3) + self.assertEqual(policy.participations, [p1, p2, p3]) + self.assertTrue(p1.interaction is policy) + self.assertTrue(p2.interaction is policy) + self.assertTrue(p3.interaction is policy) + + def test_add_w_foreign_participation(self): + class Participation(object): + interaction = object() + policy = self._makeOne() + self.assertRaises(ValueError, policy.add, Participation()) + + def test_remove_w_foreign_participation(self): + class Participation(object): + interaction = object() + policy = self._makeOne() + self.assertRaises(ValueError, policy.remove, Participation()) + + def test_remove(self): + class Participation(object): + interaction = None + p1, p2, p3 = Participation(), Participation(), Participation() + policy = self._makeOne(p1, p2, p3) + policy.remove(p2) + + self.assertEqual(policy.participations, [p1, p3]) + self.assertTrue(p1.interaction is policy) + self.assertTrue(p2.interaction is None) + self.assertTrue(p3.interaction is policy) + + def test_checkPermission_w_public(self): + from zope.security.checker import CheckerPublic + policy = self._makeOne() + target = object() + self.assertTrue(policy.checkPermission(CheckerPublic, target)) + + def test_checkPermission_w_non_public_only_system_user(self): + from zope.security._definitions import system_user + class Participation(object): + interaction = None + principal = system_user + policy = self._makeOne(Participation()) + permission = object() + target = object() + self.assertTrue(policy.checkPermission(permission, target)) + + def test_checkPermission_w_non_public_other_user(self): + class Participation(object): + interaction = None + principal = object() + policy = self._makeOne(Participation()) + permission = object() + target = object() + self.assertFalse(policy.checkPermission(permission, target)) + + def test_checkPermission_w_no_participations(self): + # The permission and object don't matter: if there are no + # participations, access is allowed. + policy = self._makeOne() + self.assertTrue(policy.checkPermission(None, None)) + self.assertTrue(policy.checkPermission(self, self)) + +class PermissiveSecurityPolicyTests(unittest.TestCase, + ConformsToIInteraction): + + def _getTargetClass(self): + from zope.security.simplepolicies import PermissiveSecurityPolicy + return PermissiveSecurityPolicy + + def test_checkPermission_w_public(self): + policy = self._makeOne() + permission = object() + target = object() + self.assertTrue(policy.checkPermission(permission, target)) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_testing.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_testing.py new file mode 100644 index 0000000..42eebd0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_testing.py @@ -0,0 +1,75 @@ +############################################################################# +# +# Copyright (c) 2011 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest + +from zope.testing.cleanup import CleanUp + +from zope.security import testing +from zope.security.interfaces import PUBLIC_PERMISSION_NAME as zope_Public + +class TestTestingFunctions(CleanUp, + unittest.TestCase): + + def test_create_interaction_should_return_principal(self): + from zope.security.management import getInteraction + + principal = testing.create_interaction( + 'foo', groups=['bar'], description='desc') + ix = getInteraction() + participation = ix.participations[0] + self.assertEqual('foo', participation.principal.id) + self.assertEqual(principal.groups, participation.principal.groups) + self.assertEqual('desc', participation.principal.description) + + def test_usable_as_contextmanager(self): + from zope.security.management import getInteraction + from zope.security.management import queryInteraction + + with testing.interaction('foo'): + ix = getInteraction() + participation = ix.participations[0] + self.assertEqual('foo', participation.principal.id) + # Nesting doesn't change anything + with testing.interaction('baz'): + ix = getInteraction() + participation = ix.participations[0] + self.assertEqual('foo', participation.principal.id) + + self.assertFalse(queryInteraction()) + + def test_contextmanager_ends_interaction_on_exception(self): + from zope.security.management import queryInteraction + class MyError(Exception): + pass + + with self.assertRaises(MyError): + with testing.interaction('foo'): + raise MyError() + + self.assertFalse(queryInteraction()) + + + def test_addCheckerPublic(self): + from zope import component + from zope.security.interfaces import IPermission + + perm = testing.addCheckerPublic() + utility = component.getUtility(IPermission, name=zope_Public) + self.assertIs(perm, utility) + + + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_zcml.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_zcml.py new file mode 100644 index 0000000..9fb1cc4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_zcml.py @@ -0,0 +1,199 @@ +############################################################################## +# +# Copyright (c) 2013 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +import unittest +from zope.security.interfaces import PUBLIC_PERMISSION_NAME as zope_Public + +class ConformsToIFromUnicode(object): + + def test_class_conforms_to_IFromUnicode(self): + from zope.interface.verify import verifyClass + from zope.schema.interfaces import IFromUnicode + verifyClass(IFromUnicode, self._getTargetClass()) + + def test_instance_conforms_to_IFromUnicode(self): + from zope.interface.verify import verifyObject + from zope.schema.interfaces import IFromUnicode + verifyObject(IFromUnicode, self._makeOne()) + + +class PermissionTests(unittest.TestCase, + ConformsToIFromUnicode, + ): + + def _getTargetClass(self): + from zope.security.zcml import Permission + return Permission + + def _makeOne(self, context=None): + if context is None: + context = DummyZCMLContext() + permission = self._getTargetClass()() + permission.context = context + return permission + + def test_fromUnicode_miss(self): + permission = self._makeOne() + self.assertEqual(permission.fromUnicode('nonesuch.permission'), + 'nonesuch.permission') + + def test_fromUnicode_hit(self): + permission = self._makeOne() + p_obj = object() + permission.context.permission_mapping = {'extant.permission': p_obj} + self.assertTrue(permission.fromUnicode('extant.permission') is p_obj) + + def test__validate_w_public(self): + context = DummyZCMLContext() + permission = self._makeOne(context) + permission._validate(zope_Public) + self.assertEqual(len(context._actions), 0) + + def test__validate_w_non_public(self): + from zope.security.permission import checkPermission + context = DummyZCMLContext() + permission = self._makeOne(context) + permission._validate('a.permission') + self.assertEqual(len(context._actions), 1) + self.assertEqual(context._actions[0]['discriminator'], None) + self.assertEqual(context._actions[0]['callable'], checkPermission) + self.assertEqual(context._actions[0]['args'], + (None, 'a.permission')) + + +class Test_securityPolicy(unittest.TestCase): + + def _callFUT(self, _context, component): + from zope.security.zcml import securityPolicy + return securityPolicy(_context, component) + + def test_it(self): + from zope.security.management import setSecurityPolicy + context = DummyZCMLContext() + component = object() + self._callFUT(context, component) + self.assertEqual(len(context._actions), 1) + self.assertEqual(context._actions[0]['discriminator'], 'defaultPolicy') + self.assertEqual(context._actions[0]['callable'], setSecurityPolicy) + self.assertEqual(context._actions[0]['args'], (component,)) + + +class Test_permission(unittest.TestCase): + + def _callFUT(self, _context, id, title, description=None): + from zope.security.zcml import permission + if description is None: + return permission(_context, id, title) + return permission(_context, id, title, description) + + def test_wo_description(self): + from zope.component.interface import provideInterface + from zope.component.zcml import handler + from zope.security.interfaces import IPermission + context = DummyZCMLContext() + context.info = 'INFO' + self._callFUT(context, 'a.permission', 'TITLE') + self.assertEqual(len(context._actions), 2) + self.assertEqual(context._actions[0]['discriminator'], + ('utility', IPermission, 'a.permission')) + self.assertEqual(context._actions[0]['callable'], handler) + args = context._actions[0]['args'] + self.assertEqual(args[0], 'registerUtility') + permission = args[1] + self.assertEqual(permission.id, 'a.permission') + self.assertEqual(permission.title, 'TITLE') + self.assertEqual(permission.description, '') + self.assertTrue(context._actions[1]['discriminator'] is None) + self.assertTrue(context._actions[1]['callable'] is provideInterface) + self.assertEqual(context._actions[1]['args'], ('', IPermission)) + + def test_w_description(self): + from zope.component.interface import provideInterface + from zope.component.zcml import handler + from zope.security.interfaces import IPermission + context = DummyZCMLContext() + context.info = 'INFO' + self._callFUT(context, 'a.permission', 'TITLE', 'DESCRIPTION') + self.assertEqual(len(context._actions), 2) + self.assertEqual(context._actions[0]['discriminator'], + ('utility', IPermission, 'a.permission')) + self.assertEqual(context._actions[0]['callable'], handler) + args = context._actions[0]['args'] + self.assertEqual(args[0], 'registerUtility') + permission = args[1] + self.assertEqual(permission.id, 'a.permission') + self.assertEqual(permission.title, 'TITLE') + self.assertEqual(permission.description, 'DESCRIPTION') + self.assertTrue(context._actions[1]['discriminator'] is None) + self.assertTrue(context._actions[1]['callable'] is provideInterface) + self.assertEqual(context._actions[1]['args'], ('', IPermission)) + + +class Test_redefinePermission(unittest.TestCase): + + def _callFUT(self, _context, from_, to): + from zope.security.zcml import redefinePermission + return redefinePermission(_context, from_, to) + + def test_wo_existing_mapping(self): + z_context = DummyZCMLContext() + class Context(object): + pass + context = z_context.context = Context() + after = object() + self._callFUT(z_context, 'before.permission', after) + self.assertTrue(context.permission_mapping['before.permission'] + is after) + + def test_w_existing_mapping_wo_existing_key(self): + z_context = DummyZCMLContext() + class Context(object): + pass + context = z_context.context = Context() + mapping = context.permission_mapping = {} + after = object() + self._callFUT(z_context, 'before.permission', after) + self.assertTrue(context.permission_mapping is mapping) + self.assertTrue(context.permission_mapping['before.permission'] + is after) + + def test_w_existing_mapping_w_existing_key(self): + z_context = DummyZCMLContext() + class Context(object): + pass + context = z_context.context = Context() + mapping = context.permission_mapping = {} + before = mapping['before.permission'] = object() + after = object() + self._callFUT(z_context, 'before.permission', after) + self.assertTrue(context.permission_mapping is mapping) + self.assertTrue(context.permission_mapping['before.permission'] + is after) + + +class DummyZCMLContext(object): + + def __init__(self): + self._actions = [] + + def action(self, **kw): + self._actions.append(kw) + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(PermissionTests), + unittest.makeSuite(Test_securityPolicy), + unittest.makeSuite(Test_permission), + unittest.makeSuite(Test_redefinePermission), + )) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_zcml_functest.py b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_zcml_functest.py new file mode 100644 index 0000000..eee6d57 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/tests/test_zcml_functest.py @@ -0,0 +1,608 @@ +############################################################################## +# +# Copyright (c) 2001, 2002, 2003, 2012 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Directives Tests +""" +import unittest +import io + + +def configfile(s): + return io.StringIO(u""" + %s + + """ % s) + + +class TestClassDirective(unittest.TestCase): + + def setUp(self): + from zope.security.tests.exampleclass import ExampleClass + try: + del ExampleClass.__implements__ + except AttributeError: + pass + from zope.component.testing import setUp + setUp() + + def tearDown(self): + from zope.security.tests.exampleclass import ExampleClass + try: + del ExampleClass.__implements__ + except AttributeError: + pass + + from zope.component.testing import tearDown + tearDown() + + def _meta(self): + from zope.configuration.xmlconfig import XMLConfig + import zope.security + XMLConfig('meta.zcml', zope.security)() + + + def testEmptyDirective(self): + from zope.configuration.xmlconfig import xmlconfig + self._meta() + f = configfile(""" + + + """) + xmlconfig(f) + + + def testImplements(self): + from zope.component.interface import queryInterface + from zope.configuration.xmlconfig import xmlconfig + from zope.security.tests.exampleclass import ExampleClass + from zope.security.tests.exampleclass import IExample + self._meta() + self.assertEqual(queryInterface( + "zope.security.tests.exampleclass.IExample"), None) + + f = configfile(""" + + + + """) + xmlconfig(f) + self.assertTrue(IExample.implementedBy(ExampleClass)) + + self.assertEqual(queryInterface( + "zope.security.tests.exampleclass.IExample"), IExample) + + + def testMulImplements(self): + from zope.component.interface import queryInterface + from zope.configuration.xmlconfig import xmlconfig + from zope.security.tests.exampleclass import ExampleClass + from zope.security.tests.exampleclass import IExample + from zope.security.tests.exampleclass import IExample2 + self._meta() + self.assertEqual(queryInterface( + "zope.security.tests.exampleclass.IExample"), None) + self.assertEqual(queryInterface( + "zope.security.tests.exampleclass.IExample2"), None) + + f = configfile(""" + + + + """) + xmlconfig(f) + self.assertTrue(IExample.implementedBy(ExampleClass)) + self.assertTrue(IExample2.implementedBy(ExampleClass)) + + self.assertEqual(queryInterface( + "zope.security.tests.exampleclass.IExample"), IExample) + self.assertEqual(queryInterface( + "zope.security.tests.exampleclass.IExample2"), + IExample2) + + def testRequire(self): + from zope.configuration.xmlconfig import xmlconfig + self._meta() + f = configfile(""" + + + + + """) + xmlconfig(f) + + def testAllow(self): + from zope.configuration.xmlconfig import xmlconfig + self._meta() + f = configfile(""" + + + + """) + xmlconfig(f) + + def testMimic(self): + from zope.configuration.xmlconfig import xmlconfig + self._meta() + f = configfile(""" + + + + """) + xmlconfig(f) + + +class TestFactorySubdirective(unittest.TestCase): + + def setUp(self): + from zope.component.testing import setUp + setUp() + + def tearDown(self): + from zope.component.testing import tearDown + tearDown() + + def _meta(self): + from zope.configuration.xmlconfig import XMLConfig + import zope.security + XMLConfig('meta.zcml', zope.security)() + + + def testFactory(self): + from zope.component import getUtility + from zope.component.interfaces import IFactory + from zope.configuration.xmlconfig import xmlconfig + self._meta() + f = configfile(""" + + + + + + """) + xmlconfig(f) + factory = getUtility(IFactory, 'test.Example') + self.assertEqual(factory.title, "Example content") + self.assertEqual(factory.description, "Example description") + + def testFactoryNoId(self): + from zope.component import getUtility + from zope.component.interfaces import IFactory + from zope.component.interfaces import ComponentLookupError + from zope.configuration.xmlconfig import xmlconfig + self._meta() + f = configfile(""" + + + + + + """) + xmlconfig(f) + self.assertRaises(ComponentLookupError, getUtility, + IFactory, 'Example') + factory = getUtility( + IFactory, 'zope.security.tests.exampleclass.ExampleClass') + self.assertEqual(factory.title, "Example content") + self.assertEqual(factory.description, "Example description") + + + def testFactoryPublicPermission(self): + from zope.component import getUtility + from zope.component.interfaces import IFactory + from zope.configuration.xmlconfig import xmlconfig + self._meta() + f = configfile(""" + + + + """) + xmlconfig(f) + factory = getUtility(IFactory, 'test.Example') + self.assertTrue(hasattr(factory, '__Security_checker__')) + + +template = """ + %s + """ + + +class TestFactoryDirective(unittest.TestCase): + + def setUp(self): + from zope.component.testing import setUp + setUp() + + def tearDown(self): + from zope.component.testing import tearDown + tearDown() + + def meta(self): + import zope.security + from zope.configuration.xmlconfig import XMLConfig + XMLConfig('meta.zcml', zope.security)() + + + def testFactory(self): + from zope.component import createObject + from zope.configuration.xmlconfig import xmlconfig + from zope.security import proxy + from zope.security.tests import exampleclass + self.meta() + f = configfile(''' + + + +''') + xmlconfig(f) + obj = createObject('test.Example') + self.assertTrue(proxy.isinstance(obj, exampleclass.ExampleClass)) + + + +def _pfx(name): + from zope.security.tests import module + return module.__name__ + '.' + name + +def defineDirectives(): + from zope.configuration.xmlconfig import XMLConfig + from zope.configuration.xmlconfig import xmlconfig + import zope.security + XMLConfig('meta.zcml', zope.security)() + xmlconfig(io.StringIO(u""" + + + """)) + +NOTSET = () + +P1 = "zope.Extravagant" +P2 = "zope.Paltry" + +class TestRequireDirective(unittest.TestCase): + + def setUp(self): + from zope.interface import implementer + from zope.security.tests import module + from zope.component.testing import setUp + setUp() + defineDirectives() + + class B(object): + def m1(self): + raise AssertionError("Never called") + def m2(self): + raise AssertionError("Never called") + + @implementer(module.I) + class C(B): + def m3(self): + raise AssertionError("Never called") + def m4(self): + raise AssertionError("Never called") + + module.test_base = B + module.test_class = C + module.test_instance = C() + self.assertState() + + def tearDown(self): + from zope.security.tests import module + module.test_class = None + from zope.component.testing import tearDown + tearDown() + + def assertState(self, m1P=NOTSET, m2P=NOTSET, m3P=NOTSET): + #Verify that class, instance, and methods have expected permissions + from zope.security.checker import selectChecker + from zope.security.tests import module + checker = selectChecker(module.test_instance) + self.assertEqual(checker.permission_id('m1'), (m1P or None)) + self.assertEqual(checker.permission_id('m2'), (m2P or None)) + self.assertEqual(checker.permission_id('m3'), (m3P or None)) + + def assertDeclaration(self, declaration, **state): + from zope.security.tests import module + apply_declaration(module.template_bracket % declaration) + self.assertState(**state) + + # "testSimple*" exercises tags that do NOT have children. This mode + # inherently sets the instances as well as the class attributes. + + def test_wo_any_attributes(self): + from zope.configuration.exceptions import ConfigurationError + from zope.security.tests import module + declaration = (''' + + ''' + % (_pfx("test_class"), P1)) + self.assertRaises(ConfigurationError, + apply_declaration, + module.template_bracket % declaration) + + # "testSimple*" exercises tags that do NOT have children. This mode + # inherently sets the instances as well as the class attributes. + + def testSimpleMethodsPlural(self): + declaration = (''' + + ''' + % (_pfx("test_class"), P1)) + self.assertDeclaration(declaration, m1P=P1, m3P=P1) + + def test_set_attributes(self): + from zope.security.checker import selectChecker + from zope.security.tests import module + declaration = (''' + + ''' + % (_pfx("test_class"), P1)) + apply_declaration(module.template_bracket % declaration) + checker = selectChecker(module.test_instance) + self.assertEqual(checker.setattr_permission_id('m1'), P1) + self.assertEqual(checker.setattr_permission_id('m2'), None) + self.assertEqual(checker.setattr_permission_id('m3'), P1) + + def test_set_schema(self): + from zope.component.interface import queryInterface + from zope.security.checker import selectChecker + from zope.security.tests import module + self.assertEqual(queryInterface(_pfx("S")), None) + + declaration = (''' + + ''' + % (_pfx("test_class"), P1, _pfx("S"))) + apply_declaration(module.template_bracket % declaration) + + self.assertEqual(queryInterface(_pfx("S")), module.S) + + + checker = selectChecker(module.test_instance) + self.assertEqual(checker.setattr_permission_id('m1'), None) + self.assertEqual(checker.setattr_permission_id('m2'), None) + self.assertEqual(checker.setattr_permission_id('m3'), None) + self.assertEqual(checker.setattr_permission_id('foo'), P1) + self.assertEqual(checker.setattr_permission_id('bar'), P1) + self.assertEqual(checker.setattr_permission_id('baro'), None) + + def test_multiple_set_schema(self): + from zope.component.interface import queryInterface + from zope.security.checker import selectChecker + from zope.security.tests import module + self.assertEqual(queryInterface(_pfx("S")), None) + self.assertEqual(queryInterface(_pfx("S2")), None) + + declaration = (''' + + ''' + % (_pfx("test_class"), P1, _pfx("S"), _pfx("S2"))) + apply_declaration(module.template_bracket % declaration) + + self.assertEqual(queryInterface(_pfx("S")), module.S) + self.assertEqual(queryInterface(_pfx("S2")), module.S2) + + + checker = selectChecker(module.test_instance) + self.assertEqual(checker.setattr_permission_id('m1'), None) + self.assertEqual(checker.setattr_permission_id('m2'), None) + self.assertEqual(checker.setattr_permission_id('m3'), None) + self.assertEqual(checker.setattr_permission_id('foo'), P1) + self.assertEqual(checker.setattr_permission_id('bar'), P1) + self.assertEqual(checker.setattr_permission_id('foo2'), P1) + self.assertEqual(checker.setattr_permission_id('bar2'), P1) + self.assertEqual(checker.setattr_permission_id('baro'), None) + + def testSimpleInterface(self): + from zope.component.interface import queryInterface + from zope.security.tests import module + self.assertEqual(queryInterface(_pfx("I")), None) + + declaration = (''' + + ''' + % (_pfx("test_class"), P1, _pfx("I"))) + # m1 and m2 are in the interface, so should be set, and m3 should not: + self.assertDeclaration(declaration, m1P=P1, m2P=P1) + + # Make sure we know about the interfaces + self.assertEqual(queryInterface(_pfx("I")), module.I) + + + def testMultipleInterface(self): + from zope.component.interface import queryInterface + from zope.security.tests import module + self.assertEqual(queryInterface(_pfx("I3")), None) + self.assertEqual(queryInterface(_pfx("I4")), None) + + declaration = (''' + + ''' + % (_pfx("test_class"), P1, _pfx("I3"), _pfx("I4"))) + self.assertDeclaration(declaration, m3P=P1, m2P=P1) + + # Make sure we know about the interfaces + self.assertEqual(queryInterface(_pfx("I3")), module.I3) + self.assertEqual(queryInterface(_pfx("I4")), module.I4) + + # "testComposite*" exercises tags that DO have children. + # "testComposite*TopPerm" exercises tags with permission in containing tag. + # "testComposite*ElementPerm" exercises tags w/permission in children. + + + def testCompositeNoPerm(self): + # Establish rejection of declarations lacking a permission spec. + from zope.configuration.xmlconfig import ZopeXMLConfigurationError + declaration = (''' + + ''' + % (_pfx("test_class"))) + self.assertRaises(ZopeXMLConfigurationError, + self.assertDeclaration, + declaration) + + + def testCompositeMethodsPluralElementPerm(self): + declaration = (''' + + ''' + % (_pfx("test_class"), P1)) + self.assertDeclaration(declaration, + m1P=P1, m3P=P1) + + + def testCompositeInterfaceTopPerm(self): + declaration = (''' + + ''' + % (_pfx("test_class"), P1, _pfx("I"))) + self.assertDeclaration(declaration, + m1P=P1, m2P=P1) + + + def testSubInterfaces(self): + declaration = (''' + + ''' + % (_pfx("test_class"), P1, _pfx("I2"))) + # m1 and m2 are in the interface, so should be set, and m3 should not: + self.assertDeclaration(declaration, m1P=P1, m2P=P1) + + + def testMimicOnly(self): + declaration = (''' + + + + + + ''' % (_pfx("test_base"), P1, + _pfx("test_class"), _pfx("test_base"))) + # m1 and m2 are in the interface, so should be set, and m3 should not: + self.assertDeclaration(declaration, + m1P=P1, m2P=P1) + + + def testMimicAsDefault(self): + declaration = (''' + + + + + + + ''' % (_pfx("test_base"), P1, + _pfx("test_class"), _pfx("test_base"), P2)) + + # m1 and m2 are in the interface, so should be set, and m3 should not: + self.assertDeclaration(declaration, + m1P=P1, m2P=P2, m3P=P2) + + +def apply_declaration(declaration): + '''Apply the xmlconfig machinery.''' + from zope.configuration.xmlconfig import xmlconfig + if isinstance(declaration, bytes): + declaration = declaration.decode("utf-8") + return xmlconfig(io.StringIO(declaration)) + + + +def make_dummy(): + from zope.interface import Interface + import zope.security.zcml + global IDummy + class IDummy(Interface): + perm = zope.security.zcml.Permission(title=u'') + + +perms = [] + +def dummy(context_, perm): + global perms + perms.append(perm) + + +class DirectivesTest(unittest.TestCase): + + def setUp(self): + from zope.component.testing import setUp + setUp() + + def tearDown(self): + del perms[:] + from zope.component.testing import tearDown + tearDown() + + def testRedefinePermission(self): + from zope.configuration import xmlconfig + from zope.security import tests + make_dummy() + xmlconfig.file("redefineperms.zcml", tests) + self.assertEqual(perms, ['zope.Security']) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/security/zcml.py b/thesisenv/lib/python3.6/site-packages/zope/security/zcml.py new file mode 100644 index 0000000..52bcf91 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/security/zcml.py @@ -0,0 +1,119 @@ +############################################################################## +# +# Copyright (c) 2004 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Security related configuration fields. +""" +__docformat__ = 'restructuredtext' + +from zope.configuration.fields import GlobalObject +from zope.configuration.fields import MessageID +from zope.interface import Interface +from zope.interface import implementer +from zope.schema import Id +from zope.schema.interfaces import IFromUnicode + +from zope.security.permission import checkPermission +from zope.security.management import setSecurityPolicy +from zope.security.interfaces import PUBLIC_PERMISSION_NAME as zope_Public + +@implementer(IFromUnicode) +class Permission(Id): + r"""This field describes a permission. + """ + + def fromUnicode(self, u): + u = super(Permission, self).fromUnicode(u) + + map = getattr(self.context, 'permission_mapping', {}) + return map.get(u, u) + + def _validate(self, value): + super(Permission, self)._validate(value) + + if value != zope_Public: + self.context.action( + discriminator=None, + callable=checkPermission, + args=(None, value), + + # Delay execution till end. This is an + # optimization. We don't want to intersperse utility + # lookup, done when checking permissions, with utility + # definitions. Utility lookup is expensive after + # utility definition, as extensive caches have to be + # rebuilt. + order=9999999, + ) + + +class ISecurityPolicyDirective(Interface): + """Defines the security policy that will be used for Zope.""" + + component = GlobalObject( + title=u"Component", + description=u"Pointer to the object that will handle the security.", + required=True) + +def securityPolicy(_context, component): + _context.action( + discriminator='defaultPolicy', + callable=setSecurityPolicy, + args=(component,) + ) + +class IPermissionDirective(Interface): + """Define a new security object.""" + + id = Id( + title=u"ID", + description=u"ID as which this object will be known and used.", + required=True) + + title = MessageID( + title=u"Title", + description=u"Provides a title for the object.", + required=True) + + description = MessageID( + title=u"Description", + description=u"Provides a description for the object.", + required=False) + +def permission(_context, id, title, description=u''): + from zope.security.interfaces import IPermission + from zope.security.permission import Permission + from zope.component.zcml import utility + permission = Permission(id, title, description) + utility(_context, IPermission, permission, name=id) + +class IRedefinePermission(Interface): + """Define a permission to replace another permission.""" + + from_ = Permission( + title=u"Original permission", + description=u"Original permission ID to redefine.", + required=True) + + to = Permission( + title=u"Substituted permission", + description=u"Substituted permission ID.", + required=True) + +def redefinePermission(_context, from_, to): + _context = _context.context + + # check if context has any permission mappings yet + if not hasattr(_context, 'permission_mapping'): + _context.permission_mapping = {} + + _context.permission_mapping[from_] = to diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/tal/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml01.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml01.html new file mode 100644 index 0000000..180b47c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml01.html @@ -0,0 +1 @@ +baseline diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml02.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml02.html new file mode 100644 index 0000000..33d978d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml02.html @@ -0,0 +1,100 @@ + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml03.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml03.html new file mode 100644 index 0000000..aea01aa --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml03.html @@ -0,0 +1,8 @@ + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml04.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml04.html new file mode 100644 index 0000000..1a3214f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml04.html @@ -0,0 +1,6 @@ + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml05.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml05.html new file mode 100644 index 0000000..70b53cb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml05.html @@ -0,0 +1,10 @@ + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml06.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml06.html new file mode 100644 index 0000000..11e5cf2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml06.html @@ -0,0 +1,14 @@ + + + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml07.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml07.html new file mode 100644 index 0000000..48f50c7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml07.html @@ -0,0 +1,73 @@ + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml08.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml08.html new file mode 100644 index 0000000..48f50c7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml08.html @@ -0,0 +1,73 @@ + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml09.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml09.html new file mode 100644 index 0000000..ce8e43e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml09.html @@ -0,0 +1,10 @@ + + &dtml-x0; + &dtml-x1; + &dtml-x2; + &dtml-x3; + &dtml-x4; + &dtml-x5; + &dtml-x6; + &dtml-x7; + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml10.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml10.html new file mode 100644 index 0000000..3115f7c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml10.html @@ -0,0 +1,102 @@ + + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml11.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml11.html new file mode 100644 index 0000000..b0f71bd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml11.html @@ -0,0 +1,103 @@ + + &dtml-x0; + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml12.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml12.html new file mode 100644 index 0000000..df2dab1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/dtml12.html @@ -0,0 +1,12 @@ + + + &dtml-y0; + &dtml-y1; + &dtml-y2; + &dtml-y3; + &dtml-y4; + &dtml-y5; + &dtml-y6; + &dtml-y7; + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal01.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal01.html new file mode 100644 index 0000000..180b47c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal01.html @@ -0,0 +1 @@ +baseline diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal02.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal02.html new file mode 100644 index 0000000..33d978d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal02.html @@ -0,0 +1,100 @@ + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal03.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal03.html new file mode 100644 index 0000000..b63a737 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal03.html @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal04.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal04.html new file mode 100644 index 0000000..42af6e8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal04.html @@ -0,0 +1,6 @@ + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal05.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal05.html new file mode 100644 index 0000000..6e2d626 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal05.html @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal06.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal06.html new file mode 100644 index 0000000..6f40872 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal06.html @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal07.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal07.html new file mode 100644 index 0000000..f331f05 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal07.html @@ -0,0 +1,73 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal08.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal08.html new file mode 100644 index 0000000..f577fed --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal08.html @@ -0,0 +1,73 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal09.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal09.html new file mode 100644 index 0000000..ef81c58 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal09.html @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal10.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal10.html new file mode 100644 index 0000000..8026df7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal10.html @@ -0,0 +1,102 @@ + + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal11.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal11.html new file mode 100644 index 0000000..d4a2440 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal11.html @@ -0,0 +1,103 @@ + + + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + A large chunk of text to be repeated. + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal12.html b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal12.html new file mode 100644 index 0000000..dcd2c30 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/benchmark/tal12.html @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/driver.py b/thesisenv/lib/python3.6/site-packages/zope/tal/driver.py new file mode 100644 index 0000000..e139bb2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/driver.py @@ -0,0 +1,179 @@ +#!/usr/bin/env python +############################################################################## +# +# Copyright (c) 2001, 2002, 2013 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Driver program to test METAL and TAL implementation: +interprets a file, prints results to stdout. +""" + +from __future__ import print_function + +import os +import optparse +import sys + +# Import local classes +import zope.tal.taldefs +from zope.tal.dummyengine import DummyEngine +from zope.tal.dummyengine import DummyTranslationDomain + + +class TestTranslations(DummyTranslationDomain): + def translate(self, msgid, mapping=None, context=None, + target_language=None, default=None): + if msgid == 'timefmt': + return '%(minutes)s minutes after %(hours)s %(ampm)s' % mapping + elif msgid == 'jobnum': + return '%(jobnum)s is the JOB NUMBER' % mapping + elif msgid == 'verify': + s = 'Your contact email address is recorded as %(email)s' + return s % mapping + elif msgid == 'mailto:${request/submitter}': + return 'mailto:bperson@dom.ain' + elif msgid == 'origin': + return '%(name)s was born in %(country)s' % mapping + return DummyTranslationDomain.translate( + self, msgid, mapping, context, + target_language, default=default) + + +class TestEngine(DummyEngine): + def __init__(self, macros=None): + DummyEngine.__init__(self, macros) + self.translationDomain = TestTranslations() + + def evaluatePathOrVar(self, expr): + if expr == 'here/currentTime': + return {'hours' : 6, + 'minutes': 59, + 'ampm' : 'PM', + } + elif expr == 'context/@@object_name': + return '7' + elif expr == 'request/submitter': + return 'aperson@dom.ain' + return DummyEngine.evaluatePathOrVar(self, expr) + + +# This is a disgusting hack so that we can use engines that actually know +# something about certain object paths. +ENGINES = {'test23.html': TestEngine, + 'test24.html': TestEngine, + 'test26.html': TestEngine, + 'test27.html': TestEngine, + 'test28.html': TestEngine, + 'test29.html': TestEngine, + 'test30.html': TestEngine, + 'test31.html': TestEngine, + 'test32.html': TestEngine, + } + + +OPTIONS = [ + optparse.make_option('-H', '--html', + action='store_const', const='html', dest='mode', + help='explicitly choose HTML input (default: use file extension)'), + optparse.make_option('-x', '--xml', + action='store_const', const='xml', dest='mode', + help='explicitly choose XML input (default: use file extension)'), + optparse.make_option('-l', '--lenient', action='store_true', + help='lenient structure insertion'), + # aka don't validate HTML/XML inserted by + # tal:content="structure expr" + optparse.make_option('-m', '--macro-only', action='store_true', + help='macro expansion only'), + optparse.make_option('-s', '--show-code', action='store_true', + help='print intermediate opcodes only'), + optparse.make_option('-t', '--show-tal', action='store_true', + help='leave TAL/METAL attributes in output'), + optparse.make_option('-i', '--show-i18n', action='store_true', + help='leave I18N substitution string un-interpolated'), + optparse.make_option('-a', '--annotate', action='store_true', + help='enable source annotations'), +] + +def main(values=None): + parser = optparse.OptionParser('usage: %prog [options] testfile', + description=__doc__, + option_list=OPTIONS) + opts, args = parser.parse_args(values=values) + if not args: + parser.print_help() + sys.exit(1) + if len(args) > 1: + parser.error('Too many arguments') + file = args[0] + + it = compilefile(file, opts.mode) + if opts.show_code: + showit(it) + else: + # See if we need a special engine for this test + engine = None + engineClass = ENGINES.get(os.path.basename(file)) + if engineClass is not None: + engine = engineClass(opts.macro_only) + interpretit(it, engine=engine, + tal=not opts.macro_only, + showtal=1 if opts.show_tal else -1, + strictinsert=not opts.lenient, + i18nInterpolate=not opts.show_i18n, + sourceAnnotations=opts.annotate) + +def interpretit(it, engine=None, stream=None, tal=1, showtal=-1, + strictinsert=1, i18nInterpolate=1, sourceAnnotations=0): + from zope.tal.talinterpreter import TALInterpreter + program, macros = it + assert zope.tal.taldefs.isCurrentVersion(program) + if engine is None: + engine = DummyEngine(macros) + TALInterpreter(program, macros, engine, stream, wrap=0, + tal=tal, showtal=showtal, strictinsert=strictinsert, + i18nInterpolate=i18nInterpolate, + sourceAnnotations=sourceAnnotations)() + +def compilefile(file, mode=None): + assert mode in ("html", "xml", None) + if mode is None: + ext = os.path.splitext(file)[1] + if ext.lower() in (".html", ".htm"): + mode = "html" + else: + mode = "xml" + # make sure we can find the file + prefix = os.path.dirname(os.path.abspath(__file__)) + os.path.sep + if (not os.path.exists(file) + and os.path.exists(os.path.join(prefix, file))): + file = os.path.join(prefix, file) + # normalize filenames for test output + filename = os.path.abspath(file) + if filename.startswith(prefix): + filename = filename[len(prefix):] + filename = filename.replace(os.sep, '/') # test files expect slashes + # parse + from zope.tal.talgenerator import TALGenerator + if mode == "html": + from zope.tal.htmltalparser import HTMLTALParser + p = HTMLTALParser(gen=TALGenerator(source_file=filename, xml=0)) + else: + from zope.tal.talparser import TALParser + p = TALParser(gen=TALGenerator(source_file=filename)) + p.parseFile(file) + return p.getCode() + +def showit(it): + from pprint import pprint + pprint(it) + +if __name__ == "__main__": + main() diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/dummyengine.py b/thesisenv/lib/python3.6/site-packages/zope/tal/dummyengine.py new file mode 100644 index 0000000..2987c2d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/dummyengine.py @@ -0,0 +1,344 @@ +############################################################################## +# +# Copyright (c) 2001, 2002, 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Dummy TAL expression engine so that I can test out the TAL implementation. +""" +import re + +try: + # Python 2.x + from StringIO import StringIO +except ImportError: + # Python 3.x + from io import StringIO + +from zope.interface import implementer +from zope.tal.taldefs import NAME_RE, TALExpressionError, ErrorInfo +from zope.tal.interfaces import ITALExpressionCompiler, ITALExpressionEngine +from zope.i18nmessageid import Message + + +try: + unicode +except NameError: + unicode = str # Python 3.x + + +Default = object() + +name_match = re.compile(r"(?s)(%s):(.*)\Z" % NAME_RE).match + +class CompilerError(Exception): + pass + + +@implementer(ITALExpressionCompiler, ITALExpressionEngine) +class DummyEngine(object): + + position = None + source_file = None + + + def __init__(self, macros=None): + if macros is None: + macros = {} + self.macros = macros + dict = {'nothing': None, 'default': Default} + self.locals = self.globals = dict + self.stack = [dict] + self.translationDomain = DummyTranslationDomain() + self.useEngineAttrDicts = False + + # zope.tal.interfaces.ITALExpressionCompiler + + def getCompilerError(self): + return CompilerError + + def compile(self, expr): + return "$%s$" % expr + + # zope.tal.interfaces.ITALExpressionEngine + + def setSourceFile(self, source_file): + self.source_file = source_file + + def setPosition(self, position): + self.position = position + + def beginScope(self): + self.stack.append(self.locals) + + def endScope(self): + assert len(self.stack) > 1, "more endScope() than beginScope() calls" + self.locals = self.stack.pop() + + def setLocal(self, name, value): + if self.locals is self.stack[-1]: + # Unmerge this scope's locals from previous scope of first set + self.locals = self.locals.copy() + self.locals[name] = value + + def setGlobal(self, name, value): + self.globals[name] = value + + def getValue(self, name, default=None): + value = self.globals.get(name, default) + if value is default: + value = self.locals.get(name, default) + return value + + def evaluate(self, expression): + assert expression.startswith("$") and expression.endswith("$"), \ + expression + expression = expression[1:-1] + m = name_match(expression) + if m: + type, expr = m.group(1, 2) + else: + type = "path" + expr = expression + + if type in ("string", "str"): + return expr + if type in ("path", "var", "global", "local"): + return self.evaluatePathOrVar(expr) + if type == "not": + return not self.evaluate(expr) + if type == "exists": + return expr in self.locals or expr in self.globals + if type == "python": + try: + return eval(expr, self.globals, self.locals) + except: + raise TALExpressionError("evaluation error in %s" % repr(expr)) + if type == "position": + # Insert the current source file name, line number, + # and column offset. + if self.position: + lineno, offset = self.position + else: + lineno, offset = None, None + return '%s (%s,%s)' % (self.source_file, lineno, offset) + raise TALExpressionError("unrecognized expression: " + repr(expression)) + + # implementation; can be overridden + def evaluatePathOrVar(self, expr): + expr = expr.strip() + if expr in self.locals: + return self.locals[expr] + elif expr in self.globals: + return self.globals[expr] + else: + raise TALExpressionError("unknown variable: %s" % repr(expr)) + + def evaluateValue(self, expr): + return self.evaluate(expr) + + def evaluateBoolean(self, expr): + return self.evaluate(expr) + + def evaluateText(self, expr): + text = self.evaluate(expr) + if isinstance(text, (str, unicode, Message)): + return text + if text is not None and text is not Default: + text = str(text) + return text + + def evaluateStructure(self, expr): + # TODO Should return None or a DOM tree + return self.evaluate(expr) + + # implementation; can be overridden + def evaluateSequence(self, expr): + # TODO: Should return a sequence + return self.evaluate(expr) + + def evaluateMacro(self, macroName): + assert macroName.startswith("$") and macroName.endswith("$"), \ + macroName + macroName = macroName[1:-1] + file, localName = self.findMacroFile(macroName) + if not file: + # Local macro + macro = self.macros[localName] + else: + # External macro + from . import driver + program, macros = driver.compilefile(file) + macro = macros.get(localName) + if not macro: + raise TALExpressionError("macro %s not found in file %s" % + (localName, file)) + return macro + + # internal + def findMacroFile(self, macroName): + if not macroName: + raise TALExpressionError("empty macro name") + i = macroName.rfind('/') + if i < 0: + # No slash -- must be a locally defined macro + return None, macroName + else: + # Up to last slash is the filename + fileName = macroName[:i] + localName = macroName[i+1:] + return fileName, localName + + def setRepeat(self, name, expr): + seq = self.evaluateSequence(expr) + return Iterator(name, seq, self) + + def createErrorInfo(self, err, position): + return ErrorInfo(err, position) + + def getDefault(self): + return Default + + def translate(self, msgid, domain=None, mapping=None, default=None): + self.translationDomain.domain = domain + return self.translationDomain.translate( + msgid, mapping, default=default) + + def evaluateCode(self, lang, code): + # We probably implement too much, but I use the dummy engine to test + # some of the issues that we will have. + + # For testing purposes only + locals = {} + globals = {} + if self.useEngineAttrDicts: + globals = self.globals.copy() + locals = self.locals.copy() + + assert lang == 'text/server-python' + import sys + + # Removing probable comments + if code.strip().startswith(''): + code = code.strip()[4:-3] + + # Prepare code. + lines = code.split('\n') + lines = [l for l in lines if l.strip() != ''] + code = '\n'.join(lines) + # This saves us from all indentation issues :) + if code.startswith(' ') or code.startswith('\t'): + code = 'if 1 == 1:\n' + code + '\n' + tmp = sys.stdout + sys.stdout = StringIO() + try: + exec(code, globals, locals) + finally: + result = sys.stdout + sys.stdout = tmp + + # For testing purposes only + self.codeLocals = locals + self.codeGlobals = globals + + self.locals.update(locals) + self.globals.update(globals) + + return result.getvalue() + +class Iterator(object): + + def __init__(self, name, seq, engine): + self.name = name + self.seq = seq + self.engine = engine + self.nextIndex = 0 + + def __next__(self): + i = self.nextIndex + try: + item = self.seq[i] + except IndexError: + return 0 + self.nextIndex = i+1 + self.engine.setLocal(self.name, item) + return 1 + next = __next__ # Python 2 compatibility + + +class DummyTranslationDomain(object): + + domain = '' + + msgids = {} + + def appendMsgid(self, domain, data): + if domain not in self.msgids: + self.msgids[domain] = [] + self.msgids[domain].append(data) + + def getMsgids(self, domain): + return self.msgids[domain] + + def clearMsgids(self): + self.msgids = {} + + def translate(self, msgid, mapping=None, context=None, + target_language=None, default=None): + + domain = self.domain + # This is a fake translation service which simply uppercases non + # ${name} placeholder text in the message id. + # + # First, transform a string with ${name} placeholders into a list of + # substrings. Then upcase everything but the placeholders, then glue + # things back together. + + # If the domain is a string method, then transform the string + # by calling that method. + + # MessageID attributes override arguments + if isinstance(msgid, Message): + domain = msgid.domain + mapping = msgid.mapping + default = msgid.default + if default is None: # Message doesn't substitute itself for + default = msgid # missing default + + # simulate an unknown msgid by returning None + if msgid == "don't translate me": + text = default + elif domain and hasattr('', domain): + text = getattr(msgid, domain)() + else: + domain = 'default' + text = msgid.upper() + + self.appendMsgid(domain, (msgid, mapping)) + + def repl(m): + return unicode(mapping[m.group(m.lastindex).lower()]) + cre = re.compile(r'\$(?:([_A-Za-z][-\w]*)|\{([_A-Za-z][-\w]*)\})') + return cre.sub(repl, text) + +class MultipleDomainsDummyEngine(DummyEngine): + + def translate(self, msgid, domain=None, mapping=None, default=None): + + if isinstance(msgid, Message): + domain = msgid.domain + + if domain == 'a_very_explicit_domain_setup_by_template_developer_that_wont_be_taken_into_account_by_the_ZPT_engine': + domain = 'lower' + + self.translationDomain.domain = domain + return self.translationDomain.translate( + msgid, mapping, default=default) + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/htmltalparser.py b/thesisenv/lib/python3.6/site-packages/zope/tal/htmltalparser.py new file mode 100644 index 0000000..1761bc7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/htmltalparser.py @@ -0,0 +1,369 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" +Parse HTML and compile to :class:`~.TALInterpreter` intermediate code, using +a :class:`~.TALGenerator`. +""" + +# When Python 3 becomes mainstream please swap the try and except parts. +try: + # Python 2.x + from HTMLParser import HTMLParser, HTMLParseError +except ImportError: + # Python 3.x + from html.parser import HTMLParser + try: + from html.parser import HTMLParseError + except ImportError: + # Python 3.5 removed it, but we need it as a base class + # so here's a copy taken from Python 3.4: + class HTMLParseError(Exception): + def __init__(self, msg, position=(None, None)): + Exception.__init__(self) + assert msg + self.msg = msg + self.lineno = position[0] + self.offset = position[1] + + def __str__(self): + result = self.msg + if self.lineno is not None: + result = result + ", at line %d" % self.lineno + if self.offset is not None: + result = result + ", column %d" % (self.offset + 1) + return result + +from zope.tal.taldefs import (ZOPE_METAL_NS, ZOPE_TAL_NS, ZOPE_I18N_NS, + METALError, TALError, I18NError) +from zope.tal.talgenerator import TALGenerator + + +_html_parser_extras = {} +if 'convert_charrefs' in HTMLParser.__init__.__code__.co_names: + _html_parser_extras['convert_charrefs'] = False # pragma: NO COVER py34 + +#: List of Boolean attributes in HTML that may be given in +#: minimized form (e.g. ```` rather than ````) +#: From http://www.w3.org/TR/xhtml1/#guidelines (C.10) +BOOLEAN_HTML_ATTRS = frozenset([ + "compact", "nowrap", "ismap", "declare", "noshade", "checked", + "disabled", "readonly", "multiple", "selected", "noresize", + "defer" + ]) + +#: List of HTML tags with an empty content model; these are +#: rendered in minimized form, e.g. ````. +#: From http://www.w3.org/TR/xhtml1/#dtds +EMPTY_HTML_TAGS = frozenset([ + "base", "meta", "link", "hr", "br", "param", "img", "area", + "input", "col", "basefont", "isindex", "frame", + ]) + +#: List of HTML elements that close open paragraph-level elements +#: and are themselves paragraph-level. +PARA_LEVEL_HTML_TAGS = frozenset([ + "h1", "h2", "h3", "h4", "h5", "h6", "p", + ]) + +#: Tags that automatically close other tags. +BLOCK_CLOSING_TAG_MAP = { + "tr": frozenset(["tr", "td", "th"]), + "td": frozenset(["td", "th"]), + "th": frozenset(["td", "th"]), + "li": frozenset(["li"]), + "dd": frozenset(["dd", "dt"]), + "dt": frozenset(["dd", "dt"]), + } + +#: List of HTML tags that denote larger sections than paragraphs. +BLOCK_LEVEL_HTML_TAGS = frozenset([ + "blockquote", "table", "tr", "th", "td", "thead", "tfoot", "tbody", + "noframe", "ul", "ol", "li", "dl", "dt", "dd", "div", + ]) + +#: Section level HTML tags +SECTION_LEVEL_HTML_TAGS = PARA_LEVEL_HTML_TAGS.union(BLOCK_LEVEL_HTML_TAGS) + +TIGHTEN_IMPLICIT_CLOSE_TAGS = PARA_LEVEL_HTML_TAGS.union(BLOCK_CLOSING_TAG_MAP) + + +class NestingError(HTMLParseError): + """Exception raised when elements aren't properly nested.""" + + def __init__(self, tagstack, endtag, position=(None, None)): + self.endtag = endtag + if tagstack: + if len(tagstack) == 1: + msg = ('Open tag <%s> does not match close tag ' + % (tagstack[0], endtag)) + else: + msg = ('Open tags <%s> do not match close tag ' + % ('>, <'.join(tagstack), endtag)) + else: + msg = 'No tags are open to match ' % endtag + HTMLParseError.__init__(self, msg, position) + +class EmptyTagError(NestingError): + """Exception raised when empty elements have an end tag.""" + + def __init__(self, tag, position=(None, None)): + self.tag = tag + msg = 'Close tag should be removed' % tag + HTMLParseError.__init__(self, msg, position) + +class OpenTagError(NestingError): + """Exception raised when a tag is not allowed in another tag.""" + + def __init__(self, tagstack, tag, position=(None, None)): + self.tag = tag + msg = 'Tag <%s> is not allowed in <%s>' % (tag, tagstack[-1]) + HTMLParseError.__init__(self, msg, position) + +class HTMLTALParser(HTMLParser): + """ + Parser for HTML. + + After you call either :meth:`parseFile` and :meth:`parseString` + you can retrieve the compiled program using :meth:`getCode`. + """ + + # External API + + def __init__(self, gen=None): + """ + :keyword TALGenerator gen: The configured (with an expression compiler) + code generator to use. If one is not given, a default will be used. + """ + HTMLParser.__init__(self, **_html_parser_extras) + if gen is None: + gen = TALGenerator(xml=0) + self.gen = gen + self.tagstack = [] + self.nsstack = [] + self.nsdict = { + 'tal': ZOPE_TAL_NS, + 'metal': ZOPE_METAL_NS, + 'i18n': ZOPE_I18N_NS, + } + + def parseFile(self, file): + """Parse data in the given file.""" + with open(file) as f: + data = f.read() + + try: + self.parseString(data) + except TALError as e: + e.setFile(file) + raise + + def parseString(self, data): + """Parse data in the given string.""" + self.feed(data) + self.close() + while self.tagstack: + self.implied_endtag(self.tagstack[-1], 2) + assert self.nsstack == [], self.nsstack + + def getCode(self): + """ + After parsing, this returns ``(program, macros)``. + """ + return self.gen.getCode() + + # Overriding HTMLParser methods + + def handle_starttag(self, tag, attrs): + self.close_para_tags(tag) + self.scan_xmlns(attrs) + tag, attrlist, taldict, metaldict, i18ndict \ + = self.process_ns(tag, attrs) + if tag in EMPTY_HTML_TAGS and "content" in taldict: + raise TALError( + "empty HTML tags cannot use tal:content: %s" % repr(tag), + self.getpos()) + # Support for inline Python code. + if tag == 'script': + type_attr = [a for a in attrlist if a[0] == "type"] + if type_attr and type_attr[0][1].startswith('text/server-'): + attrlist.remove(type_attr[0]) + taldict = {'script': type_attr[0][1], 'omit-tag': ''} + self.tagstack.append(tag) + self.gen.emitStartElement(tag, attrlist, taldict, metaldict, i18ndict, + self.getpos()) + if tag in EMPTY_HTML_TAGS: + self.implied_endtag(tag, -1) + + def handle_startendtag(self, tag, attrs): + self.close_para_tags(tag) + self.scan_xmlns(attrs) + tag, attrlist, taldict, metaldict, i18ndict \ + = self.process_ns(tag, attrs) + if "content" in taldict: + if tag in EMPTY_HTML_TAGS: + raise TALError( + "empty HTML tags cannot use tal:content: %s" % repr(tag), + self.getpos()) + self.gen.emitStartElement(tag, attrlist, taldict, metaldict, + i18ndict, self.getpos()) + self.gen.emitEndElement(tag, implied=-1, position=self.getpos()) + else: + self.gen.emitStartElement(tag, attrlist, taldict, metaldict, + i18ndict, self.getpos(), isend=1) + self.pop_xmlns() + + def handle_endtag(self, tag): + if self.tagstack and self.tagstack[-1] == 'script' and tag != 'script': + self.handle_data('' % tag) + return + if tag in EMPTY_HTML_TAGS: + # etc. in the source is an error + raise EmptyTagError(tag, self.getpos()) + self.close_enclosed_tags(tag) + self.gen.emitEndElement(tag, position=self.getpos()) + self.pop_xmlns() + self.tagstack.pop() + + def close_para_tags(self, tag): + if tag in EMPTY_HTML_TAGS: + return + close_to = -1 + if tag in BLOCK_CLOSING_TAG_MAP: + blocks_to_close = BLOCK_CLOSING_TAG_MAP[tag] + for i, t in enumerate(self.tagstack): + if t in blocks_to_close: + if close_to == -1: + close_to = i + elif t in BLOCK_LEVEL_HTML_TAGS: + close_to = -1 + elif tag in SECTION_LEVEL_HTML_TAGS: + for i in range(len(self.tagstack) - 1, -1, -1): + closetag = self.tagstack[i] + if closetag in BLOCK_LEVEL_HTML_TAGS: + break + elif closetag in PARA_LEVEL_HTML_TAGS: + if closetag != "p": + raise OpenTagError(self.tagstack, tag, self.getpos()) + close_to = i + if close_to >= 0: + while len(self.tagstack) > close_to: + self.implied_endtag(self.tagstack[-1], 1) + + def close_enclosed_tags(self, tag): + if tag not in self.tagstack: + raise NestingError(self.tagstack, tag, self.getpos()) + while tag != self.tagstack[-1]: + self.implied_endtag(self.tagstack[-1], 1) + assert self.tagstack[-1] == tag + + def implied_endtag(self, tag, implied): + assert tag == self.tagstack[-1] + assert implied in (-1, 1, 2) + isend = (implied < 0) + if tag in TIGHTEN_IMPLICIT_CLOSE_TAGS: + # Pick out trailing whitespace from the program, and + # insert the close tag before the whitespace. + white = self.gen.unEmitWhitespace() + else: + white = None + self.gen.emitEndElement(tag, isend=isend, implied=implied, + position=self.getpos()) + if white: + self.gen.emitRawText(white) + self.tagstack.pop() + self.pop_xmlns() + + def handle_charref(self, name): + self.gen.emitRawText("&#%s;" % name) + + def handle_entityref(self, name): + self.gen.emitRawText("&%s;" % name) + + def handle_data(self, data): + self.gen.emitRawText(data) + + def handle_comment(self, data): + self.gen.emitRawText("" % data) + + def handle_decl(self, data): + self.gen.emitRawText("" % data) + + def handle_pi(self, data): + self.gen.emitRawText("" % data) + + # Internal thingies + + def scan_xmlns(self, attrs): + nsnew = {} + for key, value in attrs: + if key.startswith("xmlns:"): + nsnew[key[6:]] = value + self.nsstack.append(self.nsdict) + if nsnew: + self.nsdict = self.nsdict.copy() + self.nsdict.update(nsnew) + + def pop_xmlns(self): + self.nsdict = self.nsstack.pop() + + _namespaces = { + ZOPE_TAL_NS: "tal", + ZOPE_METAL_NS: "metal", + ZOPE_I18N_NS: "i18n", + } + + def fixname(self, name): + if ':' in name: + prefix, suffix = name.split(':', 1) + if prefix == 'xmlns': + nsuri = self.nsdict.get(suffix) + if nsuri in self._namespaces: + return name, name, prefix + else: + nsuri = self.nsdict.get(prefix) + if nsuri in self._namespaces: + return name, suffix, self._namespaces[nsuri] + return name, name, 0 + + def process_ns(self, name, attrs): + attrlist = [] + taldict = {} + metaldict = {} + i18ndict = {} + name, namebase, namens = self.fixname(name) + for item in attrs: + key, value = item + key, keybase, keyns = self.fixname(key) + ns = keyns or namens # default to tag namespace + if ns and ns != 'unknown': + item = (key, value, ns) + if ns == 'tal': + if keybase in taldict: + raise TALError("duplicate TAL attribute " + + repr(keybase), self.getpos()) + taldict[keybase] = value + elif ns == 'metal': + if keybase in metaldict: + raise METALError("duplicate METAL attribute " + + repr(keybase), self.getpos()) + metaldict[keybase] = value + elif ns == 'i18n': + if keybase in i18ndict: + raise I18NError("duplicate i18n attribute " + + repr(keybase), self.getpos()) + i18ndict[keybase] = value + attrlist.append(item) + if namens in ('metal', 'tal', 'i18n'): + taldict['tal tag'] = namens + return name, attrlist, taldict, metaldict, i18ndict diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/interfaces.py b/thesisenv/lib/python3.6/site-packages/zope/tal/interfaces.py new file mode 100644 index 0000000..f6c1aec --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/interfaces.py @@ -0,0 +1,218 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" +Interface that a TAL expression implementation provides to the +METAL/TAL implementation. + +This package does not provide an implementation of +:class:`ITALExpressionCompiler`, :class:`ITALExpressionEngine` or +:class:`ITALIterator`. An external package must provide those. The +most commonly used are :class:`zope.tales.tales.ExpressionEngine`, +:class:`zope.tales.tales.Context`, and +:class:`zope.tales.tales.Iterator`, respectively. +""" +from zope.interface import Attribute, Interface + + +class ITALExpressionCompiler(Interface): + """Compile-time interface provided by a TAL expression implementation. + + The TAL compiler needs an instance of this interface to support + compilation of TAL expressions embedded in documents containing + TAL and METAL constructs. + """ + + def getCompilerError(): + """Return the exception class raised for compilation errors. + """ + + def compile(expression): + """Return a compiled form of *expression* for later evaluation. + + *expression* is the source text of the expression. + + The return value may be passed to the various ``evaluate*()`` + methods of the :class:`ITALExpressionEngine` interface. No compatibility is + required for the values of the compiled expression between + different :class:`ITALExpressionEngine` implementations. + """ + + def getContext(namespace): + """Create an expression execution context + + The given *namespace* provides the initial top-level names. + """ + +class ITALExpressionEngine(Interface): + """Render-time interface provided by a TAL expression implementation. + + The TAL interpreter uses this interface to TAL expression to support + evaluation of the compiled expressions returned by + :meth:`ITALExpressionCompiler.compile`. + """ + + def getDefault(): + """Return the value of the ``default`` TAL expression. + + Checking a value for a match with ``default`` should be done + using the ``is`` operator in Python. + """ + + def setPosition(position): + """Inform the engine of the current position in the source file. + + *position* is a tuple (lineno, offset). + + This is used to allow the evaluation engine to report + execution errors so that site developers can more easily + locate the offending expression. + """ + + def setSourceFile(filename): + """Inform the engine of the name of the current source file. + + This is used to allow the evaluation engine to report + execution errors so that site developers can more easily + locate the offending expression. + """ + + def beginScope(): + """Push a new scope onto the stack of open scopes. + """ + + def endScope(): + """Pop one scope from the stack of open scopes. + """ + + def evaluate(compiled_expression): + """Evaluate an arbitrary expression. + + No constraints are imposed on the return value. + """ + + def evaluateBoolean(compiled_expression): + """Evaluate an expression that must return a Boolean value. + """ + + def evaluateMacro(compiled_expression): + """Evaluate an expression that must return a macro program. + """ + + def evaluateStructure(compiled_expression): + """Evaluate an expression that must return a structured + document fragment. + + The result of evaluating *compiled_expression* must be a + string containing a parsable HTML or XML fragment. Any TAL + markup contained in the result string will be interpreted. + """ + + def evaluateText(compiled_expression): + """Evaluate an expression that must return text. + + The returned text should be suitable for direct inclusion in + the output: any HTML or XML escaping or quoting is the + responsibility of the expression itself. + + If the expression evaluates to None, then that is returned. It + represents ``nothing`` in TALES. + If the expression evaluates to what :meth:`getDefault()` + returns, by comparison using ``is``, then that is returned. It + represents ``default`` in TALES. + """ + + def evaluateValue(compiled_expression): + """Evaluate an arbitrary expression. + + No constraints are imposed on the return value. + """ + + def createErrorInfo(exception, position): + """Returns an :class:`ITALExpressionErrorInfo` object. + + *position* is a tuple (lineno, offset). + + The returned object is used to provide information about the + error condition for the on-error handler. + """ + + def setGlobal(name, value): + """Set a global variable. + + The variable will be named *name* and have the value *value*. + """ + + def setLocal(name, value): + """Set a local variable in the current scope. + + The variable will be named *name* and have the value *value*. + """ + + def getValue(name, default=None): + """Get a variable by name. + + If the variable does not exist, return default. + """ + + def setRepeat(name, compiled_expression): + """Start a repetition, returning an :class:`ITALIterator`. + + The engine is expected to add the a value (typically the + returned iterator) for the name to the variable namespace. + """ + + def translate(msgid, domain=None, mapping=None, default=None): + """See zope.i18n.interfaces.ITranslationDomain.translate""" + + # NB: This differs from the Zope 2 equivalent in the order of + # the arguments. This will be a (hopefully minor) issue when + # creating a unified TAL implementation. + + def evaluateCode(lang, code): + """Evaluates code of the given language. + + Returns whatever the code outputs. This can be defined on a + per-language basis. In Python this usually everything the print + statement will return. + """ + + +class ITALIterator(Interface): + """A TAL iterator + + Not to be confused with a Python iterator. + """ + + def next(): + """Advance to the next value in the iteration, if possible + + Return a true value if it was possible to advance and return + a false value otherwise. + """ + + +class ITALExpressionErrorInfo(Interface): + """Information about an error.""" + + type = Attribute("type", + "The exception class.") + + value = Attribute("value", + "The exception instance.") + + lineno = Attribute("lineno", + "The line number the error occurred on in the source.") + + offset = Attribute("offset", + "The character offset at which the error occurred.") diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/runtest.py b/thesisenv/lib/python3.6/site-packages/zope/tal/runtest.py new file mode 100644 index 0000000..518a1b0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/runtest.py @@ -0,0 +1,174 @@ +#! /usr/bin/env python +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Driver program to run METAL and TAL regression tests: +compares interpeted test files with expected output files in a sibling +directory. +""" + +from __future__ import print_function + +import glob +import os +import sys +import traceback +import difflib +import copy +import optparse + +try: + # Python 2.x + from cStringIO import StringIO +except ImportError: + # Python 3.x + from io import StringIO + +import zope.tal.driver +import zope.tal.tests.utils + +def showdiff(a, b, out): + print(''.join(difflib.ndiff(a, b)), file=out) + +def main(argv=None, out=sys.stdout): + parser = optparse.OptionParser('usage: %prog [options] [testfile ...]', + description=__doc__) + parser.add_option('-q', '--quiet', action='store_true', + help="less verbose output") + internal_options = optparse.OptionGroup(parser, 'Internal options') + internal_options.add_option('-Q', '--very-quiet', + action='store_true', dest='unittesting', + help="no output on success, only diff/traceback on failure") + internal_options.add_option('-N', '--normalize-newlines', + action='store_true', dest='normalize_newlines', + help="ignore differences between CRLF and LF") + parser.add_option_group(internal_options) + driver_options = optparse.OptionGroup(parser, 'Driver options', + "(for debugging only; supplying these *will* cause test failures)") + for option in zope.tal.driver.OPTIONS: + driver_options.add_option(option) + parser.add_option_group(driver_options) + opts, args = parser.parse_args(argv) + + if not args: + here = os.path.dirname(__file__) + prefix = os.path.join(here, "tests", "input", "test*.") + if zope.tal.tests.utils.skipxml: + xmlargs = [] + else: + xmlargs = glob.glob(prefix + "xml") + xmlargs.sort() + htmlargs = glob.glob(prefix + "html") + htmlargs.sort() + args = xmlargs + htmlargs + if not args: + sys.stderr.write("No tests found -- please supply filenames\n") + sys.exit(1) + errors = 0 + for arg in args: + locopts = [] + if "metal" in arg and not opts.macro_only: + locopts.append("-m") + if "_sa" in arg and not opts.annotate: + locopts.append("-a") + if not opts.unittesting: + print(arg, end=' ', file=out) + sys.stdout.flush() + if zope.tal.tests.utils.skipxml and arg.endswith(".xml"): + print("SKIPPED (XML parser not available)", file=out) + continue + save = sys.stdout, sys.argv + try: + try: + sys.stdout = stdout = StringIO() + sys.argv = ["driver.py"] + locopts + [arg] + zope.tal.driver.main(copy.copy(opts)) + finally: + sys.stdout, sys.argv = save + except SystemExit: + raise + except: + errors = 1 + if opts.quiet: + print(sys.exc_info()[0].__name__, file=out) + sys.stdout.flush() + else: + if opts.unittesting: + print('', file=out) + else: + print("Failed:", file=out) + sys.stdout.flush() + traceback.print_exc() + continue + head, tail = os.path.split(arg) + outfile = os.path.join( + head.replace("input", "output"), + tail) + try: + f = open(outfile) + except IOError: + expected = None + print("(missing file %s)" % outfile, end=' ', file=out) + else: + expected = f.readlines() + f.close() + stdout.seek(0) + if hasattr(stdout, "readlines"): + actual = stdout.readlines() + else: + actual = readlines(stdout) + if opts.normalize_newlines or "_sa" in arg or arg.endswith('.xml'): + # EOL normalization makes the tests pass: + # - XML files, on Windows, have \r\n line endings. Because + # expat insists on byte streams on Python 3, we end up with + # those \r\n's going through the entire TAL engine and + # showing up in the actual output. Expected output, on the + # other hand, has just \n's, since we read the file as text. + # - Source annotation tests: when a developer converts all the + # input and output files to \r\n line endings and runs + # tests on Linux (because they're trying to debug Windows + # problems but can't be forced to use an inferior OS), we + # also have \r\n's going through the TAL engine and showing + # up both in actual and expected lists. Except for source + # annotation lines added by TAL, which always use just \n. + actual = [l.replace('\r\n', '\n') for l in actual] + if expected is not None: + expected = [l.replace('\r\n', '\n') for l in expected] + if actual == expected: + if not opts.unittesting: + print("OK", file=out) + else: + if opts.unittesting: + print('', file=out) + else: + print("not OK", file=out) + errors = 1 + if not opts.quiet and expected is not None: + showdiff(expected, actual, out) + if errors: + if opts.unittesting: + return 1 + else: + sys.exit(1) + +def readlines(f): + L = [] + while 1: + line = f.readline() + if not line: + break + L.append(line) + return L + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/taldefs.py b/thesisenv/lib/python3.6/site-packages/zope/tal/taldefs.py new file mode 100644 index 0000000..539e541 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/taldefs.py @@ -0,0 +1,218 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Common definitions used by TAL and METAL compilation and transformation. +""" +import re +from zope.tal.interfaces import ITALExpressionErrorInfo +from zope.interface import implementer + +#: Version of the specification we implement. +TAL_VERSION = "1.6" + +#: URI for XML namespace +XML_NS = "http://www.w3.org/XML/1998/namespace" +#: URI for XML NS declarations +XMLNS_NS = "http://www.w3.org/2000/xmlns/" + +#: TAL namespace URI +ZOPE_TAL_NS = "http://xml.zope.org/namespaces/tal" +#: METAL namespace URI +ZOPE_METAL_NS = "http://xml.zope.org/namespaces/metal" +#: I18N namespace URI +ZOPE_I18N_NS = "http://xml.zope.org/namespaces/i18n" + +# This RE must exactly match the expression of the same name in the +# zope.i18n.simpletranslationservice module: +NAME_RE = "[a-zA-Z_][-a-zA-Z0-9_]*" + +#: Known METAL attributes +KNOWN_METAL_ATTRIBUTES = frozenset([ + "define-macro", + "extend-macro", + "use-macro", + "define-slot", + "fill-slot", + ]) + +#: Known TAL attributes +KNOWN_TAL_ATTRIBUTES = frozenset([ + "define", + "condition", + "content", + "replace", + "repeat", + "attributes", + "on-error", + "omit-tag", + "script", + "tal tag", # a pseudo attribute that holds the namespace of elements + # like , , + ]) + +#: Known I18N attributes +KNOWN_I18N_ATTRIBUTES = frozenset([ + "translate", + "domain", + "target", + "source", + "attributes", + "data", + "name", + "ignore", + "ignore-attributes", + ]) + +class TALError(Exception): + """ + A base exception for errors raised by this implementation. + """ + + def __init__(self, msg, position=(None, None)): + Exception.__init__(self) + assert msg != "" + self.msg = msg + self.lineno = position[0] + self.offset = position[1] + self.filename = None + + def setFile(self, filename): + self.filename = filename + + def __str__(self): + result = self.msg + if self.lineno is not None: + result = result + ", at line %d" % self.lineno + if self.offset is not None: + result = result + ", column %d" % (self.offset + 1) + if self.filename is not None: + result = result + ', in file %s' % self.filename + return result + +class METALError(TALError): + """An error parsing on running METAL macros.""" + +class TALExpressionError(TALError): + """An error parsing or running a TAL expression.""" + +class I18NError(TALError): + """An error parsing a I18N expression.""" + + +@implementer(ITALExpressionErrorInfo) +class ErrorInfo(object): + """ + Default implementation of :class:`zope.tal.interfaces.ITALExpressionErrorInfo`. + """ + + def __init__(self, err, position=(None, None)): + if isinstance(err, Exception): + self.type = err.__class__ + self.value = err + else: + self.type = err + self.value = None + self.lineno = position[0] + self.offset = position[1] + + +_attr_re = re.compile(r"\s*([^\s]+)\s+([^\s].*)\Z", re.S) +_subst_re = re.compile(r"\s*(?:(text|structure)\s+)?(.*)\Z", re.S) + +def parseAttributeReplacements(arg, xml): + attr_dict = {} + for part in splitParts(arg): + m = _attr_re.match(part) + if not m: + raise TALError("Bad syntax in attributes: %r" % part) + name, expr = m.groups() + if not xml: + name = name.lower() + if name in attr_dict: + raise TALError("Duplicate attribute name in attributes: %r" % part) + attr_dict[name] = expr + return attr_dict + +def parseSubstitution(arg, position=(None, None)): + m = _subst_re.match(arg) + if not m: + raise TALError("Bad syntax in substitution text: %r" % arg, position) + key, expr = m.groups() + if not key: + key = "text" + return key, expr + +def splitParts(arg): + # Break in pieces at undoubled semicolons and + # change double semicolons to singles: + arg = arg.replace(";;", "\0") + parts = arg.split(';') + parts = [p.replace("\0", ";") for p in parts] + if len(parts) > 1 and not parts[-1].strip(): + del parts[-1] # It ended in a semicolon + return parts + +def isCurrentVersion(program): + version = getProgramVersion(program) + return version == TAL_VERSION + +def isinstance_(ob, kind): + # Proxy-friendly and faster isinstance_ check for new-style objects + try: + return kind in ob.__class__.__mro__ + except AttributeError: + return False + + +def getProgramMode(program): + version = getProgramVersion(program) + if (version == TAL_VERSION and isinstance_(program[1], tuple) + and len(program[1]) == 2): + opcode, mode = program[1] + if opcode == "mode": + return mode + return None + +def getProgramVersion(program): + if (len(program) >= 2 + and isinstance_(program[0], tuple) and len(program[0]) == 2): + opcode, version = program[0] + if opcode == "version": + return version + return None + +_ent1_re = re.compile('&(?![A-Z#])', re.I) +_entch_re = re.compile('&([A-Z][A-Z0-9]*)(?![A-Z0-9;])', re.I) +_entn1_re = re.compile('&#(?![0-9X])', re.I) +_entnx_re = re.compile('&(#X[A-F0-9]*)(?![A-F0-9;])', re.I) +_entnd_re = re.compile('&(#[0-9][0-9]*)(?![0-9;])') + +def attrEscape(s): + """Replace special characters '&<>' by character entities, + except when '&' already begins a syntactically valid entity.""" + s = _ent1_re.sub('&', s) + s = _entch_re.sub(r'&\1', s) + s = _entn1_re.sub('&#', s) + s = _entnx_re.sub(r'&\1', s) + s = _entnd_re.sub(r'&\1', s) + s = s.replace('<', '<') + s = s.replace('>', '>') + s = s.replace('"', '"') + return s + +def quote(s): + s = s.replace("&", "&") # Must be done first! + s = s.replace("<", "<") + s = s.replace(">", ">") + s = s.replace('"', """) + return '"%s"' % s diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/talgenerator.py b/thesisenv/lib/python3.6/site-packages/zope/tal/talgenerator.py new file mode 100644 index 0000000..ce7470c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/talgenerator.py @@ -0,0 +1,874 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" +Code generator for :class:`~.TALInterpreter` intermediate code. +""" +import re + +try: + # Python 3.x + from html import escape +except ImportError: + # Python 2.x + from cgi import escape + +from zope.tal import taldefs +from zope.tal.taldefs import NAME_RE, TAL_VERSION +from zope.tal.taldefs import I18NError, METALError, TALError +from zope.tal.taldefs import parseSubstitution +from zope.tal.translationcontext import TranslationContext, DEFAULT_DOMAIN + +try: + xrange +except NameError: + xrange = range # Python 3.x + + +_name_rx = re.compile(NAME_RE) + +class TALGenerator(object): + """ + Generate intermediate code. + """ + + inMacroUse = 0 + inMacroDef = 0 + source_file = None + + def __init__(self, expressionCompiler=None, xml=1, source_file=None): + """ + :keyword expressionCompiler: The implementation of + :class:`zope.tal.interfaces.ITALExpressionCompiler` to use. + If not given, we'll use a simple, undocumented, compiler. + """ + if not expressionCompiler: + from zope.tal.dummyengine import DummyEngine + expressionCompiler = DummyEngine() + self.expressionCompiler = expressionCompiler + self.CompilerError = expressionCompiler.getCompilerError() + # This holds the emitted opcodes representing the input + self.program = [] + # The program stack for when we need to do some sub-evaluation for an + # intermediate result. E.g. in an i18n:name tag for which the + # contents describe the ${name} value. + self.stack = [] + # Another stack of postponed actions. Elements on this stack are a + # dictionary; key/values contain useful information that + # emitEndElement needs to finish its calculations + self.todoStack = [] + self.macros = {} + # {slot-name --> default content program} + self.slots = {} + self.slotStack = [] + self.xml = xml # true --> XML, false --> HTML + self.emit("version", TAL_VERSION) + self.emit("mode", xml and "xml" or "html") + if source_file is not None: + self.source_file = source_file + self.emit("setSourceFile", source_file) + self.i18nContext = TranslationContext() + self.i18nLevel = 0 + + def getCode(self): + assert not self.stack + assert not self.todoStack + return self.optimize(self.program), self.macros + + def optimize(self, program): + output = [] + collect = [] + cursor = 0 + for cursor in xrange(len(program)+1): + try: + item = program[cursor] + except IndexError: + item = (None, None) + opcode = item[0] + if opcode == "rawtext": + collect.append(item[1]) + continue + if opcode == "endTag": + collect.append("" % item[1]) + continue + if opcode == "startTag": + if self.optimizeStartTag(collect, item[1], item[2], ">"): + continue + if opcode == "startEndTag": + endsep = "/>" if self.xml else " />" + if self.optimizeStartTag(collect, item[1], item[2], endsep): + continue + if opcode in ("beginScope", "endScope"): + # Push *Scope instructions in front of any text instructions; + # this allows text instructions separated only by *Scope + # instructions to be joined together. + output.append(self.optimizeArgsList(item)) + continue + if opcode == 'noop': + # This is a spacer for end tags in the face of i18n:name + # attributes. We can't let the optimizer collect immediately + # following end tags into the same rawtextOffset. + opcode = None + pass + text = "".join(collect) + if text: + i = text.rfind("\n") + if i >= 0: + i = len(text) - (i + 1) + output.append(("rawtextColumn", (text, i))) + else: + output.append(("rawtextOffset", (text, len(text)))) + if opcode != None: + output.append(self.optimizeArgsList(item)) + collect = [] + return self.optimizeCommonTriple(output) + + def optimizeArgsList(self, item): + if len(item) == 2: + return item + else: + return item[0], tuple(item[1:]) + + # These codes are used to indicate what sort of special actions + # are needed for each special attribute. (Simple attributes don't + # get action codes.) + # + # The special actions (which are modal) are handled by + # TALInterpreter.attrAction() and .attrAction_tal(). + # + # Each attribute is represented by a tuple: + # + # (name, value) -- a simple name/value pair, with + # no special processing + # + # (name, value, action, *extra) -- attribute with special + # processing needs, action is a + # code that indicates which + # branch to take, and *extra + # contains additional, + # action-specific information + # needed by the processing + # + def optimizeStartTag(self, collect, name, attrlist, end): + # return true if the tag can be converted to plain text + if not attrlist: + collect.append("<%s%s" % (name, end)) + return 1 + opt = 1 + new = ["<" + name] + for i in range(len(attrlist)): + item = attrlist[i] + if len(item) > 2: + opt = 0 + name, value, action = item[:3] + attrlist[i] = (name, value, action) + item[3:] + else: + if item[1] is None: + s = item[0] + else: + s = '%s="%s"' % (item[0], taldefs.attrEscape(item[1])) + attrlist[i] = item[0], s + new.append(" " + s) + # if no non-optimizable attributes were found, convert to plain text + if opt: + new.append(end) + collect.extend(new) + return opt + + def optimizeCommonTriple(self, program): + if len(program) < 3: + return program + output = program[:2] + prev2, prev1 = output + for item in program[2:]: + if (item[0] == "beginScope" + and prev1[0] == "setPosition" + and prev2[0] == "rawtextColumn"): + position = output.pop()[1] + text, column = output.pop()[1] + prev1 = None, None + closeprev = 0 + if output and output[-1][0] == "endScope": + closeprev = 1 + output.pop() + item = ("rawtextBeginScope", + (text, column, position, closeprev, item[1])) + output.append(item) + prev2 = prev1 + prev1 = item + return output + + def todoPush(self, todo): + self.todoStack.append(todo) + + def todoPop(self): + return self.todoStack.pop() + + def compileExpression(self, expr): + try: + return self.expressionCompiler.compile(expr) + except self.CompilerError as err: + raise TALError('%s in expression %s' % (err.args[0], repr(expr)), + self.position) + + def pushProgram(self): + self.stack.append(self.program) + self.program = [] + + def popProgram(self): + program = self.program + self.program = self.stack.pop() + return self.optimize(program) + + def pushSlots(self): + self.slotStack.append(self.slots) + self.slots = {} + + def popSlots(self): + slots = self.slots + self.slots = self.slotStack.pop() + return slots + + def emit(self, *instruction): + self.program.append(instruction) + + def emitStartTag(self, name, attrlist, isend=0): + if isend: + opcode = "startEndTag" + else: + opcode = "startTag" + self.emit(opcode, name, attrlist) + + def emitEndTag(self, name): + if self.xml and self.program and self.program[-1][0] == "startTag": + # Minimize empty element + self.program[-1] = ("startEndTag",) + self.program[-1][1:] + else: + self.emit("endTag", name) + + def emitOptTag(self, name, optTag, isend): + program = self.popProgram() #block + start = self.popProgram() #start tag + if (isend or not program) and self.xml: + # Minimize empty element + start[-1] = ("startEndTag",) + start[-1][1:] + isend = 1 + cexpr = optTag[0] + if cexpr: + cexpr = self.compileExpression(optTag[0]) + self.emit("optTag", name, cexpr, optTag[1], isend, start, program) + + def emitRawText(self, text): + self.emit("rawtext", text) + + def emitText(self, text): + self.emitRawText(escape(text, False)) + + def emitDefines(self, defines): + for part in taldefs.splitParts(defines): + m = re.match( + r"(?s)\s*(?:(global|local)\s+)?(%s)\s+(.*)\Z" % NAME_RE, part) + if not m: + raise TALError("invalid define syntax: " + repr(part), + self.position) + scope, name, expr = m.group(1, 2, 3) + scope = scope or "local" + cexpr = self.compileExpression(expr) + if scope == "local": + self.emit("setLocal", name, cexpr) + else: + self.emit("setGlobal", name, cexpr) + + def emitOnError(self, name, onError, TALtag, isend): + block = self.popProgram() + key, expr = parseSubstitution(onError) + cexpr = self.compileExpression(expr) + if key == "text": + self.emit("insertText", cexpr, []) + else: + assert key == "structure" + self.emit("insertStructure", cexpr, {}, []) + if TALtag: + self.emitOptTag(name, (None, 1), isend) + else: + self.emitEndTag(name) + handler = self.popProgram() + self.emit("onError", block, handler) + + def emitCondition(self, expr): + cexpr = self.compileExpression(expr) + program = self.popProgram() + self.emit("condition", cexpr, program) + + def emitRepeat(self, arg): + m = re.match(r"(?s)\s*(%s)\s+(.*)\Z" % NAME_RE, arg) + if not m: + raise TALError("invalid repeat syntax: " + repr(arg), + self.position) + name, expr = m.group(1, 2) + cexpr = self.compileExpression(expr) + program = self.popProgram() + self.emit("loop", name, cexpr, program) + + def emitSubstitution(self, arg, attrDict={}): + key, expr = parseSubstitution(arg) + cexpr = self.compileExpression(expr) + program = self.popProgram() + if key == "text": + self.emit("insertText", cexpr, program) + else: + assert key == "structure" + self.emit("insertStructure", cexpr, attrDict, program) + + def emitI18nSubstitution(self, arg, attrDict={}): + # TODO: Code duplication is BAD, we need to fix it later + key, expr = parseSubstitution(arg) + cexpr = self.compileExpression(expr) + program = self.popProgram() + if key == "text": + self.emit("insertI18nText", cexpr, program) + else: + assert key == "structure" + self.emit("insertI18nStructure", cexpr, attrDict, program) + + def emitEvaluateCode(self, lang): + program = self.popProgram() + self.emit('evaluateCode', lang, program) + + def emitI18nVariable(self, varname): + # Used for i18n:name attributes. + m = _name_rx.match(varname) + if m is None or m.group() != varname: + raise TALError("illegal i18n:name: %r" % varname, self.position) + program = self.popProgram() + self.emit('i18nVariable', varname, program, None, False) + + def emitTranslation(self, msgid, i18ndata): + program = self.popProgram() + if i18ndata is None: + self.emit('insertTranslation', msgid, program) + else: + key, expr = parseSubstitution(i18ndata) + cexpr = self.compileExpression(expr) + assert key == 'text' + self.emit('insertTranslation', msgid, program, cexpr) + + def emitDefineMacro(self, macroName): + program = self.popProgram() + macroName = macroName.strip() + if macroName in self.macros: + raise METALError("duplicate macro definition: %s" % repr(macroName), + self.position) + if not re.match('%s$' % NAME_RE, macroName): + raise METALError("invalid macro name: %s" % repr(macroName), + self.position) + self.macros[macroName] = program + self.inMacroDef = self.inMacroDef - 1 + self.emit("defineMacro", macroName, program) + + def emitUseMacro(self, expr): + cexpr = self.compileExpression(expr) + program = self.popProgram() + self.inMacroUse = 0 + self.emit("useMacro", expr, cexpr, self.popSlots(), program) + + def emitExtendMacro(self, defineName, useExpr): + cexpr = self.compileExpression(useExpr) + program = self.popProgram() + self.inMacroUse = 0 + self.emit("extendMacro", useExpr, cexpr, self.popSlots(), program, + defineName) + self.emitDefineMacro(defineName) + + def emitDefineSlot(self, slotName): + program = self.popProgram() + slotName = slotName.strip() + if not re.match('%s$' % NAME_RE, slotName): + raise METALError("invalid slot name: %s" % repr(slotName), + self.position) + self.emit("defineSlot", slotName, program) + + def emitFillSlot(self, slotName): + program = self.popProgram() + slotName = slotName.strip() + if slotName in self.slots: + raise METALError("duplicate fill-slot name: %s" % repr(slotName), + self.position) + if not re.match('%s$' % NAME_RE, slotName): + raise METALError("invalid slot name: %s" % repr(slotName), + self.position) + self.slots[slotName] = program + self.inMacroUse = 1 + self.emit("fillSlot", slotName, program) + + def unEmitWhitespace(self): + collect = [] + i = len(self.program) - 1 + while i >= 0: + item = self.program[i] + if item[0] != "rawtext": + break + text = item[1] + if not re.match(r"\A\s*\Z", text): + break + collect.append(text) + i = i-1 + del self.program[i+1:] + if i >= 0 and self.program[i][0] == "rawtext": + text = self.program[i][1] + m = re.search(r"\s+\Z", text) + if m: + self.program[i] = ("rawtext", text[:m.start()]) + collect.append(m.group()) + collect.reverse() + return "".join(collect) + + def unEmitNewlineWhitespace(self): + collect = [] + i = len(self.program) + while i > 0: + i = i-1 + item = self.program[i] + if item[0] != "rawtext": + break + text = item[1] + if re.match(r"\A[ \t]*\Z", text): + collect.append(text) + continue + m = re.match(r"(?s)^(.*?)(\r?\n[ \t]*)\Z", text) + if not m: + break + text, rest = m.group(1, 2) + collect.reverse() + rest = rest + "".join(collect) + del self.program[i:] + if text: + self.emit("rawtext", text) + return rest + return None + + def replaceAttrs(self, attrlist, repldict): + # Each entry in attrlist starts like (name, value). Result is + # (name, value, action, expr, xlat, msgid) if there is a + # tal:attributes entry for that attribute. Additional attrs + # defined only by tal:attributes are added here. + # + # (name, value, action, expr, xlat, msgid) + if not repldict: + return attrlist + newlist = [] + for item in attrlist: + key = item[0] + if key in repldict: + expr, xlat, msgid = repldict[key] + item = item[:2] + ("replace", expr, xlat, msgid) + del repldict[key] + newlist.append(item) + # Add dynamic-only attributes + for key, (expr, xlat, msgid) in sorted(repldict.items()): + newlist.append((key, None, "insert", expr, xlat, msgid)) + return newlist + + def emitStartElement(self, name, attrlist, taldict, metaldict, i18ndict, + position=(None, None), isend=0): + if not taldict and not metaldict and not i18ndict: + # Handle the simple, common case + self.emitStartTag(name, attrlist, isend) + self.todoPush({}) + if isend: + self.emitEndElement(name, isend) + return + self.position = position + + # TODO: Ugly hack to work around tal:replace and i18n:translate issue. + # I (DV) need to cleanup the code later. + replaced = False + if "replace" in taldict: + if "content" in taldict: + raise TALError( + "tal:content and tal:replace are mutually exclusive", + position) + taldict["omit-tag"] = taldict.get("omit-tag", "") + taldict["content"] = taldict.pop("replace") + replaced = True + + for key, value in taldict.items(): + if key not in taldefs.KNOWN_TAL_ATTRIBUTES: + raise TALError("bad TAL attribute: " + repr(key), position) + if not (value or key == 'omit-tag'): + raise TALError("missing value for TAL attribute: " + + repr(key), position) + for key, value in metaldict.items(): + if key not in taldefs.KNOWN_METAL_ATTRIBUTES: + raise METALError("bad METAL attribute: " + repr(key), + position) + if not value: + raise TALError("missing value for METAL attribute: " + + repr(key), position) + for key, value in i18ndict.items(): + if key not in taldefs.KNOWN_I18N_ATTRIBUTES: + raise I18NError("bad i18n attribute: " + repr(key), position) + if not value and key in ("attributes", "data", "id"): + raise I18NError("missing value for i18n attribute: " + + repr(key), position) + + todo = {} + defineMacro = metaldict.get("define-macro") + extendMacro = metaldict.get("extend-macro") + useMacro = metaldict.get("use-macro") + defineSlot = metaldict.get("define-slot") + fillSlot = metaldict.get("fill-slot") + define = taldict.get("define") + condition = taldict.get("condition") + repeat = taldict.get("repeat") + content = taldict.get("content") + script = taldict.get("script") + attrsubst = taldict.get("attributes") + onError = taldict.get("on-error") + omitTag = taldict.get("omit-tag") + TALtag = taldict.get("tal tag") + i18nattrs = i18ndict.get("attributes") + # Preserve empty string if implicit msgids are used. We'll generate + # code with the msgid='' and calculate the right implicit msgid during + # interpretation phase. + msgid = i18ndict.get("translate") + varname = i18ndict.get('name') + i18ndata = i18ndict.get('data') + + if varname and not self.i18nLevel: + raise I18NError( + "i18n:name can only occur inside a translation unit", + position) + + if i18ndata and not msgid: + raise I18NError("i18n:data must be accompanied by i18n:translate", + position) + + if extendMacro: + if useMacro: + raise METALError( + "extend-macro cannot be used with use-macro", position) + if not defineMacro: + raise METALError( + "extend-macro must be used with define-macro", position) + + if defineMacro or extendMacro or useMacro: + if fillSlot or defineSlot: + raise METALError( + "define-slot and fill-slot cannot be used with " + "define-macro, extend-macro, or use-macro", position) + if defineMacro and useMacro: + raise METALError( + "define-macro may not be used with use-macro", position) + + useMacro = useMacro or extendMacro + + if content and msgid: + raise I18NError( + "explicit message id and tal:content can't be used together", + position) + + repeatWhitespace = None + if repeat: + # Hack to include preceding whitespace in the loop program + repeatWhitespace = self.unEmitNewlineWhitespace() + if position != (None, None): + # TODO: at some point we should insist on a non-trivial position + self.emit("setPosition", position) + if self.inMacroUse: + if fillSlot: + self.pushProgram() + # generate a source annotation at the beginning of fill-slot + if self.source_file is not None: + if position != (None, None): + self.emit("setPosition", position) + self.emit("setSourceFile", self.source_file) + todo["fillSlot"] = fillSlot + self.inMacroUse = 0 + else: + if fillSlot: + raise METALError("fill-slot must be within a use-macro", + position) + if not self.inMacroUse: + if defineMacro: + self.pushProgram() + self.emit("version", TAL_VERSION) + self.emit("mode", self.xml and "xml" or "html") + # generate a source annotation at the beginning of the macro + if self.source_file is not None: + if position != (None, None): + self.emit("setPosition", position) + self.emit("setSourceFile", self.source_file) + todo["defineMacro"] = defineMacro + self.inMacroDef = self.inMacroDef + 1 + if useMacro: + self.pushSlots() + self.pushProgram() + todo["useMacro"] = useMacro + self.inMacroUse = 1 + if defineSlot: + if not self.inMacroDef: + raise METALError( + "define-slot must be within a define-macro", + position) + self.pushProgram() + todo["defineSlot"] = defineSlot + + if defineSlot or i18ndict: + + domain = i18ndict.get("domain") or self.i18nContext.domain + source = i18ndict.get("source") or self.i18nContext.source + target = i18ndict.get("target") or self.i18nContext.target + if ( domain != DEFAULT_DOMAIN + or source is not None + or target is not None): + self.i18nContext = TranslationContext(self.i18nContext, + domain=domain, + source=source, + target=target) + self.emit("beginI18nContext", + {"domain": domain, "source": source, + "target": target}) + todo["i18ncontext"] = 1 + if taldict or i18ndict: + dict = {} + for item in attrlist: + key, value = item[:2] + dict[key] = value + self.emit("beginScope", dict) + todo["scope"] = 1 + if onError: + self.pushProgram() # handler + if TALtag: + self.pushProgram() # start + self.emitStartTag(name, list(attrlist)) # Must copy attrlist! + if TALtag: + self.pushProgram() # start + self.pushProgram() # block + todo["onError"] = onError + if define: + self.emitDefines(define) + todo["define"] = define + if condition: + self.pushProgram() + todo["condition"] = condition + if repeat: + todo["repeat"] = repeat + self.pushProgram() + if repeatWhitespace: + self.emitText(repeatWhitespace) + if content: + if varname: + todo['i18nvar'] = varname + todo["content"] = content + self.pushProgram() + else: + todo["content"] = content + # i18n:name w/o tal:replace uses the content as the interpolation + # dictionary values + elif varname: + todo['i18nvar'] = varname + self.pushProgram() + if msgid is not None: + self.i18nLevel += 1 + todo['msgid'] = msgid + if i18ndata: + todo['i18ndata'] = i18ndata + optTag = omitTag is not None or TALtag + if optTag: + todo["optional tag"] = omitTag, TALtag + self.pushProgram() + if attrsubst or i18nattrs: + if attrsubst: + repldict = taldefs.parseAttributeReplacements(attrsubst, + self.xml) + else: + repldict = {} + if i18nattrs: + i18nattrs = _parseI18nAttributes(i18nattrs, self.position, + self.xml) + else: + i18nattrs = {} + # Convert repldict's name-->expr mapping to a + # name-->(compiled_expr, translate) mapping + for key, value in sorted(repldict.items()): + if i18nattrs.get(key, None): + raise I18NError( + "attribute [%s] cannot both be part of tal:attributes" + " and have a msgid in i18n:attributes" % key, + position) + ce = self.compileExpression(value) + repldict[key] = ce, key in i18nattrs, i18nattrs.get(key) + for key in sorted(i18nattrs): + if key not in repldict: + repldict[key] = None, 1, i18nattrs.get(key) + else: + repldict = {} + if replaced: + todo["repldict"] = repldict + repldict = {} + if script: + todo["script"] = script + self.emitStartTag(name, self.replaceAttrs(attrlist, repldict), isend) + if optTag: + self.pushProgram() + if content and not varname: + self.pushProgram() + if not content and msgid is not None: + self.pushProgram() + if content and varname: + self.pushProgram() + if script: + self.pushProgram() + if todo and position != (None, None): + todo["position"] = position + self.todoPush(todo) + if isend: + self.emitEndElement(name, isend, position=position) + + def emitEndElement(self, name, isend=0, implied=0, position=(None, None)): + todo = self.todoPop() + if not todo: + # Shortcut + if not isend: + self.emitEndTag(name) + return + + self.position = todo.get("position", (None, None)) + defineMacro = todo.get("defineMacro") + useMacro = todo.get("useMacro") + defineSlot = todo.get("defineSlot") + fillSlot = todo.get("fillSlot") + repeat = todo.get("repeat") + content = todo.get("content") + script = todo.get("script") + condition = todo.get("condition") + onError = todo.get("onError") + repldict = todo.get("repldict", {}) + scope = todo.get("scope") + optTag = todo.get("optional tag") + msgid = todo.get('msgid') + i18ncontext = todo.get("i18ncontext") + varname = todo.get('i18nvar') + i18ndata = todo.get('i18ndata') + + if implied > 0: + if defineMacro or useMacro or defineSlot or fillSlot: + exc = METALError + what = "METAL" + else: + exc = TALError + what = "TAL" + raise exc("%s attributes on <%s> require explicit " % + (what, name, name), self.position) + + if script: + self.emitEvaluateCode(script) + # If there's no tal:content or tal:replace in the tag with the + # i18n:name, tal:replace is the default. + if content: + if msgid is not None: + self.emitI18nSubstitution(content, repldict) + else: + self.emitSubstitution(content, repldict) + # If we're looking at an implicit msgid, emit the insertTranslation + # opcode now, so that the end tag doesn't become part of the implicit + # msgid. If we're looking at an explicit msgid, it's better to emit + # the opcode after the i18nVariable opcode so we can better handle + # tags with both of them in them (and in the latter case, the contents + # would be thrown away for msgid purposes). + # + # Still, we should emit insertTranslation opcode before i18nVariable + # in case tal:content, i18n:translate and i18n:name in the same tag + if not content and msgid is not None: + self.emitTranslation(msgid, i18ndata) + self.i18nLevel -= 1 + if optTag: + self.emitOptTag(name, optTag, isend) + elif not isend: + # If we're processing the end tag for a tag that contained + # i18n:name, we need to make sure that optimize() won't collect + # immediately following end tags into the same rawtextOffset, so + # put a spacer here that the optimizer will recognize. + if varname: + self.emit('noop') + self.emitEndTag(name) + if varname: + self.emitI18nVariable(varname) + if repeat: + self.emitRepeat(repeat) + if condition: + self.emitCondition(condition) + if onError: + self.emitOnError(name, onError, optTag and optTag[1], isend) + if scope: + self.emit("endScope") + if i18ncontext: + self.emit("endI18nContext") + assert self.i18nContext.parent is not None + self.i18nContext = self.i18nContext.parent + if defineSlot: + self.emitDefineSlot(defineSlot) + if fillSlot: + self.emitFillSlot(fillSlot) + if useMacro or defineMacro: + if useMacro and defineMacro: + self.emitExtendMacro(defineMacro, useMacro) + elif useMacro: + self.emitUseMacro(useMacro) + elif defineMacro: + self.emitDefineMacro(defineMacro) + if useMacro or defineSlot: + # generate a source annotation after define-slot or use-macro + # because the source file might have changed + if self.source_file is not None: + if position != (None, None): + self.emit("setPosition", position) + self.emit("setSourceFile", self.source_file) + + +def _parseI18nAttributes(i18nattrs, position, xml): + d = {} + # Filter out empty items, eg: + # i18n:attributes="value msgid; name msgid2;" + # would result in 3 items where the last one is empty + attrs = [spec for spec in i18nattrs.split(";") if spec] + for spec in attrs: + parts = spec.split() + if len(parts) == 2: + attr, msgid = parts + elif len(parts) == 1: + attr = parts[0] + msgid = None + else: + raise TALError("illegal i18n:attributes specification: %r" % spec, + position) + if not xml: + attr = attr.lower() + if attr in d: + raise TALError( + "attribute may only be specified once in i18n:attributes: %r" + % attr, + position) + d[attr] = msgid + return d + +def test(): + t = TALGenerator() + t.pushProgram() + t.emit("bar") + p = t.popProgram() + t.emit("foo", p) + +if __name__ == "__main__": + test() diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/talgettext.py b/thesisenv/lib/python3.6/site-packages/zope/tal/talgettext.py new file mode 100644 index 0000000..a4e3917 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/talgettext.py @@ -0,0 +1,341 @@ +#!/usr/bin/env python +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Program to extract internationalization markup from Page Templates. + +Once you have marked up a Page Template file with i18n: namespace tags, use +this program to extract GNU gettext .po file entries. + +Usage: talgettext.py [options] files +Options: + -h / --help + Print this message and exit. + -o / --output + Output the translation .po file to . + -u / --update + Update the existing translation with any new translation strings + found. +""" + +from __future__ import print_function + +import sys +import time +import getopt +import traceback +import warnings + +from zope.interface import implementer +from zope.tal.htmltalparser import HTMLTALParser +from zope.tal.talinterpreter import TALInterpreter, normalize +from zope.tal.dummyengine import DummyEngine +from zope.tal.interfaces import ITALExpressionEngine +from zope.tal.taldefs import TALExpressionError +from zope.i18nmessageid import Message + +PY3 = sys.version_info > (3,) +if PY3: + unicode = str + +pot_header = '''\ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR ORGANIZATION +# FIRST AUTHOR , YEAR. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\\n" +"POT-Creation-Date: %(time)s\\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\\n" +"Last-Translator: FULL NAME \\n" +"Language-Team: LANGUAGE \\n" +"MIME-Version: 1.0\\n" +"Content-Type: text/plain; charset=CHARSET\\n" +"Content-Transfer-Encoding: ENCODING\\n" +"Generated-By: talgettext.py %(version)s\\n" +''' + +NLSTR = '"\n"' + + +def usage(code, msg=''): + # Python 2.1 required + print(__doc__, file=sys.stderr) + if msg: + print(msg, file=sys.stderr) + sys.exit(code) + + +class POTALInterpreter(TALInterpreter): + def translate(self, msgid, default=None, i18ndict=None, obj=None): + if default is None: + default = getattr(msgid, 'default', unicode(msgid)) + # If no i18n dict exists yet, create one. + if i18ndict is None: + i18ndict = {} + if obj: + i18ndict.update(obj) + # Mmmh, it seems that sometimes the msgid is None; is that really + # possible? + if msgid is None: + return None + # TODO: We need to pass in one of context or target_language + return self.engine.translate(msgid, self.i18nContext.domain, i18ndict, + default=default, position=self.position) + + +@implementer(ITALExpressionEngine) +class POEngine(DummyEngine): + + def __init__(self, macros=None): + self.catalog = {} + DummyEngine.__init__(self, macros) + + def evaluate(*args): + # If the result of evaluate ever gets into a message ID, we want + # to notice the fact in the .pot file. + return '${DYNAMIC_CONTENT}' + + def evaluatePathOrVar(*args): + # Actually this method is never called. + return 'XXX' + + def evaluateSequence(self, expr): + return (0,) # dummy + + def evaluateBoolean(self, expr): + return True # dummy + + def translate(self, msgid, domain=None, mapping=None, default=None, + # Position is not part of the ITALExpressionEngine + # interface + position=None): + + if default is not None: + default = normalize(default) + if msgid == default: + default = None + msgid = Message(msgid, default=default) + + if domain not in self.catalog: + self.catalog[domain] = {} + domain = self.catalog[domain] + + if msgid not in domain: + domain[msgid] = [] + else: + msgids = list(domain) + idx = msgids.index(msgid) + existing_msgid = msgids[idx] + if msgid.default != existing_msgid.default: + references = '\n'.join([location[0]+':'+str(location[1]) + for location in domain[msgid]]) + # Note: a lot of encode calls here are needed so + # Python 3 does not break. + warnings.warn( + "Warning: msgid '%s' in %s already exists " + "with a different default (bad: %s, should be: %s)\n" + "The references for the existent value are:\n%s\n" % + (msgid.encode('utf-8'), + self.file.encode('utf-8') + ':'.encode('utf-8') + + str(position).encode('utf-8'), + msgid.default.encode('utf-8'), + existing_msgid.default.encode('utf-8'), + references.encode('utf-8'))) + domain[msgid].append((self.file, position)) + return 'x' + + +class UpdatePOEngine(POEngine): + """A slightly-less braindead POEngine which supports loading an existing + .po file first.""" + + def __init__ (self, macros=None, filename=None): + POEngine.__init__(self, macros) + + self._filename = filename + self._loadFile() + self.base = self.catalog + self.catalog = {} + + def __add(self, id, s, fuzzy): + "Add a non-fuzzy translation to the dictionary." + if not fuzzy and str: + # check for multi-line values and munge them appropriately + if '\n' in s: + lines = s.rstrip().split('\n') + s = NLSTR.join(lines) + self.catalog[id] = s + + def _loadFile(self): + # shamelessly cribbed from Python's Tools/i18n/msgfmt.py + # 25-Mar-2003 Nathan R. Yergler (nathan@zope.org) + # 14-Apr-2003 Hacked by Barry Warsaw (barry@zope.com) + + ID = 1 + STR = 2 + + try: + lines = open(self._filename).readlines() + except IOError as msg: + print(msg, file=sys.stderr) + sys.exit(1) + + section = None + fuzzy = False + + # Parse the catalog + lno = 0 + for l in lines: + lno += True + # If we get a comment line after a msgstr, this is a new entry + if l[0] == '#' and section == STR: + self.__add(msgid, msgstr, fuzzy) + section = None + fuzzy = False + # Record a fuzzy mark + if l[:2] == '#,' and l.find('fuzzy'): + fuzzy = True + # Skip comments + if l[0] == '#': + continue + # Now we are in a msgid section, output previous section + if l.startswith('msgid'): + if section == STR: + self.__add(msgid, msgstr, fuzzy) + section = ID + l = l[5:] + msgid = msgstr = '' + # Now we are in a msgstr section + elif l.startswith('msgstr'): + section = STR + l = l[6:] + # Skip empty lines + if not l.strip(): + continue + # TODO: Does this always follow Python escape semantics? + l = eval(l) + if section == ID: + msgid += l + elif section == STR: + msgstr += '%s\n' % l + else: + print('Syntax error on %s:%d' % (infile, lno), + 'before:', file=sys.stderr) + print(l, file=sys.stderr) + sys.exit(1) + # Add last entry + if section == STR: + self.__add(msgid, msgstr, fuzzy) + + def evaluate(self, expression): + try: + return POEngine.evaluate(self, expression) + except TALExpressionError: + pass + + def evaluatePathOrVar(self, expr): + return 'who cares' + + def translate(self, msgid, domain=None, mapping=None, default=None, + position=None): + if msgid not in self.base: + POEngine.translate(self, msgid, domain, mapping, default, position) + return 'x' + + +def main(): + try: + opts, args = getopt.getopt( + sys.argv[1:], + 'ho:u:', + ['help', 'output=', 'update=']) + except getopt.error as msg: + usage(1, msg) + + outfile = None + engine = None + update_mode = False + for opt, arg in opts: + if opt in ('-h', '--help'): + usage(0) + elif opt in ('-o', '--output'): + outfile = arg + elif opt in ('-u', '--update'): + update_mode = True + if outfile is None: + outfile = arg + engine = UpdatePOEngine(filename=arg) + + if not args: + print('nothing to do') + return + + # We don't care about the rendered output of the .pt file + class Devnull(object): + def write(self, s): + pass + + # check if we've already instantiated an engine; + # if not, use the stupidest one available + if not engine: + engine = POEngine() + + # process each file specified + for filename in args: + try: + engine.file = filename + p = HTMLTALParser() + p.parseFile(filename) + program, macros = p.getCode() + POTALInterpreter(program, macros, engine, stream=Devnull(), + metal=False)() + except: # Hee hee, I love bare excepts! + print('There was an error processing', filename) + traceback.print_exc() + + # Now output the keys in the engine. Write them to a file if --output or + # --update was specified; otherwise use standard out. + if (outfile is None): + outfile = sys.stdout + else: + outfile = file(outfile, update_mode and "a" or "w") + + catalog = {} + for domain in engine.catalog: + catalog.update(engine.catalog[domain]) + + messages = catalog.copy() + try: + messages.update(engine.base) + except AttributeError: + pass + if '' not in messages: + print(pot_header % {'time': time.ctime(), 'version': __version__}, + file=outfile) + + # TODO: You should not sort by msgid, but by filename and position. (SR) + msgids = sorted(catalog) + for msgid in msgids: + positions = engine.catalog[msgid] + for filename, position in positions: + outfile.write('#: %s:%s\n' % (filename, position[0])) + + outfile.write('msgid "%s"\n' % msgid) + outfile.write('msgstr ""\n') + outfile.write('\n') + + +if __name__ == '__main__': + main() diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/talinterpreter.py b/thesisenv/lib/python3.6/site-packages/zope/tal/talinterpreter.py new file mode 100644 index 0000000..82154db --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/talinterpreter.py @@ -0,0 +1,1033 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Interpreter for a pre-compiled TAL program. +""" +import cgi +import operator +import sys + +from zope.i18nmessageid import Message +from zope.tal.taldefs import quote, TAL_VERSION, METALError +from zope.tal.taldefs import isCurrentVersion +from zope.tal.taldefs import getProgramVersion, getProgramMode +from zope.tal.talgenerator import TALGenerator +from zope.tal.translationcontext import TranslationContext + +try: + unicode +except NameError: + unicode = str # Python 3.x +_BLANK = u'' + + +# Avoid constructing this tuple over and over +I18nMessageTypes = (Message,) + +TypesToTranslate = I18nMessageTypes + (str, unicode) + +BOOLEAN_HTML_ATTRS = frozenset([ + # List of Boolean attributes in HTML that should be rendered in + # minimized form (e.g. rather than ) + # From http://www.w3.org/TR/xhtml1/#guidelines (C.10) + # TODO: The problem with this is that this is not valid XML and + # can't be parsed back! + # XXX: This is an exact duplicate of htmltalparser.BOOLEAN_HTML_ATTRS. Why? + "compact", "nowrap", "ismap", "declare", "noshade", "checked", + "disabled", "readonly", "multiple", "selected", "noresize", + "defer" +]) + +_nulljoin = ''.join +_spacejoin = ' '.join + +def normalize(text): + # Now we need to normalize the whitespace in implicit message ids and + # implicit $name substitution values by stripping leading and trailing + # whitespace, and folding all internal whitespace to a single space. + return _spacejoin(text.split()) + + +class AltTALGenerator(TALGenerator): + + def __init__(self, repldict, expressionCompiler=None, xml=0): + self.repldict = repldict + self.enabled = 1 + TALGenerator.__init__(self, expressionCompiler, xml) + + def enable(self, enabled): + self.enabled = enabled + + def emit(self, *args): + if self.enabled: + TALGenerator.emit(self, *args) + + def emitStartElement(self, name, attrlist, taldict, metaldict, i18ndict, + position=(None, None), isend=0): + metaldict = {} + taldict = {} + i18ndict = {} + if self.enabled and self.repldict: + taldict["attributes"] = "x x" + TALGenerator.emitStartElement(self, name, attrlist, + taldict, metaldict, i18ndict, + position, isend) + + def replaceAttrs(self, attrlist, repldict): + if self.enabled and self.repldict: + repldict = self.repldict + self.repldict = None + return TALGenerator.replaceAttrs(self, attrlist, repldict) + + + +class MacroStackItem(list): + # This is a `list` subclass for backward compability. + """Stack entry for the TALInterpreter.macroStack. + + This offers convenience attributes for more readable access. + + """ + __slots__ = () + + # These would be nicer using @syntax, but that would require + # Python 2.4.x; this will do for now. + + macroName = property(lambda self: self[0]) + slots = property(lambda self: self[1]) + definingName = property(lambda self: self[2]) + extending = property(lambda self: self[3]) + entering = property(lambda self: self[4], + lambda self, value: operator.setitem(self, 4, value)) + i18nContext = property(lambda self: self[5]) + + +class TALInterpreter(object): + """TAL interpreter. + + Some notes on source annotations. They are HTML/XML comments added to the + output whenever ``sourceFile`` is changed by a ``setSourceFile`` bytecode. Source + annotations are disabled by default, but you can turn them on by passing a + ``sourceAnnotations`` argument to the constructor. You can change the format + of the annotations by overriding formatSourceAnnotation in a subclass. + + The output of the annotation is delayed until some actual text is output + for two reasons: + + 1. ``setPosition`` bytecode follows ``setSourceFile``, and we need position + information to output the line number. + 2. Comments are not allowed in XML documents before the ```` + declaration. + + For performance reasons (TODO: premature optimization?) instead of checking + the value of ``_pending_source_annotation`` on every write to the output + stream, the ``_stream_write`` attribute is changed to point to + ``_annotated_stream_write`` method whenever ``_pending_source_annotation`` is + set to True, and to _stream.write when it is False. The following + invariant always holds:: + + if self._pending_source_annotation: + assert self._stream_write is self._annotated_stream_write + else: + assert self._stream_write is self.stream.write + + """ + + def __init__(self, program, macros, engine, stream=None, + debug=0, wrap=1023, metal=1, tal=1, showtal=-1, + strictinsert=1, stackLimit=100, i18nInterpolate=1, + sourceAnnotations=0): + """Create a TAL interpreter. + + :param program: A compiled program, as generated + by :class:`zope.tal.talgenerator.TALGenerator` + :param macros: Namespace of macros, usually also from + :class:`~.TALGenerator` + + Optional arguments: + + :keyword stream: output stream (defaults to sys.stdout). + :keyword bool debug: enable debugging output to sys.stderr (off by default). + :keyword int wrap: try to wrap attributes on opening tags to this number of + column (default: 1023). + :keyword bool metal: enable METAL macro processing (on by default). + :keyword bool tal: enable TAL processing (on by default). + :keyword int showtal: do not strip away TAL directives. A special value of + -1 (which is the default setting) enables showtal when TAL + processing is disabled, and disables showtal when TAL processing is + enabled. Note that you must use 0, 1, or -1; true boolean values + are not supported (for historical reasons). + :keyword bool strictinsert: enable TAL processing and stricter HTML/XML + checking on text produced by structure inserts (on by default). + Note that Zope turns this value off by default. + :keyword int stackLimit: set macro nesting limit (default: 100). + :keyword bool i18nInterpolate: enable i18n translations (default: on). + :keyword bool sourceAnnotations: enable source annotations with HTML comments + (default: off). + """ + self.program = program + self.macros = macros + self.engine = engine # Execution engine (aka context) + self.Default = engine.getDefault() + self._pending_source_annotation = False + self._currentTag = "" + self._stream_stack = [stream or sys.stdout] + self.popStream() + self.debug = debug + self.wrap = wrap + self.metal = metal + self.tal = tal + if tal: + self.dispatch = self.bytecode_handlers_tal + else: + self.dispatch = self.bytecode_handlers + assert showtal in (-1, 0, 1) + if showtal == -1: + showtal = (not tal) + self.showtal = showtal + self.strictinsert = strictinsert + self.stackLimit = stackLimit + self.html = 0 + self.endsep = "/>" + self.endlen = len(self.endsep) + # macroStack entries are MacroStackItem instances; + # the entries are mutated while on the stack + self.macroStack = [] + # `inUseDirective` is set iff we're handling either a + # metal:use-macro or a metal:extend-macro + self.inUseDirective = False + self.position = None, None # (lineno, offset) + self.col = 0 + self.level = 0 + self.scopeLevel = 0 + self.sourceFile = None + self.i18nStack = [] + self.i18nInterpolate = i18nInterpolate + self.i18nContext = TranslationContext() + self.sourceAnnotations = sourceAnnotations + + def StringIO(self): + # Third-party products wishing to provide a full Unicode-aware + # StringIO can do so by monkey-patching this method. + return FasterStringIO() + + def saveState(self): + return (self.position, self.col, self.stream, self._stream_stack, + self.scopeLevel, self.level, self.i18nContext) + + def restoreState(self, state): + (self.position, self.col, self.stream, + self._stream_stack, scopeLevel, level, i18n) = state + if self._pending_source_annotation: + self._stream_write = self._annotated_stream_write + else: + self._stream_write = self.stream.write + assert self.level == level + while self.scopeLevel > scopeLevel: + self.engine.endScope() + self.scopeLevel = self.scopeLevel - 1 + self.engine.setPosition(self.position) + self.i18nContext = i18n + + def restoreOutputState(self, state): + (dummy, self.col, self.stream, + self._stream_stack, scopeLevel, level, i18n) = state + if self._pending_source_annotation: + self._stream_write = self._annotated_stream_write + else: + self._stream_write = self.stream.write + assert self.level == level + assert self.scopeLevel == scopeLevel + + def pushMacro(self, macroName, slots, definingName, extending): + if len(self.macroStack) >= self.stackLimit: + raise METALError("macro nesting limit (%d) exceeded " + "by %s" % (self.stackLimit, repr(macroName))) + self.macroStack.append( + MacroStackItem((macroName, slots, definingName, extending, + True, self.i18nContext))) + + def popMacro(self): + return self.macroStack.pop() + + def __call__(self): + """ + Interpret the current program. + + :return: Nothing. + """ + assert self.level == 0 + assert self.scopeLevel == 0 + assert self.i18nContext.parent is None + self.interpret(self.program) + assert self.level == 0 + assert self.scopeLevel == 0 + assert self.i18nContext.parent is None + + def pushStream(self, newstream): + self._stream_stack.append(self.stream) + self.stream = newstream + if self._pending_source_annotation: + self._stream_write = self._annotated_stream_write + else: + self._stream_write = self.stream.write + + def popStream(self): + self.stream = self._stream_stack.pop() + if self._pending_source_annotation: + self._stream_write = self._annotated_stream_write + else: + self._stream_write = self.stream.write + + def _annotated_stream_write(self, s): + idx = s.find('= 0 or s.isspace(): + # Do not preprend comments in front of the declaration. + end_of_doctype = s.find('?>', idx) + if end_of_doctype > idx: + self.stream.write(s[:end_of_doctype+2]) + s = s[end_of_doctype+2:] + # continue + else: + self.stream.write(s) + return + self._pending_source_annotation = False + self._stream_write = self.stream.write + self._stream_write(self.formatSourceAnnotation()) + self._stream_write(s) + + def formatSourceAnnotation(self): + lineno = self.position[0] + if lineno is None: + location = self.sourceFile + else: + location = '%s (line %s)' % (self.sourceFile, lineno) + sep = '=' * 78 + return '' % (sep, location, sep) + + def stream_write(self, s, + len=len): + self._stream_write(s) + i = s.rfind('\n') + if i < 0: + self.col = self.col + len(s) + else: + self.col = len(s) - (i + 1) + + bytecode_handlers = {} + + def interpret(self, program): + oldlevel = self.level + self.level = oldlevel + 1 + handlers = self.dispatch + try: + if self.debug: + for (opcode, args) in program: + s = "%sdo_%s(%s)\n" % (" "*self.level, opcode, + repr(args)) + if len(s) > 80: + s = s[:76] + "...\n" + sys.stderr.write(s) + handlers[opcode](self, args) + else: + for (opcode, args) in program: + handlers[opcode](self, args) + finally: + self.level = oldlevel + + def do_version(self, version): + assert version == TAL_VERSION + bytecode_handlers["version"] = do_version + + def do_mode(self, mode): + assert mode in ("html", "xml") + self.html = (mode == "html") + if self.html: + self.endsep = " />" + else: + self.endsep = "/>" + self.endlen = len(self.endsep) + bytecode_handlers["mode"] = do_mode + + def do_setSourceFile(self, source_file): + self.sourceFile = source_file + self.engine.setSourceFile(source_file) + if self.sourceAnnotations: + self._pending_source_annotation = True + self._stream_write = self._annotated_stream_write + + bytecode_handlers["setSourceFile"] = do_setSourceFile + + def do_setPosition(self, position): + self.position = position + self.engine.setPosition(position) + bytecode_handlers["setPosition"] = do_setPosition + + def do_startEndTag(self, stuff): + self.do_startTag(stuff, self.endsep, self.endlen) + bytecode_handlers["startEndTag"] = do_startEndTag + + def do_startTag(self, stuff, end=">", endlen=1, _len=len): + # The bytecode generator does not cause calls to this method + # for start tags with no attributes; those are optimized down + # to rawtext events. Hence, there is no special "fast path" + # for that case. + (name, attrList) = stuff + self._currentTag = name + L = ["<", name] + append = L.append + col = self.col + _len(name) + 1 + wrap = self.wrap + align = col + 1 + if align >= wrap/2: + align = 4 # Avoid a narrow column far to the right + attrAction = self.dispatch[""] + try: + for item in attrList: + if _len(item) == 2: + rendered = item[1:] + else: + # item[2] is the 'action' field: + if item[2] in ('metal', 'tal', 'xmlns', 'i18n'): + if not self.showtal: + continue + rendered = self.attrAction(item) + else: + rendered = attrAction(self, item) + if not rendered: + continue + for s in rendered: + slen = _len(s) + if (wrap and + col >= align and + col + 1 + slen > wrap): + append("\n") + append(" "*align) + col = align + slen + else: + append(" ") + col = col + 1 + slen + append(s) + append(end) + col = col + endlen + finally: + self._stream_write(_nulljoin(L)) + self.col = col + bytecode_handlers["startTag"] = do_startTag + + def attrAction(self, item): + name, value, action = item[:3] + if action == 'insert': + return () + macs = self.macroStack + if action == 'metal' and self.metal and macs: + # Drop all METAL attributes at a use-depth beyond the first + # use-macro and its extensions + if len(macs) > 1: + for macro in macs[1:]: + if not macro.extending: + return () + if not macs[-1].entering: + return () + macs[-1].entering = False + # Convert or drop depth-one METAL attributes. + i = name.rfind(":") + 1 + prefix, suffix = name[:i], name[i:] + if suffix == "define-macro": + # Convert define-macro as we enter depth one. + useName = macs[0].macroName + defName = macs[0].definingName + res = [] + if defName: + res.append('%sdefine-macro=%s' % (prefix, quote(defName))) + if useName: + res.append('%suse-macro=%s' % (prefix, quote(useName))) + return res + elif suffix == "define-slot": + name = prefix + "fill-slot" + elif suffix == "fill-slot": + pass + else: + return () + + if value is None: + value = name + else: + value = "%s=%s" % (name, quote(value)) + return [value] + + def attrAction_tal(self, item): + name, value, action = item[:3] + ok = 1 + expr, xlat, msgid = item[3:] + if self.html and name.lower() in BOOLEAN_HTML_ATTRS: + evalue = self.engine.evaluateBoolean(item[3]) + if evalue is self.Default: + if action == 'insert': # Cancelled insert + ok = 0 + elif evalue: + value = None + else: + ok = 0 + elif expr is not None: + evalue = self.engine.evaluateText(item[3]) + if evalue is self.Default: + if action == 'insert': # Cancelled insert + ok = 0 + else: + if evalue is None: + ok = 0 + value = evalue + + if ok: + if xlat: + translated = self.translate(msgid or value, value) + if translated is not None: + value = translated + elif isinstance(value, I18nMessageTypes): + translated = self.translate(value) + if translated is not None: + value = translated + if value is None: + value = name + return ["%s=%s" % (name, quote(value))] + else: + return () + bytecode_handlers[""] = attrAction + + def no_tag(self, start, program): + state = self.saveState() + self.stream = stream = self.StringIO() + self._stream_write = stream.write + self.interpret(start) + self.restoreOutputState(state) + self.interpret(program) + + def do_optTag(self, stuff, omit=0): + (name, cexpr, tag_ns, isend, start, program) = stuff + if tag_ns and not self.showtal: + return self.no_tag(start, program) + + self.interpret(start) + if not isend: + self.interpret(program) + s = '' % name + self._stream_write(s) + self.col = self.col + len(s) + + def do_optTag_tal(self, stuff): + cexpr = stuff[1] + if cexpr is not None and (cexpr == '' or + self.engine.evaluateBoolean(cexpr)): + self.no_tag(stuff[-2], stuff[-1]) + else: + self.do_optTag(stuff) + bytecode_handlers["optTag"] = do_optTag + + def do_rawtextBeginScope(self, stuff): + (s, col, position, closeprev, dict) = stuff + self._stream_write(s) + self.col = col + self.do_setPosition(position) + if closeprev: + engine = self.engine + engine.endScope() + engine.beginScope() + else: + self.engine.beginScope() + self.scopeLevel = self.scopeLevel + 1 + + def do_rawtextBeginScope_tal(self, stuff): + (s, col, position, closeprev, dict) = stuff + self._stream_write(s) + self.col = col + engine = self.engine + self.position = position + engine.setPosition(position) + if closeprev: + engine.endScope() + engine.beginScope() + else: + engine.beginScope() + self.scopeLevel = self.scopeLevel + 1 + engine.setLocal("attrs", dict) + bytecode_handlers["rawtextBeginScope"] = do_rawtextBeginScope + + def do_beginScope(self, dict): + self.engine.beginScope() + self.scopeLevel = self.scopeLevel + 1 + + def do_beginScope_tal(self, dict): + engine = self.engine + engine.beginScope() + engine.setLocal("attrs", dict) + self.scopeLevel = self.scopeLevel + 1 + bytecode_handlers["beginScope"] = do_beginScope + + def do_endScope(self, notused=None): + self.engine.endScope() + self.scopeLevel = self.scopeLevel - 1 + bytecode_handlers["endScope"] = do_endScope + + def do_setLocal(self, notused): + pass + + def do_setLocal_tal(self, stuff): + (name, expr) = stuff + self.engine.setLocal(name, self.engine.evaluateValue(expr)) + bytecode_handlers["setLocal"] = do_setLocal + + def do_setGlobal_tal(self, stuff): + (name, expr) = stuff + self.engine.setGlobal(name, self.engine.evaluateValue(expr)) + bytecode_handlers["setGlobal"] = do_setLocal + + def do_beginI18nContext(self, settings): + get = settings.get + self.i18nContext = TranslationContext(self.i18nContext, + domain=get("domain"), + source=get("source"), + target=get("target")) + bytecode_handlers["beginI18nContext"] = do_beginI18nContext + + def do_endI18nContext(self, notused=None): + self.i18nContext = self.i18nContext.parent + assert self.i18nContext is not None + bytecode_handlers["endI18nContext"] = do_endI18nContext + + def do_insertText(self, stuff): + self.interpret(stuff[1]) + bytecode_handlers["insertText"] = do_insertText + bytecode_handlers["insertI18nText"] = do_insertText + + def _writeText(self, text): + # '&' must be done first! + s = text.replace( + "&", "&").replace("<", "<").replace(">", ">") + self._stream_write(s) + i = s.rfind('\n') + if i < 0: + self.col += len(s) + else: + self.col = len(s) - (i + 1) + + def do_insertText_tal(self, stuff): + text = self.engine.evaluateText(stuff[0]) + if text is None: + return + if text is self.Default: + self.interpret(stuff[1]) + return + if isinstance(text, I18nMessageTypes): + # Translate this now. + text = self.translate(text) + self._writeText(text) + + def do_insertI18nText_tal(self, stuff): + # TODO: Code duplication is BAD, we need to fix it later + text = self.engine.evaluateText(stuff[0]) + if text is not None: + if text is self.Default: + self.interpret(stuff[1]) + else: + if isinstance(text, TypesToTranslate): + text = self.translate(text) + self._writeText(text) + + def do_i18nVariable(self, stuff): + varname, program, expression, structure = stuff + if expression is None: + # The value is implicitly the contents of this tag, so we have to + # evaluate the mini-program to get the value of the variable. + state = self.saveState() + try: + tmpstream = self.StringIO() + self.pushStream(tmpstream) + try: + self.interpret(program) + finally: + self.popStream() + if self.html and self._currentTag == "pre": + value = tmpstream.getvalue() + else: + value = normalize(tmpstream.getvalue()) + finally: + self.restoreState(state) + else: + # TODO: Seems like this branch not used anymore, we + # need to remove it + + # Evaluate the value to be associated with the variable in the + # i18n interpolation dictionary. + if structure: + value = self.engine.evaluateStructure(expression) + else: + value = self.engine.evaluate(expression) + + # evaluate() does not do any I18n, so we do it here. + if isinstance(value, I18nMessageTypes): + # Translate this now. + value = self.translate(value) + + if not structure: + value = cgi.escape(unicode(value)) + + # Either the i18n:name tag is nested inside an i18n:translate in which + # case the last item on the stack has the i18n dictionary and string + # representation, or the i18n:name and i18n:translate attributes are + # in the same tag, in which case the i18nStack will be empty. In that + # case we can just output the ${name} to the stream + i18ndict, srepr = self.i18nStack[-1] + i18ndict[varname] = value + placeholder = '${%s}' % varname + srepr.append(placeholder) + self._stream_write(placeholder) + bytecode_handlers['i18nVariable'] = do_i18nVariable + + def do_insertTranslation(self, stuff): + i18ndict = {} + srepr = [] + obj = None + self.i18nStack.append((i18ndict, srepr)) + msgid = stuff[0] + # We need to evaluate the content of the tag because that will give us + # several useful pieces of information. First, the contents will + # include an implicit message id, if no explicit one was given. + # Second, it will evaluate any i18nVariable definitions in the body of + # the translation (necessary for $varname substitutions). + # + # Use a temporary stream to capture the interpretation of the + # subnodes, which should /not/ go to the output stream. + currentTag = self._currentTag + tmpstream = self.StringIO() + self.pushStream(tmpstream) + try: + self.interpret(stuff[1]) + finally: + self.popStream() + # We only care about the evaluated contents if we need an implicit + # message id. All other useful information will be in the i18ndict on + # the top of the i18nStack. + default = tmpstream.getvalue() + if not msgid: + if self.html and currentTag == "pre": + msgid = default + else: + msgid = normalize(default) + self.i18nStack.pop() + # See if there is was an i18n:data for msgid + if len(stuff) > 2: + obj = self.engine.evaluate(stuff[2]) + xlated_msgid = self.translate(msgid, default, i18ndict, obj) + # TODO: I can't decide whether we want to cgi escape the translated + # string or not. OTOH not doing this could introduce a cross-site + # scripting vector by allowing translators to sneak JavaScript into + # translations. OTOH, for implicit interpolation values, we don't + # want to escape stuff like ${name} <= "Timmy". + assert xlated_msgid is not None + self._stream_write(xlated_msgid) + bytecode_handlers['insertTranslation'] = do_insertTranslation + + def do_insertStructure(self, stuff): + self.interpret(stuff[2]) + bytecode_handlers["insertStructure"] = do_insertStructure + bytecode_handlers["insertI18nStructure"] = do_insertStructure + + def do_insertStructure_tal(self, stuff): + (expr, repldict, block) = stuff + structure = self.engine.evaluateStructure(expr) + if structure is None: + return + if structure is self.Default: + self.interpret(block) + return + if isinstance(structure, I18nMessageTypes): + text = self.translate(structure) + else: + text = unicode(structure) + if not (repldict or self.strictinsert): + # Take a shortcut, no error checking + self.stream_write(text) + return + if self.html: + self.insertHTMLStructure(text, repldict) + else: + self.insertXMLStructure(text, repldict) + + def do_insertI18nStructure_tal(self, stuff): + # TODO: Code duplication is BAD, we need to fix it later + (expr, repldict, block) = stuff + structure = self.engine.evaluateStructure(expr) + if structure is not None: + if structure is self.Default: + self.interpret(block) + else: + if not isinstance(structure, TypesToTranslate): + structure = unicode(structure) + text = self.translate(structure) + if not (repldict or self.strictinsert): + # Take a shortcut, no error checking + self.stream_write(text) + elif self.html: + self.insertHTMLStructure(text, repldict) + else: + self.insertXMLStructure(text, repldict) + + def insertHTMLStructure(self, text, repldict): + from zope.tal.htmltalparser import HTMLTALParser + gen = AltTALGenerator(repldict, self.engine, 0) + p = HTMLTALParser(gen) # Raises an exception if text is invalid + p.parseString(text) + program, macros = p.getCode() + self.interpret(program) + + def insertXMLStructure(self, text, repldict): + from zope.tal.talparser import TALParser + gen = AltTALGenerator(repldict, self.engine, 0) + p = TALParser(gen) + gen.enable(0) + p.parseFragment('') + gen.enable(1) + p.parseFragment(text) # Raises an exception if text is invalid + gen.enable(0) + p.parseFragment('', 1) + program, macros = gen.getCode() + self.interpret(program) + + def do_evaluateCode(self, stuff): + lang, program = stuff + # Use a temporary stream to capture the interpretation of the + # subnodes, which should /not/ go to the output stream. + tmpstream = self.StringIO() + self.pushStream(tmpstream) + try: + self.interpret(program) + finally: + self.popStream() + code = tmpstream.getvalue() + output = self.engine.evaluateCode(lang, code) + self._stream_write(output) + bytecode_handlers["evaluateCode"] = do_evaluateCode + + def do_loop(self, stuff): + (name, expr, block) = stuff + self.interpret(block) + + def do_loop_tal(self, stuff): + (name, expr, block) = stuff + iterator = self.engine.setRepeat(name, expr) + while next(iterator): + self.interpret(block) + bytecode_handlers["loop"] = do_loop + + def translate(self, msgid, default=None, i18ndict=None, + obj=None, domain=None): + if default is None: + default = getattr(msgid, 'default', unicode(msgid)) + if i18ndict is None: + i18ndict = {} + if domain is None: + domain = getattr(msgid, 'domain', self.i18nContext.domain) + if obj: + i18ndict.update(obj) + if not self.i18nInterpolate: + return msgid + # TODO: We need to pass in one of context or target_language + return self.engine.translate(msgid, self.i18nContext.domain, + i18ndict, default=default) + + def do_rawtextColumn(self, stuff): + (s, col) = stuff + self._stream_write(s) + self.col = col + bytecode_handlers["rawtextColumn"] = do_rawtextColumn + + def do_rawtextOffset(self, stuff): + (s, offset) = stuff + self._stream_write(s) + self.col = self.col + offset + bytecode_handlers["rawtextOffset"] = do_rawtextOffset + + def do_condition(self, stuff): + (condition, block) = stuff + if not self.tal or self.engine.evaluateBoolean(condition): + self.interpret(block) + bytecode_handlers["condition"] = do_condition + + def do_defineMacro(self, stuff): + (macroName, macro) = stuff + wasInUse = self.inUseDirective + self.inUseDirective = False + self.interpret(macro) + self.inUseDirective = wasInUse + bytecode_handlers["defineMacro"] = do_defineMacro + + def do_useMacro(self, stuff, + definingName=None, extending=False): + (macroName, macroExpr, compiledSlots, block) = stuff + if not self.metal: + self.interpret(block) + return + macro = self.engine.evaluateMacro(macroExpr) + if macro is self.Default: + macro = block + else: + if not isCurrentVersion(macro): + raise METALError("macro %s has incompatible version %s" % + (repr(macroName), repr(getProgramVersion(macro))), + self.position) + mode = getProgramMode(macro) + if mode != (self.html and "html" or "xml"): + raise METALError("macro %s has incompatible mode %s" % + (repr(macroName), repr(mode)), self.position) + self.pushMacro(macroName, compiledSlots, definingName, extending) + + # We want 'macroname' name to be always available as a variable + outer = self.engine.getValue('macroname') + self.engine.setLocal('macroname', macroName.rsplit('/', 1)[-1]) + + prev_source = self.sourceFile + wasInUse = self.inUseDirective + self.inUseDirective = True + self.interpret(macro) + self.inUseDirective = wasInUse + + if self.sourceFile != prev_source: + self.engine.setSourceFile(prev_source) + self.sourceFile = prev_source + self.popMacro() + # Push the outer macroname again. + self.engine.setLocal('macroname', outer) + bytecode_handlers["useMacro"] = do_useMacro + + def do_extendMacro(self, stuff): + # extendMacro results from a combination of define-macro and + # use-macro. definingName has the value of the + # metal:define-macro attribute. + (macroName, macroExpr, compiledSlots, block, definingName) = stuff + extending = self.metal and self.inUseDirective + self.do_useMacro((macroName, macroExpr, compiledSlots, block), + definingName, extending) + bytecode_handlers["extendMacro"] = do_extendMacro + + def do_fillSlot(self, stuff): + # This is only executed if the enclosing 'use-macro' evaluates + # to 'default'. + (slotName, block) = stuff + self.interpret(block) + bytecode_handlers["fillSlot"] = do_fillSlot + + def do_defineSlot(self, stuff): + (slotName, block) = stuff + if not self.metal: + self.interpret(block) + return + macs = self.macroStack + if macs: + len_macs = len(macs) + # Measure the extension depth of this use-macro + depth = 1 + while depth < len_macs: + if macs[-depth].extending: + depth += 1 + else: + break + # Search for a slot filler from the most specific to the + # most general macro. The most general is at the top of + # the stack. + slot = None + i = len_macs - 1 + while i >= (len_macs - depth): + slot = macs[i].slots.get(slotName) + if slot is not None: + break + i -= 1 + if slot is not None: + # Found a slot filler. Temporarily chop the macro + # stack starting at the macro that filled the slot and + # render the slot filler. + chopped = macs[i:] + del macs[i:] + try: + self.interpret(slot) + finally: + # Restore the stack entries. + for mac in chopped: + mac.entering = False # Not entering + macs.extend(chopped) + return + # Falling out of the 'if' allows the macro to be interpreted. + self.interpret(block) + bytecode_handlers["defineSlot"] = do_defineSlot + + def do_onError(self, stuff): + (block, handler) = stuff + self.interpret(block) + + def do_onError_tal(self, stuff): + (block, handler) = stuff + state = self.saveState() + self.stream = stream = self.StringIO() + self._stream_write = stream.write + try: + self.interpret(block) + # TODO: this should not catch ZODB.POSException.ConflictError. + # The ITALExpressionEngine interface should provide a way of + # getting the set of exception types that should not be + # handled. + except: + exc = sys.exc_info()[1] + self.restoreState(state) + engine = self.engine + engine.beginScope() + error = engine.createErrorInfo(exc, self.position) + engine.setLocal('error', error) + try: + self.interpret(handler) + finally: + engine.endScope() + else: + self.restoreOutputState(state) + self.stream_write(stream.getvalue()) + bytecode_handlers["onError"] = do_onError + + bytecode_handlers_tal = bytecode_handlers.copy() + bytecode_handlers_tal["rawtextBeginScope"] = do_rawtextBeginScope_tal + bytecode_handlers_tal["beginScope"] = do_beginScope_tal + bytecode_handlers_tal["setLocal"] = do_setLocal_tal + bytecode_handlers_tal["setGlobal"] = do_setGlobal_tal + bytecode_handlers_tal["insertStructure"] = do_insertStructure_tal + bytecode_handlers_tal["insertI18nStructure"] = do_insertI18nStructure_tal + bytecode_handlers_tal["insertText"] = do_insertText_tal + bytecode_handlers_tal["insertI18nText"] = do_insertI18nText_tal + bytecode_handlers_tal["loop"] = do_loop_tal + bytecode_handlers_tal["onError"] = do_onError_tal + bytecode_handlers_tal[""] = attrAction_tal + bytecode_handlers_tal["optTag"] = do_optTag_tal + + +class FasterStringIO(list): + # Unicode-aware append-only version of StringIO. + write = list.append + + def __init__(self, value=None): + list.__init__(self) + if value is not None: + self.append(value) + + def getvalue(self): + return _BLANK.join(self) + + +def _write_ValueError(s): + raise ValueError("I/O operation on closed file") diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/talparser.py b/thesisenv/lib/python3.6/site-packages/zope/tal/talparser.py new file mode 100644 index 0000000..d99fc9f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/talparser.py @@ -0,0 +1,154 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +""" +Parse XML and compile to :class:`~.TALInterpreter` intermediate code, +using a :class:`~.TALGenerator`. +""" +from zope.tal.taldefs import XML_NS, ZOPE_I18N_NS, ZOPE_METAL_NS, ZOPE_TAL_NS +from zope.tal.talgenerator import TALGenerator +from zope.tal.xmlparser import XMLParser + + +class TALParser(XMLParser): + """ + Parser for XML. + + After parsing with :meth:`~.XMLParser.parseFile`, + :meth:`~.XMLParser.parseString`, :meth:`~.XMLParser.parseURL` or + :meth:`~.XMLParser.parseStream`, you can call :meth:`getCode` to + retrieve the parsed program and macros. + """ + + ordered_attributes = 1 + + def __init__(self, gen=None, encoding=None): # Override + """ + :keyword TALGenerator gen: The configured (with an expression compiler) + code generator to use. If one is not given, a default will be used. + """ + XMLParser.__init__(self, encoding) + if gen is None: + gen = TALGenerator() + self.gen = gen + self.nsStack = [] + self.nsDict = {XML_NS: 'xml'} + self.nsNew = [] + + def getCode(self): + """Return the compiled program and macros after parsing.""" + return self.gen.getCode() + + def StartNamespaceDeclHandler(self, prefix, uri): + self.nsStack.append(self.nsDict.copy()) + self.nsDict[uri] = prefix + self.nsNew.append((prefix, uri)) + + def EndNamespaceDeclHandler(self, prefix): + self.nsDict = self.nsStack.pop() + + def StartElementHandler(self, name, attrs): + if self.ordered_attributes: + # attrs is a list of alternating names and values + attrlist = [] + for i in range(0, len(attrs), 2): + key = attrs[i] + value = attrs[i + 1] + attrlist.append((key, value)) + else: + # attrs is a dict of {name: value} + attrlist = sorted(attrs.items()) # sort for definiteness + name, attrlist, taldict, metaldict, i18ndict \ + = self.process_ns(name, attrlist) + attrlist = self.xmlnsattrs() + attrlist + self.gen.emitStartElement(name, attrlist, taldict, metaldict, i18ndict, + self.getpos()) + + def process_ns(self, name, attrlist): + taldict = {} + metaldict = {} + i18ndict = {} + fixedattrlist = [] + name, namebase, namens = self.fixname(name) + for key, value in attrlist: + key, keybase, keyns = self.fixname(key) + ns = keyns or namens # default to tag namespace + item = key, value + if ns == 'metal': + metaldict[keybase] = value + item = item + ("metal",) + elif ns == 'tal': + taldict[keybase] = value + item = item + ("tal",) + elif ns == 'i18n': + i18ndict[keybase] = value + item = item + ('i18n',) + fixedattrlist.append(item) + if namens in ('metal', 'tal', 'i18n'): + taldict['tal tag'] = namens + return name, fixedattrlist, taldict, metaldict, i18ndict + + _namespaces = { + ZOPE_TAL_NS: "tal", + ZOPE_METAL_NS: "metal", + ZOPE_I18N_NS: "i18n", + } + + def xmlnsattrs(self): + newlist = [] + for prefix, uri in self.nsNew: + if prefix: + key = "xmlns:" + prefix + else: + key = "xmlns" + if uri in self._namespaces: + item = (key, uri, "xmlns") + else: + item = (key, uri) + newlist.append(item) + self.nsNew = [] + return newlist + + def fixname(self, name): + if ' ' in name: + uri, name = name.split(' ', 1) + prefix = self.nsDict[uri] + prefixed = name + if prefix: + prefixed = "%s:%s" % (prefix, name) + ns = self._namespaces.get(uri, "x") + return (prefixed, name, ns) + return (name, name, None) + + def EndElementHandler(self, name): + name = self.fixname(name)[0] + self.gen.emitEndElement(name, position=self.getpos()) + + def DefaultHandler(self, text): + self.gen.emitRawText(text) + +def test(): + import sys + p = TALParser() + file = "tests/input/test01.xml" + if sys.argv[1:]: + file = sys.argv[1] + p.parseFile(file) + program, macros = p.getCode() + from zope.tal.talinterpreter import TALInterpreter + from zope.tal.dummyengine import DummyEngine + engine = DummyEngine(macros) + TALInterpreter(program, macros, engine, sys.stdout, wrap=0)() + +if __name__ == "__main__": + test() diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/__init__.py new file mode 100644 index 0000000..05f0007 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/__init__.py @@ -0,0 +1,13 @@ +############################################################################## +# +# Copyright (c) 2015 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/acme_template.pt b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/acme_template.pt new file mode 100644 index 0000000..0af01ba --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/acme_template.pt @@ -0,0 +1,15 @@ + + + +ACME Look and Feel + + +

      +Copyright 2004 Acme Inc. +
      +Standard disclaimers apply. +
      +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/document_list.pt b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/document_list.pt new file mode 100644 index 0000000..8226be1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/document_list.pt @@ -0,0 +1,21 @@ + + + +Acme Document List + + + +
      +

      Documents

      +
        +
      • Rocket Science for Dummies
      • +
      • Birds for the Gourmet Chef
      • +
      +
      +
      +This document list is classified. +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/pnome_template.pt b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/pnome_template.pt new file mode 100644 index 0000000..f4d1c66 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/pnome_template.pt @@ -0,0 +1,23 @@ + + + +Title here + + + + +
      +
      + "The early bird gets the worm, but the second mouse gets the cheese." +
      + Preferences... +
      +
      + Content here +
      +
      + page footer +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test01.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test01.html new file mode 100644 index 0000000..e2ae0c4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test01.html @@ -0,0 +1,56 @@ + + + + dadada + + +

      This title is not displayed

      +

      Title

      + + +  &HarryPotter; + + + + + +

      + +

      + +

      + +

      foo bar

      + + + +
        + +
      • Car Name
      • +
        +
      + + + + python + python + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test01.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test01.xml new file mode 100644 index 0000000..82038e9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test01.xml @@ -0,0 +1,57 @@ + + + + + dadada + + +

      This title is not displayed

      +

      Title

      + + +  &HarryPotter; + + + + + +

      + +

      + +

      + +

      foo bar

      + + + +
        + +
      • Car Name
      • +
        +
      + + + + python + python + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test02.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test02.html new file mode 100644 index 0000000..df2fb18 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test02.html @@ -0,0 +1,118 @@ + + + + + + sample1 + a simple invoice + + + + + + + + + +
      + 01786 + 2000-03-17 + 55377 + 2000-03-15 + GJ03405 + DAVE 1 + 2000-03-17 + K5211(34) + 23 + 23 +
      + + SHIPWRIGHT RESTAURANTS LIMITED + 125 NORTH SERVICE ROAD W + WESTLAKE ACCESS + NORTH BAY + L8B1O5 + ONTARIO + CANADA + + + + ATTN: PAULINE DEGRASSI + + + + + + + + 1 + CS + DM 5309 + #1013 12 OZ.MUNICH STEIN + 37.72 + 37.72 + + + 6 + DZ + ON 6420 + PROVINCIAL DINNER FORK + 17.98 + 107.88 + + + 72 + EA + JR20643 + PLASTIC HANDLED STEAK KNIFE + .81 + 58.32 + + + 6 + DZ + ON 6410 + PROVINCIAL TEASPOONS + 12.16 + 72.96 + + + 0 + DZ + ON 6411 + PROVINCIAL RD BOWL SPOON + 6 + 17.98 + 0.00 + + + 1 + EA + DO 3218 + 34 OZ DUAL DIAL SCALE AM3218 + 70.00 + 5.0 + 66.50 + + + 1 + CS + DM 195 + 20 OZ.BEER PUB GLASS + 55.90 + 55.90 + + + + 399.28 + 3.50 + 23.75 + 29.61 + 33.84 + 33.84 + 486.48 + +
      + + +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test02.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test02.xml new file mode 100644 index 0000000..69567ea --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test02.xml @@ -0,0 +1,119 @@ + + + + + + + sample1 + a simple invoice + + + + + + + + + +
      + 01786 + 2000-03-17 + 55377 + 2000-03-15 + GJ03405 + DAVE 1 + 2000-03-17 + K5211(34) + 23 + 23 +
      + + SHIPWRIGHT RESTAURANTS LIMITED + 125 NORTH SERVICE ROAD W + WESTLAKE ACCESS + NORTH BAY + L8B1O5 + ONTARIO + CANADA + + + + ATTN: PAULINE DEGRASSI + + + + + + + + 1 + CS + DM 5309 + #1013 12 OZ.MUNICH STEIN + 37.72 + 37.72 + + + 6 + DZ + ON 6420 + PROVINCIAL DINNER FORK + 17.98 + 107.88 + + + 72 + EA + JR20643 + PLASTIC HANDLED STEAK KNIFE + .81 + 58.32 + + + 6 + DZ + ON 6410 + PROVINCIAL TEASPOONS + 12.16 + 72.96 + + + 0 + DZ + ON 6411 + PROVINCIAL RD BOWL SPOON + 6 + 17.98 + 0.00 + + + 1 + EA + DO 3218 + 34 OZ DUAL DIAL SCALE AM3218 + 70.00 + 5.0 + 66.50 + + + 1 + CS + DM 195 + 20 OZ.BEER PUB GLASS + 55.90 + 55.90 + + + + 399.28 + 3.50 + 23.75 + 29.61 + 33.84 + 33.84 + 486.48 + +
      + + +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test03.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test03.html new file mode 100644 index 0000000..a0230e1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test03.html @@ -0,0 +1,9 @@ +

      + + outer variable x, first appearance + + inner variable x + + outer variable x, second appearance + +

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test03.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test03.xml new file mode 100644 index 0000000..830149d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test03.xml @@ -0,0 +1,10 @@ + +

      + + outer variable x, first appearance + + inner variable x + + outer variable x, second appearance + +

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test04.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test04.html new file mode 100644 index 0000000..bdaad39 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test04.html @@ -0,0 +1,26 @@ + + + + +
        +
      • + 1 + +
      • +
      + + + +

      use-macro + fill-slot +

      + + + +

      use-macro

      + +

      define-slot

      + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test04.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test04.xml new file mode 100644 index 0000000..bde6cef --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test04.xml @@ -0,0 +1,27 @@ + + + + + +
        +
      • + 1 + +
      • +
      + + + +

      use-macro + fill-slot +

      + + + +

      use-macro

      + +

      define-slot

      + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test05.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test05.html new file mode 100644 index 0000000..21f6b68 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test05.html @@ -0,0 +1,9 @@ + + + + +

      This is the body of test5

      + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test05.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test05.xml new file mode 100644 index 0000000..fcaaf6b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test05.xml @@ -0,0 +1,10 @@ + + + + + +

      This is the body of test5

      + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test06.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test06.html new file mode 100644 index 0000000..ac1264d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test06.html @@ -0,0 +1,6 @@ + + + dummy body in test6 + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test06.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test06.xml new file mode 100644 index 0000000..b32bd0f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test06.xml @@ -0,0 +1,7 @@ + + + + dummy body in test6 + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test07.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test07.html new file mode 100644 index 0000000..bff98f0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test07.html @@ -0,0 +1,11 @@ + + + + + + + + + + +
      Top LeftTop Right
      Bottom leftBottom Right
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test07.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test07.xml new file mode 100644 index 0000000..e5c520a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test07.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + +
      Top LeftTop Right
      Bottom leftBottom Right
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test08.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test08.html new file mode 100644 index 0000000..1e4915b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test08.html @@ -0,0 +1,44 @@ + + + + + +
      + +

      Some headline

      +

      This is the real contents of the bottom right slot.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +
      +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test08.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test08.xml new file mode 100644 index 0000000..b0360fa --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test08.xml @@ -0,0 +1,45 @@ + + + + + + +
      + +

      Some headline

      +

      This is the real contents of the bottom right slot.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +
      +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test09.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test09.html new file mode 100644 index 0000000..35f481a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test09.html @@ -0,0 +1,30 @@ + + +

      + Just a bunch of text. +

      more text... +

        +
      • first item +
      • second item + +
          +
        1. second list, first item +
        2. second list, second item +
          +
          term 1 +
          term 2 +
          definition +
          +
        + +
      • Now let's have a paragraph... +

        My Paragraph +

      • + +
      • And a table in a list item: + +
        +
      + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test09.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test09.xml new file mode 100644 index 0000000..c3d10d7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test09.xml @@ -0,0 +1,30 @@ + + +

      + Just a bunch of text.

      +

      more text...

      +
        +
      • first item
      • +
      • second item + +
          +
        1. second list, first item
        2. +
        3. second list, second item +
          +
          term 1
          +
          term 2
          +
          definition
          +
        4. +
      • + +
      • Now let's have a paragraph... +

        My Paragraph

        +
      • + +
      • And a table in a list item: + +
      • +
      + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test10.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test10.html new file mode 100644 index 0000000..6ecca4c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test10.html @@ -0,0 +1,48 @@ + + + + + + +
      + +

      Some headline

      +

      This is the real contents of the bottom right slot.

      +
      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      +
      +
      + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test11.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test11.html new file mode 100644 index 0000000..89f7563 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test11.html @@ -0,0 +1,19 @@ + +

      dummy text

      +

      + + + + + +

      +

      + +

      + + +

      p

      +
      +
      rule
      + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test11.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test11.xml new file mode 100644 index 0000000..435f95c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test11.xml @@ -0,0 +1,14 @@ + +

      dummy text

      +

      + + + + + +

      +

      + +

      + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test12.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test12.html new file mode 100644 index 0000000..94d9a66 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test12.html @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test13.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test13.html new file mode 100644 index 0000000..d68e0ce --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test13.html @@ -0,0 +1,7 @@ +Here's a stray greater than: > + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test14.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test14.html new file mode 100644 index 0000000..0aaa751 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test14.html @@ -0,0 +1,10 @@ + + + + +
      +
      + +

      + +

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test14.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test14.xml new file mode 100644 index 0000000..c596135 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test14.xml @@ -0,0 +1,15 @@ + + + + + + + +
      +
      + +

      + +

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test15.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test15.html new file mode 100644 index 0000000..0cd456e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test15.html @@ -0,0 +1,26 @@ + + INNERSLOT + + + + inner-argument + + +
      +
      + + OUTERSLOT + +
      +
      + +
      + + +
      outer-argument
      +
      +
      +
      + +
      +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test16.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test16.html new file mode 100644 index 0000000..1414f45 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test16.html @@ -0,0 +1,2 @@ +blah, blah diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test16.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test16.xml new file mode 100644 index 0000000..2efb2ab --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test16.xml @@ -0,0 +1,7 @@ + + + +bar + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test17.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test17.html new file mode 100644 index 0000000..5a5ebb3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test17.html @@ -0,0 +1,6 @@ +No +No +Yes + +No +Yes diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test17.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test17.xml new file mode 100644 index 0000000..ecb617a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test17.xml @@ -0,0 +1,10 @@ + + +No +No +Yes + +No +Yes + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test18.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test18.html new file mode 100644 index 0000000..c3a5c26 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test18.html @@ -0,0 +1,16 @@ +

      Content

      +

      + + +

      Content

      +

      + + +

      Content

      +

      + + +

      No

      +

      No

      +

      Yes

      +

      Yes

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test18.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test18.xml new file mode 100644 index 0000000..5a0cca4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test18.xml @@ -0,0 +1,20 @@ + + +

      Content

      +

      + + +

      Content

      +

      + + +

      Content

      +

      + + +

      No

      +

      No

      +

      Yes

      +

      Yes

      + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test19.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test19.html new file mode 100644 index 0000000..a56632a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test19.html @@ -0,0 +1,5 @@ +Replace this +This is a +translated string +And another +translated string diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test19.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test19.xml new file mode 100644 index 0000000..fe4bf79 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test19.xml @@ -0,0 +1,8 @@ + + +Replace this +This is a +translated string +And another +translated string + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test20.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test20.html new file mode 100644 index 0000000..f302213 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test20.html @@ -0,0 +1 @@ +replaceable

      content

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test20.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test20.xml new file mode 100644 index 0000000..5050883 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test20.xml @@ -0,0 +1,6 @@ + + +replaceable

      content

      + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test21.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test21.html new file mode 100644 index 0000000..95f925e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test21.html @@ -0,0 +1,4 @@ + + was born in + . + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test21.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test21.xml new file mode 100644 index 0000000..eea370b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test21.xml @@ -0,0 +1,9 @@ + + + + was born in + . + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test22.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test22.html new file mode 100644 index 0000000..a4a7e93 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test22.html @@ -0,0 +1,4 @@ + + Jim was born in + the USA. + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test22.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test22.xml new file mode 100644 index 0000000..54b57d8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test22.xml @@ -0,0 +1,6 @@ + + + content + omit + replace + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test23.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test23.html new file mode 100644 index 0000000..bfe6665 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test23.html @@ -0,0 +1,2 @@ +2:32 pm diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test24.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test24.html new file mode 100644 index 0000000..6d53984 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test24.html @@ -0,0 +1,12 @@ + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test25.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test25.html new file mode 100644 index 0000000..25a99cf --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test25.html @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test26.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test26.html new file mode 100644 index 0000000..fa5a99d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test26.html @@ -0,0 +1,3 @@ + + Job #NN diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test27.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test27.html new file mode 100644 index 0000000..b9c16cb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test27.html @@ -0,0 +1,5 @@ +

      Your contact email address is recorded as + user@host.com +

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test28.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test28.html new file mode 100644 index 0000000..0364663 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test28.html @@ -0,0 +1,5 @@ +

      Your contact email address is recorded as + + user@host.com +

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test29.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test29.html new file mode 100644 index 0000000..e2f1e82 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test29.html @@ -0,0 +1,4 @@ +
      At the tone the time will be +2:32 pm... beep!
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test30.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test30.html new file mode 100644 index 0000000..6f8c6ef --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test30.html @@ -0,0 +1,6 @@ +

      Your contact email address is recorded as +user@host.com +

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test31.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test31.html new file mode 100644 index 0000000..c927f42 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test31.html @@ -0,0 +1,7 @@ +

      Your contact email address is recorded as + + + user@host.com +

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test32.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test32.html new file mode 100644 index 0000000..3b09bad --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test32.html @@ -0,0 +1,4 @@ + + was born in + . + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test33.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test33.html new file mode 100644 index 0000000..f5dcf58 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test33.html @@ -0,0 +1 @@ +don't translate me diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test34.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test34.html new file mode 100644 index 0000000..4cd6ff0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test34.html @@ -0,0 +1,11 @@ + + stuff + + more stuff + + + + stuff + + more stuff + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test35.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test35.html new file mode 100644 index 0000000..7964e9f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test35.html @@ -0,0 +1,7 @@ + +

      + + + +

      name

      +
      \ No newline at end of file diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test36.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test36.html new file mode 100644 index 0000000..bf4932a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test36.html @@ -0,0 +1,6 @@ + + + + + some text + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test37.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test37.html new file mode 100644 index 0000000..601f7af --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test37.html @@ -0,0 +1,3 @@ + + Test + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test38.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test38.html new file mode 100644 index 0000000..4bc3d1e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test38.html @@ -0,0 +1,8 @@ + + Test + + Python is a programming language. + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_domain.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_domain.html new file mode 100644 index 0000000..95d40a2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_domain.html @@ -0,0 +1,7 @@ +
      +Replace this +This is a +translated string +And another +translated string +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_failed_attr_translation.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_failed_attr_translation.html new file mode 100644 index 0000000..1c395c7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_failed_attr_translation.html @@ -0,0 +1,2 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal1.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal1.html new file mode 100644 index 0000000..a5371ce --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal1.html @@ -0,0 +1,61 @@ + + AAA + INNER + BBB + + + + + + + + + + AAA + + INNER + + BBB + + + + + + + + + + OUTERSLOT + + + + AAA + + INNER + INNERSLOT + + + BBB + + + + + + + OUTERSLOT + + + + + + + INNERSLOT + + + + + INSLOT + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal2.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal2.html new file mode 100644 index 0000000..425508a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal2.html @@ -0,0 +1,7 @@ +
      + OUTER + INNER + OUTER +
      + +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal3.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal3.html new file mode 100644 index 0000000..b0af907 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal3.html @@ -0,0 +1 @@ +Should not get attr in metal diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal4.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal4.html new file mode 100644 index 0000000..dc774d3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal4.html @@ -0,0 +1,4 @@ + + + Z3 UI + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal5.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal5.html new file mode 100644 index 0000000..8bae3d8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal5.html @@ -0,0 +1,4 @@ + + + Z3 UI + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal6.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal6.html new file mode 100644 index 0000000..ce243f2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal6.html @@ -0,0 +1,5 @@ + + + Z3 UI + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal7.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal7.html new file mode 100644 index 0000000..75ec511 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal7.html @@ -0,0 +1,6 @@ + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal8.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal8.html new file mode 100644 index 0000000..40d8a43 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal8.html @@ -0,0 +1,15 @@ + + +
      +
      +Default body +
      +
      + + + + +
      +Filled-in body +
      + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal9.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal9.html new file mode 100644 index 0000000..46b1b45 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_metal9.html @@ -0,0 +1,23 @@ +
      + +Default for macro1 + +
      + +
      + +Macro 2's slot 1 decoration + +Default for macro2 + + +
      + +
      +
      + +
      + +Custom slot1 + +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa1.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa1.html new file mode 100644 index 0000000..8879865 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa1.html @@ -0,0 +1,6 @@ + +Simple test of source annotations + +

      Foo!

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa1.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa1.xml new file mode 100644 index 0000000..d00a46d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa1.xml @@ -0,0 +1,7 @@ + + +Simple test of source annotations + +

      Foo!

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa2.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa2.html new file mode 100644 index 0000000..1c4e06b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa2.html @@ -0,0 +1,9 @@ + + +Simple test of source annotations + +

      Foo!

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa2.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa2.xml new file mode 100644 index 0000000..b54d6a1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa2.xml @@ -0,0 +1,10 @@ + + + +Simple test of source annotations + +

      Foo!

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa3.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa3.html new file mode 100644 index 0000000..675805d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa3.html @@ -0,0 +1,15 @@ + + +
      This is macro1 on sa3 line 3. + This is slot1 on sa3 line 4. + This is the end of macro1 on sa3 line 5. +
      +

      Some text on sa3 line 7.

      +

      + This text on sa3 line 9 will disappear. + Text from sa3 line 10 is filled into slot1. + This text on sa3 line 11 will disappear. +

      +

      This is some text on sa3 line 13.

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa3.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa3.xml new file mode 100644 index 0000000..79e3251 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa3.xml @@ -0,0 +1,16 @@ + + + +
      This is macro1 on sa3 line 4. + This is slot1 on sa3 line 5. + This is the end of macro1 on sa3 line 6. +
      +

      Some text on sa3 line 8.

      +

      + This text on sa3 line 10 will disappear. + Text from sa3 line 11 is filled into slot1. + This text on sa3 line 12 will disappear. +

      +

      This is some text on sa3 line 14.

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa4.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa4.html new file mode 100644 index 0000000..97596f6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/input/test_sa4.html @@ -0,0 +1,11 @@ + + +

      Some text on sa4 line 3.

      +

      + This text on sa4 line 5 will disappear. + Text from sa4 line 6 is filled into slot1. + This text on sa4 line 7 will disappear. +

      +

      This is some text on sa4 line 9.

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/markbench.py b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/markbench.py new file mode 100644 index 0000000..3122cb2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/markbench.py @@ -0,0 +1,188 @@ +#! /usr/bin/env python +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Run benchmarks of TAL vs. DTML +""" + +from __future__ import print_function + +import warnings +warnings.filterwarnings("ignore", category=DeprecationWarning) + +import os +os.environ['NO_SECURITY'] = 'true' + +import getopt +import sys +import time +import errno + +from cStringIO import StringIO + +#from zope.documenttemplate.dt_html import HTMLFile + +from zope.tal.htmltalparser import HTMLTALParser +from zope.tal.talinterpreter import TALInterpreter +from zope.tal.dummyengine import DummyEngine + + +def time_apply(f, args, kwargs, count): + r = [None] * count + for i in range(4): + f(*args, **kwargs) + t0 = time.clock() + for i in r: + pass + t1 = time.clock() + for i in r: + f(*args, **kwargs) + t = time.clock() - t1 - (t1 - t0) + return t / count + +def time_zpt(fn, count): + from zope.pagetemplate.pagetemplate import PageTemplate + pt = PageTemplate() + pt.write(open(fn).read()) + return time_apply(pt.pt_render, (data,), {}, count) + +def time_tal(fn, count): + p = HTMLTALParser() + p.parseFile(fn) + program, macros = p.getCode() + engine = DummyEngine(macros) + engine.globals = data + tal = TALInterpreter(program, macros, engine, StringIO(), wrap=0, + tal=1, strictinsert=0) + return time_apply(tal, (), {}, count) + +def time_dtml(fn, count): + html = HTMLFile(fn) + return time_apply(html, (), data, count) + +def profile_zpt(fn, count, profiler): + from zope.pagetemplate.pagetemplate import PageTemplate + pt = PageTemplate() + pt.write(open(fn).read()) + for i in range(4): + pt.pt_render(extra_context=data) + r = [None] * count + for i in r: + profiler.runcall(pt.pt_render, 0, data) + +def profile_tal(fn, count, profiler): + p = HTMLTALParser() + p.parseFile(fn) + program, macros = p.getCode() + engine = DummyEngine(macros) + engine.globals = data + tal = TALInterpreter(program, macros, engine, StringIO(), wrap=0, + tal=1, strictinsert=0) + for i in range(4): + tal() + r = [None] * count + for i in r: + profiler.runcall(tal) + +# Figure out where the benchmark files are: +try: + fname = __file__ +except NameError: + fname = sys.argv[0] +taldir = os.path.dirname(os.path.dirname(os.path.abspath(fname))) +benchdir = os.path.join(taldir, 'benchmark') + +# Construct templates for the filenames: +tal_fn = os.path.join(benchdir, 'tal%.2d.html') +dtml_fn = os.path.join(benchdir, 'dtml%.2d.html') + +def compare(n, count, profiler=None, verbose=1): + if verbose: + t1 = int(time_zpt(tal_fn % n, count) * 1000 + 0.5) + t2 = int(time_tal(tal_fn % n, count) * 1000 + 0.5) + t3 = 'n/a' # int(time_dtml(dtml_fn % n, count) * 1000 + 0.5) + print('%.2d: %10s %10s %10s' % (n, t1, t2, t3)) + if profiler: + profile_tal(tal_fn % n, count, profiler) + +def main(count, profiler=None, verbose=1): + n = 1 + if verbose: + print('##: %10s %10s %10s' % ('ZPT', 'TAL', 'DTML')) + while os.path.isfile(tal_fn % n) and os.path.isfile(dtml_fn % n): + compare(n, count, profiler, verbose) + n = n + 1 + +def get_signal_name(sig): + import signal + for name in dir(signal): + if getattr(signal, name) == sig: + return name + return None + +data = {'x':'X', 'r2': [0, 1], 'r8': list(range(8)), 'r64': list(range(64))} +for i in range(10): + data['x%s' % i] = 'X%s' % i + +if __name__ == "__main__": + filename = "markbench.prof" + profiler = None + runtests = False + verbose = True + + opts, args = getopt.getopt(sys.argv[1:], "pqt") + for opt, arg in opts: + if opt == "-p": + import profile + profiler = profile.Profile() + elif opt == "-q": + verbose = False + elif opt == "-t": + runtests = True + + if runtests: + srcdir = os.path.dirname(os.path.dirname(taldir)) + topdir = os.path.dirname(srcdir) + pwd = os.getcwd() + os.chdir(topdir) + rc = os.spawnl(os.P_WAIT, sys.executable, + sys.executable, "test.py", "zope.tal.tests") + if rc > 0: + # TODO: Failing tests don't cause test.py to report an + # error; not sure why. ;-( + sys.exit(rc) + elif rc < 0: + sig = -rc + print(( + "Process exited, signal %d (%s)." + % (sig, get_signal_name(sig) or "")), file=sys.stderr) + sys.exit(1) + os.chdir(pwd) + + if len(args) >= 1: + for arg in args: + compare(int(arg), 25, profiler, verbose) + else: + main(25, profiler, verbose) + + if profiler is not None: + profiler.dump_stats(filename) + import pstats + p = pstats.Stats(filename) + p.strip_dirs() + p.sort_stats('time', 'calls') + try: + p.print_stats(20) + except IOError as e: + if e.errno != errno.EPIPE: + raise diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/__init__.py b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/__init__.py new file mode 100644 index 0000000..b711d36 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/__init__.py @@ -0,0 +1,2 @@ +# +# This file is necessary to make this directory a package. diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/acme_template.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/acme_template.html new file mode 100644 index 0000000..3d37355 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/acme_template.html @@ -0,0 +1,26 @@ + + + +ACME Look and Feel + + + + +
      +
      + "The early bird gets the worm, but the second mouse gets the cheese." +
      + Preferences... +
      +
      + Content here +
      +
      +Copyright 2004 Acme Inc. +
      +Standard disclaimers apply. +
      +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/acme_template_source.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/acme_template_source.html new file mode 100644 index 0000000..1bbda0b --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/acme_template_source.html @@ -0,0 +1,26 @@ + + + +ACME Look and Feel + + + + +
      +
      + "The early bird gets the worm, but the second mouse gets the cheese." +
      + Preferences... +
      +
      + Content here +
      +
      +Copyright 2004 Acme Inc. +
      +Standard disclaimers apply. +
      +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/document_list.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/document_list.html new file mode 100644 index 0000000..9e0ea10 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/document_list.html @@ -0,0 +1,30 @@ + + + +Acme Document List + + + +
      +
      + "The early bird gets the worm, but the second mouse gets the cheese." +
      + Preferences... +
      +
      +

      Documents

      +
        +
      • Rocket Science for Dummies
      • +
      • Birds for the Gourmet Chef
      • +
      +
      +
      +Copyright 2004 Acme Inc. +
      +This document list is classified. +
      +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/document_list_source.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/document_list_source.html new file mode 100644 index 0000000..69600e0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/document_list_source.html @@ -0,0 +1,30 @@ + + + +Acme Document List + + + +
      +
      + "The early bird gets the worm, but the second mouse gets the cheese." +
      + Preferences... +
      +
      +

      Documents

      +
        +
      • Rocket Science for Dummies
      • +
      • Birds for the Gourmet Chef
      • +
      +
      +
      +Copyright 2004 Acme Inc. +
      +This document list is classified. +
      +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test01.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test01.html new file mode 100644 index 0000000..7064db0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test01.html @@ -0,0 +1,68 @@ + + + + dadada + + + +

      This +Is +The +Replaced +Title

      + + +  &HarryPotter; + + + + here/id + +

      5

      + +

      + honda +

      +

      + subaru +

      +

      + acura +

      + +

      foo bar

      + + + +
        + +
      • honda
      • +
        + +
      • subaru
      • +
        + +
      • acura
      • +
        +
      + + + + python + python + + + + + + + + +  

      Header Level 3

      +  

      Header Level 3

      + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test01.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test01.xml new file mode 100644 index 0000000..91e9851 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test01.xml @@ -0,0 +1,65 @@ + + + + + dadada + + + +

      This Is The Replaced Title

      + + +  &HarryPotter; + + + + here/id + +

      5

      + +

      + honda +

      +

      + subaru +

      +

      + acura +

      + +

      foo bar

      + + + +
        + +
      • honda
      • +
        + +
      • subaru
      • +
        + +
      • acura
      • +
        +
      + + + + python + python + + + + + + + + +  

      Header Level 3

      +  

      Header Level 3

      + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test02.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test02.html new file mode 100644 index 0000000..8d081fc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test02.html @@ -0,0 +1,118 @@ + + + + + + sample1 + a simple invoice + + + + + + + + + +
      + 01786 + 2000-03-17 + 55377 + 2000-03-15 + GJ03405 + DAVE 1 + 2000-03-17 + K5211(34) + 23 + 23 +
      + + SHIPWRIGHT RESTAURANTS LIMITED + 125 NORTH SERVICE ROAD W + WESTLAKE ACCESS + NORTH BAY + L8B1O5 + ONTARIO + CANADA + + + + ATTN: PAULINE DEGRASSI + + + + + + + + 1 + CS + DM 5309 + #1013 12 OZ.MUNICH STEIN + 37.72 + 37.72 + + + 6 + DZ + ON 6420 + PROVINCIAL DINNER FORK + 17.98 + 107.88 + + + 72 + EA + JR20643 + PLASTIC HANDLED STEAK KNIFE + .81 + 58.32 + + + 6 + DZ + ON 6410 + PROVINCIAL TEASPOONS + 12.16 + 72.96 + + + 0 + DZ + ON 6411 + PROVINCIAL RD BOWL SPOON + 6 + 17.98 + 0.00 + + + 1 + EA + DO 3218 + 34 OZ DUAL DIAL SCALE AM3218 + 70.00 + 5.0 + 66.50 + + + 1 + CS + DM 195 + 20 OZ.BEER PUB GLASS + 55.90 + 55.90 + + + + 399.28 + 3.50 + 23.75 + 29.61 + 33.84 + 33.84 + 486.48 + +
      + + +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test02.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test02.xml new file mode 100644 index 0000000..71ff075 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test02.xml @@ -0,0 +1,119 @@ + + + + + + + sample1 + a simple invoice + + + + + + + + + +
      + 01786 + 2000-03-17 + 55377 + 2000-03-15 + GJ03405 + DAVE 1 + 2000-03-17 + K5211(34) + 23 + 23 +
      + + SHIPWRIGHT RESTAURANTS LIMITED + 125 NORTH SERVICE ROAD W + WESTLAKE ACCESS + NORTH BAY + L8B1O5 + ONTARIO + CANADA + + + + ATTN: PAULINE DEGRASSI + + + + + + + + 1 + CS + DM 5309 + #1013 12 OZ.MUNICH STEIN + 37.72 + 37.72 + + + 6 + DZ + ON 6420 + PROVINCIAL DINNER FORK + 17.98 + 107.88 + + + 72 + EA + JR20643 + PLASTIC HANDLED STEAK KNIFE + .81 + 58.32 + + + 6 + DZ + ON 6410 + PROVINCIAL TEASPOONS + 12.16 + 72.96 + + + 0 + DZ + ON 6411 + PROVINCIAL RD BOWL SPOON + 6 + 17.98 + 0.00 + + + 1 + EA + DO 3218 + 34 OZ DUAL DIAL SCALE AM3218 + 70.00 + 5.0 + 66.50 + + + 1 + CS + DM 195 + 20 OZ.BEER PUB GLASS + 55.90 + 55.90 + + + + 399.28 + 3.50 + 23.75 + 29.61 + 33.84 + 33.84 + 486.48 + +
      + + +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test03.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test03.html new file mode 100644 index 0000000..7fb5156 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test03.html @@ -0,0 +1,9 @@ +

      + + hello brave new world + + goodbye cruel world + + hello brave new world + +

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test03.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test03.xml new file mode 100644 index 0000000..24be638 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test03.xml @@ -0,0 +1,10 @@ + +

      + + hello brave new world + + goodbye cruel world + + hello brave new world + +

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test04.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test04.html new file mode 100644 index 0000000..f0666da --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test04.html @@ -0,0 +1,38 @@ + + + + +
        +
      + + + +
        +
      • + 0 + hello world +
      • +
      • + 1 + hello world +
      • +
      + + + +
        +
      • + 0 + goodbye cruel world +
      • +
      • + 1 + goodbye cruel world +
      • +
      + +

      define-slot

      + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test04.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test04.xml new file mode 100644 index 0000000..8b73d02 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test04.xml @@ -0,0 +1,39 @@ + + + + + +
        +
      + + + +
        +
      • + 0 + hello world +
      • +
      • + 1 + hello world +
      • +
      + + + +
        +
      • + 0 + goodbye cruel world +
      • +
      • + 1 + goodbye cruel world +
      • +
      + +

      define-slot

      + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test05.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test05.html new file mode 100644 index 0000000..006851a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test05.html @@ -0,0 +1,9 @@ + + + + +

      This is the body of test5

      + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test05.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test05.xml new file mode 100644 index 0000000..0bc2691 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test05.xml @@ -0,0 +1,10 @@ + + + + + +

      This is the body of test5

      + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test06.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test06.html new file mode 100644 index 0000000..d3f58d9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test06.html @@ -0,0 +1,7 @@ + + + +

      This is the body of test5

      + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test06.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test06.xml new file mode 100644 index 0000000..b9ad4ac --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test06.xml @@ -0,0 +1,8 @@ + + + + +

      This is the body of test5

      + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test07.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test07.html new file mode 100644 index 0000000..e0b3d88 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test07.html @@ -0,0 +1,11 @@ + + + + + + + + + + +
      Top LeftTop Right
      Bottom leftBottom Right
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test07.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test07.xml new file mode 100644 index 0000000..8884d97 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test07.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + +
      Top LeftTop Right
      Bottom leftBottom Right
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test08.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test08.html new file mode 100644 index 0000000..06e01b2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test08.html @@ -0,0 +1,47 @@ + + + + + + + + + + +
      Top LeftTop Right
      Bottom left +

      Some headline

      +

      This is the real contents of the bottom right slot.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test08.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test08.xml new file mode 100644 index 0000000..51a969c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test08.xml @@ -0,0 +1,48 @@ + + + + + + + + + + + +
      Top LeftTop Right
      Bottom left +

      Some headline

      +

      This is the real contents of the bottom right slot.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test09.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test09.html new file mode 100644 index 0000000..844c1a9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test09.html @@ -0,0 +1,30 @@ + + +

      + Just a bunch of text.

      +

      more text...

      +
        +
      • first item
      • +
      • second item + +
          +
        1. second list, first item
        2. +
        3. second list, second item +
          +
          term 1
          +
          term 2
          +
          definition
          +
        4. +
      • + +
      • Now let's have a paragraph... +

        My Paragraph

        +
      • + +
      • And a table in a list item: + +
      • +
      + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test09.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test09.xml new file mode 100644 index 0000000..c3d10d7 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test09.xml @@ -0,0 +1,30 @@ + + +

      + Just a bunch of text.

      +

      more text...

      +
        +
      • first item
      • +
      • second item + +
          +
        1. second list, first item
        2. +
        3. second list, second item +
          +
          term 1
          +
          term 2
          +
          definition
          +
        4. +
      • + +
      • Now let's have a paragraph... +

        My Paragraph

        +
      • + +
      • And a table in a list item: + +
      • +
      + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test10.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test10.html new file mode 100644 index 0000000..d9cc7ed --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test10.html @@ -0,0 +1,51 @@ + + + + + + + + + + + +
      Top LeftTop Right
      Bottom left +

      Some headline

      +

      This is the real contents of the bottom right slot.

      +
      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      It is supposed to contain a lot of text. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb. Blah, blah, blab. + Blabber, blabber, blah. Baah, baah, barb.

      +

      +
      + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test11.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test11.html new file mode 100644 index 0000000..9e2223c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test11.html @@ -0,0 +1,8 @@ + + bar +

      bad boy!

      +

      x undefined

      + x undefined + x undefined +
      + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test11.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test11.xml new file mode 100644 index 0000000..caba039 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test11.xml @@ -0,0 +1,5 @@ + + bar +

      bad boy!

      +

      x undefined

      + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test12.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test12.html new file mode 100644 index 0000000..9533b42 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test12.html @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test13.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test13.html new file mode 100644 index 0000000..d68e0ce --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test13.html @@ -0,0 +1,7 @@ +Here's a stray greater than: > + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test14.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test14.html new file mode 100644 index 0000000..b9bf468 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test14.html @@ -0,0 +1,13 @@ + + + + + + +
      carbikebroomstick
      + +

      + Harry + Ron + Hermione +

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test14.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test14.xml new file mode 100644 index 0000000..67c0c37 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test14.xml @@ -0,0 +1,18 @@ + + + + + + + + + +
      carbikebroomstick
      + +

      + Harry + Ron + Hermione +

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test15.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test15.html new file mode 100644 index 0000000..314fd43 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test15.html @@ -0,0 +1,29 @@ + + INNERSLOT + + + + inner-argument + + +
      + + + OUTERSLOT + + +
      + +
      + +
      outer-argument
      +
      +
      + +
      + + + OUTERSLOT + + +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test16.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test16.html new file mode 100644 index 0000000..d3ea228 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test16.html @@ -0,0 +1 @@ +blah, blah diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test16.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test16.xml new file mode 100644 index 0000000..cb96d01 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test16.xml @@ -0,0 +1,6 @@ + + + +bar + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test17.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test17.html new file mode 100644 index 0000000..e50997d --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test17.html @@ -0,0 +1,6 @@ +Yes +Yes +Yes + +Yes +Yes diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test17.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test17.xml new file mode 100644 index 0000000..7a54cdb --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test17.xml @@ -0,0 +1,9 @@ + + +Yes +Yes +Yes + +Yes +Yes + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test18.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test18.html new file mode 100644 index 0000000..f49e29e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test18.html @@ -0,0 +1,16 @@ +Content + + + +Content + + + +

      Content

      +

      + + +Yes +Yes +Yes +Yes diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test18.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test18.xml new file mode 100644 index 0000000..77eba02 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test18.xml @@ -0,0 +1,19 @@ + + +Content + + + +Content + + + +

      Content

      +

      + + +Yes +Yes +Yes +Yes + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test19.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test19.html new file mode 100644 index 0000000..2341a4a --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test19.html @@ -0,0 +1,3 @@ +REPLACE THIS +MSGID +AND ANOTHER TRANSLATED STRING diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test19.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test19.xml new file mode 100644 index 0000000..4460acd --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test19.xml @@ -0,0 +1,6 @@ + + +REPLACE THIS +MSGID +AND ANOTHER TRANSLATED STRING + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test20.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test20.html new file mode 100644 index 0000000..606b989 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test20.html @@ -0,0 +1 @@ +REPLACEABLE HERE diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test20.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test20.xml new file mode 100644 index 0000000..ed1f9fe --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test20.xml @@ -0,0 +1,4 @@ + + +REPLACEABLE HERE + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test21.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test21.html new file mode 100644 index 0000000..95b3b08 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test21.html @@ -0,0 +1 @@ +Lomax WAS BORN IN Antarctica. diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test21.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test21.xml new file mode 100644 index 0000000..c373d52 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test21.xml @@ -0,0 +1,4 @@ + + +Lomax WAS BORN IN Antarctica. + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test22.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test22.html new file mode 100644 index 0000000..6c1b6de --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test22.html @@ -0,0 +1 @@ +Jim WAS BORN IN the USA. diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test22.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test22.xml new file mode 100644 index 0000000..c2e79c5 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test22.xml @@ -0,0 +1,6 @@ + + + content + omit + replace + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test23.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test23.html new file mode 100644 index 0000000..0ea1654 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test23.html @@ -0,0 +1 @@ +59 minutes after 6 PM diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test24.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test24.html new file mode 100644 index 0000000..7f72c59 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test24.html @@ -0,0 +1,7 @@ + + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test25.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test25.html new file mode 100644 index 0000000..6b80bd3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test25.html @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test26.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test26.html new file mode 100644 index 0000000..9d179a6 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test26.html @@ -0,0 +1 @@ +7 is the JOB NUMBER diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test27.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test27.html new file mode 100644 index 0000000..96229e4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test27.html @@ -0,0 +1 @@ +

      Your contact email address is recorded as aperson@dom.ain

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test28.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test28.html new file mode 100644 index 0000000..96229e4 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test28.html @@ -0,0 +1 @@ +

      Your contact email address is recorded as aperson@dom.ain

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test29.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test29.html new file mode 100644 index 0000000..886137e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test29.html @@ -0,0 +1 @@ +
      AT THE TONE THE TIME WILL BE 59 minutes after 6 PM... BEEP!
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test30.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test30.html new file mode 100644 index 0000000..964b772 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test30.html @@ -0,0 +1 @@ +

      Your contact email address is recorded as aperson@dom.ain

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test31.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test31.html new file mode 100644 index 0000000..964b772 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test31.html @@ -0,0 +1 @@ +

      Your contact email address is recorded as aperson@dom.ain

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test32.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test32.html new file mode 100644 index 0000000..f39bd97 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test32.html @@ -0,0 +1 @@ +Lomax was born in Antarctica diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test33.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test33.html new file mode 100644 index 0000000..4472f21 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test33.html @@ -0,0 +1 @@ +don't translate me diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test34.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test34.html new file mode 100644 index 0000000..1d7b5f2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test34.html @@ -0,0 +1,7 @@ + + stuff + foobar + more stuff + + +STUFF foobar MORE STUFF diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test35.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test35.html new file mode 100644 index 0000000..b1a9d2e --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test35.html @@ -0,0 +1,6 @@ + + + + + +

      page

      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test36.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test36.html new file mode 100644 index 0000000..2a563c1 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test36.html @@ -0,0 +1,2 @@ +<foo> +<foo> some text diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test37.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test37.html new file mode 100644 index 0000000..5c68ed0 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test37.html @@ -0,0 +1,3 @@ + + TEST + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test38.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test38.html new file mode 100644 index 0000000..8efb617 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test38.html @@ -0,0 +1,6 @@ + + Test + + Python is a programming language. + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_domain.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_domain.html new file mode 100644 index 0000000..6a282ac --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_domain.html @@ -0,0 +1,5 @@ +
      +replace this +msgid +and another translated string +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_failed_attr_translation.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_failed_attr_translation.html new file mode 100644 index 0000000..cd34b1f --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_failed_attr_translation.html @@ -0,0 +1 @@ + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal1.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal1.html new file mode 100644 index 0000000..c8cc346 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal1.html @@ -0,0 +1,79 @@ + + AAA + INNER + BBB + + + + AAA + INNER + BBB + + +INNER + + + AAA + + INNER + + BBB + + + + AAA + + INNER + + BBB + + +INNER + + + AAA + OUTERSLOT + BBB + + + + AAA + + INNER + INNERSLOT + + + BBB + + + + AAA + + INNER + INNERSLOT + + + BBB + + + + AAA + OUTERSLOT + BBB + + +INNER + INNERSLOT + + +INNER + INNERSLOT + + +INNER + + INSLOT + + + +INSLOT diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal2.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal2.html new file mode 100644 index 0000000..7e56c0c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal2.html @@ -0,0 +1,11 @@ +
      + OUTER + INNER + OUTER +
      + +
      + OUTER + INNER + OUTER +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal3.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal3.html new file mode 100644 index 0000000..b0af907 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal3.html @@ -0,0 +1 @@ +Should not get attr in metal diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal4.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal4.html new file mode 100644 index 0000000..dc774d3 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal4.html @@ -0,0 +1,4 @@ + + + Z3 UI + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal5.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal5.html new file mode 100644 index 0000000..8bae3d8 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal5.html @@ -0,0 +1,4 @@ + + + Z3 UI + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal6.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal6.html new file mode 100644 index 0000000..ce243f2 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal6.html @@ -0,0 +1,5 @@ + + + Z3 UI + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal7.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal7.html new file mode 100644 index 0000000..cc449ed --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal7.html @@ -0,0 +1,6 @@ + + + + + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal8.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal8.html new file mode 100644 index 0000000..d56adab --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal8.html @@ -0,0 +1,19 @@ + + +
      +
      +Default body +
      +
      + + + + + +
      +
      +Filled-in body +
      +
      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal9.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal9.html new file mode 100644 index 0000000..4cbc637 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_metal9.html @@ -0,0 +1,32 @@ +
      + +Default for macro1 + +
      + +
      + +Macro 2's slot 1 decoration + +Default for macro2 + + +
      + +
      + +Macro 2's slot 1 decoration + +Default for macro2 + + +
      + +
      + +Macro 2's slot 1 decoration + +Custom slot1 + + +
      diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa1.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa1.html new file mode 100644 index 0000000..a37b9e9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa1.html @@ -0,0 +1,10 @@ + +Simple test of source annotations + +

      Foo!

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa1.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa1.xml new file mode 100644 index 0000000..8e1f4cc --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa1.xml @@ -0,0 +1,11 @@ + + +Simple test of source annotations + +

      Foo!

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa2.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa2.html new file mode 100644 index 0000000..4709b49 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa2.html @@ -0,0 +1,13 @@ + + +Simple test of source annotations + +

      Foo!

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa2.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa2.xml new file mode 100644 index 0000000..30b5699 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa2.xml @@ -0,0 +1,14 @@ + + + +Simple test of source annotations + +

      Foo!

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa3.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa3.html new file mode 100644 index 0000000..8431438 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa3.html @@ -0,0 +1,42 @@ + + +
      This is macro1 on sa3 line 3. + This is slot1 on sa3 line 4. + This is the end of macro1 on sa3 line 5. +
      +

      Some text on sa3 line 7.

      +
      This is macro1 on sa3 line 3. + Text from sa3 line 10 is filled into slot1. + This is the end of macro1 on sa3 line 5. +
      +

      This is some text on sa3 line 13.

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa3.xml b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa3.xml new file mode 100644 index 0000000..bd20f83 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa3.xml @@ -0,0 +1,43 @@ + + + +
      This is macro1 on sa3 line 4. + This is slot1 on sa3 line 5. + This is the end of macro1 on sa3 line 6. +
      +

      Some text on sa3 line 8.

      +
      This is macro1 on sa3 line 4. + Text from sa3 line 11 is filled into slot1. + This is the end of macro1 on sa3 line 6. +
      +

      This is some text on sa3 line 14.

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa4.html b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa4.html new file mode 100644 index 0000000..4aca908 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/output/test_sa4.html @@ -0,0 +1,30 @@ + + +

      Some text on sa4 line 3.

      +
      This is macro1 on sa3 line 3. + Text from sa4 line 6 is filled into slot1. + This is the end of macro1 on sa3 line 5. +
      +

      This is some text on sa4 line 9.

      + + diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/run.py b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/run.py new file mode 100644 index 0000000..6de4a50 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/run.py @@ -0,0 +1,43 @@ +#! /usr/bin/env python +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Run all tests. +""" +import sys +import unittest + +from zope.tal.tests import utils +from zope.tal.tests import test_htmltalparser +from zope.tal.tests import test_talinterpreter +from zope.tal.tests import test_files +from zope.tal.tests import test_sourcepos + +# TODO this code isn't picked up by the Zope 3 test framework.. +def test_suite(): + suite = unittest.TestSuite() + suite.addTest(test_htmltalparser.test_suite()) + if not utils.skipxml: + from . import test_xmlparser + suite.addTest(test_xmlparser.test_suite()) + suite.addTest(test_talinterpreter.test_suite()) + suite.addTest(test_files.test_suite()) + suite.addTest(test_sourcepos.test_suite()) + return suite + +def main(): + return utils.run_suite(test_suite()) + +if __name__ == "__main__": + errs = main() + sys.exit(errs and 1 or 0) diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_files.py b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_files.py new file mode 100644 index 0000000..eca051c --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_files.py @@ -0,0 +1,90 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests that run driver.py over input files comparing to output files. +""" + +import glob +import os +import sys +import unittest + +try: + # Python 2.x + from cStringIO import StringIO +except ImportError: + # Python 3.x + from io import StringIO + +import zope.tal.runtest + +from zope.tal.tests import utils + +HERE = os.path.abspath(os.path.dirname(__file__)) +PARENTDIR = os.path.dirname(HERE) +PREFIX = os.path.join(HERE, "input", "test*.") + + +def _factory(filename, dirname): + + pwd = os.getcwd() + short_path = os.path.relpath(filename, os.path.dirname(__file__)) + + def setUp(): + os.chdir(dirname) + + def tearDown(): + os.chdir(pwd) + + def runTest(): + buf = StringIO() + basename = os.path.basename(filename) + if basename.startswith('test_sa'): + argv = ["-Q", "-a", filename] + elif basename.startswith('test_metal'): + argv = ["-Q", "-m", filename] + else: + argv = ["-Q", filename] + try: + failed = zope.tal.runtest.main(argv, buf) + finally: + captured_stdout = buf.getvalue() + if failed: + raise AssertionError("output for %s didn't match:\n%s" + % (filename, captured_stdout)) + + return unittest.FunctionTestCase(runTest, setUp, tearDown, short_path) + + +def _find_files(): + if utils.skipxml: + xmlargs = [] + else: + xmlargs = sorted(glob.glob(PREFIX + "xml")) + htmlargs = sorted(glob.glob(PREFIX + "html")) + + args = xmlargs + htmlargs + if not args: + sys.stderr.write("Warning: no test input files found!!!\n") + return args + +# Nose doesn't handle 'test_suite' in the same was as zope.testrunner, +# so we'll use its generator-as-test-factory feature. See: +# https://nose.readthedocs.org/en/latest/writing_tests.html#test-generators +def test_for_nose_discovery(): + for arg in _find_files(): + yield _factory(arg, PARENTDIR) + +def test_suite(): + return unittest.TestSuite( + [_factory(arg, PARENTDIR) for arg in _find_files()]) diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_htmltalparser.py b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_htmltalparser.py new file mode 100644 index 0000000..126b707 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_htmltalparser.py @@ -0,0 +1,1021 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests for the HTMLTALParser code generator. +""" +import pprint +import sys +import unittest + +from zope.tal import htmltalparser, taldefs +from zope.tal.tests import utils + + +class TestCaseBase(unittest.TestCase): + + prologue = "" + epilogue = "" + initial_program = [('version', taldefs.TAL_VERSION), ('mode', 'html')] + final_program = [] + + def _merge(self, p1, p2): + if p1 and p2: + op1, args1 = p1[-1] + op2, args2 = p2[0] + if op1.startswith('rawtext') and op2.startswith('rawtext'): + return (p1[:-1] + + [rawtext(args1[0] + args2[0])] + + p2[1:]) + return p1+p2 + + def _run_check(self, source, program, macros={}): + parser = htmltalparser.HTMLTALParser() + parser.parseString(self.prologue + source + self.epilogue) + got_program, got_macros = parser.getCode() + program = self._merge(self.initial_program, program) + program = self._merge(program, self.final_program) + self.assertEqual(got_program, program, + "Program:\n" + pprint.pformat(got_program) + + "\nExpected:\n" + pprint.pformat(program)) + self.assertEqual(got_macros, macros, + "Macros:\n" + pprint.pformat(got_macros) + + "\nExpected:\n" + pprint.pformat(macros)) + + def _should_error(self, source, exc=taldefs.TALError): + def parse(self=self, source=source): + parser = htmltalparser.HTMLTALParser() + parser.parseString(self.prologue + source + self.epilogue) + self.assertRaises(exc, parse) + + +def rawtext(s): + """Compile raw text to the appropriate instruction.""" + if "\n" in s: + return ("rawtextColumn", (s, len(s) - (s.rfind("\n") + 1))) + else: + return ("rawtextOffset", (s, len(s))) + + +class HTMLTALParserTestCases(TestCaseBase): + + def test_code_simple_identity(self): + self._run_check("""My Title</html>""", [ + rawtext('<html a="b" b="c" c="d">' + '<title>My Title'), + ]) + + def test_code_implied_list_closings(self): + self._run_check("""
      """, [ + rawtext('
      '), + ]) + self._run_check("""
      """, [ + rawtext('
      ' + '
      '), + ]) + + def test_code_implied_table_closings(self): + self._run_check("""

      text
      head\t
      cell\t""" + """""", [ + rawtext('

      text

      cell \n \t \n
      ' + '\t
      head
      cell\t' + ' \n \t \n
      cell
      '), + ]) + self._run_check("""
      cell """ + """
      cell
      """, [ + rawtext('
      cell ' + '
      cell
      '), + ]) + + def test_code_bad_nesting(self): + def check(self=self): + self._run_check("", []) + self.assertRaises(htmltalparser.NestingError, check) + + def test_code_attr_syntax(self): + output = [ + rawtext(''), + ] + self._run_check("""""", output) + self._run_check("""""", output) + self._run_check("""""", output) + self._run_check("""""", output) + + def test_code_attr_values(self): + self._run_check( + """""", [ + rawtext('')]) + self._run_check("""""", [ + rawtext(''), + ]) + + def test_code_attr_entity_replacement(self): + # we expect entities *not* to be replaced by HTLMParser! + self._run_check("""""", [ + rawtext(''), + ]) + self._run_check("""""", [ + rawtext(''), + ]) + self._run_check("""""", [ + rawtext(''), + ]) + self._run_check("""""", [ + rawtext(''), + ]) + + def test_code_attr_funky_names(self): + self._run_check("""""", [ + rawtext(''), + ]) + + def test_code_pcdata_entityref(self): + self._run_check(""" """, [ + rawtext(' '), + ]) + + def test_code_short_endtags(self): + self._run_check("""""", [ + rawtext(''), + ]) + + +class METALGeneratorTestCases(TestCaseBase): + + def test_null(self): + self._run_check("", []) + + def test_define_macro(self): + macro = self.initial_program + [ + ('startTag', ('p', [('metal:define-macro', 'M', 'metal')])), + rawtext('booh

      '), + ] + program = [ + ('setPosition', (1, 0)), + ('defineMacro', ('M', macro)), + ] + macros = {'M': macro} + self._run_check('

      booh

      ', program, macros) + + def test_use_macro(self): + self._run_check('

      booh

      ', [ + ('setPosition', (1, 0)), + ('useMacro', + ('M', '$M$', {}, + [('startTag', ('p', [('metal:use-macro', 'M', 'metal')])), + rawtext('booh

      ')])), + ]) + + def test_define_slot(self): + macro = self.initial_program + [ + ('startTag', ('p', [('metal:define-macro', 'M', 'metal')])), + rawtext('foo'), + ('setPosition', (1, 29)), + ('defineSlot', ('S', + [('startTag', ('span', [('metal:define-slot', 'S', 'metal')])), + rawtext('spam')])), + rawtext('bar

      '), + ] + program = [('setPosition', (1, 0)), + ('defineMacro', ('M', macro))] + macros = {'M': macro} + self._run_check('

      foo' + 'spambar

      ', + program, macros) + + def test_fill_slot(self): + self._run_check('

      foo' + 'spambar

      ', [ + ('setPosition', (1, 0)), + ('useMacro', + ('M', '$M$', + {'S': [('startTag', ('span', + [('metal:fill-slot', 'S', 'metal')])), + rawtext('spam')]}, + [('startTag', ('p', [('metal:use-macro', 'M', 'metal')])), + rawtext('foo'), + ('setPosition', (1, 26)), + ('fillSlot', ('S', + [('startTag', ('span', [('metal:fill-slot', 'S', 'metal')])), + rawtext('spam')])), + rawtext('bar

      ')])), + ]) + + +class TALGeneratorTestCases(TestCaseBase): + + def test_null(self): + self._run_check("", []) + + def test_define_1(self): + self._run_check("

      ", [ + ('setPosition', (1, 0)), + ('beginScope', {'tal:define': 'xyzzy string:spam'}), + ('setLocal', ('xyzzy', '$string:spam$')), + ('startTag', ('p', [('tal:define', 'xyzzy string:spam', 'tal')])), + ('endScope', ()), + rawtext('

      '), + ]) + + def test_define_2(self): + self._run_check("

      ", [ + ('setPosition', (1, 0)), + ('beginScope', {'tal:define': 'local xyzzy string:spam'}), + ('setLocal', ('xyzzy', '$string:spam$')), + ('startTag', ('p', + [('tal:define', 'local xyzzy string:spam', 'tal')])), + ('endScope', ()), + rawtext('

      '), + ]) + + def test_define_3(self): + self._run_check("

      ", [ + ('setPosition', (1, 0)), + ('beginScope', {'tal:define': 'global xyzzy string:spam'}), + ('setGlobal', ('xyzzy', '$string:spam$')), + ('startTag', ('p', + [('tal:define', 'global xyzzy string:spam', 'tal')])), + ('endScope', ()), + rawtext('

      '), + ]) + + def test_define_4(self): + self._run_check("

      ", [ + ('setPosition', (1, 0)), + ('beginScope', {'tal:define': 'x string:spam; y x'}), + ('setLocal', ('x', '$string:spam$')), + ('setLocal', ('y', '$x$')), + ('startTag', ('p', [('tal:define', 'x string:spam; y x', 'tal')])), + ('endScope', ()), + rawtext('

      '), + ]) + + def test_define_5(self): + self._run_check("

      ", [ + ('setPosition', (1, 0)), + ('beginScope', {'tal:define': 'x string:;;;;; y x'}), + ('setLocal', ('x', '$string:;;$')), + ('setLocal', ('y', '$x$')), + ('startTag', ('p', [('tal:define', 'x string:;;;;; y x', 'tal')])), + ('endScope', ()), + rawtext('

      '), + ]) + + def test_define_6(self): + self._run_check( + "

      ", [ + ('setPosition', (1, 0)), + ('beginScope', + {'tal:define': 'x string:spam; global y x; local z y'}), + ('setLocal', ('x', '$string:spam$')), + ('setGlobal', ('y', '$x$')), + ('setLocal', ('z', '$y$')), + ('startTag', ('p', + [('tal:define', 'x string:spam; global y x; local z y', 'tal')])), + ('endScope', ()), + rawtext('

      '), + ]) + + def test_condition(self): + self._run_check( + "

      foo

      ", [ + rawtext('

      '), + ('setPosition', (1, 3)), + ('beginScope', {'tal:condition': 'python:1'}), + ('condition', ('$python:1$', + [('startTag', ('span', [('tal:condition', 'python:1', 'tal')])), + rawtext('foo')])), + ('endScope', ()), + rawtext('

      '), + ]) + + def test_content_1(self): + self._run_check("

      bar

      ", [ + ('setPosition', (1, 0)), + ('beginScope', {'tal:content': 'string:foo'}), + ('startTag', ('p', [('tal:content', 'string:foo', 'tal')])), + ('insertText', ('$string:foo$', [rawtext('bar')])), + ('endScope', ()), + rawtext('

      '), + ]) + + def test_content_2(self): + self._run_check("

      bar

      ", [ + ('setPosition', (1, 0)), + ('beginScope', {'tal:content': 'text string:foo'}), + ('startTag', ('p', [('tal:content', 'text string:foo', 'tal')])), + ('insertText', ('$string:foo$', [rawtext('bar')])), + ('endScope', ()), + rawtext('

      '), + ]) + + def test_content_3(self): + self._run_check("

      bar

      ", [ + ('setPosition', (1, 0)), + ('beginScope', {'tal:content': 'structure string:
      '}), + ('startTag', ('p', + [('tal:content', 'structure string:
      ', 'tal')])), + ('insertStructure', + ('$string:
      $', {}, [rawtext('bar')])), + ('endScope', ()), + rawtext('

      '), + ]) + + def test_replace_1(self): + self._run_check("

      bar

      ", [ + ('setPosition', (1, 0)), + ('beginScope', {'tal:replace': 'string:foo'}), + ('optTag', + ('p', + '', + None, + 0, + [('startTag', ('p', [('tal:replace', 'string:foo', 'tal')]))], + [('insertText', ('$string:foo$', [('rawtextOffset', ('bar', 3))]))])), + ('endScope', ()), + ]) + + def test_replace_2(self): + self._run_check("

      bar

      ", [ + ('setPosition', (1, 0)), + ('beginScope', {'tal:replace': 'text string:foo'}), + ('optTag', + ('p', + '', + None, + 0, + [('startTag', ('p', [('tal:replace', 'text string:foo', 'tal')]))], + [('insertText', ('$string:foo$', [('rawtextOffset', ('bar', 3))]))])), + ('endScope', ()), + ]) + + def test_replace_3(self): + self._run_check("

      bar

      ", [ + ('setPosition', (1, 0)), + ('beginScope', {'tal:replace': 'structure string:
      '}), + ('optTag', + ('p', + '', + None, + 0, + [('startTag', ('p', [('tal:replace', 'structure string:
      ', 'tal')]))], + [('insertStructure', + ('$string:
      $', {}, [('rawtextOffset', ('bar', 3))]))])), + ('endScope', ()), + ]) + + def test_repeat(self): + self._run_check("

      " + "dummy

      ", [ + ('setPosition', (1, 0)), + ('beginScope', {'tal:repeat': 'x python:(1,2,3)'}), + ('loop', ('x', '$python:(1,2,3)$', + [('startTag', ('p', + [('tal:repeat', 'x python:(1,2,3)', 'tal')])), + ('setPosition', (1, 33)), + ('beginScope', {'tal:replace': 'x'}), + ('optTag', + ('span', + '', + None, + 0, + [('startTag', ('span', [('tal:replace', 'x', 'tal')]))], + [('insertText', ('$x$', [('rawtextOffset', ('dummy', 5))]))])), + ('endScope', ()), + rawtext('

      ')])), + ('endScope', ()), + ]) + + def test_script_1(self): + self._run_check('

      code

      ', [ + ('setPosition', (1, 0)), + ('beginScope', {'tal:script': 'text/server-python'}), + ('startTag', ('p', + [('tal:script', 'text/server-python', 'tal')])), + ('evaluateCode', ('text/server-python', + [('rawtextOffset', ('code', 4))])), + ('endScope', ()), + rawtext('

      '), + ]) + + def test_script_2(self): + self._run_check('' + 'code' + '', [ + ('setPosition', (1, 0)), + ('beginScope', {'script': 'text/server-python'}), + ('optTag', + ('tal:block', + None, + 'tal', + 0, + [('startTag', ('tal:block', + [('script', 'text/server-python', 'tal')]))], + [('evaluateCode', + ('text/server-python', + [('rawtextOffset', ('code', 4))]))])), + ('endScope', ()) + ]) + + def test_script_3(self): + self._run_check('', [ + ('setPosition', (1, 0)), + ('beginScope', {}), + ('optTag', + ('script', + '', + None, + 0, + [('rawtextOffset', ('', [ + ('rawtextOffset', + ('', 44)) + ]) + + def test_script_5(self): + self._run_check("""""", [ + ('rawtextOffset', + ("""""", 64)) + ]) + + def test_attributes_1(self): + self._run_check("" + "link", [ + ('setPosition', (1, 0)), + ('beginScope', + {'tal:attributes': 'href string:http://www.zope.org; x string:y', + 'name': 'bar', 'href': 'foo'}), + ('startTag', ('a', + [('href', 'foo', 'replace', '$string:http://www.zope.org$', 0, None), + ('name', 'name="bar"'), + ('tal:attributes', + 'href string:http://www.zope.org; x string:y', 'tal'), + ('x', None, 'insert', '$string:y$', 0, None)])), + ('endScope', ()), + rawtext('link'), + ]) + + def test_attributes_2(self): + self._run_check("

      duh

      ", [ + ('setPosition', (1, 0)), + ('beginScope', + {'tal:attributes': 'src string:foo.png', + 'tal:replace': 'structure string:'}), + ('optTag', + ('p', + '', + None, + 0, + [('startTag', + ('p', + [('tal:replace', 'structure string:', 'tal'), + ('tal:attributes', 'src string:foo.png', 'tal')]))], + [('insertStructure', + ('$string:$', + {'src': ('$string:foo.png$', False, None)}, + [('rawtextOffset', ('duh', 3))]))])), + ('endScope', ())]) + + def test_on_error_1(self): + self._run_check("

      okay

      ", [ + ('setPosition', (1, 0)), + ('beginScope', + {'tal:content': 'notHere', 'tal:on-error': 'string:error'}), + ('onError', + ([('startTag', ('p', + [('tal:on-error', 'string:error', 'tal'), + ('tal:content', 'notHere', 'tal')])), + ('insertText', ('$notHere$', [rawtext('okay')])), + rawtext('

      ')], + [('startTag', ('p', + [('tal:on-error', 'string:error', 'tal'), + ('tal:content', 'notHere', 'tal')])), + ('insertText', ('$string:error$', [])), + rawtext('

      ')])), + ('endScope', ()), + ]) + + def test_on_error_2(self): + self._run_check("

      okay

      ", [ + ('setPosition', (1, 0)), + ('beginScope', + {'tal:replace': 'notHere', 'tal:on-error': 'string:error'}), + ('onError', + ([('optTag', + ('p', + '', + None, + 0, + [('startTag', + ('p', + [('tal:on-error', 'string:error', 'tal'), + ('tal:replace', 'notHere', 'tal')]))], + [('insertText', ('$notHere$', [('rawtextOffset', ('okay', 4))]))]))], + [('startTag', + ('p', + [('tal:on-error', 'string:error', 'tal'), + ('tal:replace', 'notHere', 'tal')])), + ('insertText', ('$string:error$', [])), + ('rawtextOffset', ('

      ', 4))])), + ('endScope', ()), + ]) + + def test_dup_attr(self): + self._should_error("") + self._should_error("", taldefs.METALError) + + def test_tal_errors(self): + self._should_error("

      ") + self._should_error("

      ") + self._should_error("

      ") + self._should_error("

      ") + self._should_error("

      ") + for tag in htmltalparser.EMPTY_HTML_TAGS: + self._should_error("<%s tal:content='string:foo'>" % tag) + + def test_metal_errors(self): + exc = taldefs.METALError + self._should_error(2*"

      xxx

      ", exc) + self._should_error("" + + 2*"

      " + "", exc) + self._should_error("

      ", exc) + self._should_error("

      ", exc) + + def test_extend_macro_errors(self): + exc = taldefs.METALError + # extend-macro requires define-macro: + self._should_error("

      xxx

      ", exc) + # extend-macro prevents use-macro: + self._should_error("

      xxx

      ", exc) + # use-macro doesn't co-exist with define-macro: + self._should_error("

      xxx

      ", exc) + + # + # I18N test cases + # + + def test_i18n_attributes(self): + self._run_check("foo", [ + ('setPosition', (1, 0)), + ('beginScope', {'alt': 'foo', 'i18n:attributes': 'alt'}), + ('startTag', ('img', + [('alt', 'foo', 'replace', None, 1, None), + ('i18n:attributes', 'alt', 'i18n')])), + ('endScope', ()), + ]) + self._run_check("foo", [ + ('setPosition', (1, 0)), + ('beginScope', {'alt': 'foo', 'i18n:attributes': 'alt foo ; bar'}), + ('startTag', ('img', + [('alt', 'foo', 'replace', None, 1, 'foo'), + ('i18n:attributes', 'alt foo ; bar', 'i18n'), + ('bar', None, 'insert', None, 1, None)])), + ('endScope', ()), + ]) + + def test_i18n_name_bad_name(self): + self._should_error("") + self._should_error("") + + def test_i18n_attributes_repeated_attr(self): + self._should_error("") + self._should_error("") + + def test_i18n_translate(self): + # input/test19.html + self._run_check('''\ +Replace this +This is a +translated string +And another +translated string +''', [ + ('setPosition', (1, 0)), + ('beginScope', {'i18n:translate': ''}), + ('startTag', ('span', [('i18n:translate', '', 'i18n')])), + ('insertTranslation', ('', [('rawtextOffset', ('Replace this', 12))])), + ('rawtextBeginScope', + ('\n', 0, (2, 0), 1, {'i18n:translate': 'msgid'})), + ('startTag', ('span', [('i18n:translate', 'msgid', 'i18n')])), + ('insertTranslation', + ('msgid', [('rawtextColumn', ('This is a\ntranslated string', 17))])), + ('rawtextBeginScope', ('\n', 0, (4, 0), 1, {'i18n:translate': ''})), + ('startTag', ('span', [('i18n:translate', '', 'i18n')])), + ('insertTranslation', + ('', [('rawtextColumn', ('And another\ntranslated string', 17))])), + ('endScope', ()), + ('rawtextColumn', ('\n', 0))]) + + def test_i18n_translate_with_nested_tal(self): + self._run_check('''\ +replaceable

      content

      +''', [ + ('setPosition', (1, 0)), + ('beginScope', {'i18n:translate': ''}), + ('startTag', ('span', [('i18n:translate', '', 'i18n')])), + ('insertTranslation', + ('', + [('rawtextOffset', ('replaceable ', 12)), + ('setPosition', (1, 36)), + ('beginScope', {'tal:replace': 'str:here'}), + ('optTag', + ('p', + '', + None, + 0, + [('startTag', ('p', [('tal:replace', 'str:here', 'tal')]))], + [('insertText', + ('$str:here$', [('rawtextOffset', ('content', 7))]))])), + ('endScope', ())])), + ('endScope', ()), + ('rawtextColumn', ('\n', 0)) + ]) + + def test_i18n_name(self): + # input/test21.html + self._run_check('''\ + + was born in + . + +''', [ + ('setPosition', (1, 0)), + ('beginScope', {'i18n:translate': ''}), + ('startTag', ('span', [('i18n:translate', '', 'i18n')])), + ('insertTranslation', + ('', + [('rawtextBeginScope', + ('\n ', + 2, + (2, 2), + 0, + {'i18n:name': 'name', 'tal:replace': 'str:Lomax'})), + ('i18nVariable', + ('name', + [('optTag', + ('span', + '', + None, + 1, + [('startEndTag', + ('span', + [('tal:replace', 'str:Lomax', 'tal'), + ('i18n:name', 'name', 'i18n')]))], + [('insertText', ('$str:Lomax$', []))]))], + None, + 0)), + ('rawtextBeginScope', + (' was born in\n ', + 2, + (3, 2), + 1, + {'i18n:name': 'country', 'tal:replace': 'str:Antarctica'})), + ('i18nVariable', + ('country', + [('optTag', + ('span', + '', + None, + 1, + [('startEndTag', + ('span', + [('tal:replace', 'str:Antarctica', 'tal'), + ('i18n:name', 'country', 'i18n')]))], + [('insertText', ('$str:Antarctica$', []))]))], + None, + 0)), + ('endScope', ()), + ('rawtextColumn', ('.\n', 0))])), + ('endScope', ()), + ('rawtextColumn', ('\n', 0)) + ]) + + def test_i18n_name_with_content(self): + self._run_check('
      This is text for ' + '.' + '
      ', [ +('setPosition', (1, 0)), +('beginScope', {'i18n:translate': ''}), +('startTag', ('div', [('i18n:translate', '', 'i18n')])), +('insertTranslation', + ('', + [('rawtextOffset', ('This is text for ', 17)), + ('setPosition', (1, 40)), + ('beginScope', + {'tal:content': 'bar', 'i18n:name': 'bar_name', 'i18n:translate': ''}), + ('i18nVariable', + ('bar_name', + [('startTag', + ('span', + [('i18n:translate', '', 'i18n'), + ('tal:content', 'bar', 'tal'), + ('i18n:name', 'bar_name', 'i18n')])), + ('insertI18nText', ('$bar$', [])), + ('rawtextOffset', ('
      ', 7))], + None, + 0)), + ('endScope', ()), + ('rawtextOffset', ('.', 1))])), +('endScope', ()), +('rawtextOffset', ('', 6)) + ]) + + def test_i18n_name_implicit_value(self): + # input/test22.html + self._run_check('''\ + + Jim was born in + the USA. + +''', [('setPosition', (1, 0)), + ('beginScope', {'i18n:translate': ''}), + ('startTag', ('span', [('i18n:translate', '', 'i18n')])), + ('insertTranslation', + ('', + [('rawtextBeginScope', + ('\n ', 2, (2, 2), 0, {'i18n:name': 'name', 'tal:omit-tag': ''})), + ('i18nVariable', + ('name', + [('optTag', + ('span', + '', + None, + 0, + [('startTag', + ('span', + [('tal:omit-tag', '', 'tal'), + ('i18n:name', 'name', 'i18n')]))], + [('rawtextOffset', ('Jim', 10))]))], + None, + 0)), + ('rawtextBeginScope', + (' was born in\n ', + 2, + (3, 2), + 1, + {'i18n:name': 'country', 'tal:omit-tag': ''})), + ('i18nVariable', + ('country', + [('optTag', + ('span', + '', + None, + 0, + [('startTag', + ('span', + [('tal:omit-tag', '', 'tal'), + ('i18n:name', 'country', 'i18n')]))], + [('rawtextOffset', ('the USA', 7))]))], + None, + 0)), + ('endScope', ()), + ('rawtextColumn', ('.\n', 0))])), + ('endScope', ()), + ('rawtextColumn', ('\n', 0)) + ]) + + def test_i18n_context_domain(self): + self._run_check("", [ + ('setPosition', (1, 0)), + ('beginI18nContext', {'domain': 'mydomain', + 'source': None, 'target': None}), + ('beginScope', {'i18n:domain': 'mydomain'}), + ('startEndTag', ('span', [('i18n:domain', 'mydomain', 'i18n')])), + ('endScope', ()), + ('endI18nContext', ()), + ]) + + def test_i18n_context_source(self): + self._run_check("", [ + ('setPosition', (1, 0)), + ('beginI18nContext', {'source': 'en', + 'domain': 'default', 'target': None}), + ('beginScope', {'i18n:source': 'en'}), + ('startEndTag', ('span', [('i18n:source', 'en', 'i18n')])), + ('endScope', ()), + ('endI18nContext', ()), + ]) + + def test_i18n_context_source_target(self): + self._run_check("", [ + ('setPosition', (1, 0)), + ('beginI18nContext', {'source': 'en', 'target': 'ru', + 'domain': 'default'}), + ('beginScope', {'i18n:source': 'en', 'i18n:target': 'ru'}), + ('startEndTag', ('span', [('i18n:source', 'en', 'i18n'), + ('i18n:target', 'ru', 'i18n')])), + ('endScope', ()), + ('endI18nContext', ()), + ]) + + def test_i18n_context_in_define_slot(self): + text = ("
      " + "
      spam
      " + "
      ") + self._run_check(text, [ + ('setPosition', (1, 0)), + ('useMacro', + ('M', '$M$', + {'S': [('startTag', ('div', + [('metal:fill-slot', 'S', 'metal')])), + rawtext('spam')]}, + [('beginI18nContext', {'domain': 'mydomain', + 'source': None, 'target': None}), + ('beginScope', + {'i18n:domain': 'mydomain', 'metal:use-macro': 'M'}), + ('startTag', ('div', [('metal:use-macro', 'M', 'metal'), + ('i18n:domain', 'mydomain', 'i18n')])), + ('setPosition', (1, 48)), + ('fillSlot', ('S', + [('startTag', + ('div', [('metal:fill-slot', 'S', 'metal')])), + rawtext('spam')])), + ('endScope', ()), + rawtext(''), + ('endI18nContext', ())])), + ]) + + def test_i18n_data(self): + # input/test23.html + self._run_check('''\ +2:32 pm +''', [ + ('setPosition', (1, 0)), + ('beginScope', + {'i18n:translate': 'timefmt', 'i18n:data': 'here/currentTime'}), + ('startTag', + ('span', + [('i18n:data', 'here/currentTime', 'i18n'), + ('i18n:translate', 'timefmt', 'i18n')])), + ('insertTranslation', + ('timefmt', [('rawtextOffset', ('2:32 pm', 7))], '$here/currentTime$')), + ('endScope', ()), + ('rawtextColumn', ('
      \n', 0)) + ]) + + def test_i18n_data_with_name(self): + # input/test29.html + self._run_check('''\ +
      At the tone the time will be +2:32 pm... beep!
      +''', [('setPosition', (1, 0)), + ('beginScope', {'i18n:translate': ''}), + ('startTag', ('div', [('i18n:translate', '', 'i18n')])), + ('insertTranslation', + ('', + [('rawtextBeginScope', + ('At the tone the time will be\n', + 0, + (2, 0), + 0, + {'i18n:data': 'here/currentTime', + 'i18n:name': 'time', + 'i18n:translate': 'timefmt'})), + ('i18nVariable', + ('time', + [('startTag', + ('span', + [('i18n:data', 'here/currentTime', 'i18n'), + ('i18n:translate', 'timefmt', 'i18n'), + ('i18n:name', 'time', 'i18n')])), + ('insertTranslation', + ('timefmt', + [('rawtextOffset', ('2:32 pm', 7))], + '$here/currentTime$')), + ('rawtextOffset', ('
      ', 7))], + None, + 0)), + ('endScope', ()), + ('rawtextOffset', ('... beep!', 9))])), + ('endScope', ()), + ('rawtextColumn', ('\n', 0)) + ]) + + def test_i18n_name_around_tal_content(self): + # input/test28.html + self._run_check('''\ +

      Your contact email address is recorded as + + user@host.com +

      +''', [('setPosition', (1, 0)), + ('beginScope', {'i18n:translate': 'verify'}), + ('startTag', ('p', [('i18n:translate', 'verify', 'i18n')])), + ('insertTranslation', + ('verify', + [('rawtextBeginScope', + ('Your contact email address is recorded as\n ', + 4, + (2, 4), + 0, + {'i18n:name': 'email', 'tal:omit-tag': ''})), + ('i18nVariable', + ('email', + [('optTag', + ('span', + '', + None, + 0, + [('startTag', + ('span', + [('tal:omit-tag', '', 'tal'), + ('i18n:name', 'email', 'i18n')]))], + [('rawtextBeginScope', + ('\n ', + 4, + (3, 4), + 0, + {'href': 'mailto:user@example.com', + 'tal:content': 'request/submitter'})), + ('startTag', + ('a', + [('href', 'href="mailto:user@example.com"'), + ('tal:content', 'request/submitter', 'tal')])), + ('insertText', + ('$request/submitter$', + [('rawtextOffset', ('user@host.com', 13))])), + ('endScope', ()), + ('rawtextOffset', ('', 4))]))], + None, + 0)), + ('endScope', ()), + ('rawtextColumn', ('\n', 0))])), + ('endScope', ()), + ('rawtextColumn', ('

      \n', 0)) + ]) + + def test_i18n_name_with_tal_content(self): + # input/test27.html + self._run_check('''\ +

      Your contact email address is recorded as + user@host.com +

      +''', [ + ('setPosition', (1, 0)), + ('beginScope', {'i18n:translate': 'verify'}), + ('startTag', ('p', [('i18n:translate', 'verify', 'i18n')])), + ('insertTranslation', + ('verify', + [('rawtextBeginScope', + ('Your contact email address is recorded as\n ', + 4, + (2, 4), + 0, + {'href': 'mailto:user@example.com', + 'i18n:name': 'email', + 'tal:content': 'request/submitter'})), + ('i18nVariable', + ('email', + [('startTag', + ('a', + [('href', 'href="mailto:user@example.com"'), + ('tal:content', 'request/submitter', 'tal'), + ('i18n:name', 'email', 'i18n')])), + ('insertText', + ('$request/submitter$', + [('rawtextOffset', ('user@host.com', 13))])), + ('rawtextOffset', ('', 4))], + None, + 0)), + ('endScope', ()), + ('rawtextColumn', ('\n', 0))])), + ('endScope', ()), + ('rawtextColumn', ('

      \n', 0)) + ]) + + +def test_suite(): + return unittest.TestSuite(( + unittest.makeSuite(HTMLTALParserTestCases), + unittest.makeSuite(METALGeneratorTestCases), + unittest.makeSuite(TALGeneratorTestCases), + )) diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_sourcepos.py b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_sourcepos.py new file mode 100644 index 0000000..c397194 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_sourcepos.py @@ -0,0 +1,93 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests for TALInterpreter. +""" +import unittest + +try: + # Python 2.x + from StringIO import StringIO +except ImportError: + # Python 3.x + from io import StringIO + +from zope.tal.htmltalparser import HTMLTALParser +from zope.tal.talinterpreter import TALInterpreter +from zope.tal.talgenerator import TALGenerator +from zope.tal.dummyengine import DummyEngine + + +page1 = ''' +
      +page1= +
      +''' + +main_template = ''' +main_template= +
      +main_template= +
      +main_template= +''' + +footer = '''
      +footer= +
      ''' + +expected = ''' +main_template=main_template (2,14) +
      +page1=page1 (3,6) +
      +main_template=main_template (4,14) +
      +footer=footer (2,7) +
      +main_template=main_template (6,14) +''' + + + +class SourcePosTestCase(unittest.TestCase): + + def parse(self, eng, s, fn): + gen = TALGenerator(expressionCompiler=eng, xml=0, source_file=fn) + parser = HTMLTALParser(gen) + parser.parseString(s) + program, macros = parser.getCode() + return program, macros + + def test_source_positions(self): + # Ensure source file and position are set correctly by TAL + macros = {} + eng = DummyEngine(macros) + page1_program, page1_macros = self.parse(eng, page1, 'page1') + main_template_program, main_template_macros = self.parse( + eng, main_template, 'main_template') + footer_program, footer_macros = self.parse(eng, footer, 'footer') + + macros['main'] = main_template_macros['main'] + macros['foot'] = footer_macros['foot'] + + stream = StringIO() + interp = TALInterpreter(page1_program, macros, eng, stream) + interp() + self.assertEqual(stream.getvalue().strip(), expected.strip(), + "Got result:\n%s\nExpected:\n%s" + % (stream.getvalue(), expected)) + + +def test_suite(): + return unittest.makeSuite(SourcePosTestCase) diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_talgettext.py b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_talgettext.py new file mode 100644 index 0000000..1e3b3f9 --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_talgettext.py @@ -0,0 +1,141 @@ +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests for the talgettext utility. +""" + +from __future__ import print_function + +import tempfile +import unittest +import warnings + +try: + # Python 2.x + from StringIO import StringIO +except ImportError: + # Python 3.x + from io import StringIO + +from zope.tal.htmltalparser import HTMLTALParser +from zope.tal.talgettext import POTALInterpreter +from zope.tal.talgettext import POEngine + +class test_POEngine(unittest.TestCase): + """Test the PO engine functionality, which simply adds items to a catalog + as .translate is called + """ + + def test_translate(self): + test_keys = ['foo', 'bar', 'blarf', 'washington'] + + engine = POEngine() + engine.file = 'foo.pt' + for key in test_keys: + engine.translate(key, 'domain') + + for key in test_keys: + self.assertIn( + key, engine.catalog['domain'], + "POEngine catalog does not properly store message ids" + ) + + def test_translate_existing(self): + engine = POEngine() + # This tries to reproduce a big surfacing in a template of + # PloneSoftwareCenter when using the i18ndude package to + # extract translatable strings, which uses zope.tal. The + # relevant html snippet is this: + # + # + # Read more… + # + # + # Due to the different ways that i18n:attributes and + # i18n:translate are handled, the attribute gets passed to the + # translate method with the html entity interpreted as a + # unicode, and the i18n:translate gets passed as a simple + # string with the html entity intact. That may need a fix + # elsewhere, but at the moment it gives a warning. The very + # least we can do is make sure that this does not give a + # UnicodeDecodeError, which is what we test here. + engine.file = 'psc_release_listing.pt' + # position is position in file. + engine.translate('foo', 'domain', + default=u'Read more\u2026', position=7) + # Adding the same key with the same default is fine. + engine.translate('foo', 'domain', + default=u'Read more\u2026', position=13) + # Adding the same key with a different default is bad and + # triggers a warning. + with warnings.catch_warnings(record=True) as log: + warnings.simplefilter("always") + engine.translate('foo', 'domain', + default='Read still more…', position=42) + self.assertEqual(len(log), 1) + message = log[0].message + with tempfile.TemporaryFile('w+') as printfile: + print(message, file=printfile) + printfile.seek(0) + self.assertTrue("already exists with a different default" + in printfile.read()) + + def test_dynamic_msgids(self): + sample_source = """ +

      + Some + dynamic + text. +

      +

      + A link. +

      + """ + p = HTMLTALParser() + p.parseString(sample_source) + program, macros = p.getCode() + engine = POEngine() + engine.file = 'sample_source' + POTALInterpreter(program, macros, engine, stream=StringIO(), + metal=False)() + msgids = [] + for domain in engine.catalog.values(): + msgids += list(domain) + msgids.sort() + self.assertEqual(msgids, + ['A link.', + 'Some ${DYNAMIC_CONTENT} text.']) + + def test_potalinterpreter_translate_default(self): + sample_source = '

      text

      ' + p = HTMLTALParser() + p.parseString(sample_source) + program, macros = p.getCode() + engine = POEngine() + engine.file = 'sample_source' + interpreter = POTALInterpreter( + program, macros, engine, stream=StringIO(), metal=False) + # We simply call this, to make sure we don't get a NameError + # for 'unicode' in python 3. + # The return value (strangely: 'x') is not interesting here. + interpreter.translate('text') + msgids = [] + for domain in engine.catalog.values(): + msgids += list(domain) + self.assertIn('text', msgids) + + +def test_suite(): + return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_talinterpreter.py b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_talinterpreter.py new file mode 100644 index 0000000..dd6b4df --- /dev/null +++ b/thesisenv/lib/python3.6/site-packages/zope/tal/tests/test_talinterpreter.py @@ -0,0 +1,859 @@ +# -*- coding: utf-8 -*- +############################################################################## +# +# Copyright (c) 2001, 2002 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## +"""Tests for TALInterpreter. +""" +import os + +import sys +import unittest + +try: + # Python 2.x + from StringIO import StringIO +except ImportError: + # Python 3.x + from io import StringIO + + +from zope.tal.taldefs import METALError, I18NError, TAL_VERSION +from zope.tal.taldefs import TALExpressionError +from zope.tal.htmltalparser import HTMLTALParser +from zope.tal.talparser import TALParser +from zope.tal.talinterpreter import TALInterpreter +from zope.tal.talgenerator import TALGenerator +from zope.tal.dummyengine import DummyEngine +from zope.tal.dummyengine import MultipleDomainsDummyEngine +from zope.i18nmessageid import Message + + +class TestCaseBase(unittest.TestCase): + + def _compile(self, source, source_file=None): + generator = TALGenerator(xml=0, source_file=source_file) + parser = HTMLTALParser(generator) + parser.parseString(source) + program, macros = parser.getCode() + return program, macros + + +class MacroErrorsTestCase(TestCaseBase): + + def setUp(self): + dummy, macros = self._compile('

      Booh

      ') + self.macro = macros['M'] + self.engine = DummyEngine(macros) + program, dummy = self._compile('

      Bah

      ') + self.interpreter = TALInterpreter(program, {}, self.engine) + + def tearDown(self): + with self.assertRaises(METALError): + self.interpreter() + + def test_mode_error(self): + self.macro[1] = ("mode", "duh") + + def test_version_error(self): + self.macro[0] = ("version", "duh") + + +class TestMacroFunkyError(TestCaseBase): + + @unittest.expectedFailure + def test_div_in_p_using_macro(self): + # We have not found a solution for this + # and it is a deep and undocumented HTML parser issue. + # Fred is looking into this. + dummy, macros = self._compile('

      Booh

      ') + engine = DummyEngine(macros) + program, dummy = self._compile( + '

      foo

      ') + interpreter = TALInterpreter(program, {}, engine) + + output = interpreter() + self.assertEqual(output, '

      foo

      ') + + +class MacroExtendTestCase(TestCaseBase): + + def setUp(self): + s = self._read(('input', 'pnome_template.pt')) + self.pnome_program, pnome_macros = self._compile(s) + s = self._read(('input', 'acme_template.pt')) + self.acme_program, acme_macros = self._compile(s) + s = self._read(('input', 'document_list.pt')) + self.doclist_program, _doclist_macros = self._compile(s) + macros = { + 'pnome_macros_page': pnome_macros['page'], + 'acme_macros_page': acme_macros['page'], + } + self.engine = DummyEngine(macros) + + def _read(self, path): + dir = os.path.dirname(__file__) + fn = os.path.join(dir, *path) + with open(fn) as f: + data = f.read() + return data + + def test_preview_acme_template(self): + # An ACME designer is previewing the ACME design. For the + # purposes of this use case, extending a macro should act the + # same as using a macro. + result = StringIO() + interpreter = TALInterpreter( + self.acme_program, {}, self.engine, stream=result) + interpreter() + actual = result.getvalue().strip() + expected = self._read(('output', 'acme_template.html')).strip() + self.assertEqual(actual, expected) + + def test_preview_acme_template_source(self): + # Render METAL attributes in acme_template + result = StringIO() + interpreter = TALInterpreter( + self.acme_program, {}, self.engine, stream=result, tal=False) + interpreter() + actual = result.getvalue().strip() + expected = self._read(('output', 'acme_template_source.html')).strip() + self.assertEqual(actual, expected) + + +class I18NCornerTestCaseMessage(TestCaseBase): + + interpreter = None + + def factory(self, msgid, default=None, mapping=None, domain=None): + return Message(msgid, domain=domain, default=default, mapping=mapping or {}) + + def setUp(self): + self.engine = DummyEngine() + # Make sure we'll translate the msgid not its unicode representation + self.engine.setLocal('foo', + self.factory('FoOvAlUe${empty}', 'default', {'empty': ''})) + self.engine.setLocal('bar', 'BaRvAlUe') + + def _check(self, program, expected): + result = StringIO() + self.interpreter = TALInterpreter(program, {}, self.engine, + stream=result) + self.interpreter() + self.assertEqual(expected, result.getvalue()) + + def test_simple_messageid_translate(self): + # This test is mainly here to make sure our DummyEngine works + # correctly. + program, _macros = self._compile( + '') + self._check(program, 'FOOVALUE') + + program, _macros = self._compile( + '') + self._check(program, 'FOOVALUE') + + # i18n messages defined in Python are translated automatically + # (no i18n:translate necessary) + program, _macros = self._compile( + '') + self._check(program, 'FOOVALUE') + + program, _macros = self._compile( + '') + self._check(program, 'FOOVALUE') + + def test_attributes_translation(self): + program, _macros = self._compile( + '') + self._check(program, '') + + program, _macros = self._compile( + '') + self._check(program, '') + + program, _macros = self._compile( + '') + self._check(program, '') + + # i18n messages defined in Python are translated automatically + # (no i18n:attributes necessary) + program, _macros = self._compile( + '') + self._check(program, '') + + def test_text_variable_translate(self): + program, _macros = self._compile( + '') + self._check(program, 'BaRvAlUe') + + program, _macros = self._compile( + '') + self._check(program, 'BARVALUE') + + program, _macros = self._compile( + '') + self._check(program, 'BARVALUE') + + def test_text_translate(self): + program, _macros = self._compile( + '') + self._check(program, 'BaR') + + program, _macros = self._compile( + '') + self._check(program, 'BAR') + + program, _macros = self._compile( + '') + self._check(program, 'BAR') + + def test_structure_text_variable_translate(self): + program, _macros = self._compile( + '') + self._check(program, 'BaRvAlUe') + + program, _macros = self._compile( + '') + self._check(program, 'BARVALUE') + + program, _macros = self._compile( + '') + self._check(program, 'BARVALUE') + + # i18n messages defined in Python are translated automatically + # (no i18n:translate necessary) + program, _macros = self._compile( + '') + self._check(program, 'FOOVALUE') + + program, _macros = self._compile( + '') + self._check(program, 'FOOVALUE') + + def test_structure_text_translate(self): + program, _macros = self._compile( + '') + self._check(program, 'BaR') + + program, _macros = self._compile( + '') + self._check(program, 'BAR') + + program, _macros = self._compile( + '') + self._check(program, 'BAR') + + def test_replace_with_messageid_and_i18nname(self): + program, _macros = self._compile( + '
      ' + '' + '
      ') + self._check(program, '
      FOOVALUE
      ') + + def test_pythonexpr_replace_with_messageid_and_i18nname(self): + program, _macros = self._compile( + '
      ' + '' + '
      ') + self._check(program, '
      FOOVALUE
      ') + + def test_structure_replace_with_messageid_and_i18nname(self): + program, _macros = self._compile( + '
      ' + '' + '
      ') + self._check(program, '
      FOOVALUE
      ') + + def test_complex_replace_with_messageid_and_i18nname(self): + program, _macros = self._compile( + '
      ' + '' + '' + '' + '
      ') + self._check(program, '
      FOOVALUE
      ') + + def test_content_with_messageid_and_i18nname(self): + program, _macros = self._compile( + '
      ' + '' + '
      ') + self._check(program, '
      FOOVALUE
      ') + + def test_content_with_messageid_and_i18nname_and_i18ntranslate(self): + # Let's tell the user this is incredibly silly! + self.assertRaises( + I18NError, self._compile, + '') + + def test_content_with_explicit_messageid(self): + # Let's tell the user this is incredibly silly! + self.assertRaises( + I18NError, self._compile, + '') + + def test_content_with_plaintext_and_i18nname_and_i18ntranslate(self): + # Let's tell the user this is incredibly silly! + self.assertRaises( + I18NError, self._compile, + 'green') + + def test_translate_static_text_as_dynamic(self): + program, _macros = self._compile( + '
      This is text for ' + '.' + '
      ') + self._check(program, + '
      THIS IS TEXT FOR BaRvAlUe.
      ') + program, _macros = self._compile( + '
      This is text for ' + '.' + '
      ') + self._check(program, + '
      THIS IS TEXT FOR BARVALUE.
      ') + + def test_translate_static_text_as_dynamic_from_bytecode(self): + program = [ + ('version', TAL_VERSION), + ('mode', 'html'), + ('setPosition', (1, 0)), + ('beginScope', {'i18n:translate': ''}), + ('startTag', ('div', [('i18n:translate', '', 'i18n')])), + ('insertTranslation', + ('', + [('rawtextOffset', ('This is text for ', 17)), + ('setPosition', (1, 40)), + ('beginScope', + {'tal:content': 'bar', 'i18n:name': 'bar_name', 'i18n:translate': ''}), + ('i18nVariable', + ('bar_name', + [('startTag', + ('span', + [('i18n:translate', '', 'i18n'), + ('tal:content', 'bar', 'tal'), + ('i18n:name', 'bar_name', 'i18n')])), + ('insertTranslation', + ('', + [('insertText', ('$bar$', []))])), + ('rawtextOffset', ('
      ', 7))], + None, + 0)), + ('endScope', ()), + ('rawtextOffset', ('.', 1))])), + ('endScope', ()), + ('rawtextOffset', ('
      ', 6)) + ] + self._check(program, + '
      THIS IS TEXT FOR BARVALUE.
      ') + + def test_for_correct_msgids(self): + self.engine.translationDomain.clearMsgids() + result = StringIO() + #GChapelle: + #I have the feeling the i18n:translate with the i18n:name is wrong + # + #program, macros = self._compile( + # '
      This is text for ' + # '.
      ') + program, _macros = self._compile( + '
      This is text for ' + '.
      ') + self.interpreter = TALInterpreter(program, {}, self.engine, + stream=result) + self.interpreter() + msgids = self.engine.translationDomain.getMsgids('default') + msgids.sort() + self.assertEqual(1, len(msgids)) + self.assertEqual('This is text for ${bar_name}.', msgids[0][0]) + self.assertEqual({'bar_name': 'BaRvAlUe'}, msgids[0][1]) + self.assertEqual( + '
      THIS IS TEXT FOR BaRvAlUe.
      ', + result.getvalue()) + + def test_for_correct_msgids_translate_name(self): + self.engine.translationDomain.clearMsgids() + result = StringIO() + program, _macros = self._compile( + '
      This is text for ' + '.
      ') + self.interpreter = TALInterpreter(program, {}, self.engine, + stream=result) + self.interpreter() + msgids = self.engine.translationDomain.getMsgids('default') + msgids.sort() + self.assertEqual(2, len(msgids)) + self.assertEqual('This is text for ${bar_name}.', msgids[1][0]) + self.assertEqual({'bar_name': 'BARVALUE'}, msgids[1][1]) + self.assertEqual( + '
      THIS IS TEXT FOR BARVALUE.
      ', + result.getvalue()) + + def test_i18ntranslate_i18nname_and_attributes(self): + # Test for Issue 301: Bug with i18n:name and i18n:translate + # on the same element + self.engine.translationDomain.clearMsgids() + result = StringIO() + program, _macros = self._compile( + '

      ' + 'Some static text and a link text.

      ') + self.interpreter = TALInterpreter(program, {}, self.engine, + stream=result) + self.interpreter() + msgids = self.engine.translationDomain.getMsgids('default') + msgids.sort() + self.assertEqual(2, len(msgids)) + self.assertEqual('Some static text and a ${link}.', msgids[0][0]) + self.assertEqual({'link': 'LINK TEXT'}, msgids[0][1]) + self.assertEqual('link text', msgids[1][0]) + self.assertEqual( + '

      SOME STATIC TEXT AND A LINK TEXT.

      ', + result.getvalue()) + + def test_for_raw_msgids(self): + # Test for Issue 314: i18n:translate removes line breaks from + #
      ...
      contents + # HTML mode + self.engine.translationDomain.clearMsgids() + result = StringIO() + program, _macros = self._compile( + '
      This is text\n' + ' \tfor\n div.
      ' + '
       This is text\n'
      +            ' \tfor\n pre. 
      ') + self.interpreter = TALInterpreter(program, {}, self.engine, + stream=result) + self.interpreter() + msgids = self.engine.translationDomain.getMsgids('default') + msgids.sort() + self.assertEqual(2, len(msgids)) + self.assertEqual(' This is text\n \tfor\n pre. ', msgids[0][0]) + self.assertEqual('This is text for div.', msgids[1][0]) + self.assertEqual( + '
      THIS IS TEXT FOR DIV.
      ' + '
       THIS IS TEXT\n \tFOR\n PRE. 
      ', + result.getvalue()) + + # XML mode + self.engine.translationDomain.clearMsgids() + result = StringIO() + parser = TALParser() + parser.parseString( + '\n' + '
       This is text\n'
      +            ' \tfor\n barvalue. 
      ') + program, _macros = parser.getCode() + self.interpreter = TALInterpreter(program, {}, self.engine, + stream=result) + self.interpreter() + msgids = self.engine.translationDomain.getMsgids('default') + msgids.sort() + self.assertEqual(1, len(msgids)) + self.assertEqual('This is text for barvalue.', msgids[0][0]) + self.assertEqual( + '\n' + '
      THIS IS TEXT  FOR BARVALUE.
      ', + result.getvalue()) + + def test_raw_msgids_and_i18ntranslate_i18nname(self): + self.engine.translationDomain.clearMsgids() + result = StringIO() + program, _macros = self._compile( + '
      This is text\n \tfor\n' + '
       \tbar\n 
      .
      ') + self.interpreter = TALInterpreter(program, {}, self.engine, + stream=result) + self.interpreter() + msgids = self.engine.translationDomain.getMsgids('default') + msgids.sort() + self.assertEqual(2, len(msgids)) + self.assertEqual(' \tbar\n ', msgids[0][0]) + self.assertEqual('This is text for ${bar}.', msgids[1][0]) + self.assertEqual({'bar': '
       \tBAR\n 
      '}, msgids[1][1]) + self.assertEqual( + (u'
      THIS IS TEXT FOR
       \tBAR\n 
      .
      '), + result.getvalue()) + + def test_for_handling_unicode_vars(self): + # Make sure that non-ASCII Unicode is substituted correctly. + # http://collector.zope.org/Zope3-dev/264 + program, _macros = self._compile( + r'''
      ''' + r'''Foo
      ''') + self._check(program, (u"
      FOO \u00C0
      ")) + + +class UnusedExplicitDomainTestCase(I18NCornerTestCaseMessage): + + def setUp(self): + # MultipleDomainsDummyEngine is a Engine + # where default domain transforms to uppercase + self.engine = MultipleDomainsDummyEngine() + self.engine.setLocal('foo', + self.factory('FoOvAlUe${empty}', 'default', {'empty': ''})) + self.engine.setLocal('bar', 'BaRvAlUe') + self.engine.setLocal('baz', + self.factory('BaZvAlUe', 'default', {})) + # Message ids with different domains + self.engine.setLocal('toupper', + self.factory('ToUpper', 'default', {})) + self.engine.setLocal('tolower', + self.factory('ToLower', 'default', {}, domain='lower')) + + def test_multiple_domains(self): + program, _macros = self._compile( + '
      ') + self._check(program, '
      TOUPPER
      ') + program, _macros = self._compile( + '
      ') + self._check(program, '
      tolower
      ') + program, _macros = self._compile( + '
      ') + self._check(program, '
      TOUPPER
      ') + program, _macros = self._compile( + '
      ') + self._check(program, '
      tolower
      ') + program, _macros = self._compile( + '
      ') + self._check(program, '
      TOUPPER
      ') + program, _macros = self._compile( + '
      ') + self._check(program, '
      tolower
      ') + + def test_unused_explicit_domain(self): + #a_very_explicit_domain_setup_by_template_developer_that_wont_be_taken_into_account_by_the_ZPT_engine + #is a domain that transforms to lowercase + self.engine.setLocal( + 'othertolower', + self.factory('OtherToLower', + 'a_very_explicit_domain_setup_by_template_developer_that_wont_be_taken_into_account_by_the_ZPT_engine', + {}, + domain='lower')) + program, _macros = self._compile( + '
      ') + self._check(program, '
      othertolower
      ') + #takes domain into account for strings + program, _macros = self._compile( + '
      ') + self._check(program, '
      tolower
      ') + #but not for messageids + program, _macros = self._compile( + '
      ') + self._check(program, '
      BAZVALUE
      ') + +class ScriptTestCase(TestCaseBase): + + interpreter = None + + def setUp(self): + self.engine = DummyEngine() + + def _check(self, program, expected): + result = StringIO() + self.interpreter = TALInterpreter(program, {}, self.engine, + stream=result) + self.interpreter() + self.assertEqual(expected, result.getvalue()) + + def test_simple(self): + program, _macros = self._compile( + '

      print("hello")

      ') + self._check(program, '

      hello\n

      ') + + def test_script_and_tal_block(self): + program, _macros = self._compile( + '\n' + ' global x\n' + ' x = 1\n' + '\n' + '') + self._check(program, '\n1') + self.assertEqual(self.engine.codeGlobals['x'], 1) + + def test_script_and_tal_block_having_inside_print(self): + program, _macros = self._compile( + '\n' + ' print("hello")' + '') + self._check(program, 'hello\n') + + def test_script_and_omittag(self): + program, _macros = self._compile( + '

      \n' + ' print("hello")' + '

      ') + self._check(program, 'hello\n') + + def test_script_and_inside_tags(self): + program, _macros = self._compile( + '

      \n' + ' print("hello")' + '

      ') + self._check(program, 'hello\n') + + def test_script_and_inside_tags_with_tal(self): + program, _macros = self._compile( + '

      ') + self._check(program, 'hello\n') + + def test_html_script(self): + program, _macros = self._compile( + '') + self._check(program, 'Hello world!\n') + + def test_html_script_and_javascript(self): + program, _macros = self._compile( + '') + self._check(program, + '